diff --git a/.gitea/workflows/build-test-deploy.yml b/.gitea/workflows/build-test-deploy.yml index 3ee81d74..e5b90aff 100644 --- a/.gitea/workflows/build-test-deploy.yml +++ b/.gitea/workflows/build-test-deploy.yml @@ -85,6 +85,29 @@ jobs: --logger "trx;LogFileName=stellaops-feedser-tests.trx" \ --results-directory "$TEST_RESULTS_DIR" + - name: Build scanner language analyzer projects + run: | + dotnet restore src/StellaOps.sln + for project in \ + src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj \ + src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj \ + src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj \ + src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj \ + src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj \ + src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj \ + src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj + do + dotnet build "$project" --configuration $BUILD_CONFIGURATION --no-restore -warnaserror + done + + - name: Run scanner language analyzer tests + run: | + dotnet test src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj \ + --configuration $BUILD_CONFIGURATION \ + --no-build \ + --logger "trx;LogFileName=stellaops-scanner-lang-tests.trx" \ + --results-directory "$TEST_RESULTS_DIR" + - name: Publish BuildX SBOM generator run: | dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \ @@ -152,6 +175,25 @@ PY if-no-files-found: error retention-days: 7 + - name: Package OS analyzer plug-ins + run: | + if [ ! -d "plugins/scanner/analyzers/os" ]; then + echo "OS analyzer plug-in directory not found" >&2 + exit 1 + fi + + mkdir -p artifacts/plugins/os + tar -czf artifacts/plugins/os/stellaops-scanner-os-analyzers.tar.gz -C plugins/scanner/analyzers/os . + sha256sum artifacts/plugins/os/stellaops-scanner-os-analyzers.tar.gz > artifacts/plugins/os/stellaops-scanner-os-analyzers.tar.gz.sha256 + + - name: Upload OS analyzer plug-ins + uses: actions/upload-artifact@v4 + with: + name: scanner-os-analyzers + path: artifacts/plugins/os + if-no-files-found: error + retention-days: 7 + - name: Publish Feedser web service run: | mkdir -p "$PUBLISH_DIR" @@ -224,7 +266,7 @@ PY runs-on: ubuntu-22.04 env: DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-site - steps: + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -246,18 +288,100 @@ PY uses: actions/upload-artifact@v4 with: name: feedser-docs-site - path: ${{ env.DOCS_OUTPUT_DIR }} - if-no-files-found: error - retention-days: 7 - - deploy: - runs-on: ubuntu-22.04 - needs: [build-test, docs] - if: >- - needs.build-test.result == 'success' && - needs.docs.result == 'success' && - ( - (github.event_name == 'push' && github.ref == 'refs/heads/main') || + path: ${{ env.DOCS_OUTPUT_DIR }} + if-no-files-found: error + retention-days: 7 + + scanner-perf: + runs-on: ubuntu-22.04 + needs: build-test + env: + BENCH_DIR: bench/Scanner.Analyzers + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Run analyzer microbench + working-directory: ${{ env.BENCH_DIR }} + run: | + node run-bench.js \ + --repo-root "${{ github.workspace }}" \ + --out latest.csv \ + --threshold-ms 5000 + + - name: Compare against baseline + working-directory: ${{ env.BENCH_DIR }} + run: | + node - <<'NODE' + const fs = require('fs'); + const path = require('path'); + + function parseCsv(file) { + const rows = fs.readFileSync(file, 'utf8').trim().split(/\r?\n/); + rows.shift(); + const data = {}; + for (const row of rows) { + const [id, iterations, sampleCount, mean, p95, max] = row.split(','); + data[id] = { + iterations: Number(iterations), + sampleCount: Number(sampleCount), + mean: Number(mean), + p95: Number(p95), + max: Number(max), + }; + } + return data; + } + + const baseline = parseCsv('baseline.csv'); + const latest = parseCsv('latest.csv'); + const allowedMultiplier = 1.20; + const regressions = []; + + for (const [id, baseMetrics] of Object.entries(baseline)) { + const current = latest[id]; + if (!current) { + regressions.push(`Scenario ${id} missing from latest run`); + continue; + } + if (current.mean > baseMetrics.mean * allowedMultiplier) { + regressions.push(`Scenario ${id} mean ${current.mean.toFixed(2)}ms exceeded baseline ${baseMetrics.mean.toFixed(2)}ms by >20%`); + } + if (current.max > baseMetrics.max * allowedMultiplier) { + regressions.push(`Scenario ${id} max ${current.max.toFixed(2)}ms exceeded baseline ${baseMetrics.max.toFixed(2)}ms by >20%`); + } + } + + if (regressions.length > 0) { + console.error('Performance regression detected:'); + for (const msg of regressions) { + console.error(` - ${msg}`); + } + process.exit(1); + } + NODE + + - name: Upload bench report + uses: actions/upload-artifact@v4 + with: + name: scanner-analyzers-bench + path: ${{ env.BENCH_DIR }}/latest.csv + retention-days: 7 + + deploy: + runs-on: ubuntu-22.04 + needs: [build-test, docs, scanner-perf] + if: >- + needs.build-test.result == 'success' && + needs.docs.result == 'success' && + needs.scanner-perf.result == 'success' && + ( + (github.event_name == 'push' && github.ref == 'refs/heads/main') || github.event_name == 'workflow_dispatch' ) environment: staging diff --git a/.gitea/workflows/docs.yml b/.gitea/workflows/docs.yml index e73fb71e..3812c428 100755 --- a/.gitea/workflows/docs.yml +++ b/.gitea/workflows/docs.yml @@ -49,9 +49,19 @@ jobs: - name: Validate event schemas run: | + set -euo pipefail for schema in docs/events/*.json; do npx ajv compile -c ajv-formats -s "$schema" done + for sample in docs/events/samples/*.json; do + schema_name=$(basename "$sample" .sample.json) + schema_path="docs/events/${schema_name}.json" + if [ ! -f "$schema_path" ]; then + echo "Missing schema for sample ${sample}" >&2 + exit 1 + fi + npx ajv validate -c ajv-formats -s "$schema_path" -d "$sample" + done - name: Setup Python uses: actions/setup-python@v5 diff --git a/EXECPLAN.md b/EXECPLAN.md new file mode 100644 index 00000000..35388cce --- /dev/null +++ b/EXECPLAN.md @@ -0,0 +1,1270 @@ +# Execution Tree for Open Backlog +Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster tasks by dependency depth; Wave 0 has no unresolved blockers and later waves depend on earlier ones. + +## Wave Instructions +### Wave 0 +- Team Attestor Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Attestor/TASKS.md`. Focus on ATTESTOR-API-11-201 (TODO), ATTESTOR-VERIFY-11-202 (TODO), ATTESTOR-OBS-11-203 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Authority Core & Security Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Authority/TASKS.md`. Focus on AUTH-DPOP-11-001 (TODO), AUTH-MTLS-11-002 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Authority Core & Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Authority/TASKS.md`. Focus on AUTHSTORAGE-MONGO-08-001 (BLOCKED). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team DevEx/CLI: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on EXCITITOR-CLI-01-002 (TODO), CLI-RUNTIME-13-005 (TODO). Confirm prerequisites (external: EXCITITOR-CLI-01-001, EXCITITOR-EXPORT-01-001) before starting and report status in module TASKS.md. +- Team DevOps Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-SEC-10-301 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Diff Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Diff/TASKS.md`. Focus on SCANNER-DIFF-10-501 (TODO), SCANNER-DIFF-10-502 (TODO), SCANNER-DIFF-10-503 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Docs Guild, Plugin Team: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `docs/TASKS.md`. Focus on DOC4.AUTH-PDG (REVIEW). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Docs/CLI: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on EXCITITOR-CLI-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-CLI-01-001) before starting and report status in module TASKS.md. +- Team Emit Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Emit/TASKS.md`. Focus on SCANNER-EMIT-10-601 (TODO), SCANNER-EMIT-10-602 (TODO), SCANNER-EMIT-10-603 (TODO), SCANNER-EMIT-10-604 (TODO), SCANNER-EMIT-10-605 (TODO), SCANNER-EMIT-10-606 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team EntryTrace Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.EntryTrace/TASKS.md`. Focus on SCANNER-ENTRYTRACE-10-401 (TODO), SCANNER-ENTRYTRACE-10-402 (TODO), SCANNER-ENTRYTRACE-10-403 (TODO), SCANNER-ENTRYTRACE-10-404 (TODO), SCANNER-ENTRYTRACE-10-405 (TODO), SCANNER-ENTRYTRACE-10-406 (TODO), SCANNER-ENTRYTRACE-10-407 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Language Analyzer Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md`, `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-301 (TODO), SCANNER-ANALYZERS-LANG-10-307 (TODO), SCANNER-ANALYZERS-LANG-10-308 (TODO), SCANNER-ANALYZERS-LANG-10-302..309 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Notify Models Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Notify.Models/TASKS.md`. Focus on NOTIFY-MODELS-15-101 (TODO), NOTIFY-MODELS-15-102 (TODO), NOTIFY-MODELS-15-103 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Notify Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Notify.Storage.Mongo/TASKS.md`. Focus on NOTIFY-STORAGE-15-201 (TODO), NOTIFY-STORAGE-15-202 (TODO), NOTIFY-STORAGE-15-203 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Notify WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Notify.WebService/TASKS.md`. Focus on NOTIFY-WEB-15-101 (TODO), NOTIFY-WEB-15-102 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Platform Events Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `docs/TASKS.md`. Focus on PLATFORM-EVENTS-09-401 (TODO). Confirm prerequisites (external: DOCS-EVENTS-09-003) before starting and report status in module TASKS.md. +- Team Plugin Platform Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Plugin/TASKS.md`. Focus on PLUGIN-DI-08-001 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Plugin Platform Guild, Authority Core: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Plugin/TASKS.md`. Focus on PLUGIN-DI-08-002 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Policy Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Policy/TASKS.md`. Focus on POLICY-CORE-09-004 (TODO), POLICY-CORE-09-005 (TODO), POLICY-CORE-09-006 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Runtime Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `docs/TASKS.md`. Focus on RUNTIME-GUILD-09-402 (TODO). Confirm prerequisites (external: SCANNER-POLICY-09-107) before starting and report status in module TASKS.md. +- Team Scanner WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.WebService/TASKS.md`. Focus on SCANNER-EVENTS-15-201 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-300 (DOING). Confirm prerequisites (external: SAMPLES-10-001) before starting and report status in module TASKS.md. +- Team Scheduler Models Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.Models/TASKS.md`. Focus on SCHED-MODELS-16-103 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. +- Team Scheduler Queue Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.Queue/TASKS.md`. Focus on SCHED-QUEUE-16-401 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. +- Team Scheduler Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md`. Focus on SCHED-STORAGE-16-201 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. +- Team Scheduler WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-101 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. +- Team Signer Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Signer/TASKS.md`. Focus on SIGNER-API-11-101 (TODO), SIGNER-REF-11-102 (TODO), SIGNER-QUOTA-11-103 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-302C (TODO). Confirm prerequisites (external: SCANNER-ANALYZERS-LANG-10-302B) before starting and report status in module TASKS.md. +- Team Team Connector Resumption – CERT/RedHat: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md`. Focus on FEEDCONN-REDHAT-02-001 (DOING). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Team Excititor Attestation: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Attestation/TASKS.md`. Focus on EXCITITOR-ATTEST-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-ATTEST-01-002) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Cisco: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-CISCO-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-CISCO-01-002, EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – MSRC: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-MS-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-MS-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-001 (DOING). Confirm prerequisites (external: EXCITITOR-CONN-ABS-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – SUSE: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md`. Focus on EXCITITOR-CONN-SUSE-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-SUSE-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Ubuntu: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-UBUNTU-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-UBUNTU-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Export: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-005 (TODO). Confirm prerequisites (external: EXCITITOR-CORE-02-001, EXCITITOR-EXPORT-01-004) before starting and report status in module TASKS.md. +- Team Team Excititor Formats: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Formats.CSAF/TASKS.md`, `src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md`, `src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md`. Focus on EXCITITOR-FMT-CSAF-01-002 (TODO), EXCITITOR-FMT-CSAF-01-003 (TODO), EXCITITOR-FMT-CYCLONE-01-002 (TODO), EXCITITOR-FMT-CYCLONE-01-003 (TODO), EXCITITOR-FMT-OPENVEX-01-002 (TODO), EXCITITOR-FMT-OPENVEX-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-CSAF-01-001, EXCITITOR-FMT-CYCLONE-01-001, EXCITITOR-FMT-OPENVEX-01-001, EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Storage: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Storage.Mongo/TASKS.md`. Focus on EXCITITOR-STORAGE-MONGO-08-001 (TODO), EXCITITOR-STORAGE-03-001 (TODO). Confirm prerequisites (external: EXCITITOR-STORAGE-01-003, EXCITITOR-STORAGE-02-001) before starting and report status in module TASKS.md. +- Team Team Excititor WebService: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.WebService/TASKS.md`. Focus on EXCITITOR-WEB-01-002 (TODO), EXCITITOR-WEB-01-003 (TODO), EXCITITOR-WEB-01-004 (TODO). Confirm prerequisites (external: EXCITITOR-ATTEST-01-001, EXCITITOR-EXPORT-01-001, EXCITITOR-WEB-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Worker: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-01-002 (TODO), EXCITITOR-WORKER-01-004 (TODO), EXCITITOR-WORKER-02-001 (TODO). Confirm prerequisites (external: EXCITITOR-CORE-02-001, EXCITITOR-WORKER-01-001) before starting and report status in module TASKS.md. +- Team Team Merge & QA Enforcement: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Merge/TASKS.md`. Focus on FEEDMERGE-COORD-02-900 (DOING). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Team Normalization & Storage Backbone: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Focus on FEEDSTORAGE-MONGO-08-001 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Team WebService & Authority: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md`, `src/StellaOps.Concelier.WebService/TASKS.md`. Focus on SEC2.PLG (DOING), SEC3.PLG (DOING), SEC5.PLG (DOING), PLG4-6.CAPABILITIES (BLOCKED), PLG6.DIAGRAM (TODO), PLG7.RFC (REVIEW), FEEDWEB-DOCS-01-001 (DOING), FEEDWEB-OPS-01-006 (TODO), FEEDWEB-OPS-01-007 (BLOCKED). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Tools Guild, BE-Conn-MSRC: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.Common/TASKS.md`. Focus on FEEDCONN-SHARED-STATE-003 (**TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team UX Specialist, Angular Eng: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Web/TASKS.md`. Focus on WEB1.TRIVY-SETTINGS (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Zastava Core Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Zastava.Core/TASKS.md`. Focus on ZASTAVA-CORE-12-201 (TODO), ZASTAVA-CORE-12-202 (TODO), ZASTAVA-CORE-12-203 (TODO), ZASTAVA-OPS-12-204 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Zastava Webhook Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Zastava.Webhook/TASKS.md`. Focus on ZASTAVA-WEBHOOK-12-101 (TODO), ZASTAVA-WEBHOOK-12-102 (TODO), ZASTAVA-WEBHOOK-12-103 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. + +### Wave 1 +- Team Bench Guild, Language Analyzer Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `bench/TASKS.md`. Focus on BENCH-SCANNER-10-002 (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-301 (Wave 0)) before starting and report status in module TASKS.md. +- Team DevEx/CLI, QA Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-RUNTIME-13-009 (TODO). Confirm prerequisites (internal: CLI-RUNTIME-13-005 (Wave 0)) before starting and report status in module TASKS.md. +- Team DevOps Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-REL-14-001 (TODO). Confirm prerequisites (internal: ATTESTOR-API-11-201 (Wave 0), SIGNER-API-11-101 (Wave 0)) before starting and report status in module TASKS.md. +- Team DevOps Guild, Scanner WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-SCANNER-09-204 (TODO). Confirm prerequisites (internal: SCANNER-EVENTS-15-201 (Wave 0)) before starting and report status in module TASKS.md. +- Team Emit Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scanner.Emit/TASKS.md`. Focus on SCANNER-EMIT-10-607 (TODO), SCANNER-EMIT-17-701 (TODO). Confirm prerequisites (internal: POLICY-CORE-09-005 (Wave 0), SCANNER-EMIT-10-602 (Wave 0), SCANNER-EMIT-10-604 (Wave 0)) before starting and report status in module TASKS.md. +- Team Language Analyzer Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-309 (DOING), SCANNER-ANALYZERS-LANG-10-306 (TODO), SCANNER-ANALYZERS-LANG-10-302 (DOING), SCANNER-ANALYZERS-LANG-10-304 (TODO), SCANNER-ANALYZERS-LANG-10-305 (TODO), SCANNER-ANALYZERS-LANG-10-303 (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-301 (Wave 0), SCANNER-ANALYZERS-LANG-10-307 (Wave 0)) before starting and report status in module TASKS.md. +- Team Licensing Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `ops/licensing/TASKS.md`. Focus on DEVOPS-LIC-14-004 (TODO). Confirm prerequisites (internal: AUTH-MTLS-11-002 (Wave 0)) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-301 (TODO). Confirm prerequisites (internal: NOTIFY-MODELS-15-101 (Wave 0)) before starting and report status in module TASKS.md. +- Team Notify Queue Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Notify.Queue/TASKS.md`. Focus on NOTIFY-QUEUE-15-401 (TODO). Confirm prerequisites (internal: NOTIFY-MODELS-15-101 (Wave 0)) before starting and report status in module TASKS.md. +- Team Notify WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Notify.WebService/TASKS.md`. Focus on NOTIFY-WEB-15-103 (TODO). Confirm prerequisites (internal: NOTIFY-WEB-15-102 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scanner WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scanner.WebService/TASKS.md`. Focus on SCANNER-RUNTIME-12-301 (TODO). Confirm prerequisites (internal: ZASTAVA-CORE-12-201 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-301 (TODO). Confirm prerequisites (internal: SCANNER-EMIT-10-605 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler Queue Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.Queue/TASKS.md`. Focus on SCHED-QUEUE-16-402 (TODO), SCHED-QUEUE-16-403 (TODO). Confirm prerequisites (internal: SCHED-QUEUE-16-401 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler Storage Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md`. Focus on SCHED-STORAGE-16-203 (TODO), SCHED-STORAGE-16-202 (TODO). Confirm prerequisites (internal: SCHED-STORAGE-16-201 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-104 (TODO), SCHED-WEB-16-102 (TODO). Confirm prerequisites (internal: SCHED-QUEUE-16-401 (Wave 0), SCHED-STORAGE-16-201 (Wave 0), SCHED-WEB-16-101 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-201 (TODO). Confirm prerequisites (internal: SCHED-QUEUE-16-401 (Wave 0)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-305A (TODO), SCANNER-ANALYZERS-LANG-10-304A (TODO), SCANNER-ANALYZERS-LANG-10-307N (TODO), SCANNER-ANALYZERS-LANG-10-303A (TODO), SCANNER-ANALYZERS-LANG-10-306A (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-302C (Wave 0), SCANNER-ANALYZERS-LANG-10-307 (Wave 0)) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – MSRC: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-MS-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-MS-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-002 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-ORACLE-01-001 (Wave 0); external: EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – SUSE: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md`. Focus on EXCITITOR-CONN-SUSE-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-SUSE-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Ubuntu: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-UBUNTU-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-UBUNTU-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Export: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-006 (TODO). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-005 (Wave 0), POLICY-CORE-09-005 (Wave 0)) before starting and report status in module TASKS.md. +- Team Team Excititor Worker: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-ATTEST-01-003 (Wave 0); external: EXCITITOR-EXPORT-01-002, EXCITITOR-WORKER-01-001) before starting and report status in module TASKS.md. +- Team UI Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.UI/TASKS.md`. Focus on UI-ATTEST-11-005 (TODO), UI-VEX-13-003 (TODO), UI-POLICY-13-007 (TODO), UI-ADMIN-13-004 (TODO), UI-AUTH-13-001 (TODO), UI-SCANS-13-002 (TODO), UI-NOTIFY-13-006 (TODO), UI-SCHED-13-005 (TODO). Confirm prerequisites (internal: ATTESTOR-API-11-201 (Wave 0), AUTH-DPOP-11-001 (Wave 0), AUTH-MTLS-11-002 (Wave 0), EXCITITOR-EXPORT-01-005 (Wave 0), NOTIFY-WEB-15-101 (Wave 0), POLICY-CORE-09-006 (Wave 0), SCHED-WEB-16-101 (Wave 0), SIGNER-API-11-101 (Wave 0); external: EXCITITOR-CORE-02-001, SCANNER-WEB-09-102, SCANNER-WEB-09-103) before starting and report status in module TASKS.md. +- Team Zastava Observer Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Zastava.Observer/TASKS.md`. Focus on ZASTAVA-OBS-12-001 (TODO). Confirm prerequisites (internal: ZASTAVA-CORE-12-201 (Wave 0)) before starting and report status in module TASKS.md. + +### Wave 2 +- Team Bench Guild, Notify Team: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `bench/TASKS.md`. Focus on BENCH-NOTIFY-15-001 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Bench Guild, Scheduler Team: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `bench/TASKS.md`. Focus on BENCH-IMPACT-16-001 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Deployment Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `ops/deployment/TASKS.md`. Focus on DEVOPS-OPS-14-003 (TODO). Confirm prerequisites (internal: DEVOPS-REL-14-001 (Wave 1)) before starting and report status in module TASKS.md. +- Team DevOps Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-MIRROR-08-001 (TODO), DEVOPS-PERF-10-002 (TODO), DEVOPS-REL-17-002 (TODO). Confirm prerequisites (internal: BENCH-SCANNER-10-002 (Wave 1), DEVOPS-REL-14-001 (Wave 1), SCANNER-EMIT-17-701 (Wave 1)) before starting and report status in module TASKS.md. +- Team DevOps Guild, Notify Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-SCANNER-09-205 (TODO). Confirm prerequisites (internal: DEVOPS-SCANNER-09-204 (Wave 1)) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-302 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Notify Queue Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Notify.Queue/TASKS.md`. Focus on NOTIFY-QUEUE-15-403 (TODO), NOTIFY-QUEUE-15-402 (TODO). Confirm prerequisites (internal: NOTIFY-QUEUE-15-401 (Wave 1)) before starting and report status in module TASKS.md. +- Team Notify WebService Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Notify.WebService/TASKS.md`. Focus on NOTIFY-WEB-15-104 (TODO). Confirm prerequisites (internal: NOTIFY-QUEUE-15-401 (Wave 1), NOTIFY-STORAGE-15-201 (Wave 0)) before starting and report status in module TASKS.md. +- Team Notify Worker Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Notify.Worker/TASKS.md`. Focus on NOTIFY-WORKER-15-201 (TODO), NOTIFY-WORKER-15-202 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-301 (Wave 1), NOTIFY-QUEUE-15-401 (Wave 1)) before starting and report status in module TASKS.md. +- Team Offline Kit Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `ops/offline-kit/TASKS.md`. Focus on DEVOPS-OFFLINE-14-002 (TODO). Confirm prerequisites (internal: DEVOPS-REL-14-001 (Wave 1)) before starting and report status in module TASKS.md. +- Team Samples Guild, Policy Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `samples/TASKS.md`. Focus on SAMPLES-13-004 (TODO). Confirm prerequisites (internal: POLICY-CORE-09-006 (Wave 0), UI-POLICY-13-007 (Wave 1)) before starting and report status in module TASKS.md. +- Team Scanner WebService Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scanner.WebService/TASKS.md`. Focus on SCANNER-RUNTIME-12-302 (TODO). Confirm prerequisites (internal: SCANNER-RUNTIME-12-301 (Wave 1), ZASTAVA-CORE-12-201 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-303 (TODO), SCHED-IMPACT-16-302 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Scheduler WebService Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-103 (TODO). Confirm prerequisites (internal: SCHED-WEB-16-102 (Wave 1)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-202 (TODO), SCHED-WORKER-16-205 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1), SCHED-WORKER-16-201 (Wave 1)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-305B (TODO), SCANNER-ANALYZERS-LANG-10-304B (TODO), SCANNER-ANALYZERS-LANG-10-308N (TODO), SCANNER-ANALYZERS-LANG-10-303B (TODO), SCANNER-ANALYZERS-LANG-10-306B (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303A (Wave 1), SCANNER-ANALYZERS-LANG-10-304A (Wave 1), SCANNER-ANALYZERS-LANG-10-305A (Wave 1), SCANNER-ANALYZERS-LANG-10-306A (Wave 1), SCANNER-ANALYZERS-LANG-10-307N (Wave 1)) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-ORACLE-01-002 (Wave 1); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Export: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-007 (TODO). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-006 (Wave 1)) before starting and report status in module TASKS.md. +- Team Zastava Observer Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Zastava.Observer/TASKS.md`. Focus on ZASTAVA-OBS-12-002 (TODO). Confirm prerequisites (internal: ZASTAVA-OBS-12-001 (Wave 1)) before starting and report status in module TASKS.md. + +### Wave 3 +- Team DevEx/CLI: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-OFFLINE-13-006 (TODO). Confirm prerequisites (internal: DEVOPS-OFFLINE-14-002 (Wave 2)) before starting and report status in module TASKS.md. +- Team DevEx/CLI, Scanner WebService Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-RUNTIME-13-008 (TODO). Confirm prerequisites (internal: SCANNER-RUNTIME-12-302 (Wave 2)) before starting and report status in module TASKS.md. +- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-001 (TODO). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-007 (Wave 2)) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-303 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-302 (Wave 2)) before starting and report status in module TASKS.md. +- Team Notify Worker Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Notify.Worker/TASKS.md`. Focus on NOTIFY-WORKER-15-203 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-302 (Wave 2)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-203 (TODO). Confirm prerequisites (internal: SCHED-WORKER-16-202 (Wave 2)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-305C (TODO), SCANNER-ANALYZERS-LANG-10-304C (TODO), SCANNER-ANALYZERS-LANG-10-309N (TODO), SCANNER-ANALYZERS-LANG-10-303C (TODO), SCANNER-ANALYZERS-LANG-10-306C (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303B (Wave 2), SCANNER-ANALYZERS-LANG-10-304B (Wave 2), SCANNER-ANALYZERS-LANG-10-305B (Wave 2), SCANNER-ANALYZERS-LANG-10-306B (Wave 2), SCANNER-ANALYZERS-LANG-10-308N (Wave 2)) before starting and report status in module TASKS.md. +- Team Zastava Observer Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Zastava.Observer/TASKS.md`. Focus on ZASTAVA-OBS-12-003 (TODO), ZASTAVA-OBS-12-004 (TODO), ZASTAVA-OBS-17-005 (TODO). Confirm prerequisites (internal: ZASTAVA-OBS-12-002 (Wave 2)) before starting and report status in module TASKS.md. + +### Wave 4 +- Team DevEx/CLI: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-PLUGIN-13-007 (TODO). Confirm prerequisites (internal: CLI-OFFLINE-13-006 (Wave 3), CLI-RUNTIME-13-005 (Wave 0)) before starting and report status in module TASKS.md. +- Team Docs Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `docs/TASKS.md`. Focus on DOCS-RUNTIME-17-004 (TODO). Confirm prerequisites (internal: DEVOPS-REL-17-002 (Wave 2), SCANNER-EMIT-17-701 (Wave 1), ZASTAVA-OBS-17-005 (Wave 3)) before starting and report status in module TASKS.md. +- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-002 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-STELLA-07-001 (Wave 3)) before starting and report status in module TASKS.md. +- Team Notify Connectors Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-501 (TODO), NOTIFY-CONN-TEAMS-15-601 (TODO), NOTIFY-CONN-EMAIL-15-701 (TODO), NOTIFY-CONN-WEBHOOK-15-801 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-303 (Wave 3)) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-304 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-303 (Wave 3)) before starting and report status in module TASKS.md. +- Team Notify Worker Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Notify.Worker/TASKS.md`. Focus on NOTIFY-WORKER-15-204 (TODO). Confirm prerequisites (internal: NOTIFY-WORKER-15-203 (Wave 3)) before starting and report status in module TASKS.md. +- Team Policy Guild, Scanner WebService Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Policy/TASKS.md`. Focus on POLICY-RUNTIME-17-201 (TODO). Confirm prerequisites (internal: ZASTAVA-OBS-17-005 (Wave 3)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-204 (TODO). Confirm prerequisites (internal: SCHED-WORKER-16-203 (Wave 3)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-307D (TODO), SCANNER-ANALYZERS-LANG-10-307G (TODO), SCANNER-ANALYZERS-LANG-10-307P (TODO), SCANNER-ANALYZERS-LANG-10-307R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303C (Wave 3), SCANNER-ANALYZERS-LANG-10-304C (Wave 3), SCANNER-ANALYZERS-LANG-10-305C (Wave 3), SCANNER-ANALYZERS-LANG-10-306C (Wave 3)) before starting and report status in module TASKS.md. + +### Wave 5 +- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-STELLA-07-002 (Wave 4)) before starting and report status in module TASKS.md. +- Team Notify Connectors Guild: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-502 (TODO), NOTIFY-CONN-TEAMS-15-602 (TODO), NOTIFY-CONN-EMAIL-15-702 (TODO), NOTIFY-CONN-WEBHOOK-15-802 (TODO). Confirm prerequisites (internal: NOTIFY-CONN-EMAIL-15-701 (Wave 4), NOTIFY-CONN-SLACK-15-501 (Wave 4), NOTIFY-CONN-TEAMS-15-601 (Wave 4), NOTIFY-CONN-WEBHOOK-15-801 (Wave 4)) before starting and report status in module TASKS.md. +- Team Scanner WebService Guild: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/StellaOps.Scanner.WebService/TASKS.md`. Focus on SCANNER-RUNTIME-17-401 (TODO). Confirm prerequisites (internal: POLICY-RUNTIME-17-201 (Wave 4), SCANNER-EMIT-17-701 (Wave 1), SCANNER-RUNTIME-12-301 (Wave 1), ZASTAVA-OBS-17-005 (Wave 3)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-308D (TODO), SCANNER-ANALYZERS-LANG-10-308G (TODO), SCANNER-ANALYZERS-LANG-10-308P (TODO), SCANNER-ANALYZERS-LANG-10-308R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-307D (Wave 4), SCANNER-ANALYZERS-LANG-10-307G (Wave 4), SCANNER-ANALYZERS-LANG-10-307P (Wave 4), SCANNER-ANALYZERS-LANG-10-307R (Wave 4)) before starting and report status in module TASKS.md. + +### Wave 6 +- Team Notify Connectors Guild: read EXECPLAN.md Wave 6 and SPRINTS.md rows for `src/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-503 (TODO), NOTIFY-CONN-TEAMS-15-603 (TODO), NOTIFY-CONN-EMAIL-15-703 (TODO), NOTIFY-CONN-WEBHOOK-15-803 (TODO). Confirm prerequisites (internal: NOTIFY-CONN-EMAIL-15-702 (Wave 5), NOTIFY-CONN-SLACK-15-502 (Wave 5), NOTIFY-CONN-TEAMS-15-602 (Wave 5), NOTIFY-CONN-WEBHOOK-15-802 (Wave 5)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 6 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-309D (TODO), SCANNER-ANALYZERS-LANG-10-309G (TODO), SCANNER-ANALYZERS-LANG-10-309P (TODO), SCANNER-ANALYZERS-LANG-10-309R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-308D (Wave 5), SCANNER-ANALYZERS-LANG-10-308G (Wave 5), SCANNER-ANALYZERS-LANG-10-308P (Wave 5), SCANNER-ANALYZERS-LANG-10-308R (Wave 5)) before starting and report status in module TASKS.md. + +### Wave 7 +- Team Team Core Engine & Storage Analytics: read EXECPLAN.md Wave 7 and SPRINTS.md rows for `src/StellaOps.Concelier.Core/TASKS.md`. Focus on FEEDCORE-ENGINE-07-001 (TODO). Confirm prerequisites (internal: FEEDSTORAGE-DATA-07-001 (Wave 10)) before starting and report status in module TASKS.md. + +### Wave 8 +- Team Team Core Engine & Data Science: read EXECPLAN.md Wave 8 and SPRINTS.md rows for `src/StellaOps.Concelier.Core/TASKS.md`. Focus on FEEDCORE-ENGINE-07-002 (TODO). Confirm prerequisites (internal: FEEDCORE-ENGINE-07-001 (Wave 7)) before starting and report status in module TASKS.md. + +### Wave 9 +- Team Team Core Engine & Storage Analytics: read EXECPLAN.md Wave 9 and SPRINTS.md rows for `src/StellaOps.Concelier.Core/TASKS.md`. Focus on FEEDCORE-ENGINE-07-003 (TODO). Confirm prerequisites (internal: FEEDCORE-ENGINE-07-001 (Wave 7)) before starting and report status in module TASKS.md. + +### Wave 10 +- Team Team Normalization & Storage Backbone: read EXECPLAN.md Wave 10 and SPRINTS.md rows for `src/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Focus on FEEDSTORAGE-DATA-07-001 (TODO). Confirm prerequisites (internal: FEEDMERGE-ENGINE-07-001 (Wave 11)) before starting and report status in module TASKS.md. + +### Wave 11 +- Team BE-Merge: read EXECPLAN.md Wave 11 and SPRINTS.md rows for `src/StellaOps.Concelier.Merge/TASKS.md`. Focus on FEEDMERGE-ENGINE-07-001 (TODO). Confirm prerequisites (internal: FEEDSTORAGE-DATA-07-001 (Wave 10)) before starting and report status in module TASKS.md. + +### Wave 12 +- Team Concelier Export Guild: read EXECPLAN.md Wave 12 and SPRINTS.md rows for `src/StellaOps.Concelier.Exporter.Json/TASKS.md`. Focus on CONCELIER-EXPORT-08-201 (TODO). Confirm prerequisites (internal: FEEDCORE-ENGINE-07-001 (Wave 7)) before starting and report status in module TASKS.md. + +### Wave 13 +- Team Concelier Export Guild: read EXECPLAN.md Wave 13 and SPRINTS.md rows for `src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md`. Focus on CONCELIER-EXPORT-08-202 (TODO). Confirm prerequisites (internal: CONCELIER-EXPORT-08-201 (Wave 12)) before starting and report status in module TASKS.md. + +### Wave 14 +- Team Concelier WebService Guild: read EXECPLAN.md Wave 14 and SPRINTS.md rows for `src/StellaOps.Concelier.WebService/TASKS.md`. Focus on CONCELIER-WEB-08-201 (TODO). Confirm prerequisites (internal: CONCELIER-EXPORT-08-201 (Wave 12), DEVOPS-MIRROR-08-001 (Wave 2)) before starting and report status in module TASKS.md. + +### Wave 15 +- Team BE-Conn-Stella: read EXECPLAN.md Wave 15 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. Focus on FEEDCONN-STELLA-08-001 (TODO). Confirm prerequisites (internal: CONCELIER-EXPORT-08-201 (Wave 12)) before starting and report status in module TASKS.md. + +### Wave 16 +- Team BE-Conn-Stella: read EXECPLAN.md Wave 16 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. Focus on FEEDCONN-STELLA-08-002 (TODO). Confirm prerequisites (internal: FEEDCONN-STELLA-08-001 (Wave 15)) before starting and report status in module TASKS.md. + +### Wave 17 +- Team BE-Conn-Stella: read EXECPLAN.md Wave 17 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. Focus on FEEDCONN-STELLA-08-003 (TODO). Confirm prerequisites (internal: FEEDCONN-STELLA-08-002 (Wave 16)) before starting and report status in module TASKS.md. + +## Wave 0 — 98 task(s) ready now +- **Sprint 1** · Backlog + - Team: UX Specialist, Angular Eng + - Path: `src/StellaOps.Web/TASKS.md` + 1. [TODO] WEB1.TRIVY-SETTINGS — Implement Trivy DB exporter settings panel with `publishFull`, `publishDelta`, `includeFull`, `includeDelta` toggles and “Run export now” action using future `/exporters/trivy-db/settings` API. + • Prereqs: — + • Current: TODO +- **Sprint 1** · Developer Tooling + - Team: DevEx/CLI + - Path: `src/StellaOps.Cli/TASKS.md` + 1. [TODO] EXCITITOR-CLI-01-002 — EXCITITOR-CLI-01-002 – Export download & attestation UX + • Prereqs: EXCITITOR-CLI-01-001 (external/completed), EXCITITOR-EXPORT-01-001 (external/completed) + • Current: TODO – Display export metadata (sha256, size, Rekor link), support optional artifact download path, and handle cache hits gracefully. + - Team: Docs/CLI + - Path: `src/StellaOps.Cli/TASKS.md` + 1. [TODO] EXCITITOR-CLI-01-003 — EXCITITOR-CLI-01-003 – CLI docs & examples for Excititor + • Prereqs: EXCITITOR-CLI-01-001 (external/completed) + • Current: TODO – Update docs/09_API_CLI_REFERENCE.md and quickstart snippets to cover Excititor verbs, offline guidance, and attestation verification workflow. +- **Sprint 1** · Stabilize In-Progress Foundations + - Team: Team Connector Resumption – CERT/RedHat + - Path: `src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md` + 1. [DOING] FEEDCONN-REDHAT-02-001 — Fixture validation sweep — Instructions to work: — Regenerating RHSA fixtures awaits remaining range provenance patches; review snapshot diffs and update docs once upstream helpers land. Conflict resolver deltas logged in src/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. + • Prereqs: — + • Current: DOING (2025-10-10) + - Team: Team WebService & Authority + - Path: `src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md` + 1. [DOING] SEC2.PLG — Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`; Serilog enrichment complete, storage durability tests in flight. + • Prereqs: — + • Current: DOING (2025-10-14) + 2. [DOING] SEC3.PLG — Ensure lockout responses carry rate-limit metadata through plugin logs/events; retry-after propagation and limiter tests underway. + • Prereqs: — + • Current: DOING (2025-10-14) + 3. [DOING] SEC5.PLG — Address plugin-specific mitigations in threat model backlog; mitigation items tracked, docs updates pending. + • Prereqs: — + • Current: DOING (2025-10-14) + 4. [BLOCKED] PLG4-6.CAPABILITIES — Finalise capability metadata exposure and docs once Authority rate-limiter stream (CORE8/SEC3) is stable; awaiting dependency unblock. + • Prereqs: — + • Current: BLOCKED (2025-10-12) + 5. [TODO] PLG6.DIAGRAM — Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. + • Prereqs: — + • Current: TODO + 6. [REVIEW] PLG7.RFC — Socialize LDAP plugin RFC and capture guild feedback; awaiting final review sign-off and follow-up issue tracking. + • Prereqs: — + • Current: REVIEW (2025-10-13) + - Path: `src/StellaOps.Concelier.WebService/TASKS.md` + 1. [DOING] FEEDWEB-DOCS-01-001 — Document authority toggle & scope requirements — Quickstart updates are staged; awaiting Docs guild review before publishing operator guide refresh. + • Prereqs: — + • Current: DOING (2025-10-10) + 2. [TODO] FEEDWEB-OPS-01-006 — Rename plugin drop directory to namespaced path — Repoint build outputs to `StellaOps.Concelier.PluginBinaries`/`StellaOps.Authority.PluginBinaries`, update PluginHost defaults, Offline Kit packaging, and operator docs. + • Prereqs: — + • Current: TODO + 3. [BLOCKED] FEEDWEB-OPS-01-007 — Authority resilience adoption — Roll out retry/offline knobs to deployment docs and align CLI parity once LIB5 resilience options land; unblock when library release is available and docs review completes. + • Prereqs: — + • Current: BLOCKED (2025-10-10) +- **Sprint 2** · Connector & Data Implementation Wave + - Team: Docs Guild, Plugin Team + - Path: `docs/TASKS.md` + 1. [REVIEW] DOC4.AUTH-PDG — Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. + • Prereqs: — + • Current: REVIEW + - Team: Team Merge & QA Enforcement + - Path: `src/StellaOps.Concelier.Merge/TASKS.md` + 1. [DOING] FEEDMERGE-COORD-02-900 — Range primitives rollout coordination — Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical range primitives with provenance tags; fixtures tracked in `RANGE_PRIMITIVES_COORDINATION.md`. + • Prereqs: — + • Current: DOING (2025-10-12) +- **Sprint 3** · Backlog + - Team: Tools Guild, BE-Conn-MSRC + - Path: `src/StellaOps.Concelier.Connector.Common/TASKS.md` + 1. [**TODO] FEEDCONN-SHARED-STATE-003 — FEEDCONN-SHARED-STATE-003 Source state seeding helper + • Prereqs: — + • Current: **TODO (2025-10-15)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands. +- **Sprint 5** · Excititor Core Foundations + - Team: Team Excititor Attestation + - Path: `src/StellaOps.Excititor.Attestation/TASKS.md` + 1. [TODO] EXCITITOR-ATTEST-01-003 — EXCITITOR-ATTEST-01-003 – Verification suite & observability + • Prereqs: EXCITITOR-ATTEST-01-002 (external/completed) + • Current: TODO – Add verification helpers for Worker/WebService, metrics/logging hooks, and negative-path regression tests. + - Team: Team Excititor WebService + - Path: `src/StellaOps.Excititor.WebService/TASKS.md` + 1. [TODO] EXCITITOR-WEB-01-002 — EXCITITOR-WEB-01-002 – Ingest & reconcile endpoints + • Prereqs: EXCITITOR-WEB-01-001 (external/completed) + • Current: TODO – Implement `/excititor/init`, `/excititor/ingest/run`, `/excititor/ingest/resume`, `/excititor/reconcile` with token scope enforcement and structured run telemetry. + 2. [TODO] EXCITITOR-WEB-01-003 — EXCITITOR-WEB-01-003 – Export & verify endpoints + • Prereqs: EXCITITOR-WEB-01-001 (external/completed), EXCITITOR-EXPORT-01-001 (external/completed), EXCITITOR-ATTEST-01-001 (external/completed) + • Current: TODO – Add `/excititor/export`, `/excititor/export/{id}`, `/excititor/export/{id}/download`, `/excititor/verify`, returning artifact + attestation metadata with cache awareness. +- **Sprint 6** · Excititor Ingest & Formats + - Team: Team Excititor Connectors – Cisco + - Path: `src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-CONN-CISCO-01-003 — EXCITITOR-CONN-CISCO-01-003 – Provider trust metadata + • Prereqs: EXCITITOR-CONN-CISCO-01-002 (external/completed), EXCITITOR-POLICY-01-001 (external/completed) + • Current: TODO – Emit cosign/PGP trust metadata and advisory provenance hints for policy weighting. + - Team: Team Excititor Connectors – MSRC + - Path: `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-CONN-MS-01-002 — EXCITITOR-CONN-MS-01-002 – CSAF download pipeline + • Prereqs: EXCITITOR-CONN-MS-01-001 (external/completed), EXCITITOR-STORAGE-01-003 (external/completed) + • Current: TODO – Fetch CSAF packages with retry/backoff, checksum verification, and raw document persistence plus quarantine for schema failures. + - Team: Team Excititor Connectors – Oracle + - Path: `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` + 1. [DOING] EXCITITOR-CONN-ORACLE-01-001 — EXCITITOR-CONN-ORACLE-01-001 – Oracle CSAF catalogue discovery + • Prereqs: EXCITITOR-CONN-ABS-01-001 (external/completed) + • Current: DOING (2025-10-17) – Implement catalogue discovery, CPU calendar awareness, and offline snapshot import for Oracle CSAF feeds. + - Team: Team Excititor Connectors – SUSE + - Path: `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md` + 1. [TODO] EXCITITOR-CONN-SUSE-01-002 — EXCITITOR-CONN-SUSE-01-002 – Checkpointed event ingestion + • Prereqs: EXCITITOR-CONN-SUSE-01-001 (external/completed), EXCITITOR-STORAGE-01-003 (external/completed) + • Current: TODO – Process hub events with resume checkpoints, deduplication, and quarantine path for malformed payloads. + - Team: Team Excititor Connectors – Ubuntu + - Path: `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-CONN-UBUNTU-01-002 — EXCITITOR-CONN-UBUNTU-01-002 – Incremental fetch & deduplication + • Prereqs: EXCITITOR-CONN-UBUNTU-01-001 (external/completed), EXCITITOR-STORAGE-01-003 (external/completed) + • Current: TODO – Fetch CSAF bundles with ETag handling, checksum validation, deduplication, and raw persistence. + - Team: Team Excititor Formats + - Path: `src/StellaOps.Excititor.Formats.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-FMT-CSAF-01-002 — EXCITITOR-FMT-CSAF-01-002 – Status/justification mapping + • Prereqs: EXCITITOR-FMT-CSAF-01-001 (external/completed), EXCITITOR-POLICY-01-001 (external/completed) + • Current: TODO – Normalize CSAF `product_status` + `justification` values into policy-aware enums with audit diagnostics for unsupported codes. + 2. [TODO] EXCITITOR-FMT-CSAF-01-003 — EXCITITOR-FMT-CSAF-01-003 – CSAF export adapter + • Prereqs: EXCITITOR-EXPORT-01-001 (external/completed), EXCITITOR-FMT-CSAF-01-001 (external/completed) + • Current: TODO – Provide CSAF export writer producing deterministic documents (per vuln/product) and manifest metadata for attestation. + - Path: `src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md` + 1. [TODO] EXCITITOR-FMT-CYCLONE-01-002 — EXCITITOR-FMT-CYCLONE-01-002 – Component reference reconciliation + • Prereqs: EXCITITOR-FMT-CYCLONE-01-001 (external/completed) + • Current: TODO – Implement helpers to reconcile component/service references against policy expectations and emit diagnostics for missing SBOM links. + 2. [TODO] EXCITITOR-FMT-CYCLONE-01-003 — EXCITITOR-FMT-CYCLONE-01-003 – CycloneDX export serializer + • Prereqs: EXCITITOR-EXPORT-01-001 (external/completed), EXCITITOR-FMT-CYCLONE-01-001 (external/completed) + • Current: TODO – Provide exporters producing CycloneDX VEX output with canonical ordering and hash-stable manifests. + - Path: `src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md` + 1. [TODO] EXCITITOR-FMT-OPENVEX-01-002 — EXCITITOR-FMT-OPENVEX-01-002 – Statement merge utilities + • Prereqs: EXCITITOR-FMT-OPENVEX-01-001 (external/completed) + • Current: TODO – Add reducers merging multiple OpenVEX statements, resolving conflicts deterministically, and emitting policy diagnostics. + 2. [TODO] EXCITITOR-FMT-OPENVEX-01-003 — EXCITITOR-FMT-OPENVEX-01-003 – OpenVEX export writer + • Prereqs: EXCITITOR-EXPORT-01-001 (external/completed), EXCITITOR-FMT-OPENVEX-01-001 (external/completed) + • Current: TODO – Provide export serializer generating canonical OpenVEX documents with optional SBOM references and hash-stable ordering. + - Team: Team Excititor Worker + - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + 1. [TODO] EXCITITOR-WORKER-01-002 — EXCITITOR-WORKER-01-002 – Resume tokens & retry policy + • Prereqs: EXCITITOR-WORKER-01-001 (external/completed) + • Current: TODO – Implement durable resume markers, exponential backoff with jitter, and quarantine for failing connectors per architecture spec. +- **Sprint 7** · Contextual Truth Foundations + - Team: Team Excititor Export + - Path: `src/StellaOps.Excititor.Export/TASKS.md` + 1. [TODO] EXCITITOR-EXPORT-01-005 — EXCITITOR-EXPORT-01-005 – Score & resolve envelope surfaces + • Prereqs: EXCITITOR-EXPORT-01-004 (external/completed), EXCITITOR-CORE-02-001 (external/completed) + • Current: TODO – Emit consensus+score envelopes in export manifests, include policy/scoring digests, and update offline bundle/ORAS layouts to carry signed VEX responses. + - Team: Team Excititor WebService + - Path: `src/StellaOps.Excititor.WebService/TASKS.md` + 1. [TODO] EXCITITOR-WEB-01-004 — Resolve API & signed responses – expose `/excititor/resolve`, return signed consensus/score envelopes, document auth. + • Prereqs: — + • Current: TODO + - Team: Team Excititor Worker + - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + 1. [TODO] EXCITITOR-WORKER-01-004 — EXCITITOR-WORKER-01-004 – TTL refresh & stability damper + • Prereqs: EXCITITOR-WORKER-01-001 (external/completed), EXCITITOR-CORE-02-001 (external/completed) + • Current: TODO – Monitor consensus/VEX TTLs, apply 24–48h dampers before flipping published status/score, and trigger re-resolve when base image or kernel fingerprints change. +- **Sprint 8** · Mongo strengthening + - Team: Authority Core & Storage Guild + - Path: `src/StellaOps.Authority/TASKS.md` + 1. [BLOCKED] AUTHSTORAGE-MONGO-08-001 — Harden Authority Mongo usage — Scoped sessions with causal consistency pending rate-limiter stream updates; resume once plugin lockout telemetry stabilises. + • Prereqs: — + • Current: BLOCKED (2025-10-19) + - Team: Team Excititor Storage + - Path: `src/StellaOps.Excititor.Storage.Mongo/TASKS.md` + 1. [TODO] EXCITITOR-STORAGE-MONGO-08-001 — EXCITITOR-STORAGE-MONGO-08-001 – Session + causal consistency hardening + • Prereqs: EXCITITOR-STORAGE-01-003 (external/completed) + • Current: TODO – Register Mongo client/database with majority read/write concerns, expose scoped session helper enabling causal consistency, thread session handles through raw/export/consensus/cache stores (including GridFS reads), and extend integration tests to verify read-your-write semantics during replica-set failover. + - Team: Team Normalization & Storage Backbone + - Path: `src/StellaOps.Concelier.Storage.Mongo/TASKS.md` + 1. [TODO] FEEDSTORAGE-MONGO-08-001 — Causal-consistent Concelier storage sessions — Ensure `AddMongoStorage` registers a scoped session facilitator (causal consistency + majority concerns), update repositories to accept optional session handles, and add integration coverage proving read-your-write and monotonic reads across a replica set/election scenario. + • Prereqs: — + • Current: TODO +- **Sprint 8** · Platform Maintenance + - Team: Team Excititor Storage + - Path: `src/StellaOps.Excititor.Storage.Mongo/TASKS.md` + 1. [TODO] EXCITITOR-STORAGE-03-001 — EXCITITOR-STORAGE-03-001 – Statement backfill tooling + • Prereqs: EXCITITOR-STORAGE-02-001 (external/completed) + • Current: TODO – Provide CLI/scripted tooling to replay historical statements into `vex.statements` (leveraging `/excititor/statements`), document operational runbook, and add smoke test verifying replayed data includes severity/KEV/EPSS signals. + - Team: Team Excititor Worker + - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + 1. [TODO] EXCITITOR-WORKER-02-001 — EXCITITOR-WORKER-02-001 – Resolve Microsoft.Extensions.Caching.Memory advisory + • Prereqs: EXCITITOR-WORKER-01-001 (external/completed) + • Current: TODO – Bump `Microsoft.Extensions.Caching.Memory` (and related packages) to the latest .NET 10 preview, regenerate lockfiles, and re-run worker/webservice tests to clear NU1903 high severity warning. +- **Sprint 8** · Plugin Infrastructure + - Team: Plugin Platform Guild + - Path: `src/StellaOps.Plugin/TASKS.md` + 1. [TODO] PLUGIN-DI-08-001 — Scoped service support in plugin bootstrap — Teach the plugin loader/registrar to surface services with scoped lifetimes, honour `StellaOps.DependencyInjection` metadata, and document the new contract. + • Prereqs: — + • Current: TODO + - Team: Plugin Platform Guild, Authority Core + - Path: `src/StellaOps.Plugin/TASKS.md` + 1. [TODO] PLUGIN-DI-08-002 — Update Authority plugin integration — Flow scoped services through identity-provider registrars, bootstrap flows, and background jobs; add regression coverage around scoped lifetimes. + • Prereqs: — + • Current: TODO +- **Sprint 9** · Docs & Governance + - Team: Platform Events Guild + - Path: `docs/TASKS.md` + 1. [TODO] PLATFORM-EVENTS-09-401 — Embed canonical event samples into contract/integration tests and ensure CI validates payloads against published schemas. + • Prereqs: DOCS-EVENTS-09-003 (external/completed) + • Current: TODO + - Team: Runtime Guild + - Path: `docs/TASKS.md` + 1. [TODO] RUNTIME-GUILD-09-402 — Confirm Scanner WebService surfaces `quietedFindingCount` and progress hints to runtime consumers; document readiness checklist. + • Prereqs: SCANNER-POLICY-09-107 (external/completed) + • Current: TODO +- **Sprint 9** · Policy Foundations + - Team: Policy Guild + - Path: `src/StellaOps.Policy/TASKS.md` + 1. [TODO] POLICY-CORE-09-004 — Versioned scoring config with schema validation, trust table, and golden fixtures. + • Prereqs: — + • Current: TODO + 2. [TODO] POLICY-CORE-09-005 — Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. + • Prereqs: — + • Current: TODO + 3. [TODO] POLICY-CORE-09-006 — Unknown state & confidence decay – deterministic bands surfaced in policy outputs. + • Prereqs: — + • Current: TODO +- **Sprint 10** · Backlog + - Team: TBD + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-302C — Surface script metadata (postinstall/preinstall) and policy hints; emit telemetry counters and evidence records. + • Prereqs: SCANNER-ANALYZERS-LANG-10-302B (external/completed) + • Current: TODO +- **Sprint 10** · DevOps Perf + - Team: DevOps Guild + - Path: `ops/devops/TASKS.md` + 1. [TODO] DEVOPS-SEC-10-301 — Address NU1902/NU1903 advisories for `MongoDB.Driver` 2.12.0 and `SharpCompress` 0.23.0 surfaced during scanner cache and worker test runs. + • Prereqs: — + • Current: TODO +- **Sprint 10** · Scanner Analyzers & SBOM + - Team: Diff Guild + - Path: `src/StellaOps.Scanner.Diff/TASKS.md` + 1. [TODO] SCANNER-DIFF-10-501 — Build component differ tracking add/remove/version changes with deterministic ordering. + • Prereqs: — + • Current: TODO + 2. [TODO] SCANNER-DIFF-10-502 — Attribute diffs to introducing/removing layers including provenance evidence. + • Prereqs: — + • Current: TODO + 3. [TODO] SCANNER-DIFF-10-503 — Produce JSON diff output for inventory vs usage views aligned with API contract. + • Prereqs: — + • Current: TODO + - Team: Emit Guild + - Path: `src/StellaOps.Scanner.Emit/TASKS.md` + 1. [TODO] SCANNER-EMIT-10-601 — Compose inventory SBOM (CycloneDX JSON/Protobuf) from layer fragments. + • Prereqs: — + • Current: TODO + 2. [TODO] SCANNER-EMIT-10-602 — Compose usage SBOM leveraging EntryTrace to flag actual usage. + • Prereqs: — + • Current: TODO + 3. [TODO] SCANNER-EMIT-10-603 — Generate BOM index sidecar (purl table + roaring bitmap + usage flag). + • Prereqs: — + • Current: TODO + 4. [TODO] SCANNER-EMIT-10-604 — Package artifacts for export + attestation with deterministic manifests. + • Prereqs: — + • Current: TODO + 5. [TODO] SCANNER-EMIT-10-605 — Emit BOM-Index sidecar schema/fixtures (CRITICAL PATH for SP16). + • Prereqs: — + • Current: TODO + 6. [TODO] SCANNER-EMIT-10-606 — Usage view bit flags integrated with EntryTrace. + • Prereqs: — + • Current: TODO + - Team: EntryTrace Guild + - Path: `src/StellaOps.Scanner.EntryTrace/TASKS.md` + 1. [TODO] SCANNER-ENTRYTRACE-10-401 — POSIX shell AST parser with deterministic output. + • Prereqs: — + • Current: TODO + 2. [TODO] SCANNER-ENTRYTRACE-10-402 — Command resolution across layered rootfs with evidence attribution. + • Prereqs: — + • Current: TODO + 3. [TODO] SCANNER-ENTRYTRACE-10-403 — Interpreter tracing for shell wrappers to Python/Node/Java launchers. + • Prereqs: — + • Current: TODO + 4. [TODO] SCANNER-ENTRYTRACE-10-404 — Python entry analyzer (venv shebang, module invocation, usage flag). + • Prereqs: — + • Current: TODO + 5. [TODO] SCANNER-ENTRYTRACE-10-405 — Node/Java launcher analyzer capturing script/jar targets. + • Prereqs: — + • Current: TODO + 6. [TODO] SCANNER-ENTRYTRACE-10-406 — Explainability + diagnostics for unresolved constructs with metrics. + • Prereqs: — + • Current: TODO + 7. [TODO] SCANNER-ENTRYTRACE-10-407 — Package EntryTrace analyzers as restart-time plug-ins (manifest + host registration). + • Prereqs: — + • Current: TODO + - Team: Language Analyzer Guild + - Path: `src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-302..309 — Detailed per-language sprint plan (Node, Python, Go, .NET, Rust) with gates and benchmarks. + • Prereqs: — + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-301 — Java analyzer emitting `pkg:maven` with provenance. + • Prereqs: — + • Current: TODO + 2. [TODO] SCANNER-ANALYZERS-LANG-10-307 — Shared language evidence helpers + usage flag propagation. + • Prereqs: — + • Current: TODO + 3. [TODO] SCANNER-ANALYZERS-LANG-10-308 — Determinism + fixture harness for language analyzers. + • Prereqs: — + • Current: TODO +- **Sprint 11** · Signing Chain Bring-up + - Team: Attestor Guild + - Path: `src/StellaOps.Attestor/TASKS.md` + 1. [TODO] ATTESTOR-API-11-201 — `/rekor/entries` submission pipeline with dedupe, proof acquisition, and persistence. + • Prereqs: — + • Current: TODO + 2. [TODO] ATTESTOR-VERIFY-11-202 — `/rekor/verify` + retrieval endpoints validating signatures and Merkle proofs. + • Prereqs: — + • Current: TODO + 3. [TODO] ATTESTOR-OBS-11-203 — Telemetry, alerting, mTLS hardening, and archive workflow for Attestor. + • Prereqs: — + • Current: TODO + - Team: Authority Core & Security Guild + - Path: `src/StellaOps.Authority/TASKS.md` + 1. [TODO] AUTH-DPOP-11-001 — Implement DPoP proof validation + nonce handling for high-value audiences per architecture. + • Prereqs: — + • Current: TODO + 2. [TODO] AUTH-MTLS-11-002 — Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. + • Prereqs: — + • Current: TODO + - Team: Signer Guild + - Path: `src/StellaOps.Signer/TASKS.md` + 1. [TODO] SIGNER-API-11-101 — `/sign/dsse` pipeline with Authority auth, PoE introspection, release verification, DSSE signing. + • Prereqs: — + • Current: TODO + 2. [TODO] SIGNER-REF-11-102 — `/verify/referrers` endpoint with OCI lookup, caching, and policy enforcement. + • Prereqs: — + • Current: TODO + 3. [TODO] SIGNER-QUOTA-11-103 — Enforce plan quotas, concurrency/QPS limits, artifact size caps with metrics/audit logs. + • Prereqs: — + • Current: TODO +- **Sprint 12** · Runtime Guardrails + - Team: Zastava Core Guild + - Path: `src/StellaOps.Zastava.Core/TASKS.md` + 1. [TODO] ZASTAVA-CORE-12-201 — Define runtime event/admission DTOs, hashing helpers, and versioning strategy. + • Prereqs: — + • Current: TODO + 2. [TODO] ZASTAVA-CORE-12-202 — Provide configuration/logging/metrics utilities shared by Observer/Webhook. + • Prereqs: — + • Current: TODO + 3. [TODO] ZASTAVA-CORE-12-203 — Authority client helpers, OpTok caching, and security guardrails for runtime services. + • Prereqs: — + • Current: TODO + 4. [TODO] ZASTAVA-OPS-12-204 — Operational runbooks, alert rules, and dashboard exports for runtime plane. + • Prereqs: — + • Current: TODO + - Team: Zastava Webhook Guild + - Path: `src/StellaOps.Zastava.Webhook/TASKS.md` + 1. [TODO] ZASTAVA-WEBHOOK-12-101 — Admission controller host with TLS bootstrap and Authority auth. + • Prereqs: — + • Current: TODO + 2. [TODO] ZASTAVA-WEBHOOK-12-102 — Query Scanner `/policy/runtime`, resolve digests, enforce verdicts. + • Prereqs: — + • Current: TODO + 3. [TODO] ZASTAVA-WEBHOOK-12-103 — Caching, fail-open/closed toggles, metrics/logging for admission decisions. + • Prereqs: — + • Current: TODO +- **Sprint 13** · UX & CLI Experience + - Team: DevEx/CLI + - Path: `src/StellaOps.Cli/TASKS.md` + 1. [TODO] CLI-RUNTIME-13-005 — Add runtime policy test verbs that consume `/policy/runtime` and display verdicts. + • Prereqs: — + • Current: TODO +- **Sprint 15** · Notify Foundations + - Team: Notify Models Guild + - Path: `src/StellaOps.Notify.Models/TASKS.md` + 1. [TODO] NOTIFY-MODELS-15-101 — Define core Notify DTOs, validation helpers, canonical serialization. + • Prereqs: — + • Current: TODO + 2. [TODO] NOTIFY-MODELS-15-102 — Publish schema docs and sample payloads for Notify. + • Prereqs: — + • Current: TODO + 3. [TODO] NOTIFY-MODELS-15-103 — Versioning/migration helpers for rules/templates/deliveries. + • Prereqs: — + • Current: TODO + - Team: Notify Storage Guild + - Path: `src/StellaOps.Notify.Storage.Mongo/TASKS.md` + 1. [TODO] NOTIFY-STORAGE-15-201 — Mongo schemas/indexes for rules, channels, deliveries, digests, locks, audit. + • Prereqs: — + • Current: TODO + 2. [TODO] NOTIFY-STORAGE-15-202 — Repositories with tenant scoping, soft delete, TTL, causal consistency options. + • Prereqs: — + • Current: TODO + 3. [TODO] NOTIFY-STORAGE-15-203 — Delivery history retention and query APIs. + • Prereqs: — + • Current: TODO + - Team: Notify WebService Guild + - Path: `src/StellaOps.Notify.WebService/TASKS.md` + 1. [TODO] NOTIFY-WEB-15-101 — Minimal API host with Authority enforcement and plug-in loading. + • Prereqs: — + • Current: TODO + 2. [TODO] NOTIFY-WEB-15-102 — Rules/channel/template CRUD with audit logging. + • Prereqs: — + • Current: TODO + - Team: Scanner WebService Guild + - Path: `src/StellaOps.Scanner.WebService/TASKS.md` + 1. [TODO] SCANNER-EVENTS-15-201 — Emit `scanner.report.ready` + `scanner.scan.completed` events. + • Prereqs: — + • Current: TODO +- **Sprint 16** · Scheduler Intelligence + - Team: Scheduler ImpactIndex Guild + - Path: `src/StellaOps.Scheduler.ImpactIndex/TASKS.md` + 1. [DOING] SCHED-IMPACT-16-300 — **STUB** ingest/query using fixtures to unblock Scheduler planning (remove by SP16 end). + • Prereqs: SAMPLES-10-001 (external/completed) + • Current: DOING + - Team: Scheduler Models Guild + - Path: `src/StellaOps.Scheduler.Models/TASKS.md` + 1. [TODO] SCHED-MODELS-16-103 — Versioning/migration helpers (schedule evolution, run state transitions). + • Prereqs: SCHED-MODELS-16-101 (external/completed) + • Current: TODO + - Team: Scheduler Queue Guild + - Path: `src/StellaOps.Scheduler.Queue/TASKS.md` + 1. [TODO] SCHED-QUEUE-16-401 — Implement queue abstraction + Redis Streams adapter (planner inputs, runner segments) with ack/lease semantics. + • Prereqs: SCHED-MODELS-16-101 (external/completed) + • Current: TODO + - Team: Scheduler Storage Guild + - Path: `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md` + 1. [TODO] SCHED-STORAGE-16-201 — Create Mongo collections (schedules, runs, impact_cursors, locks, audit) with indexes/migrations per architecture. + • Prereqs: SCHED-MODELS-16-101 (external/completed) + • Current: TODO + - Team: Scheduler WebService Guild + - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + 1. [TODO] SCHED-WEB-16-101 — Bootstrap Minimal API host with Authority OpTok + DPoP, health endpoints, plug-in discovery per architecture §§1–2. + • Prereqs: SCHED-MODELS-16-101 (external/completed) + • Current: TODO + +## Wave 1 — 45 task(s) ready after Wave 0 +- **Sprint 6** · Excititor Ingest & Formats + - Team: Team Excititor Connectors – MSRC + - Path: `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-CONN-MS-01-003 — EXCITITOR-CONN-MS-01-003 – Trust metadata & provenance hints + • Prereqs: EXCITITOR-CONN-MS-01-002 (Wave 0), EXCITITOR-POLICY-01-001 (external/completed) + • Current: TODO – Emit cosign/AAD issuer metadata, attach provenance details, and document policy integration. + - Team: Team Excititor Connectors – Oracle + - Path: `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-CONN-ORACLE-01-002 — EXCITITOR-CONN-ORACLE-01-002 – CSAF download & dedupe pipeline + • Prereqs: EXCITITOR-CONN-ORACLE-01-001 (Wave 0), EXCITITOR-STORAGE-01-003 (external/completed) + • Current: TODO – Fetch CSAF documents with retry/backoff, checksum validation, revision deduplication, and raw persistence. + - Team: Team Excititor Connectors – SUSE + - Path: `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md` + 1. [TODO] EXCITITOR-CONN-SUSE-01-003 — EXCITITOR-CONN-SUSE-01-003 – Trust metadata & policy hints + • Prereqs: EXCITITOR-CONN-SUSE-01-002 (Wave 0), EXCITITOR-POLICY-01-001 (external/completed) + • Current: TODO – Emit provider trust configuration (signers, weight overrides) and attach provenance hints for consensus engine. + - Team: Team Excititor Connectors – Ubuntu + - Path: `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-CONN-UBUNTU-01-003 — EXCITITOR-CONN-UBUNTU-01-003 – Trust metadata & provenance + • Prereqs: EXCITITOR-CONN-UBUNTU-01-002 (Wave 0), EXCITITOR-POLICY-01-001 (external/completed) + • Current: TODO – Emit Ubuntu signing metadata (GPG fingerprints) plus provenance hints for policy weighting and diagnostics. + - Team: Team Excititor Worker + - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + 1. [TODO] EXCITITOR-WORKER-01-003 — EXCITITOR-WORKER-01-003 – Verification & cache GC loops + • Prereqs: EXCITITOR-WORKER-01-001 (external/completed), EXCITITOR-ATTEST-01-003 (Wave 0), EXCITITOR-EXPORT-01-002 (external/completed) + • Current: TODO – Add scheduled attestation re-verification and cache pruning routines, surfacing metrics for export reuse ratios. +- **Sprint 7** · Contextual Truth Foundations + - Team: Team Excititor Export + - Path: `src/StellaOps.Excititor.Export/TASKS.md` + 1. [TODO] EXCITITOR-EXPORT-01-006 — EXCITITOR-EXPORT-01-006 – Quiet provenance packaging + • Prereqs: EXCITITOR-EXPORT-01-005 (Wave 0), POLICY-CORE-09-005 (Wave 0) + • Current: TODO – Attach `quietedBy` statement IDs, signers, and justification codes to exports/offline bundles, mirror metadata into attested manifest, and add regression fixtures. +- **Sprint 9** · DevOps Foundations + - Team: DevOps Guild, Scanner WebService Guild + - Path: `ops/devops/TASKS.md` + 1. [TODO] DEVOPS-SCANNER-09-204 — Surface `SCANNER__EVENTS__*` environment variables across docker-compose (dev/stage/airgap) and Helm values, defaulting to share the Redis queue DSN. + • Prereqs: SCANNER-EVENTS-15-201 (Wave 0) + • Current: TODO +- **Sprint 10** · Backlog + - Team: TBD + - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-305A — Parse `*.deps.json` + `runtimeconfig.json`, build RID graph, and normalize to `pkg:nuget` components. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-304A — Parse Go build info blob (`runtime/debug` format) and `.note.go.buildid`; map to module/version and evidence. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-307N — Integrate shared helpers for license/licence evidence, canonical JSON serialization, and usage flag propagation. + • Prereqs: SCANNER-ANALYZERS-LANG-10-302C (Wave 0) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-303A — STREAM-based parser for `*.dist-info` (`METADATA`, `WHEEL`, `entry_points.txt`) with normalization + evidence capture. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-306A — Parse Cargo metadata (`Cargo.lock`, `.fingerprint`, `.metadata`) and map crates to components with evidence. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO +- **Sprint 10** · Benchmarks + - Team: Bench Guild, Language Analyzer Guild + - Path: `bench/TASKS.md` + 1. [TODO] BENCH-SCANNER-10-002 — Wire real language analyzers into bench harness & refresh baselines post-implementation. + • Prereqs: SCANNER-ANALYZERS-LANG-10-301 (Wave 0) + • Current: TODO +- **Sprint 10** · Scanner Analyzers & SBOM + - Team: Emit Guild + - Path: `src/StellaOps.Scanner.Emit/TASKS.md` + 1. [TODO] SCANNER-EMIT-10-607 — Embed scoring inputs, confidence band, and `quietedBy` provenance into CycloneDX 1.6 and DSSE predicates; verify deterministic serialization. + • Prereqs: SCANNER-EMIT-10-604 (Wave 0), POLICY-CORE-09-005 (Wave 0) + • Current: TODO + - Team: Language Analyzer Guild + - Path: `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md` + 1. [DOING] SCANNER-ANALYZERS-LANG-10-309 — Package language analyzers as restart-time plug-ins (manifest + host registration). + • Prereqs: SCANNER-ANALYZERS-LANG-10-301 (Wave 0) + • Current: DOING (2025-10-19) + 2. [TODO] SCANNER-ANALYZERS-LANG-10-306 — Rust analyzer detecting crate provenance or falling back to `bin:{sha256}`. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO + 3. [DOING] SCANNER-ANALYZERS-LANG-10-302 — Node analyzer resolving workspaces/symlinks into `pkg:npm` identities. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: DOING (2025-10-19) + 4. [TODO] SCANNER-ANALYZERS-LANG-10-304 — Go analyzer leveraging buildinfo for `pkg:golang` components. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO + 5. [TODO] SCANNER-ANALYZERS-LANG-10-305 — .NET analyzer parsing `*.deps.json`, assembly metadata, and RID variants. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO + 6. [TODO] SCANNER-ANALYZERS-LANG-10-303 — Python analyzer consuming `*.dist-info` metadata and RECORD hashes. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) + • Current: TODO +- **Sprint 11** · UI Integration + - Team: UI Guild + - Path: `src/StellaOps.UI/TASKS.md` + 1. [TODO] UI-ATTEST-11-005 — Attestation visibility (Rekor id, status) on Scan Detail. + • Prereqs: SIGNER-API-11-101 (Wave 0), ATTESTOR-API-11-201 (Wave 0) + • Current: TODO +- **Sprint 12** · Runtime Guardrails + - Team: Scanner WebService Guild + - Path: `src/StellaOps.Scanner.WebService/TASKS.md` + 1. [TODO] SCANNER-RUNTIME-12-301 — Implement `/runtime/events` ingestion endpoint with validation, batching, and storage hooks per Zastava contract. + • Prereqs: ZASTAVA-CORE-12-201 (Wave 0) + • Current: TODO + - Team: Zastava Observer Guild + - Path: `src/StellaOps.Zastava.Observer/TASKS.md` + 1. [TODO] ZASTAVA-OBS-12-001 — Build container lifecycle watcher that tails CRI (containerd/cri-o/docker) events and emits deterministic runtime records with buffering + backoff. + • Prereqs: ZASTAVA-CORE-12-201 (Wave 0) + • Current: TODO +- **Sprint 13** · UX & CLI Experience + - Team: DevEx/CLI, QA Guild + - Path: `src/StellaOps.Cli/TASKS.md` + 1. [TODO] CLI-RUNTIME-13-009 — CLI-RUNTIME-13-009 – Runtime policy smoke fixture + • Prereqs: CLI-RUNTIME-13-005 (Wave 0) + • Current: TODO – Build Spectre test harness exercising `runtime policy test` against a stubbed backend to lock output shape (table + `--json`) and guard regressions. Integrate into `dotnet test` suite. + - Team: UI Guild + - Path: `src/StellaOps.UI/TASKS.md` + 1. [TODO] UI-VEX-13-003 — Implement VEX explorer + policy editor with preview integration. + • Prereqs: EXCITITOR-CORE-02-001 (external/completed), EXCITITOR-EXPORT-01-005 (Wave 0) + • Current: TODO + 2. [TODO] UI-POLICY-13-007 — Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. + • Prereqs: POLICY-CORE-09-006 (Wave 0), SCANNER-WEB-09-103 (external/completed) + • Current: TODO + 3. [TODO] UI-ADMIN-13-004 — Deliver admin area (tenants/clients/quotas/licensing) with RBAC + audit hooks. + • Prereqs: AUTH-MTLS-11-002 (Wave 0) + • Current: TODO + 4. [TODO] UI-AUTH-13-001 — Integrate Authority OIDC + DPoP flows with session management. + • Prereqs: AUTH-DPOP-11-001 (Wave 0), AUTH-MTLS-11-002 (Wave 0) + • Current: TODO + 5. [TODO] UI-SCANS-13-002 — Build scans module (list/detail/SBOM/diff/attestation) with performance + accessibility targets. + • Prereqs: SCANNER-WEB-09-102 (external/completed), SIGNER-API-11-101 (Wave 0) + • Current: TODO + 6. [TODO] UI-NOTIFY-13-006 — Notify panel: channels/rules CRUD, deliveries view, test send integration. + • Prereqs: NOTIFY-WEB-15-101 (Wave 0) + • Current: TODO + 7. [TODO] UI-SCHED-13-005 — Scheduler panel: schedules CRUD, run history, dry-run preview using API/mocks. + • Prereqs: SCHED-WEB-16-101 (Wave 0) + • Current: TODO +- **Sprint 14** · Release & Offline Ops + - Team: DevOps Guild + - Path: `ops/devops/TASKS.md` + 1. [TODO] DEVOPS-REL-14-001 — Deterministic build/release pipeline with SBOM/provenance, signing, manifest generation. + • Prereqs: SIGNER-API-11-101 (Wave 0), ATTESTOR-API-11-201 (Wave 0) + • Current: TODO + - Team: Licensing Guild + - Path: `ops/licensing/TASKS.md` + 1. [TODO] DEVOPS-LIC-14-004 — Implement registry token service tied to Authority (DPoP/mTLS), plan gating, revocation handling, and monitoring per architecture. + • Prereqs: AUTH-MTLS-11-002 (Wave 0) + • Current: TODO +- **Sprint 15** · Notify Foundations + - Team: Notify Engine Guild + - Path: `src/StellaOps.Notify.Engine/TASKS.md` + 1. [TODO] NOTIFY-ENGINE-15-301 — Rules evaluation core: tenant/kind filters, severity/delta gates, VEX gating, throttling, idempotency key generation. + • Prereqs: NOTIFY-MODELS-15-101 (Wave 0) + • Current: TODO + - Team: Notify Queue Guild + - Path: `src/StellaOps.Notify.Queue/TASKS.md` + 1. [TODO] NOTIFY-QUEUE-15-401 — Build queue abstraction + Redis Streams adapter with ack/claim APIs, idempotency tokens, serialization contracts. + • Prereqs: NOTIFY-MODELS-15-101 (Wave 0) + • Current: TODO + - Team: Notify WebService Guild + - Path: `src/StellaOps.Notify.WebService/TASKS.md` + 1. [TODO] NOTIFY-WEB-15-103 — Delivery history + test-send endpoints with rate limits. + • Prereqs: NOTIFY-WEB-15-102 (Wave 0) + • Current: TODO +- **Sprint 16** · Scheduler Intelligence + - Team: Scheduler ImpactIndex Guild + - Path: `src/StellaOps.Scheduler.ImpactIndex/TASKS.md` + 1. [TODO] SCHED-IMPACT-16-301 — Implement ingestion of per-image BOM-Index sidecars into roaring bitmap store (contains/usedBy). + • Prereqs: SCANNER-EMIT-10-605 (Wave 0) + • Current: TODO + - Team: Scheduler Queue Guild + - Path: `src/StellaOps.Scheduler.Queue/TASKS.md` + 1. [TODO] SCHED-QUEUE-16-402 — Add NATS JetStream adapter with configuration binding, health probes, failover. + • Prereqs: SCHED-QUEUE-16-401 (Wave 0) + • Current: TODO + 2. [TODO] SCHED-QUEUE-16-403 — Dead-letter handling + metrics (queue depth, retry counts), configuration toggles. + • Prereqs: SCHED-QUEUE-16-401 (Wave 0) + • Current: TODO + - Team: Scheduler Storage Guild + - Path: `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md` + 1. [TODO] SCHED-STORAGE-16-203 — Audit/logging pipeline + run stats materialized views for UI. + • Prereqs: SCHED-STORAGE-16-201 (Wave 0) + • Current: TODO + 2. [TODO] SCHED-STORAGE-16-202 — Implement repositories/services with tenant scoping, soft delete, TTL for completed runs, and causal consistency options. + • Prereqs: SCHED-STORAGE-16-201 (Wave 0) + • Current: TODO + - Team: Scheduler WebService Guild + - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + 1. [TODO] SCHED-WEB-16-104 — Webhook endpoints for Feedser/Vexer exports with mTLS/HMAC validation and rate limiting. + • Prereqs: SCHED-QUEUE-16-401 (Wave 0), SCHED-STORAGE-16-201 (Wave 0) + • Current: TODO + 2. [TODO] SCHED-WEB-16-102 — Implement schedules CRUD (tenant-scoped) with cron validation, pause/resume, audit logging. + • Prereqs: SCHED-WEB-16-101 (Wave 0) + • Current: TODO + - Team: Scheduler Worker Guild + - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + 1. [TODO] SCHED-WORKER-16-201 — Planner loop (cron + event triggers) with lease management, fairness, and rate limiting (§6). + • Prereqs: SCHED-QUEUE-16-401 (Wave 0) + • Current: TODO +- **Sprint 17** · Symbol Intelligence & Forensics + - Team: Emit Guild + - Path: `src/StellaOps.Scanner.Emit/TASKS.md` + 1. [TODO] SCANNER-EMIT-17-701 — Record GNU build-id for ELF components and surface it in inventory/usage SBOM plus diff payloads with deterministic ordering. + • Prereqs: SCANNER-EMIT-10-602 (Wave 0) + • Current: TODO + +## Wave 2 — 29 task(s) ready after Wave 1 +- **Sprint 6** · Excititor Ingest & Formats + - Team: Team Excititor Connectors – Oracle + - Path: `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` + 1. [TODO] EXCITITOR-CONN-ORACLE-01-003 — EXCITITOR-CONN-ORACLE-01-003 – Trust metadata + provenance + • Prereqs: EXCITITOR-CONN-ORACLE-01-002 (Wave 1), EXCITITOR-POLICY-01-001 (external/completed) + • Current: TODO – Emit Oracle signing metadata (PGP/cosign) and provenance hints for consensus weighting. +- **Sprint 7** · Contextual Truth Foundations + - Team: Team Excititor Export + - Path: `src/StellaOps.Excititor.Export/TASKS.md` + 1. [TODO] EXCITITOR-EXPORT-01-007 — EXCITITOR-EXPORT-01-007 – Mirror bundle + domain manifest + • Prereqs: EXCITITOR-EXPORT-01-006 (Wave 1) + • Current: TODO – Create per-domain mirror bundles with consensus/score artifacts, publish signed index for downstream Excititor sync, and ensure deterministic digests + fixtures. +- **Sprint 8** · Mirror Distribution + - Team: DevOps Guild + - Path: `ops/devops/TASKS.md` + 1. [TODO] DEVOPS-MIRROR-08-001 — Stand up managed mirror profiles for `*.stella-ops.org` (Concelier/Excititor), including Helm/Compose overlays, multi-tenant secrets, CDN caching, and sync documentation. + • Prereqs: DEVOPS-REL-14-001 (Wave 1) + • Current: TODO +- **Sprint 9** · DevOps Foundations + - Team: DevOps Guild, Notify Guild + - Path: `ops/devops/TASKS.md` + 1. [TODO] DEVOPS-SCANNER-09-205 — Add Notify smoke stage that tails the Redis stream and asserts `scanner.report.ready`/`scanner.scan.completed` reach Notify WebService in staging. + • Prereqs: DEVOPS-SCANNER-09-204 (Wave 1) + • Current: TODO +- **Sprint 10** · Backlog + - Team: TBD + - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-305B — Extract assembly metadata (strong name, file/product info) and optional Authenticode details when offline cert bundle provided. + • Prereqs: SCANNER-ANALYZERS-LANG-10-305A (Wave 1) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-304B — Implement DWARF-lite reader for VCS metadata + dirty flag; add cache to avoid re-reading identical binaries. + • Prereqs: SCANNER-ANALYZERS-LANG-10-304A (Wave 1) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-308N — Author determinism harness + fixtures for Node analyzer; add benchmark suite. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307N (Wave 1) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-303B — RECORD hash verifier with chunked hashing, Zip64 support, and mismatch diagnostics. + • Prereqs: SCANNER-ANALYZERS-LANG-10-303A (Wave 1) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-306B — Implement heuristic classifier using ELF section names, symbol mangling, and `.comment` data for stripped binaries. + • Prereqs: SCANNER-ANALYZERS-LANG-10-306A (Wave 1) + • Current: TODO +- **Sprint 10** · DevOps Perf + - Team: DevOps Guild + - Path: `ops/devops/TASKS.md` + 1. [TODO] DEVOPS-PERF-10-002 — Publish analyzer bench metrics to Grafana/perf workbook and alarm on ≥20 % regressions. + • Prereqs: BENCH-SCANNER-10-002 (Wave 1) + • Current: TODO +- **Sprint 10** · Samples + - Team: Samples Guild, Policy Guild + - Path: `samples/TASKS.md` + 1. [TODO] SAMPLES-13-004 — Add policy preview/report fixtures showing confidence bands and unknown-age tags. + • Prereqs: POLICY-CORE-09-006 (Wave 0), UI-POLICY-13-007 (Wave 1) + • Current: TODO +- **Sprint 12** · Runtime Guardrails + - Team: Scanner WebService Guild + - Path: `src/StellaOps.Scanner.WebService/TASKS.md` + 1. [TODO] SCANNER-RUNTIME-12-302 — Implement `/policy/runtime` endpoint joining SBOM baseline + policy verdict, returning admission guidance. Coordinate with CLI (`CLI-RUNTIME-13-008`) before GA to lock response field names/metadata. + • Prereqs: SCANNER-RUNTIME-12-301 (Wave 1), ZASTAVA-CORE-12-201 (Wave 0) + • Current: TODO + - Team: Zastava Observer Guild + - Path: `src/StellaOps.Zastava.Observer/TASKS.md` + 1. [TODO] ZASTAVA-OBS-12-002 — Capture entrypoint traces and loaded libraries, hashing binaries and correlating to SBOM baseline per architecture sections 2.1 and 10. + • Prereqs: ZASTAVA-OBS-12-001 (Wave 1) + • Current: TODO +- **Sprint 14** · Release & Offline Ops + - Team: Deployment Guild + - Path: `ops/deployment/TASKS.md` + 1. [TODO] DEVOPS-OPS-14-003 — Document and script upgrade/rollback flows, channel management, and compatibility matrices per architecture. + • Prereqs: DEVOPS-REL-14-001 (Wave 1) + • Current: TODO + - Team: Offline Kit Guild + - Path: `ops/offline-kit/TASKS.md` + 1. [TODO] DEVOPS-OFFLINE-14-002 — Build offline kit packaging workflow (artifact bundling, manifest generation, signature verification). + • Prereqs: DEVOPS-REL-14-001 (Wave 1) + • Current: TODO +- **Sprint 15** · Benchmarks + - Team: Bench Guild, Notify Team + - Path: `bench/TASKS.md` + 1. [TODO] BENCH-NOTIFY-15-001 — Notify dispatch throughput bench (vary rule density) with results CSV. + • Prereqs: NOTIFY-ENGINE-15-301 (Wave 1) + • Current: TODO +- **Sprint 15** · Notify Foundations + - Team: Notify Engine Guild + - Path: `src/StellaOps.Notify.Engine/TASKS.md` + 1. [TODO] NOTIFY-ENGINE-15-302 — Action planner + digest coalescer with window management and dedupe per architecture §4. + • Prereqs: NOTIFY-ENGINE-15-301 (Wave 1) + • Current: TODO + - Team: Notify Queue Guild + - Path: `src/StellaOps.Notify.Queue/TASKS.md` + 1. [TODO] NOTIFY-QUEUE-15-403 — Delivery queue for channel actions with retry schedules, poison queues, and metrics instrumentation. + • Prereqs: NOTIFY-QUEUE-15-401 (Wave 1) + • Current: TODO + 2. [TODO] NOTIFY-QUEUE-15-402 — Add NATS JetStream adapter with configuration binding, health probes, failover. + • Prereqs: NOTIFY-QUEUE-15-401 (Wave 1) + • Current: TODO + - Team: Notify WebService Guild + - Path: `src/StellaOps.Notify.WebService/TASKS.md` + 1. [TODO] NOTIFY-WEB-15-104 — Configuration binding for Mongo/queue/secrets; startup diagnostics. + • Prereqs: NOTIFY-STORAGE-15-201 (Wave 0), NOTIFY-QUEUE-15-401 (Wave 1) + • Current: TODO + - Team: Notify Worker Guild + - Path: `src/StellaOps.Notify.Worker/TASKS.md` + 1. [TODO] NOTIFY-WORKER-15-201 — Implement bus subscription + leasing loop with correlation IDs, backoff, dead-letter handling (§1–§5). + • Prereqs: NOTIFY-QUEUE-15-401 (Wave 1) + • Current: TODO + 2. [TODO] NOTIFY-WORKER-15-202 — Wire rules evaluation pipeline (tenant scoping, filters, throttles, digests, idempotency) with deterministic decisions. + • Prereqs: NOTIFY-ENGINE-15-301 (Wave 1) + • Current: TODO +- **Sprint 16** · Benchmarks + - Team: Bench Guild, Scheduler Team + - Path: `bench/TASKS.md` + 1. [TODO] BENCH-IMPACT-16-001 — ImpactIndex throughput bench (resolve 10k productKeys) + RAM profile. + • Prereqs: SCHED-IMPACT-16-301 (Wave 1) + • Current: TODO +- **Sprint 16** · Scheduler Intelligence + - Team: Scheduler ImpactIndex Guild + - Path: `src/StellaOps.Scheduler.ImpactIndex/TASKS.md` + 1. [TODO] SCHED-IMPACT-16-303 — Snapshot/compaction + invalidation for removed images; persistence to RocksDB/Redis per architecture. + • Prereqs: SCHED-IMPACT-16-301 (Wave 1) + • Current: TODO + 2. [TODO] SCHED-IMPACT-16-302 — Provide query APIs (ResolveByPurls, ResolveByVulns, ResolveAll, selectors) with tenant/namespace filters. + • Prereqs: SCHED-IMPACT-16-301 (Wave 1) + • Current: TODO + - Team: Scheduler WebService Guild + - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + 1. [TODO] SCHED-WEB-16-103 — Runs API (list/detail/cancel), ad-hoc run POST, and impact preview endpoints. + • Prereqs: SCHED-WEB-16-102 (Wave 1) + • Current: TODO + - Team: Scheduler Worker Guild + - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + 1. [TODO] SCHED-WORKER-16-202 — Wire ImpactIndex targeting (ResolveByPurls/vulns), dedupe, shard planning. + • Prereqs: SCHED-IMPACT-16-301 (Wave 1) + • Current: TODO + 2. [TODO] SCHED-WORKER-16-205 — Metrics/telemetry: run stats, queue depth, planner latency, delta counts. + • Prereqs: SCHED-WORKER-16-201 (Wave 1) + • Current: TODO +- **Sprint 17** · Symbol Intelligence & Forensics + - Team: DevOps Guild + - Path: `ops/devops/TASKS.md` + 1. [TODO] DEVOPS-REL-17-002 — Persist stripped-debug artifacts organised by GNU build-id and bundle them into release/offline kits with checksum manifests. + • Prereqs: DEVOPS-REL-14-001 (Wave 1), SCANNER-EMIT-17-701 (Wave 1) + • Current: TODO + +## Wave 3 — 14 task(s) ready after Wave 2 +- **Sprint 7** · Contextual Truth Foundations + - Team: Excititor Connectors – Stella + - Path: `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` + 1. [TODO] EXCITITOR-CONN-STELLA-07-001 — Implement mirror fetch client consuming `https://.stella-ops.org/excititor/exports/index.json`, validating signatures/digests, storing raw consensus bundles with provenance. + • Prereqs: EXCITITOR-EXPORT-01-007 (Wave 2) + • Current: TODO +- **Sprint 10** · Backlog + - Team: TBD + - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-305C — Handle self-contained apps and native assets; merge with EntryTrace usage hints. + • Prereqs: SCANNER-ANALYZERS-LANG-10-305B (Wave 2) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-304C — Fallback heuristics for stripped binaries with deterministic `bin:{sha256}` labeling and quiet provenance. + • Prereqs: SCANNER-ANALYZERS-LANG-10-304B (Wave 2) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-309N — Package Node analyzer as restart-time plug-in (manifest, DI registration, Offline Kit notes). + • Prereqs: SCANNER-ANALYZERS-LANG-10-308N (Wave 2) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-303C — Editable install + pip cache detection; integrate EntryTrace hints for runtime usage flags. + • Prereqs: SCANNER-ANALYZERS-LANG-10-303B (Wave 2) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-306C — Integrate binary hash fallback (`bin:{sha256}`) and tie into shared quiet provenance helpers. + • Prereqs: SCANNER-ANALYZERS-LANG-10-306B (Wave 2) + • Current: TODO +- **Sprint 12** · Runtime Guardrails + - Team: Zastava Observer Guild + - Path: `src/StellaOps.Zastava.Observer/TASKS.md` + 1. [TODO] ZASTAVA-OBS-12-003 — Implement runtime posture checks (signature/SBOM/attestation presence) with offline caching and warning surfaces. + • Prereqs: ZASTAVA-OBS-12-002 (Wave 2) + • Current: TODO + 2. [TODO] ZASTAVA-OBS-12-004 — Batch `/runtime/events` submissions with disk-backed buffer, rate limits, and deterministic envelopes. + • Prereqs: ZASTAVA-OBS-12-002 (Wave 2) + • Current: TODO +- **Sprint 13** · UX & CLI Experience + - Team: DevEx/CLI + - Path: `src/StellaOps.Cli/TASKS.md` + 1. [TODO] CLI-OFFLINE-13-006 — CLI-OFFLINE-13-006 – Offline kit workflows + • Prereqs: DEVOPS-OFFLINE-14-002 (Wave 2) + • Current: TODO – Implement `offline kit pull/import/status` commands with integrity checks, resumable downloads, and doc updates. + - Team: DevEx/CLI, Scanner WebService Guild + - Path: `src/StellaOps.Cli/TASKS.md` + 1. [TODO] CLI-RUNTIME-13-008 — CLI-RUNTIME-13-008 – Runtime policy contract sync + • Prereqs: SCANNER-RUNTIME-12-302 (Wave 2) + • Current: TODO – Once `/api/v1/scanner/policy/runtime` exits TODO, verify CLI output against final schema (field names, metadata) and update formatter/tests if the contract moves. Capture joint review notes in docs/09 and link Scanner task sign-off. +- **Sprint 15** · Notify Foundations + - Team: Notify Engine Guild + - Path: `src/StellaOps.Notify.Engine/TASKS.md` + 1. [TODO] NOTIFY-ENGINE-15-303 — Template rendering engine (Slack, Teams, Email, Webhook) with helpers and i18n support. + • Prereqs: NOTIFY-ENGINE-15-302 (Wave 2) + • Current: TODO + - Team: Notify Worker Guild + - Path: `src/StellaOps.Notify.Worker/TASKS.md` + 1. [TODO] NOTIFY-WORKER-15-203 — Channel dispatch orchestration: invoke connectors, manage retries/jitter, record delivery outcomes. + • Prereqs: NOTIFY-ENGINE-15-302 (Wave 2) + • Current: TODO +- **Sprint 16** · Scheduler Intelligence + - Team: Scheduler Worker Guild + - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + 1. [TODO] SCHED-WORKER-16-203 — Runner execution: call Scanner `/reports` (analysis-only) or `/scans` when configured; collect deltas; handle retries. + • Prereqs: SCHED-WORKER-16-202 (Wave 2) + • Current: TODO +- **Sprint 17** · Symbol Intelligence & Forensics + - Team: Zastava Observer Guild + - Path: `src/StellaOps.Zastava.Observer/TASKS.md` + 1. [TODO] ZASTAVA-OBS-17-005 — Collect GNU build-id for ELF processes and attach it to emitted runtime events to enable symbol lookup + debug-store correlation. + • Prereqs: ZASTAVA-OBS-12-002 (Wave 2) + • Current: TODO + +## Wave 4 — 15 task(s) ready after Wave 3 +- **Sprint 7** · Contextual Truth Foundations + - Team: Excititor Connectors – Stella + - Path: `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` + 1. [TODO] EXCITITOR-CONN-STELLA-07-002 — Normalize mirror bundles into VexClaim sets referencing original provider metadata and mirror provenance. + • Prereqs: EXCITITOR-CONN-STELLA-07-001 (Wave 3) + • Current: TODO +- **Sprint 9** · Policy Foundations + - Team: Policy Guild, Scanner WebService Guild + - Path: `src/StellaOps.Policy/TASKS.md` + 1. [TODO] POLICY-RUNTIME-17-201 — Define runtime reachability feed contract and alignment plan for `SCANNER-RUNTIME-17-401` once Zastava endpoints land; document policy expectations for reachability tags. + • Prereqs: ZASTAVA-OBS-17-005 (Wave 3) + • Current: TODO +- **Sprint 10** · Backlog + - Team: TBD + - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-307D — Integrate shared helpers (license mapping, quiet provenance) and concurrency-safe caches. + • Prereqs: SCANNER-ANALYZERS-LANG-10-305C (Wave 3) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-307G — Wire shared helpers (license mapping, usage flags) and ensure concurrency-safe buffer reuse. + • Prereqs: SCANNER-ANALYZERS-LANG-10-304C (Wave 3) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-307P — Shared helper integration (license metadata, quiet provenance, component merging). + • Prereqs: SCANNER-ANALYZERS-LANG-10-303C (Wave 3) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-307R — Finalize shared helper usage (license, usage flags) and concurrency-safe caches. + • Prereqs: SCANNER-ANALYZERS-LANG-10-306C (Wave 3) + • Current: TODO +- **Sprint 13** · UX & CLI Experience + - Team: DevEx/CLI + - Path: `src/StellaOps.Cli/TASKS.md` + 1. [TODO] CLI-PLUGIN-13-007 — CLI-PLUGIN-13-007 – Plugin packaging + • Prereqs: CLI-RUNTIME-13-005 (Wave 0), CLI-OFFLINE-13-006 (Wave 3) + • Current: TODO – Package non-core verbs as restart-time plug-ins (manifest + loader updates, tests ensuring no hot reload). +- **Sprint 15** · Notify Foundations + - Team: Notify Connectors Guild + - Path: `src/StellaOps.Notify.Connectors.Email/TASKS.md` + 1. [TODO] NOTIFY-CONN-EMAIL-15-701 — Implement SMTP connector with STARTTLS/implicit TLS support, HTML+text rendering, attachment policy enforcement. + • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Slack/TASKS.md` + 1. [TODO] NOTIFY-CONN-SLACK-15-501 — Implement Slack connector with bot token auth, message rendering (blocks), rate limit handling, retries/backoff. + • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Teams/TASKS.md` + 1. [TODO] NOTIFY-CONN-TEAMS-15-601 — Implement Teams connector using Adaptive Cards 1.5, handle webhook auth, size limits, retries. + • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Webhook/TASKS.md` + 1. [TODO] NOTIFY-CONN-WEBHOOK-15-801 — Implement webhook connector: JSON payload, signature (HMAC/Ed25519), retries/backoff, status code handling. + • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) + • Current: TODO + - Team: Notify Engine Guild + - Path: `src/StellaOps.Notify.Engine/TASKS.md` + 1. [TODO] NOTIFY-ENGINE-15-304 — Test-send sandbox + preview utilities for WebService. + • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) + • Current: TODO + - Team: Notify Worker Guild + - Path: `src/StellaOps.Notify.Worker/TASKS.md` + 1. [TODO] NOTIFY-WORKER-15-204 — Metrics/telemetry: `notify.sent_total`, `notify.dropped_total`, latency histograms, tracing integration. + • Prereqs: NOTIFY-WORKER-15-203 (Wave 3) + • Current: TODO +- **Sprint 16** · Scheduler Intelligence + - Team: Scheduler Worker Guild + - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + 1. [TODO] SCHED-WORKER-16-204 — Emit events (`scheduler.rescan.delta`, `scanner.report.ready`) for Notify/UI with summaries. + • Prereqs: SCHED-WORKER-16-203 (Wave 3) + • Current: TODO +- **Sprint 17** · Symbol Intelligence & Forensics + - Team: Docs Guild + - Path: `docs/TASKS.md` + 1. [TODO] DOCS-RUNTIME-17-004 — Document build-id workflows: SBOM exposure, runtime event payloads, debug-store layout, and operator guidance for symbol retrieval. + • Prereqs: SCANNER-EMIT-17-701 (Wave 1), ZASTAVA-OBS-17-005 (Wave 3), DEVOPS-REL-17-002 (Wave 2) + • Current: TODO + +## Wave 5 — 10 task(s) ready after Wave 4 +- **Sprint 7** · Contextual Truth Foundations + - Team: Excititor Connectors – Stella + - Path: `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` + 1. [TODO] EXCITITOR-CONN-STELLA-07-003 — Implement incremental cursor handling per-export digest, support resume, and document configuration for downstream Excititor mirrors. + • Prereqs: EXCITITOR-CONN-STELLA-07-002 (Wave 4) + • Current: TODO +- **Sprint 10** · Backlog + - Team: TBD + - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-308D — Determinism fixtures + benchmark harness; compare to competitor scanners for accuracy/perf. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307D (Wave 4) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-308G — Determinism fixtures + benchmark harness (Vs competitor). + • Prereqs: SCANNER-ANALYZERS-LANG-10-307G (Wave 4) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-308P — Golden fixtures + determinism harness for Python analyzer; add benchmark and hash throughput reporting. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307P (Wave 4) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-308R — Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. + • Prereqs: SCANNER-ANALYZERS-LANG-10-307R (Wave 4) + • Current: TODO +- **Sprint 15** · Notify Foundations + - Team: Notify Connectors Guild + - Path: `src/StellaOps.Notify.Connectors.Email/TASKS.md` + 1. [TODO] NOTIFY-CONN-EMAIL-15-702 — Add DKIM signing optional support and health/test-send flows. + • Prereqs: NOTIFY-CONN-EMAIL-15-701 (Wave 4) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Slack/TASKS.md` + 1. [TODO] NOTIFY-CONN-SLACK-15-502 — Health check & test-send support with minimal scopes and redacted tokens. + • Prereqs: NOTIFY-CONN-SLACK-15-501 (Wave 4) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Teams/TASKS.md` + 1. [TODO] NOTIFY-CONN-TEAMS-15-602 — Provide health/test-send support with fallback text for legacy clients. + • Prereqs: NOTIFY-CONN-TEAMS-15-601 (Wave 4) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Webhook/TASKS.md` + 1. [TODO] NOTIFY-CONN-WEBHOOK-15-802 — Health/test-send support with signature validation hints and secret management. + • Prereqs: NOTIFY-CONN-WEBHOOK-15-801 (Wave 4) + • Current: TODO +- **Sprint 17** · Symbol Intelligence & Forensics + - Team: Scanner WebService Guild + - Path: `src/StellaOps.Scanner.WebService/TASKS.md` + 1. [TODO] SCANNER-RUNTIME-17-401 — Persist runtime build-id observations and expose them via `/runtime/events` + policy joins for debug-symbol correlation. + • Prereqs: SCANNER-RUNTIME-12-301 (Wave 1), ZASTAVA-OBS-17-005 (Wave 3), SCANNER-EMIT-17-701 (Wave 1), POLICY-RUNTIME-17-201 (Wave 4) + • Current: TODO + +## Wave 6 — 8 task(s) ready after Wave 5 +- **Sprint 10** · Backlog + - Team: TBD + - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-309D — Package plug-in (manifest, DI registration) and update Offline Kit instructions. + • Prereqs: SCANNER-ANALYZERS-LANG-10-308D (Wave 5) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-309G — Package plug-in manifest + Offline Kit notes; ensure Worker DI registration. + • Prereqs: SCANNER-ANALYZERS-LANG-10-308G (Wave 5) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-309P — Package plug-in (manifest, DI registration) and document Offline Kit bundling of Python stdlib metadata if needed. + • Prereqs: SCANNER-ANALYZERS-LANG-10-308P (Wave 5) + • Current: TODO + - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + 1. [TODO] SCANNER-ANALYZERS-LANG-10-309R — Package plug-in manifest + Offline Kit documentation; ensure Worker integration. + • Prereqs: SCANNER-ANALYZERS-LANG-10-308R (Wave 5) + • Current: TODO +- **Sprint 15** · Notify Foundations + - Team: Notify Connectors Guild + - Path: `src/StellaOps.Notify.Connectors.Email/TASKS.md` + 1. [TODO] NOTIFY-CONN-EMAIL-15-703 — Package Email connector as restart-time plug-in (manifest + host registration). + • Prereqs: NOTIFY-CONN-EMAIL-15-702 (Wave 5) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Slack/TASKS.md` + 1. [TODO] NOTIFY-CONN-SLACK-15-503 — Package Slack connector as restart-time plug-in (manifest + host registration). + • Prereqs: NOTIFY-CONN-SLACK-15-502 (Wave 5) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Teams/TASKS.md` + 1. [TODO] NOTIFY-CONN-TEAMS-15-603 — Package Teams connector as restart-time plug-in (manifest + host registration). + • Prereqs: NOTIFY-CONN-TEAMS-15-602 (Wave 5) + • Current: TODO + - Path: `src/StellaOps.Notify.Connectors.Webhook/TASKS.md` + 1. [TODO] NOTIFY-CONN-WEBHOOK-15-803 — Package Webhook connector as restart-time plug-in (manifest + host registration). + • Prereqs: NOTIFY-CONN-WEBHOOK-15-802 (Wave 5) + • Current: TODO + +## Wave 7 — 1 task(s) ready after Wave 6 +- **Sprint 7** · Contextual Truth Foundations + - Team: Team Core Engine & Storage Analytics + - Path: `src/StellaOps.Concelier.Core/TASKS.md` + 1. [TODO] FEEDCORE-ENGINE-07-001 — FEEDCORE-ENGINE-07-001 – Advisory event log & asOf queries + • Prereqs: FEEDSTORAGE-DATA-07-001 (Wave 10) + • Current: TODO – Introduce immutable advisory statement events, expose `asOf` query surface for merge/export pipelines, and document determinism guarantees for replay. + +## Wave 8 — 1 task(s) ready after Wave 7 +- **Sprint 7** · Contextual Truth Foundations + - Team: Team Core Engine & Data Science + - Path: `src/StellaOps.Concelier.Core/TASKS.md` + 1. [TODO] FEEDCORE-ENGINE-07-002 — FEEDCORE-ENGINE-07-002 – Noise prior computation service + • Prereqs: FEEDCORE-ENGINE-07-001 (Wave 7) + • Current: TODO – Build rule-based learner capturing false-positive priors per package/env, persist summaries, and expose APIs for Excititor/scan suppressors with reproducible statistics. + +## Wave 9 — 1 task(s) ready after Wave 8 +- **Sprint 7** · Contextual Truth Foundations + - Team: Team Core Engine & Storage Analytics + - Path: `src/StellaOps.Concelier.Core/TASKS.md` + 1. [TODO] FEEDCORE-ENGINE-07-003 — FEEDCORE-ENGINE-07-003 – Unknown state ledger & confidence seeding + • Prereqs: FEEDCORE-ENGINE-07-001 (Wave 7) + • Current: TODO – Persist `unknown_vuln_range/unknown_origin/ambiguous_fix` markers with initial confidence bands, expose query surface for Policy, and add fixtures validating canonical serialization. + +## Wave 10 — 1 task(s) ready after Wave 9 +- **Sprint 7** · Contextual Truth Foundations + - Team: Team Normalization & Storage Backbone + - Path: `src/StellaOps.Concelier.Storage.Mongo/TASKS.md` + 1. [TODO] FEEDSTORAGE-DATA-07-001 — FEEDSTORAGE-DATA-07-001 Advisory statement & conflict collections + • Prereqs: FEEDMERGE-ENGINE-07-001 (Wave 11) + • Current: TODO – Create `advisory_statements` (immutable) and `advisory_conflicts` collections, define `asOf`/`vulnerabilityKey` indexes, and document migration/rollback steps for event-sourced merge. + +## Wave 11 — 1 task(s) ready after Wave 10 +- **Sprint 7** · Contextual Truth Foundations + - Team: BE-Merge + - Path: `src/StellaOps.Concelier.Merge/TASKS.md` + 1. [TODO] FEEDMERGE-ENGINE-07-001 — FEEDMERGE-ENGINE-07-001 Conflict sets & explainers + • Prereqs: FEEDSTORAGE-DATA-07-001 (Wave 10) + • Current: TODO – Persist conflict sets referencing advisory statements, output rule/explainer payloads with replay hashes, and add integration tests covering deterministic `asOf` evaluations. + +## Wave 12 — 1 task(s) ready after Wave 11 +- **Sprint 8** · Mirror Distribution + - Team: Concelier Export Guild + - Path: `src/StellaOps.Concelier.Exporter.Json/TASKS.md` + 1. [TODO] CONCELIER-EXPORT-08-201 — CONCELIER-EXPORT-08-201 – Mirror bundle + domain manifest + • Prereqs: FEEDCORE-ENGINE-07-001 (Wave 7) + • Current: TODO – Produce per-domain aggregate bundles (JSON + manifest) with deterministic digests, include upstream source metadata, and publish index consumed by mirror endpoints/tests. + +## Wave 13 — 1 task(s) ready after Wave 12 +- **Sprint 8** · Mirror Distribution + - Team: Concelier Export Guild + - Path: `src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md` + 1. [TODO] CONCELIER-EXPORT-08-202 — CONCELIER-EXPORT-08-202 – Mirror-ready Trivy DB bundles + • Prereqs: CONCELIER-EXPORT-08-201 (Wave 12) + • Current: TODO – Generate domain-specific Trivy DB archives + metadata manifest, ensure deterministic digests, and document sync process for downstream Concelier nodes. + +## Wave 14 — 1 task(s) ready after Wave 13 +- **Sprint 8** · Mirror Distribution + - Team: Concelier WebService Guild + - Path: `src/StellaOps.Concelier.WebService/TASKS.md` + 1. [TODO] CONCELIER-WEB-08-201 — CONCELIER-WEB-08-201 – Mirror distribution endpoints + • Prereqs: CONCELIER-EXPORT-08-201 (Wave 12), DEVOPS-MIRROR-08-001 (Wave 2) + • Current: TODO – Add domain-scoped mirror configuration (`*.stella-ops.org`), expose signed export index/download APIs with quota and auth, and document sync workflow for downstream Concelier instances. + +## Wave 15 — 1 task(s) ready after Wave 14 +- **Sprint 8** · Mirror Distribution + - Team: BE-Conn-Stella + - Path: `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md` + 1. [TODO] FEEDCONN-STELLA-08-001 — Implement Concelier mirror fetcher hitting `https://.stella-ops.org/concelier/exports/index.json`, verify signatures/digests, and persist raw documents with provenance. + • Prereqs: CONCELIER-EXPORT-08-201 (Wave 12) + • Current: TODO + +## Wave 16 — 1 task(s) ready after Wave 15 +- **Sprint 8** · Mirror Distribution + - Team: BE-Conn-Stella + - Path: `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md` + 1. [TODO] FEEDCONN-STELLA-08-002 — Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. + • Prereqs: FEEDCONN-STELLA-08-001 (Wave 15) + • Current: TODO + +## Wave 17 — 1 task(s) ready after Wave 16 +- **Sprint 8** · Mirror Distribution + - Team: BE-Conn-Stella + - Path: `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md` + 1. [TODO] FEEDCONN-STELLA-08-003 — Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. + • Prereqs: FEEDCONN-STELLA-08-002 (Wave 16) + • Current: TODO diff --git a/Mongo2Go-4.1.0.tar.gz b/Mongo2Go-4.1.0.tar.gz new file mode 100644 index 00000000..6f154723 Binary files /dev/null and b/Mongo2Go-4.1.0.tar.gz differ diff --git a/Mongo2Go-4.1.0/.gitattributes b/Mongo2Go-4.1.0/.gitattributes new file mode 100644 index 00000000..adea2ff2 --- /dev/null +++ b/Mongo2Go-4.1.0/.gitattributes @@ -0,0 +1 @@ +*.nuspec text eol=lf \ No newline at end of file diff --git a/Mongo2Go-4.1.0/.github/workflows/continuous-integration.yml b/Mongo2Go-4.1.0/.github/workflows/continuous-integration.yml new file mode 100644 index 00000000..25458161 --- /dev/null +++ b/Mongo2Go-4.1.0/.github/workflows/continuous-integration.yml @@ -0,0 +1,76 @@ +name: Continuous Integration + +on: + push: + branches: + - '**' # Trigger on all branches for commits + tags: + - 'v*' # Trigger only on version tags for deployments + +env: + Configuration: Release + ContinuousIntegrationBuild: true + DOTNET_CLI_TELEMETRY_OPTOUT: true + DOTNET_NOLOGO: true + +jobs: + build: + strategy: + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} + name: Build and Test + steps: + - name: Install libssl1.1 (restores libcrypto.so.1.1 which is required by MongoDB binaries v4.4.4) + if: runner.os == 'Linux' + run: | + echo "deb http://security.ubuntu.com/ubuntu focal-security main" | sudo tee /etc/apt/sources.list.d/focal-security.list + sudo apt update + sudo apt install -y libssl1.1 + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Retrieve cached NuGet packages + uses: actions/cache@v4 + with: + path: ~/.nuget/packages + key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} + - name: Restore NuGet packages + run: dotnet restore --locked-mode --verbosity normal + - name: Build solution + run: dotnet build --configuration ${{ env.Configuration }} --verbosity normal + - name: Run tests + run: dotnet test --configuration ${{ env.Configuration }} --no-build --verbosity normal + + publish: + runs-on: macos-latest + needs: build + if: startsWith(github.ref, 'refs/tags/') + name: Deploy NuGet and GitHub Release + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Restore NuGet packages + run: dotnet restore --locked-mode --verbosity normal + - name: Build solution + run: dotnet build --configuration ${{ env.Configuration }} --verbosity normal + - name: Create NuGet package + run: dotnet pack --output ./artifacts --configuration ${{ env.Configuration }} --verbosity normal + - name: Upload NuGet package artifact + uses: actions/upload-artifact@v4 + with: + name: mongo2go-nuget-package + path: ./artifacts/*.nupkg + - name: Publish NuGet package + run: dotnet nuget push ./artifacts/*.nupkg --source https://api.nuget.org/v3/index.json --api-key "${{ secrets.NUGET_API_KEY }}" --skip-duplicate + - name: Create GitHub Release + run: | + gh release create ${{ github.ref_name }} ./artifacts/*.nupkg \ + --title "${{ github.ref_name }}" \ + --notes "A new release has been created. Please update the release notes manually with details about changes and improvements." \ + --draft + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/.gitignore b/Mongo2Go-4.1.0/.gitignore new file mode 100644 index 00000000..9b19ad42 --- /dev/null +++ b/Mongo2Go-4.1.0/.gitignore @@ -0,0 +1,14 @@ +src/Mongo2Go/bin/ +src/Mongo2GoTests/bin/ +src/MongoDownloader/bin/ +src/packages/ +obj/ +*ReSharper* +*.suo +*.dotCover +*.user +~$* +*/StyleCop.Cache +*.nupkg +**/.vs +.idea/ diff --git a/Mongo2Go-4.1.0/LICENSE b/Mongo2Go-4.1.0/LICENSE new file mode 100644 index 00000000..07f9ff60 --- /dev/null +++ b/Mongo2Go-4.1.0/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2012-2025 Johannes Hoppe and many ❤️ contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Mongo2Go-4.1.0/Mongo2Go.sln b/Mongo2Go-4.1.0/Mongo2Go.sln new file mode 100644 index 00000000..6e6f949e --- /dev/null +++ b/Mongo2Go-4.1.0/Mongo2Go.sln @@ -0,0 +1,48 @@ +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.27004.2005 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{0B557702-3C09-4514-BDD5-55A44F22113F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Mongo2Go", "src\Mongo2Go\Mongo2Go.csproj", "{040A1626-1D04-40D6-BCCF-2D207AE648FC}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Mongo2GoTests", "src\Mongo2GoTests\Mongo2GoTests.csproj", "{ADE5A672-6A00-4561-BCC1-E5497016DE24}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MongoDownloader", "src\MongoDownloader\MongoDownloader.csproj", "{7E10E0DE-8092-4ECB-B05A-0A15472AB8D2}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{56AB91A3-555C-4D59-BB92-570465DC2CA0}" + ProjectSection(SolutionItems) = preProject + README.md = README.md + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {040A1626-1D04-40D6-BCCF-2D207AE648FC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {040A1626-1D04-40D6-BCCF-2D207AE648FC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {040A1626-1D04-40D6-BCCF-2D207AE648FC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {040A1626-1D04-40D6-BCCF-2D207AE648FC}.Release|Any CPU.Build.0 = Release|Any CPU + {ADE5A672-6A00-4561-BCC1-E5497016DE24}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ADE5A672-6A00-4561-BCC1-E5497016DE24}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ADE5A672-6A00-4561-BCC1-E5497016DE24}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ADE5A672-6A00-4561-BCC1-E5497016DE24}.Release|Any CPU.Build.0 = Release|Any CPU + {7E10E0DE-8092-4ECB-B05A-0A15472AB8D2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7E10E0DE-8092-4ECB-B05A-0A15472AB8D2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7E10E0DE-8092-4ECB-B05A-0A15472AB8D2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7E10E0DE-8092-4ECB-B05A-0A15472AB8D2}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {040A1626-1D04-40D6-BCCF-2D207AE648FC} = {0B557702-3C09-4514-BDD5-55A44F22113F} + {ADE5A672-6A00-4561-BCC1-E5497016DE24} = {0B557702-3C09-4514-BDD5-55A44F22113F} + {7E10E0DE-8092-4ECB-B05A-0A15472AB8D2} = {0B557702-3C09-4514-BDD5-55A44F22113F} + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {08364BFD-1801-4718-83F5-F6F99950B05E} + EndGlobalSection +EndGlobal diff --git a/Mongo2Go-4.1.0/README.md b/Mongo2Go-4.1.0/README.md new file mode 100644 index 00000000..3d5f3c94 --- /dev/null +++ b/Mongo2Go-4.1.0/README.md @@ -0,0 +1,565 @@ +Mongo2Go - MongoDB for integration tests & local debugging +======== + +![Logo](src/mongo2go_200_200.png) + +[![NuGet](https://img.shields.io/nuget/v/Mongo2Go.svg?label=NuGet&logo=NuGet)](https://www.nuget.org/packages/Mongo2Go/) + + +Mongo2Go is a managed wrapper around MongoDB binaries. +It targets **.NET Framework 4.7.2** and **.NET Standard 2.1.** and works with Windows, Linux and macOS. +This Nuget package contains the executables of _mongod_, _mongoimport_ and _mongoexport_ **for Windows, Linux and macOS** . + +__Brought to you by [Johannes Hoppe](https://twitter.com/johanneshoppe) with the help of many ❤️ contributors!__ + +Mongo2Go has two use cases: + +1. Providing multiple, temporary and isolated MongoDB databases for integration tests +2. Providing a quick to set up MongoDB database for a local developer environment + + +Integration test +------------------------------------- +With each call of the static method **MongoDbRunner.Start()** a new MongoDB instance will be set up. +A free port will be used (starting with port 27018) and a corresponding data directory will be created. +The method returns an instance of MongoDbRunner, which implements IDisposable. +As soon as the MongoDbRunner is disposed (or if the Finalizer is called by the GC), +the wrapped MongoDB process will be killed and all data in the data directory will be deleted. + + +Local debugging +------------------------ +In this mode a single MongoDB instance will be started on the default port (27017). +No data will be deleted and the MongoDB instance won’t be killed automatically. +Multiple calls to **MongoDbRunner.StartForDebugging()** will return an instance with the State “AlreadyRunning”. +You can ignore the IDisposable interface, as it won’t have any effect. +**I highly recommend to not use this mode on productive machines!** +Here you should set up a MongoDB as it is described in the manual. +For you convenience the MongoDbRunner also exposes _mongoexport_ and _mongoimport_ +which allow you to quickly set up a working environment. + + +Single server replica set mode to enable transactions +------------------------- +`MongoDbRunner.Start()` can be set up to take in an optional boolean parameter called `singleNodeReplSet`. +When passed in with the value `true` - (**`MongoDbRunner.Start(singleNodeReplSet: true)`**) +- a single node mongod instance will be started as a replica set with the name `singleNodeReplSet`. +Replica set mode is required for transactions to work in MongoDB 4.0 or greater + +Replica set initialization requires the use of a short delay to allow for the replica set to stabilize. This delay is linked to a timeout value of 5 seconds. + +If the timeout expires before the replica set has stabilized a `TimeoutException` will be thrown. + +The default timeout can be changed through the optional parameter `singleNodeReplSetWaitTimeout`, which allows values between 0 and 65535 seconds: **`MongoDbRunner.Start(singleNodeReplSet: true, singleNodeReplSetWaitTimeout: 10)`** + +Additional mongod arguments +--------------------------- +`MongoDbRunner.Start()` can be set up to consume additional `mongod` arguments. This can be done using the string parameter called `additionalMongodArguments`. + +The list of additional arguments cannot contain arguments already defined internally by Mongo2Go. An `ArgumentException` will be thrown in this case, specifying which additional arguments are required to be discarded. + +Example of usage of the additional `mongod` arguments: **`MongoDbRunner.Start(additionalMongodArguments: "--quiet")`** + +Installation +-------------- +The Mongo2Go Nuget package can be found at [https://nuget.org/packages/Mongo2Go/](https://nuget.org/packages/Mongo2Go/) +To install it via the .NET CLI, simply enter: + +```sh +dotnet add package Mongo2Go +``` + +* The new 4.x branch targets __.NET Framework 4.7.2__ and __.NET Standard 2.1__. Please use this version if possible. +* The old 3.x branch targets __.NET Standard 2.0__. No new features will be added, only bugfixes might be made. +* The old 2.x branch targets __.NET Standard 1.6__. No new features will be added, only bugfixes might be made. +* The old 1.x branch targets good-old classic __.NET 4.6.1__. This is for legacy environments only. No changes will be made. + + +Examples +-------- + +**Example: Integration Test (here: Machine.Specifications & Fluent Assertions)** + +```c# +[Subject("Runner Integration Test")] +public class when_using_the_inbuild_serialization : MongoIntegrationTest +{ + static TestDocument findResult; + + Establish context = () => + { + CreateConnection(); + _collection.Insert(TestDocument.DummyData1()); + }; + + Because of = () => findResult = _collection.FindOneAs(); + + It should_return_a_result = () => findResult.ShouldNotBeNull(); + It should_hava_expected_data = () => findResult.ShouldHave().AllPropertiesBut(d => d.Id).EqualTo(TestDocument.DummyData1()); + + Cleanup stuff = () => _runner.Dispose(); +} + +public class MongoIntegrationTest +{ + internal static MongoDbRunner _runner; + internal static MongoCollection _collection; + + internal static void CreateConnection() + { + _runner = MongoDbRunner.Start(); + + MongoClient client = new MongoClient(_runner.ConnectionString); + MongoDatabase database = client.GetDatabase("IntegrationTest"); + _collection = database.GetCollection("TestCollection"); + } +} +``` + +More tests can be found at https://github.com/Mongo2Go/Mongo2Go/tree/master/src/Mongo2GoTests/Runner + +**Example: Exporting seed data** + +```c# +using (MongoDbRunner runner = MongoDbRunner.StartForDebugging()) { + + runner.Export("TestDatabase", "TestCollection", @"..\..\App_Data\test.json"); +} +``` + +**Example: Importing for local debugging (compatible with ASP.NET MVC 4 Web API as well as ASP.NET Core)** + +```c# +public class WebApiApplication : System.Web.HttpApplication +{ + private MongoDbRunner _runner; + + protected void Application_Start() + { + _runner = MongoDbRunner.StartForDebugging(); + _runner.Import("TestDatabase", "TestCollection", @"..\..\App_Data\test.json", true); + + MongoClient client = new MongoClient(_runner.ConnectionString); + MongoDatabase database = client.GetDatabase("TestDatabase"); + MongoCollection collection = database.GetCollection("TestCollection"); + + /* happy coding! */ + } + + protected void Application_End() + { + _runner.Dispose(); + } +} +``` + +**Example: Transactions (New feature since v2.2.8)** + +
+ Full integration test with transaction handling (click to show) + + +```c# + public class when_transaction_completes : MongoTransactionTest + { + private static TestDocument mainDocument; + private static TestDocument dependentDocument; + Establish context = () => + + { + _runner = MongoDbRunner.Start(singleNodeReplSet: true); + client = new MongoClient(_runner.ConnectionString); + database = client.GetDatabase(_databaseName); + _mainCollection = database.GetCollection(_mainCollectionName); + _dependentCollection = database.GetCollection(_dependentCollectionName); + _mainCollection.InsertOne(TestDocument.DummyData2()); + _dependentCollection.InsertOne(TestDocument.DummyData2()); + }; + + private Because of = () => + { + var filter = Builders.Filter.Where(x => x.IntTest == 23); + var update = Builders.Update.Inc(i => i.IntTest, 10); + + using (var sessionHandle = client.StartSession()) + { + try + { + var i = 0; + while (i < 10) + { + try + { + i++; + sessionHandle.StartTransaction(new TransactionOptions( + readConcern: ReadConcern.Local, + writeConcern: WriteConcern.W1)); + try + { + var first = _mainCollection.UpdateOne(sessionHandle, filter, update); + var second = _dependentCollection.UpdateOne(sessionHandle, filter, update); + } + catch (Exception e) + { + sessionHandle.AbortTransaction(); + throw; + } + + var j = 0; + while (j < 10) + { + try + { + j++; + sessionHandle.CommitTransaction(); + break; + } + catch (MongoException e) + { + if (e.HasErrorLabel("UnknownTransactionCommitResult")) + continue; + throw; + } + } + break; + } + catch (MongoException e) + { + if (e.HasErrorLabel("TransientTransactionError")) + continue; + throw; + } + } + } + catch (Exception e) + { + //failed after multiple attempts so log and do what is appropriate in your case + } + } + + mainDocument = _mainCollection.FindSync(Builders.Filter.Empty).FirstOrDefault(); + dependentDocument = _dependentCollection.FindSync(Builders.Filter.Empty).FirstOrDefault(); + }; + + It main_should_be_33 = () => mainDocument.IntTest.Should().Be(33); + It dependent_should_be_33 = () => dependentDocument.IntTest.Should().Be(33); + Cleanup cleanup = () => _runner.Dispose(); + } + +``` +
+ +**Example: Logging with `ILogger`** +
+ Wire mongod's logs at info and above levels to a custom `ILogger` (click to show) + +```c# +public class MongoIntegrationTest +{ + internal static MongoDbRunner _runner; + + internal static void CreateConnection() + { + // Create a custom logger. + // Replace this code with your own configuration of an ILogger. + var provider = new ServiceCollection() + .AddLogging(config => + { + // Log to a simple console and to event logs. + config.AddSimpleConsole(); + config.AddEventLog(); + }) + .BuildServiceProvider(); + var logger = provider.GetSerivce().CreateLogger("Mongo2Go"); + + _runner = MongoDbRunner.Start(logger: logger); + } +} +``` +
+ +
+ Wire mongod's logs at debug levels to a custom `ILogger` (click to show) + +```c# +public class MongoIntegrationTest +{ + internal static MongoDbRunner _runner; + + internal static void CreateConnection() + { + // Create a custom logger. + // Replace this code with your own configuration of an ILogger. + var provider = new ServiceCollection() + .AddLogging(config => + { + // Mongod's D1-D2 levels are logged with Debug level. + // D3-D5 levels are logged with Trace level. + config.SetMinimumLevel(LogLevel.Trace); + + // Log to System.Diagnostics.Debug and to the event source. + config.AddDebug(); + config.AddEventSourceLogger(); + }) + .BuildServiceProvider(); + var logger = provider.GetSerivce().CreateLogger("Mongo2Go"); + + _runner = MongoDbRunner.Start( + additionalMongodArguments: "vvvvv", // Tell mongod to output its D5 level logs + logger: logger); + } +} +``` +
+ +Changelog +------------------------------------- + +### Mongo2Go 4.1.0, January 30 2025 + +- Updated **MongoDB.Driver** to version **3.1.0**, ensuring compatibility with the latest MongoDB client features (PR [#156](https://github.com/Mongo2Go/Mongo2Go/pull/156), fixes [#154](https://github.com/Mongo2Go/Mongo2Go/issues/154) - many thanks to [Teneko](https://github.com/teneko)) +- Please note that the bundled version of MongoDB included with this package remains **v4.4.4**. +- **Note for Ubuntu users**: MongoDB 4.4.4 requires **libcrypto.so.1.1**, which is no longer included in Ubuntu 22.04 and newer. If you encounter an error like: + ``` + error while loading shared libraries: libcrypto.so.1.1: cannot open shared object file: No such file or directory + ``` + You can fix this by installing OpenSSL 1.1 manually: + ```bash + echo "deb http://security.ubuntu.com/ubuntu focal-security main" | sudo tee /etc/apt/sources.list.d/focal-security.list + sudo apt update + sudo apt install -y libssl1.1 + ``` + This restores `libcrypto.so.1.1` and allows Mongo2Go/MongoDB to run properly. + + +### Mongo2Go 4.0.0, November 19 2024 + +- A big thank you to [DrewM-Hax0r](https://github.com/DrewM-Hax0r) for championing this release! (PR [#153](https://github.com/Mongo2Go/Mongo2Go/pull/153), fixes [#152](https://github.com/Mongo2Go/Mongo2Go/issues/152)) +- This is a new major version for Mongo2Go (4.x), driven by: + - Dropping support for old .NET Framework versions earlier than 4.7.2 due to updated framework targets. + - MongoDB driver switching to strong-named assemblies (see [.NET Driver Version 2.28.0 Release Notes](https://www.mongodb.com/community/forums/t/net-driver-2-28-0-released/289745)). +- Updated **MongoDB driver to version 3** and re-targeted the project to meet the requirements of the new driver version. +- Fixed an issue with the single-node replica set option, caused by outdated connection strings that were incompatible with the latest MongoDB driver. +- Replaced deprecated dependent packages with updated, supported versions, and patched vulnerabilities by upgrading vulnerable dependencies. +- Please note that the bundled version of MongoDB included with this package has not changed and is still **v4.4.4**. This version of MongoDB is still compatible with the latest version of the driver, so there was no need to update at this time. +- **Bugfix:** Corrected binary search path on Linux when `NUGET_PACKAGES` is specified (PR [#140](https://github.com/Mongo2Go/Mongo2Go/pull/140), fixes [#134](https://github.com/Mongo2Go/Mongo2Go/issues/134) - many thanks to [Ove Andersen](https://github.com/azzlack)) +- **Bugfix**: Stops extra empty temporary data being generated (PR [#138](https://github.com/Mongo2Go/Mongo2Go/pull/138), fixes [#136](https://github.com/Mongo2Go/Mongo2Go/issues/136) - many thanks to [Alex Wardle](https://github.com/awardle)) + +
+ Changelog v3.0.0 to 3.1.3 (click to show) + +### Mongo2Go 3.1.3, April 30 2021 + +* targeting .NET Standard 2.0 instead of 2.1, this makes Mongo2Go compatible with .NET Framework (version 4.7.1 and later) (PR [#118](https://github.com/Mongo2Go/Mongo2Go/pull/118) - many thanks to [Cédric Luthi](https://github.com/0xced)) +* fixes handling of the path search for the NUGET_PACKAGE environment variable (PR [#119](https://github.com/Mongo2Go/Mongo2Go/pull/119) - many thanks to [Timm Hoffmeister](https://github.com/vader1986)) +* internal: `dotnet pack` is now used to create the nupkg file for a release (PR [#121](https://github.com/Mongo2Go/Mongo2Go/pull/121) - many thanks to [Cédric Luthi](https://github.com/0xced)) + +### Mongo2Go 3.1.1, April 08 2021 + +* internal: Better algorithm for determining a free port. This allows parallel execution of tests and increases compatibility with Raider and other test runners. (PR [#116](https://github.com/Mongo2Go/Mongo2Go/pull/116), fixes [#115](https://github.com/Mongo2Go/Mongo2Go/issues/115) and [#106](https://github.com/Mongo2Go/Mongo2Go/issues/106) - many thanks to [liangshiwei](https://github.com/realLiangshiwei)) + +### Mongo2Go 3.1.0, April 07 2021 + +* **NEW: Configurable logging!** adds the option to inject a `Microsoft.Extensions.Logging.ILogger` to `MongoDbRunner.Start(logger)` arguments. Now you can adjust or disable the console output to avoid noise in CI environments. Please note the two examples shown above. (PR [#113](https://github.com/Mongo2Go/Mongo2Go/pull/113), fixes [#94](https://github.com/Mongo2Go/Mongo2Go/issues/94), [#95](https://github.com/Mongo2Go/Mongo2Go/issues/95) and [#113](https://github.com/Mongo2Go/Mongo2Go/issues/113) - many thanks to [Corentin Altepe](https://github.com/corentinaltepe)) +* internal: replaces `--sslMode disabled` (deprecated) with `--tlsMode disabled` in command line arguments to mongod. + +### Mongo2Go 3.0.0, March 26 2021 + +* includes MongoDB binaries of **version 4.4.4** with support for Windows, Linux and macOS +* targets **.NET Standard 2.1** (can be used with .NET Core 3.0 and .NET 5.0) + +* adds new MongoDownloader tool (PR [#109](https://github.com/Mongo2Go/Mongo2Go/pull/109), fixes [#82](https://github.com/Mongo2Go/Mongo2Go/issues/82) and [#112](https://github.com/Mongo2Go/Mongo2Go/issues/112) - many thanks to [Cédric Luthi](https://github.com/0xced)) +* adds support for `NUGET_PACKAGES` environment variable (PR [#110](https://github.com/Mongo2Go/Mongo2Go/pull/110) - many thanks to [Bastian Eicher](https://github.com/bastianeicher)) + +
+ +
+ Changelog v2.0.0-alpha1 to v2.2.16 (click to show) + +### Mongo2Go 2.2.16, December 13 2020 + +* fix for non existing starting path for binary search (PR [#107](https://github.com/Mongo2Go/Mongo2Go/pull/107), fixes [#105](https://github.com/Mongo2Go/Mongo2Go/issues/105) - many thanks to [Gurov Yury](https://github.com/kenoma)) + +### Mongo2Go 2.2.15, December 12 2020 + +* throw exception if cluster is not ready for transactions after `singleNodeReplSetWaitTimeout` (PR [#103](https://github.com/Mongo2Go/Mongo2Go/pull/103) - many thanks for the continued support by [José Mira](https://github.com/zmira)) +s +### Mongo2Go 2.2.14, October 17 2020 + +* fixes a bug with pulling mongo binaries from wrong version (PR [#87](https://github.com/Mongo2Go/Mongo2Go/pull/87), fixes [#86](https://github.com/Mongo2Go/Mongo2Go/issues/86) - many thanks to [mihevc](https://github.com/mihevc)) +* ensures transaction is ready (solves error message: `System.NotSupportedException : StartTransaction cannot determine if transactions are supported because there are no connected servers.`) (PR [#101](https://github.com/Mongo2Go/Mongo2Go/pull/101), fixes [#89](https://github.com/Mongo2Go/Mongo2Go/issues/89), [#91](https://github.com/Mongo2Go/Mongo2Go/issues/91) and [#100](https://github.com/Mongo2Go/Mongo2Go/issues/100) - many thanks to [liangshiwei](https://github.com/realLiangshiwei)) + +### Mongo2Go 2.2.12, September 07 2019 +* performance: waits for replica set ready log message, or throws if timeout expires, instead of using `Thread.Sleep(5000)` (PR [#83](https://github.com/Mongo2Go/Mongo2Go/pull/83), fixes [#80](https://github.com/Mongo2Go/Mongo2Go/issues/80) - many thanks again to [José Mira](https://github.com/zmira)) + +### Mongo2Go 2.2.11, May 10 2019 +* allows additional custom MongoDB arguments (PR [#69](https://github.com/Mongo2Go/Mongo2Go/pull/69), fixes [#68](https://github.com/Mongo2Go/Mongo2Go/issues/68) - many thanks to [José Mira](https://github.com/zmira)) +* adds option to set port for `StartForDebugging()` (PR [#72](https://github.com/Mongo2Go/Mongo2Go/pull/72), fixes [#71](https://github.com/Mongo2Go/Mongo2Go/issues/71) - many thanks to [Danny Bies](https://github.com/dannyBies)) + +### Mongo2Go 2.2.9, February 04 2019 +* fixes a file path issue on Linux if you run on an SDK version beyond .NET Standard 1.6 (PR [#63](https://github.com/Mongo2Go/Mongo2Go/pull/63), fixes [#62](https://github.com/Mongo2Go/Mongo2Go/issues/62) and [#61](https://github.com/Mongo2Go/Mongo2Go/issues/61)) - many thanks to [Jeroen Vannevel](https://github.com/Vannevelj)) +* continuous integration runs on Linux (Travis CI) and Windows (AppVeyor) now + +### Mongo2Go 2.2.8, October 12 2018 +* updated MongoDB binaries to 4.0.2 to support tests leveraging transaction across different collections and databases +* updated MongoDB C# driver to 2.7.0 to be compatible with MongoDB 4.0 +* adds `singleNodeReplSet` paramter to `MongoDbRunner.Start` which allows mongod instance to be started as a replica set to enable transaction support (PR [#57](https://github.com/Mongo2Go/Mongo2Go/pull/57) - many thanks to [Mahi Satyanarayana](https://github.com/gbackmania)) +* fixes port lookup for UnixPortWatcher (PR [#58](https://github.com/Mongo2Go/Mongo2Go/pull/58) - many thanks to [Viktor Kolybaba](https://github.com/VikKol)) + +### Mongo2Go 2.2.7, August 13 2018 +* updates the `MongoBinaryLocator` to look for binaries in the nuget cache if they are not found in the project directory. + * this will make Mongo2Go compatible with projects using the nuget `PackageReference` option. (PR [#56](https://github.com/Mongo2Go/Mongo2Go/pull/56), fixes [#39](https://github.com/Mongo2Go/Mongo2Go/issues/39) and [#55](https://github.com/Mongo2Go/Mongo2Go/issues/55)) +* adds the `binariesSearchDirectory` parameter to `MongoDbRunner.Start` which allows an additional binaries search directory to be provided. + * this will make the db runner more flexible if someone decides to use it in some unpredictable way. +* many thanks to [Nicholas Markkula](https://github.com/nickmkk) + +### Mongo2Go 2.2.6, July 20 2018 +* fixes broken linux support (fixes [#47](https://github.com/Mongo2Go/Mongo2Go/issues/47)) + +### Mongo2Go 2.2.5, July 19 2018 +* fixes unresponsive process issue (PR [#52](https://github.com/Mongo2Go/Mongo2Go/pull/52), fixes [#49](https://github.com/Mongo2Go/Mongo2Go/issues/49)) +* many thanks to [narendrachava](https://github.com/narendrachava) + +### Mongo2Go 2.2.4, June 06 2018 +* better support for TeamCity: removed MaxLevelOfRecursion limitation when searching for MongoDb binaries (PR [#50](https://github.com/Mongo2Go/Mongo2Go/pull/50), fixes [#39](https://github.com/Mongo2Go/Mongo2Go/issues/39)) +* many thanks to [Stanko Culaja](https://github.com/culaja) + +### Mongo2Go 2.2.2, June 05 2018 +* includes mongod, mongoimport and mongoexport v3.6.1 for Windows, Linux and macOS via PR [#46](https://github.com/Mongo2Go/Mongo2Go/pull/46), which fixes [#45](https://github.com/Mongo2Go/Mongo2Go/issues/45) +* many thanks to [Joe Chan](https://github.com/joehmchan) + +### Mongo2Go 2.2.1, November 23 2017 +* no MongoDB binaries changed, still .NET Standard 1.6 +* feature: uses temporary directory instead of good-old windows style `C:\data\db` by default (PR [#42](https://github.com/Mongo2Go/Mongo2Go/pull/42)) - `MongoDbRunner.Start()` and `MongoDbRunner.StartForDebugging()` will now work without any extra parameters for Linux/macOS +* bugfix: runs again on Linux/macOS, by making the binaries executable (PR [#42](https://github.com/Mongo2Go/Mongo2Go/pull/42), which fixes [#37](https://github.com/Mongo2Go/Mongo2Go/issues/37) and might also fix [#43](https://github.com/Mongo2Go/Mongo2Go/issues/43)) +* internal: Unit Tests are running again (PR [#44](https://github.com/Mongo2Go/Mongo2Go/pull/44), which fixes [#31](https://github.com/Mongo2Go/Mongo2Go/issues/31), [#40](https://github.com/Mongo2Go/Mongo2Go/issues/40)) +* internal: No hardcoded path passed to MongoDbRunner constructor (fixes [41](https://github.com/Mongo2Go/Mongo2Go/issues/41)) +* many thanks to [Per Liedman](https://github.com/perliedman) + +### Mongo2Go 2.2.0, August 17 2017 +* includes mongod, mongoimport and mongoexport v3.4.7 for Windows, Linux and macOS +* targets .NET Standard 1.6 (can be used with .NET Core 1.0 / 1.1 / 2.0) +* many thanks to [Aviram Fireberger](https://github.com/avrum) + +### Mongo2Go 2.1.0, March 10 2017 +* skips v2.0 to have same numbers as v1.x. +* no MongoDB binaries changed since 2.0.0-alpha1 (still MongoDB v3.2.7 for Windows, Linux and macOS) +* targets .NET Standard 1.6 (can be used with .NET Core 1.0 / 1.1) +* bugfix: prevent windows firewall popup (PR [#30](https://github.com/Mongo2Go/Mongo2Go/pull/30), which fixes [#21](https://github.com/Mongo2Go/Mongo2Go/pull/21)) +* many thanks to [kubal5003](https://github.com/kubal5003) + +### Mongo2Go 1.1.0, March 10 2017 _(legacy branch!)_ +* no MongoDB binaries changed since v1.0 (still MongoDB v3.2.7 for Windows, Linux and macOS) +* targets .NET 4.6.1 +* bugfix: prevent windows firewall popup (PR [#29](https://github.com/Mongo2Go/Mongo2Go/pull/29), which fixes [#21](https://github.com/Mongo2Go/Mongo2Go/pull/21)) +* many thanks to [kubal5003](https://github.com/kubal5003) + + +### Mongo2Go 2.0.0-alpha1, December 19 2016 +* this version has no support for .NET Framework 4.6, please continue to use the stable package v.1.0.0 +* NEW: first support of .NET Standard 1.6 ([#25](https://github.com/Mongo2Go/Mongo2Go/pull/25)) + * many thanks to [Hassaan Ahmed](https://github.com/bannerflow-hassaan) + * see the [Wiki](https://github.com/Mongo2Go/Mongo2Go/wiki/NetStandard) for more information about .NET Core 1.0 / .NET Standard 1.6 + +
+ +
+ Changelog v0.1.0 to v1.0.0 (click to show) + +### Mongo2Go 1.0.0, November 14 2016 +* v1.0 finally marked as stable +* no changes to 1.0.0-beta4 +* changes since last stable version (0.2): + * includes mongod, mongoimport and mongoexport v3.2.7 for Windows, Linux and macOS + * support for Windows, Linux and macOS + * uses MongoDB.Driver 2.3.0 + * **requires .NET 4.6** + * various small bugfixes and improvements + +### Mongo2Go 1.0.0-beta4, October 24 2016 +* update to MongoDB.Driver 2.3.0 ([#23](https://github.com/Mongo2Go/Mongo2Go/pull/23)) +* upgraded to __.NET 4.6__ +* internal change: update MSpec as well and add MSTest Adapter for MSpec (ReSharper console runner doesn't support 4.6) +* many thanks to [Alexander Zeitler](https://github.com/AlexZeitler) +* please report any kind of [issues here on github](https://github.com/Mongo2Go/Mongo2Go/issues) so that we can mark 1.0.0 as stable! + +### Mongo2Go 1.0.0-beta3, August 22 2016 +* feature: process windows are hidden now ([#20](https://github.com/Mongo2Go/Mongo2Go/pull/20)) +* bugfix: random folders are used for storing databases ([#18](https://github.com/Mongo2Go/Mongo2Go/pull/18)) +* many thanks to [Matt Kocaj](https://github.com/cottsak) +* please report any kind of [issues here on github](https://github.com/Mongo2Go/Mongo2Go/issues) so that we can mark 1.0.0 as stable! + +### Mongo2Go 1.0.0-beta2, July 29 2016 +* fixes for bugs that were introduced by the big rewrite for cross-platform support +* changes from pull request [#14](https://github.com/Mongo2Go/Mongo2Go/pull/14), which fixes [#12](https://github.com/Mongo2Go/Mongo2Go/issues/12), [#13](https://github.com/Mongo2Go/Mongo2Go/issues/13) and [#15](https://github.com/Mongo2Go/Mongo2Go/issues/15), many thanks to [Mitch Ferrer](https://github.com/G3N7) +* please report any kind of [issues here on github](https://github.com/Mongo2Go/Mongo2Go/issues) so that we can mark 1.0.0 as stable! + + +### Mongo2Go 1.0.0-beta, July 24 2016 +* **:tada: NEW: support for Linux and macOS :tada:** +* many thanks to [Kristofer Linnestjerna](https://github.com/krippz) from [netclean.com](http://www.netclean.com/) for the new cross-platform support +* includes mongod, mongoimport and mongoexport v3.2.7 for Windows, Linux and macOS +* changes from pull request [#8](https://github.com/Mongo2Go/Mongo2Go/pull/8), [#10](https://github.com/Mongo2Go/Mongo2Go/pull/10), [#11](https://github.com/Mongo2Go/Mongo2Go/pull/11) which fixes [#9](https://github.com/Mongo2Go/Mongo2Go/issues/9) +* please report any kind of [issues here on github](https://github.com/Mongo2Go/Mongo2Go/issues) so that we can mark 1.0.0 as stable! + +### Mongo2Go 0.2, May 30 2016 +* includes mongod, mongoimport and mongoexport v3.2.6, + (**64bit** from [win32/mongodb-win32-x86_64-2008plus-3.2.6.zip](http://downloads.mongodb.org/win32/mongodb-win32-x86_64-2008plus-3.2.6.zip?_ga=1.190428203.1815541971.1457905247) since 32bit builds are deprecated now) +* removes outmoded Strong-Name signing from assemblies (please open an issue if you really need this, see also [mspec#190](https://github.com/machine/machine.specifications/issues/190)) +* changes from pull request [#7](https://github.com/Mongo2Go/Mongo2Go/pull/7), thanks to [Mitch Ferrer](https://github.com/G3N7) + +### Mongo2Go 0.1.8, March 13 2016 +* includes mongod, mongoimport and mongoexport v3.0.10 (32bit) +* changes from pull request [#5](https://github.com/Mongo2Go/Mongo2Go/pull/5), thanks to [Aristarkh Zagorodnikov](https://github.com/onyxmaster) + +### Mongo2Go 0.1.6, July 21 2015 +* includes mongod, mongoimport and mongoexport v3.0.4 (32bit) +* bug fix [#4](https://github.com/Mongo2Go/Mongo2Go/issues/4): +Sometimes the runner tries to delete the database directory before the mongod process has been stopped, this throws an IOException. +Now the runner waits until the mongod process has been stopped before the database directory will be deleted. +* Thanks [Sergey Zwezdin](https://github.com/sergun) + +### Mongo2Go 0.1.5, July 08 2015 +* includes mongod, mongoimport and mongoexport v2.6.6 (32bit) +* changes from pull request [#3](https://github.com/Mongo2Go/Mongo2Go/pull/3) +* new: `Start` and `StartForDebugging` methods accept an optional parameter to specify a different data directory (default is "C:\data\db") +* many thanks to [Marc](https://github.com/Silv3rcircl3) + +### Mongo2Go 0.1.4, January 26 2015 +* includes mongod, mongoimport and mongoexport v2.6.6 (32bit) +* changes from pull request [#2](https://github.com/Mongo2Go/Mongo2Go/pull/2) +* internal updates for testing the package (not part of the release) + * updated MSpec package so that it would work with the latest VS and R# test runner + * updated Mongo C# Driver, Fluent Assertions, and Moq packages to latest versions + * fixed date handling for mongoimport and mongoexport to pass tests +* many thanks to [Jesse Sweetland](https://github.com/sweetlandj) + +### Mongo2Go 0.1.3, September 20 2012 +* includes mongod, mongoimport and mongoexport v2.2.0 (32bit) + +### Mongo2Go 0.1.2, August 20 2012 +* stable version +* includes mongod, mongoimport and mongoexport v2.2.0-rc1 (32bit) + +### Mongo2Go 0.1.1, August 16 2012 +* second alpha version +* includes mongod, mongoimport and mongoexport v2.2.0-rc1 (32bit) + + +### Mongo2Go 0.1.0, August 15 2012 +* first alpha version +* includes mongod, mongoimport and mongoexport v2.2.0-rc1 (32bit) + +
+ +How to contribute +------------------------------------- + +Just fork the project, make your changes send us a PR. + +In the root folder, just run: +``` +dotnet restore +dotnet build +dotnet test src/Mongo2GoTests +``` diff --git a/Mongo2Go-4.1.0/README_INTERNAL.md b/Mongo2Go-4.1.0/README_INTERNAL.md new file mode 100644 index 00000000..c146550f --- /dev/null +++ b/Mongo2Go-4.1.0/README_INTERNAL.md @@ -0,0 +1,66 @@ +# Mongo2Go - Knowledge for Maintainers + +## Creating a Release + +Mongo2Go uses [MinVer](https://github.com/adamralph/minver) for versioning. +Releases are fully automated via GitHub Actions and triggered by tagging a commit with the desired semantic version number. +This process involves two steps to ensure reliable deployments. + +### Steps to Create a Release + +1. **Push Your Changes** + - Commit and push your changes to the main branch. This will trigger a CI build to validate the changes. + ```bash + git commit -m "Your commit message" + git push + ``` + +2. **Wait for the CI Build** + - Ensure that the GitHub Actions workflow completes successfully. This confirms your changes are valid. + +3. **Tag the Commit** + - Once the CI build passes, create a lightweight tag with the desired version number + - Use an **annotated tag** to ensure the release is properly versioned and auditable (`-a` flag): + ```bash + git tag -a v4.0.0 + ``` + - Push the tag to trigger the deployment workflow: + ```bash + git push --tags + ``` + +4. **Draft Release Created** + - The workflow will: + 1. Create a multi-target NuGet package. + 2. Publish the package to nuget.org. + 3. Create a **draft release** on GitHub with a placeholder note. + +5. **Review and Finalize the Release** + - Visit the [Releases page](https://github.com/Mongo2Go/Mongo2Go/releases). + - Open the draft release, update the release notes with details about the changes (e.g., changelog, features, fixes), and publish the release manually. + + +## Workflow Details + +- **Two-Step Process**: + 1. The first push (commit) triggers a CI build to validate the changes. + 2. The second push (tag) triggers the deployment workflow. + +- **Triggers**: + - Commits are validated for all branches. + - Tags starting with `v` trigger deployment. + +- **Draft Releases**: + - Releases are created as drafts, allowing maintainers to review and add release notes before publishing. + +- **Automation**: + - The workflow automates building, testing, publishing to nuget.org, and creating a draft GitHub release. + + +## Best Practices for Maintainers + +- **Semantic Versioning**: Ensure that tags follow the [semantic versioning](https://semver.org/) format (`vMAJOR.MINOR.PATCH`). +- **Pre-Releases**: Use pre-release tags for non-final versions (e.g., `v4.0.0-rc.1`). +- **Detailed Release Notes**: Always add detailed information to the GitHub release, highlighting major changes, fixes, and improvements. +- **Final Review**: Review the draft release to ensure all details are correct before publishing. + diff --git a/Mongo2Go-4.1.0/global.json b/Mongo2Go-4.1.0/global.json new file mode 100644 index 00000000..1316d775 --- /dev/null +++ b/Mongo2Go-4.1.0/global.json @@ -0,0 +1,8 @@ +{ + "$schema": "https://json.schemastore.org/global", + "sdk": { + "allowPrerelease": false, + "rollForward": "latestMinor", + "version": "8.0.110" + } +} diff --git a/Mongo2Go-4.1.0/package_create.sh b/Mongo2Go-4.1.0/package_create.sh new file mode 100644 index 00000000..cf21db77 --- /dev/null +++ b/Mongo2Go-4.1.0/package_create.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# just to be sure +#git clean -fdx + +echo +echo "*** Your dotnet version:" +dotnet --version + +echo +echo "*** Creating package:" +dotnet pack --configuration Release src/Mongo2Go/Mongo2Go.csproj -p:ContinuousIntegrationBuild=true + +echo +echo "*** Package content:" +zipinfo src/Mongo2Go/bin/Release/Mongo2Go.*.nupkg \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/FileSystem.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/FileSystem.cs new file mode 100644 index 00000000..02000724 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/FileSystem.cs @@ -0,0 +1,44 @@ +using System.Diagnostics; +using System.IO; + +namespace Mongo2Go.Helper +{ + public class FileSystem : IFileSystem + { + public void CreateFolder(string path) + { + if (!Directory.Exists(path)) + { + Directory.CreateDirectory(path); + } + } + + public void DeleteFolder(string path) + { + if (Directory.Exists(path)) + { + Directory.Delete(path, true); + } + } + + public void DeleteFile(string fullFileName) + { + if (File.Exists(fullFileName)) + { + File.Delete(fullFileName); + } + } + + public void MakeFileExecutable (string path) + { + //when on linux or osx we must set the executeble flag on mongo binarys + var p = Process.Start("chmod", $"+x {path}"); + p.WaitForExit(); + + if (p.ExitCode != 0) + { + throw new IOException($"Could not set executable bit for {path}"); + } + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/FolderSearch.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/FolderSearch.cs new file mode 100644 index 00000000..3db17c81 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/FolderSearch.cs @@ -0,0 +1,112 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; + +namespace Mongo2Go.Helper +{ + public static class FolderSearch + { + private static readonly char[] _separators = { Path.DirectorySeparatorChar }; + + public static string CurrentExecutingDirectory() + { + string filePath = new Uri(typeof(FolderSearch).GetTypeInfo().Assembly.CodeBase).LocalPath; + return Path.GetDirectoryName(filePath); + } + + public static string FindFolder(this string startPath, string searchPattern) + { + if (startPath == null || searchPattern == null) + { + return null; + } + + string currentPath = startPath; + + foreach (var part in searchPattern.Split(_separators, StringSplitOptions.None)) + { + if (!Directory.Exists(currentPath)) + { + return null; + } + + string[] matchesDirectory = Directory.GetDirectories(currentPath, part); + if (!matchesDirectory.Any()) + { + return null; + } + + if (matchesDirectory.Length > 1) + { + currentPath = MatchVersionToAssemblyVersion(matchesDirectory) + ?? matchesDirectory.OrderBy(x => x).Last(); + } + else + { + currentPath = matchesDirectory.First(); + } + } + + return currentPath; + } + + public static string FindFolderUpwards(this string startPath, string searchPattern) + { + if (string.IsNullOrEmpty(startPath)) + { + return null; + } + + string matchingFolder = startPath.FindFolder(searchPattern); + return matchingFolder ?? startPath.RemoveLastPart().FindFolderUpwards(searchPattern); + } + + internal static string RemoveLastPart(this string path) + { + if (!path.Contains(Path.DirectorySeparatorChar)) + { + return null; + } + + List parts = path.Split(new[] { Path.DirectorySeparatorChar }, StringSplitOptions.None).ToList(); + parts.RemoveAt(parts.Count() - 1); + return string.Join(Path.DirectorySeparatorChar.ToString(), parts.ToArray()); + } + + /// + /// Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + /// + public static string FinalizePath(string fileName) + { + string finalPath; + + if (Path.IsPathRooted(fileName)) + { + finalPath = fileName; + } + else + { + finalPath = Path.Combine(CurrentExecutingDirectory(), fileName); + finalPath = Path.GetFullPath(finalPath); + } + + return finalPath; + } + + private static string MatchVersionToAssemblyVersion(string[] folders) + { + var version = typeof(FolderSearch).GetTypeInfo().Assembly.GetCustomAttribute().InformationalVersion; + + foreach (var folder in folders) + { + var lastFolder = new DirectoryInfo(folder).Name; + if (lastFolder == version) + return folder; + } + + return null; + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IFileSystem.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IFileSystem.cs new file mode 100644 index 00000000..9d478e23 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IFileSystem.cs @@ -0,0 +1,10 @@ +namespace Mongo2Go.Helper +{ + public interface IFileSystem + { + void CreateFolder(string path); + void DeleteFolder(string path); + void DeleteFile(string fullFileName); + void MakeFileExecutable (string path ); + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoBinaryLocator.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoBinaryLocator.cs new file mode 100644 index 00000000..31e2c591 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoBinaryLocator.cs @@ -0,0 +1,7 @@ +namespace Mongo2Go.Helper +{ + public interface IMongoBinaryLocator + { + string Directory { get; } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoDbProcess.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoDbProcess.cs new file mode 100644 index 00000000..d7b951be --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoDbProcess.cs @@ -0,0 +1,11 @@ +using System; +using System.Collections.Generic; + +namespace Mongo2Go.Helper +{ + public interface IMongoDbProcess : IDisposable + { + IEnumerable StandardOutput { get; } + IEnumerable ErrorOutput { get; } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoDbProcessStarter.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoDbProcessStarter.cs new file mode 100644 index 00000000..95f09c1b --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IMongoDbProcessStarter.cs @@ -0,0 +1,11 @@ +using Microsoft.Extensions.Logging; + +namespace Mongo2Go.Helper +{ + public interface IMongoDbProcessStarter + { + IMongoDbProcess Start(string binariesDirectory, string dataDirectory, int port, bool singleNodeReplSet, string additionalMongodArguments, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout, ILogger logger = null); + + IMongoDbProcess Start(string binariesDirectory, string dataDirectory, int port, bool doNotKill, bool singleNodeReplSet, string additionalMongodArguments, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout, ILogger logger = null); + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IPortPool.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IPortPool.cs new file mode 100644 index 00000000..39372db1 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IPortPool.cs @@ -0,0 +1,10 @@ +namespace Mongo2Go.Helper +{ + public interface IPortPool + { + /// + /// Returns and reserves a new port + /// + int GetNextOpenPort(); + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IPortWatcher.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IPortWatcher.cs new file mode 100644 index 00000000..28811e2d --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IPortWatcher.cs @@ -0,0 +1,8 @@ +namespace Mongo2Go.Helper +{ + public interface IPortWatcher + { + int FindOpenPort(); + bool IsPortAvailable(int portNumber); + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IProcessWatcher.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IProcessWatcher.cs new file mode 100644 index 00000000..6a465eb8 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/IProcessWatcher.cs @@ -0,0 +1,7 @@ +namespace Mongo2Go.Helper +{ + public interface IProcessWatcher + { + bool IsProcessRunning(string processName); + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoBinaryLocator.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoBinaryLocator.cs new file mode 100644 index 00000000..1b82706a --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoBinaryLocator.cs @@ -0,0 +1,101 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; + +namespace Mongo2Go.Helper +{ + + public class MongoBinaryLocator : IMongoBinaryLocator + { + private readonly string _nugetPrefix = Path.Combine("packages", "Mongo2Go*"); + private readonly string _nugetCachePrefix = Path.Combine("packages", "mongo2go", "*"); + private readonly string _nugetCacheBasePrefix = Path.Combine("mongo2go", "*"); + public const string DefaultWindowsSearchPattern = @"tools\mongodb-windows*\bin"; + public const string DefaultLinuxSearchPattern = "tools/mongodb-linux*/bin"; + public const string DefaultOsxSearchPattern = "tools/mongodb-macos*/bin"; + public const string WindowsNugetCacheLocation = @"%USERPROFILE%\.nuget\packages"; + public static readonly string OsxAndLinuxNugetCacheLocation = Environment.GetEnvironmentVariable("HOME") + "/.nuget/packages"; + private string _binFolder = string.Empty; + private readonly string _searchPattern; + private readonly string _nugetCacheDirectory; + private readonly string _additionalSearchDirectory; + + public MongoBinaryLocator(string searchPatternOverride, string additionalSearchDirectory) + { + _additionalSearchDirectory = additionalSearchDirectory; + _nugetCacheDirectory = Environment.GetEnvironmentVariable("NUGET_PACKAGES"); + + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + _searchPattern = DefaultOsxSearchPattern; + _nugetCacheDirectory = _nugetCacheDirectory ?? OsxAndLinuxNugetCacheLocation; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + _searchPattern = DefaultLinuxSearchPattern; + _nugetCacheDirectory = _nugetCacheDirectory ?? OsxAndLinuxNugetCacheLocation; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + _searchPattern = DefaultWindowsSearchPattern; + _nugetCacheDirectory = _nugetCacheDirectory ?? Environment.ExpandEnvironmentVariables(WindowsNugetCacheLocation); + } + else + { + throw new MonogDbBinariesNotFoundException($"Unknown OS: {RuntimeInformation.OSDescription}"); + } + + if (!string.IsNullOrEmpty(searchPatternOverride)) + { + _searchPattern = searchPatternOverride; + } + } + + public string Directory { + get { + if (string.IsNullOrEmpty(_binFolder)){ + return _binFolder = ResolveBinariesDirectory (); + } else { + return _binFolder; + } + } + } + + private string ResolveBinariesDirectory() + { + var searchDirectories = new[] + { + // First search from the additional search directory, if provided + _additionalSearchDirectory, + // Then search from the project directory + FolderSearch.CurrentExecutingDirectory(), + // Finally search from the nuget cache directory + _nugetCacheDirectory + }; + return FindBinariesDirectory(searchDirectories.Where(x => !string.IsNullOrWhiteSpace(x)).ToList()); + } + + private string FindBinariesDirectory(IList searchDirectories) + { + foreach (var directory in searchDirectories) + { + var binaryFolder = + // First try just the search pattern + directory.FindFolderUpwards(_searchPattern) ?? + // Next try the search pattern with nuget installation prefix + directory.FindFolderUpwards(Path.Combine(_nugetPrefix, _searchPattern)) ?? + // Finally try the search pattern with the nuget cache prefix + directory.FindFolderUpwards(Path.Combine(_nugetCachePrefix, _searchPattern)) ?? + // Finally try the search pattern with the basic nuget cache prefix + directory.FindFolderUpwards(Path.Combine(_nugetCacheBasePrefix, _searchPattern)); + if (binaryFolder != null) return binaryFolder; + } + throw new MonogDbBinariesNotFoundException( + $"Could not find Mongo binaries using the search patterns \"{_searchPattern}\", \"{Path.Combine(_nugetPrefix, _searchPattern)}\", \"{Path.Combine(_nugetCachePrefix, _searchPattern)}\", and \"{Path.Combine(_nugetCacheBasePrefix, _searchPattern)}\". " + + $"You can override the search pattern and directory when calling MongoDbRunner.Start. We have detected the OS as {RuntimeInformation.OSDescription}.\n" + + $"We walked up to root directory from the following locations.\n {string.Join("\n", searchDirectories)}"); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcess.IDisposable.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcess.IDisposable.cs new file mode 100644 index 00000000..8e2c7fdd --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcess.IDisposable.cs @@ -0,0 +1,55 @@ +using System; + +namespace Mongo2Go.Helper +{ + // IDisposable and friends + public partial class MongoDbProcess + { + ~MongoDbProcess() + { + Dispose(false); + } + + public bool Disposed { get; private set; } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (Disposed) + { + return; + } + + if (disposing) + { + // we have no "managed resources" - but we leave this switch to avoid an FxCop CA1801 warnig + } + + if (_process == null) + { + return; + } + + if (_process.DoNotKill) + { + return; + } + + if (!_process.HasExited) + { + _process.Kill(); + _process.WaitForExit(); + } + + _process.Dispose(); + _process = null; + + Disposed = true; + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcess.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcess.cs new file mode 100644 index 00000000..e8bed971 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcess.cs @@ -0,0 +1,19 @@ +using System.Collections.Generic; + +namespace Mongo2Go.Helper +{ + public partial class MongoDbProcess : IMongoDbProcess + { + + private WrappedProcess _process; + + public IEnumerable ErrorOutput { get; set; } + public IEnumerable StandardOutput { get; set; } + + internal MongoDbProcess(WrappedProcess process) + { + _process = process; + } + + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcessStarter.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcessStarter.cs new file mode 100644 index 00000000..b29237f5 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoDbProcessStarter.cs @@ -0,0 +1,92 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Bson; +using MongoDB.Driver; +using MongoDB.Driver.Core.Servers; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using System.Threading; + +namespace Mongo2Go.Helper +{ + public class MongoDbProcessStarter : IMongoDbProcessStarter + { + private const string ProcessReadyIdentifier = "waiting for connections"; + private const string Space = " "; + private const string ReplicaSetName = "singleNodeReplSet"; + private const string ReplicaSetReadyIdentifier = "transition to primary complete; database writes are now permitted"; + + /// + /// Starts a new process. Process can be killed + /// + public IMongoDbProcess Start(string binariesDirectory, string dataDirectory, int port, bool singleNodeReplSet, string additionalMongodArguments, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout, ILogger logger = null) + { + return Start(binariesDirectory, dataDirectory, port, false, singleNodeReplSet, additionalMongodArguments, singleNodeReplSetWaitTimeout, logger); + } + + /// + /// Starts a new process. + /// + public IMongoDbProcess Start(string binariesDirectory, string dataDirectory, int port, bool doNotKill, bool singleNodeReplSet, string additionalMongodArguments, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout, ILogger logger = null) + { + string fileName = @"{0}{1}{2}".Formatted(binariesDirectory, System.IO.Path.DirectorySeparatorChar.ToString(), MongoDbDefaults.MongodExecutable); + + string arguments = (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) ? + @"--dbpath ""{0}"" --port {1} --bind_ip 127.0.0.1".Formatted(dataDirectory, port) : + @"--tlsMode disabled --dbpath ""{0}"" --port {1} --bind_ip 127.0.0.1".Formatted(dataDirectory, port); + + arguments = singleNodeReplSet ? arguments + Space + "--replSet" + Space + ReplicaSetName : arguments; + arguments += MongodArguments.GetValidAdditionalArguments(arguments, additionalMongodArguments); + + WrappedProcess wrappedProcess = ProcessControl.ProcessFactory(fileName, arguments); + wrappedProcess.DoNotKill = doNotKill; + + ProcessOutput output = ProcessControl.StartAndWaitForReady(wrappedProcess, 5, ProcessReadyIdentifier, logger); + if (singleNodeReplSet) + { + var replicaSetReady = false; + + // subscribe to output from mongod process and check for replica set ready message + wrappedProcess.OutputDataReceived += (_, args) => replicaSetReady |= !string.IsNullOrWhiteSpace(args.Data) && args.Data.IndexOf(ReplicaSetReadyIdentifier, StringComparison.OrdinalIgnoreCase) >= 0; + + MongoClient client = new MongoClient("mongodb://127.0.0.1:{0}/?directConnection=true&replicaSet={1}".Formatted(port, ReplicaSetName)); + var admin = client.GetDatabase("admin"); + var replConfig = new BsonDocument(new List() + { + new BsonElement("_id", ReplicaSetName), + new BsonElement("members", + new BsonArray {new BsonDocument {{"_id", 0}, {"host", "127.0.0.1:{0}".Formatted(port)}}}) + }); + var command = new BsonDocument("replSetInitiate", replConfig); + admin.RunCommand(command); + + // wait until replica set is ready or until the timeout is reached + SpinWait.SpinUntil(() => replicaSetReady, TimeSpan.FromSeconds(singleNodeReplSetWaitTimeout)); + + if (!replicaSetReady) + { + throw new TimeoutException($"Replica set initialization took longer than the specified timeout of {singleNodeReplSetWaitTimeout} seconds. Please consider increasing the value of {nameof(singleNodeReplSetWaitTimeout)}."); + } + + // wait until transaction is ready or until the timeout is reached + SpinWait.SpinUntil(() => + client.Cluster.Description.Servers.Any(s => s.State == ServerState.Connected && s.IsDataBearing), + TimeSpan.FromSeconds(singleNodeReplSetWaitTimeout)); + + if (!client.Cluster.Description.Servers.Any(s => s.State == ServerState.Connected && s.IsDataBearing)) + { + throw new TimeoutException($"Cluster readiness for transactions took longer than the specified timeout of {singleNodeReplSetWaitTimeout} seconds. Please consider increasing the value of {nameof(singleNodeReplSetWaitTimeout)}."); + } + } + + MongoDbProcess mongoDbProcess = new MongoDbProcess(wrappedProcess) + { + ErrorOutput = output.ErrorOutput, + StandardOutput = output.StandardOutput + }; + + return mongoDbProcess; + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoImportExport.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoImportExport.cs new file mode 100644 index 00000000..35e243c6 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoImportExport.cs @@ -0,0 +1,46 @@ +using System.Diagnostics; +using System.IO; + +namespace Mongo2Go.Helper +{ + public static class MongoImportExport + { + /// + /// Input File: Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + /// + public static ProcessOutput Import(string binariesDirectory, int port, string database, string collection, string inputFile, bool drop, string additionalMongodArguments = null) + { + string finalPath = FolderSearch.FinalizePath(inputFile); + + if (!File.Exists(finalPath)) + { + throw new FileNotFoundException("File not found", finalPath); + } + + string fileName = Path.Combine("{0}", "{1}").Formatted(binariesDirectory, MongoDbDefaults.MongoImportExecutable); + string arguments = @"--host localhost --port {0} --db {1} --collection {2} --file ""{3}""".Formatted(port, database, collection, finalPath); + if (drop) { arguments += " --drop"; } + arguments += MongodArguments.GetValidAdditionalArguments(arguments, additionalMongodArguments); + + Process process = ProcessControl.ProcessFactory(fileName, arguments); + + return ProcessControl.StartAndWaitForExit(process); + } + + /// + /// Output File: Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + /// + public static ProcessOutput Export(string binariesDirectory, int port, string database, string collection, string outputFile, string additionalMongodArguments = null) + { + string finalPath = FolderSearch.FinalizePath(outputFile); + + string fileName = Path.Combine("{0}", "{1}").Formatted(binariesDirectory, MongoDbDefaults.MongoExportExecutable); + string arguments = @"--host localhost --port {0} --db {1} --collection {2} --out ""{3}""".Formatted(port, database, collection, finalPath); + arguments += MongodArguments.GetValidAdditionalArguments(arguments, additionalMongodArguments); + + Process process = ProcessControl.ProcessFactory(fileName, arguments); + + return ProcessControl.StartAndWaitForExit(process); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoLogStatement.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoLogStatement.cs new file mode 100644 index 00000000..fedf9627 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongoLogStatement.cs @@ -0,0 +1,77 @@ +using Microsoft.Extensions.Logging; +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Mongo2Go.Helper +{ + /// + /// Structure of a log generated by mongod. Used to deserialize the logs + /// and pass them to an ILogger. + /// See: https://docs.mongodb.com/manual/reference/log-messages/#json-log-output-format + /// Note: "truncated" and "size" are not parsed as we're unsure how to + /// properly parse and use them. + /// + class MongoLogStatement + { + [JsonPropertyName("t")] + public MongoDate MongoDate { get; set; } + + /// + /// Severity of the logs as defined by MongoDB. Mapped to LogLevel + /// as defined by Microsoft. + /// D1-D2 mapped to Debug level. D3-D5 mapped Trace level. + /// + [JsonPropertyName("s")] + public string Severity { get; set; } + + public LogLevel Level + { + get + { + if (string.IsNullOrEmpty(Severity)) + return LogLevel.None; + switch (Severity) + { + case "F": return LogLevel.Critical; + case "E": return LogLevel.Error; + case "W": return LogLevel.Warning; + case "I": return LogLevel.Information; + case "D": + case "D1": + case "D2": + return LogLevel.Debug; + case "D3": + case "D4": + case "D5": + default: + return LogLevel.Trace; + } + } + } + + [JsonPropertyName("c")] + public string Component { get; set; } + + [JsonPropertyName("ctx")] + public string Context { get; set; } + + [JsonPropertyName("id")] + public int? Id { get; set; } + + [JsonPropertyName("msg")] + public string Message { get; set; } + + [JsonPropertyName("tags")] + public IEnumerable Tags { get; set; } + + [JsonPropertyName("attr")] + public IDictionary Attributes { get; set; } + } + class MongoDate + { + [JsonPropertyName("$date")] + public DateTime DateTime { get; set; } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongodArguments.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongodArguments.cs new file mode 100644 index 00000000..3e551bb5 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/MongodArguments.cs @@ -0,0 +1,74 @@ +using System; +using System.Collections.Generic; + +namespace Mongo2Go.Helper +{ + public static class MongodArguments + { + private const string ArgumentSeparator = "--"; + private const string Space = " "; + + /// + /// Returns the if it is verified that it does not contain any mongod argument already defined by Mongo2Go. + /// + /// mongod arguments defined by Mongo2Go + /// Additional mongod arguments + /// contains at least one mongod argument already defined by Mongo2Go + /// A string with the additional mongod arguments + public static string GetValidAdditionalArguments(string existingMongodArguments, string additionalMongodArguments) + { + if (string.IsNullOrWhiteSpace(additionalMongodArguments)) + { + return string.Empty; + } + + var existingMongodArgumentArray = existingMongodArguments.Trim().Split(new[] { ArgumentSeparator }, StringSplitOptions.RemoveEmptyEntries); + + var existingMongodArgumentOptions = new List(); + for (var i = 0; i < existingMongodArgumentArray.Length; i++) + { + var argumentOptionSplit = existingMongodArgumentArray[i].Split(' '); + + if (argumentOptionSplit.Length == 0 + || string.IsNullOrWhiteSpace(argumentOptionSplit[0].Trim())) + { + continue; + } + + existingMongodArgumentOptions.Add(argumentOptionSplit[0].Trim()); + } + + var additionalMongodArgumentArray = additionalMongodArguments.Trim().Split(new[] { ArgumentSeparator }, StringSplitOptions.RemoveEmptyEntries); + + var validAdditionalMongodArguments = new List(); + var duplicateMongodArguments = new List(); + for (var i = 0; i < additionalMongodArgumentArray.Length; i++) + { + var additionalArgument = additionalMongodArgumentArray[i].Trim(); + var argumentOptionSplit = additionalArgument.Split(' '); + + if (argumentOptionSplit.Length == 0 + || string.IsNullOrWhiteSpace(argumentOptionSplit[0].Trim())) + { + continue; + } + + if (existingMongodArgumentOptions.Contains(argumentOptionSplit[0].Trim())) + { + duplicateMongodArguments.Add(argumentOptionSplit[0].Trim()); + } + + validAdditionalMongodArguments.Add(ArgumentSeparator + additionalArgument); + } + + if (duplicateMongodArguments.Count != 0) + { + throw new ArgumentException($"mongod arguments defined by Mongo2Go ({string.Join(", ", existingMongodArgumentOptions)}) cannot be overriden. Please remove the following additional argument(s): {string.Join(", ", duplicateMongodArguments)}."); + } + + return validAdditionalMongodArguments.Count == 0 + ? string.Empty + : Space + string.Join(" ", validAdditionalMongodArguments); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/NetStandard21Compatibility.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/NetStandard21Compatibility.cs new file mode 100644 index 00000000..835f3b71 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/NetStandard21Compatibility.cs @@ -0,0 +1,24 @@ +#if NETSTANDARD2_0 +using System; + +namespace Mongo2Go.Helper +{ + public static class NetStandard21Compatibility + { + /// + /// Returns a value indicating whether a specified string occurs within this , using the specified comparison rules. + /// + /// The string to operate on. + /// The string to seek. + /// One of the enumeration values that specifies the rules to use in the comparison. + /// if the parameter occurs within this string, or if is the empty string (""); otherwise, . + /// is + public static bool Contains(this string @string, string value, StringComparison comparisonType) + { + if (@string == null) throw new ArgumentNullException(nameof(@string)); + + return @string.IndexOf(value, comparisonType) >= 0; + } + } +} +#endif diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/NoFreePortFoundException.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/NoFreePortFoundException.cs new file mode 100644 index 00000000..a317e97a --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/NoFreePortFoundException.cs @@ -0,0 +1,11 @@ +using System; + +namespace Mongo2Go.Helper +{ + public class NoFreePortFoundException : Exception + { + public NoFreePortFoundException() { } + public NoFreePortFoundException(string message) : base(message) { } + public NoFreePortFoundException(string message, Exception inner) : base(message, inner) { } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortPool.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortPool.cs new file mode 100644 index 00000000..24c6a336 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortPool.cs @@ -0,0 +1,37 @@ +using System; + +namespace Mongo2Go.Helper +{ + /// + /// Intention: port numbers won't be assigned twice to avoid connection problems with integration tests + /// + public sealed class PortPool : IPortPool + { + private static readonly PortPool Instance = new PortPool(); + + // Explicit static constructor to tell C# compiler + // not to mark type as beforefieldinit + static PortPool() + { + } + + // Singleton + private PortPool() + { + } + + public static PortPool GetInstance + { + get { return Instance; } + } + + /// + /// Returns and reserves a new port + /// + public int GetNextOpenPort() + { + IPortWatcher portWatcher = PortWatcherFactory.CreatePortWatcher(); + return portWatcher.FindOpenPort(); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortWatcher.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortWatcher.cs new file mode 100644 index 00000000..14dc82bf --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortWatcher.cs @@ -0,0 +1,38 @@ +using System.Linq; +using System.Net; +using System.Net.NetworkInformation; +using System.Net.Sockets; + +namespace Mongo2Go.Helper +{ + public class PortWatcher : IPortWatcher + { + public int FindOpenPort() + { + // Locate a free port on the local machine by binding a socket to + // an IPEndPoint using IPAddress.Any and port 0. The socket will + // select a free port. + int listeningPort = 0; + Socket portSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); + try + { + IPEndPoint socketEndPoint = new IPEndPoint(IPAddress.Any, 0); + portSocket.Bind(socketEndPoint); + socketEndPoint = (IPEndPoint)portSocket.LocalEndPoint; + listeningPort = socketEndPoint.Port; + } + finally + { + portSocket.Close(); + } + + return listeningPort; + } + + public bool IsPortAvailable(int portNumber) + { + IPEndPoint[] tcpConnInfoArray = IPGlobalProperties.GetIPGlobalProperties().GetActiveTcpListeners(); + return tcpConnInfoArray.All(endpoint => endpoint.Port != portNumber); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortWatcherFactory.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortWatcherFactory.cs new file mode 100644 index 00000000..b52c90f8 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/PortWatcherFactory.cs @@ -0,0 +1,14 @@ +using System.Runtime.InteropServices; + +namespace Mongo2Go.Helper +{ + public class PortWatcherFactory + { + public static IPortWatcher CreatePortWatcher() + { + return RuntimeInformation.IsOSPlatform(OSPlatform.Linux) + ? (IPortWatcher) new UnixPortWatcher() + : new PortWatcher(); + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessControl.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessControl.cs new file mode 100644 index 00000000..d3d8b723 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessControl.cs @@ -0,0 +1,163 @@ +using Microsoft.Extensions.Logging; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text.Json; +using System.Threading; + +namespace Mongo2Go.Helper +{ + public static class ProcessControl + { + public static WrappedProcess ProcessFactory(string fileName, string arguments) + { + ProcessStartInfo startInfo = new ProcessStartInfo + { + FileName = fileName, + Arguments = arguments, + CreateNoWindow = true, + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true + }; + + WrappedProcess process = new WrappedProcess { StartInfo = startInfo }; + return process; + } + + public static ProcessOutput StartAndWaitForExit(Process process) + { + List errorOutput = new List(); + List standardOutput = new List(); + + process.ErrorDataReceived += (sender, args) => errorOutput.Add(args.Data); + process.OutputDataReceived += (sender, args) => standardOutput.Add(args.Data); + + process.Start(); + + process.BeginErrorReadLine(); + process.BeginOutputReadLine(); + + process.WaitForExit(); + + process.CancelErrorRead(); + process.CancelOutputRead(); + + return new ProcessOutput(errorOutput, standardOutput); + } + + /// + /// Reads from Output stream to determine if process is ready + /// + public static ProcessOutput StartAndWaitForReady(Process process, int timeoutInSeconds, string processReadyIdentifier, ILogger logger = null) + { + if (timeoutInSeconds < 1 || + timeoutInSeconds > 10) + { + throw new ArgumentOutOfRangeException("timeoutInSeconds", "The amount in seconds should have a value between 1 and 10."); + } + + // Determine when the process is ready, and store the error and standard outputs + // to eventually return them. + List errorOutput = new List(); + List standardOutput = new List(); + bool processReady = false; + + void OnProcessOnErrorDataReceived(object sender, DataReceivedEventArgs args) => errorOutput.Add(args.Data); + void OnProcessOnOutputDataReceived(object sender, DataReceivedEventArgs args) + { + standardOutput.Add(args.Data); + + if (!string.IsNullOrEmpty(args.Data) && args.Data.IndexOf(processReadyIdentifier, StringComparison.OrdinalIgnoreCase) >= 0) + { + processReady = true; + } + } + + process.ErrorDataReceived += OnProcessOnErrorDataReceived; + process.OutputDataReceived += OnProcessOnOutputDataReceived; + + if (logger == null) + WireLogsToConsoleAndDebugOutput(process); + else + WireLogsToLogger(process, logger); + + process.Start(); + + process.BeginErrorReadLine(); + process.BeginOutputReadLine(); + + int lastResortCounter = 0; + int timeOut = timeoutInSeconds * 10; + while (!processReady) + { + Thread.Sleep(100); + if (++lastResortCounter > timeOut) + { + // we waited X seconds. + // for any reason the detection did not worked, eg. the identifier changed + // lets assume everything is still ok + break; + } + } + + //unsubscribing writing to list - to prevent memory overflow. + process.ErrorDataReceived -= OnProcessOnErrorDataReceived; + process.OutputDataReceived -= OnProcessOnOutputDataReceived; + + return new ProcessOutput(errorOutput, standardOutput); + } + + /// + /// Send the mongod process logs to .NET's console and debug outputs. + /// + /// + private static void WireLogsToConsoleAndDebugOutput(Process process) + { + void DebugOutputHandler(object sender, DataReceivedEventArgs args) => Debug.WriteLine(args.Data); + void ConsoleOutputHandler(object sender, DataReceivedEventArgs args) => Console.WriteLine(args.Data); + + //Writing to debug trace & console to enable test runners to capture the output + process.ErrorDataReceived += DebugOutputHandler; + process.ErrorDataReceived += ConsoleOutputHandler; + process.OutputDataReceived += DebugOutputHandler; + process.OutputDataReceived += ConsoleOutputHandler; + } + + /// + /// Parses and redirects mongod logs to ILogger. + /// + /// + /// + private static void WireLogsToLogger(Process process, ILogger logger) + { + // Parse the structured log and wire it to logger + void OnReceivingLogFromMongod(object sender, DataReceivedEventArgs args) + { + if (string.IsNullOrWhiteSpace(args.Data)) + return; + try + { + var log = JsonSerializer.Deserialize(args.Data); + logger.Log(log.Level, + "{message} - {attributes} - {date} - {component} - {context} - {id} - {tags}", + log.Message, log.Attributes, log.MongoDate.DateTime, log.Component, log.Context, log.Id, log.Tags); + } + catch (Exception ex) when (ex is JsonException || ex is NotSupportedException) + { + logger.LogWarning(ex, + "Failed parsing the mongod logs {log}. It could be that the format has changed. " + + "See: https://docs.mongodb.com/manual/reference/log-messages/#std-label-log-message-json-output-format", + args.Data); + } + catch (Exception) + { + // Nothing else to do. Swallow the exception and do not wire the logs. + } + }; + process.ErrorDataReceived += OnReceivingLogFromMongod; + process.OutputDataReceived += OnReceivingLogFromMongod; + } + + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessOutput.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessOutput.cs new file mode 100644 index 00000000..b2d9d8a6 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessOutput.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; + +namespace Mongo2Go.Helper +{ + public class ProcessOutput + { + public ProcessOutput(IEnumerable errorOutput, IEnumerable standardOutput) + { + StandardOutput = standardOutput; + ErrorOutput = errorOutput; + } + + public IEnumerable StandardOutput { get; private set; } + public IEnumerable ErrorOutput { get; private set; } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessWatcher.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessWatcher.cs new file mode 100644 index 00000000..946f8dcc --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/ProcessWatcher.cs @@ -0,0 +1,13 @@ +using System.Diagnostics; +using System.Linq; + +namespace Mongo2Go.Helper +{ + public class ProcessWatcher : IProcessWatcher + { + public bool IsProcessRunning(string processName) + { + return Process.GetProcessesByName(processName).Any(); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/StringFormatExtension.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/StringFormatExtension.cs new file mode 100644 index 00000000..87240f3d --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/StringFormatExtension.cs @@ -0,0 +1,27 @@ +using System; +using System.Globalization; + +namespace Mongo2Go.Helper +{ + /// + /// saves about 40 keystrokes + /// + public static class StringFormatExtension + { + /// + /// Populates the template using the provided arguments and the invariant culture + /// + public static string Formatted(this string template, params object[] args) + { + return template.Formatted(CultureInfo.InvariantCulture, args); + } + + /// + /// Populates the template using the provided arguments using the provided formatter + /// + public static string Formatted(this string template, IFormatProvider formatter, params object[] args) + { + return string.IsNullOrEmpty(template) ? string.Empty : string.Format(formatter, template, args); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/UnixPortWatcher.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/UnixPortWatcher.cs new file mode 100644 index 00000000..83b8ba4f --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/UnixPortWatcher.cs @@ -0,0 +1,46 @@ +using System.Net; +using System.Net.Sockets; + +namespace Mongo2Go.Helper +{ + + public class UnixPortWatcher : IPortWatcher + { + public int FindOpenPort () + { + // Locate a free port on the local machine by binding a socket to + // an IPEndPoint using IPAddress.Any and port 0. The socket will + // select a free port. + int listeningPort = 0; + Socket portSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); + try + { + IPEndPoint socketEndPoint = new IPEndPoint(IPAddress.Any, 0); + portSocket.Bind(socketEndPoint); + socketEndPoint = (IPEndPoint)portSocket.LocalEndPoint; + listeningPort = socketEndPoint.Port; + } + finally + { + portSocket.Close(); + } + + return listeningPort; + } + + public bool IsPortAvailable (int portNumber) + { + TcpListener tcpListener = new TcpListener (IPAddress.Loopback, portNumber); + try { + tcpListener.Start (); + return true; + } + catch (SocketException) { + return false; + } finally + { + tcpListener.Stop (); + } + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Helper/WrappedProcess.cs b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/WrappedProcess.cs new file mode 100644 index 00000000..cd90543f --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Helper/WrappedProcess.cs @@ -0,0 +1,9 @@ +using System.Diagnostics; + +namespace Mongo2Go.Helper +{ + public class WrappedProcess : Process + { + public bool DoNotKill { get; set; } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj b/Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj new file mode 100644 index 00000000..e4a19148 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj @@ -0,0 +1,92 @@ + + + + net472;netstandard2.1 + Johannes Hoppe and many contributors + Mongo2Go is a managed wrapper around MongoDB binaries. It targets .NET Framework 4.7.2 and .NET Standard 2.1. +This Nuget package contains the executables of mongod, mongoimport and mongoexport v4.4.4 for Windows, Linux and macOS. + + +Mongo2Go has two use cases: + +1. Providing multiple, temporary and isolated MongoDB databases for integration tests +2. Providing a quick to set up MongoDB database for a local developer environment + HAUS HOPPE - ITS + Copyright © 2012-2025 Johannes Hoppe and many ❤️ contributors + true + icon.png + MIT + https://github.com/Mongo2Go/Mongo2Go + https://github.com/Mongo2Go/Mongo2Go/releases + MongoDB Mongo unit test integration runner + https://github.com/Mongo2Go/Mongo2Go + git + Mongo2Go + Mongo2Go is a managed wrapper around MongoDB binaries. + + + + 4 + 1701;1702;1591;1573 + + + + 4 + 1701;1702;1591;1573 + + + + 1701;1702;1591;1573 + + + + 1701;1702;1591;1573 + + + + true + true + true + + + + embedded + true + true + + + + v + + + + + + true + icon.png + + + true + tools + + + + + + + + + + + + + + + + + + + + + + diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbDefaults.cs b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbDefaults.cs new file mode 100644 index 00000000..71015bbd --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbDefaults.cs @@ -0,0 +1,22 @@ +namespace Mongo2Go +{ + public static class MongoDbDefaults + { + public const string ProcessName = "mongod"; + + public const string MongodExecutable = "mongod"; + + public const string MongoExportExecutable = "mongoexport"; + + public const string MongoImportExecutable = "mongoimport"; + + public const int DefaultPort = 27017; + + // but we don't want to get in trouble with productive systems + public const int TestStartPort = 27018; + + public const string Lockfile = "mongod.lock"; + + public const int SingleNodeReplicaSetWaitTimeout = 10; + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbPortAlreadyTakenException.cs b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbPortAlreadyTakenException.cs new file mode 100644 index 00000000..b84e122f --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbPortAlreadyTakenException.cs @@ -0,0 +1,11 @@ +using System; + +namespace Mongo2Go +{ + public class MongoDbPortAlreadyTakenException : Exception + { + public MongoDbPortAlreadyTakenException() { } + public MongoDbPortAlreadyTakenException(string message) : base(message) { } + public MongoDbPortAlreadyTakenException(string message, Exception inner) : base(message, inner) { } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbRunner.IDisposable.cs b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbRunner.IDisposable.cs new file mode 100644 index 00000000..639ab0ae --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbRunner.IDisposable.cs @@ -0,0 +1,54 @@ +using System; + +namespace Mongo2Go +{ + // IDisposable and friends + public partial class MongoDbRunner + { + ~MongoDbRunner() + { + Dispose(false); + } + + public bool Disposed { get; private set; } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (Disposed) + { + return; + } + + if (State != State.Running) + { + return; + } + + if (disposing) + { + // we have no "managed resources" - but we leave this switch to avoid an FxCop CA1801 warnig + } + + if (_mongoDbProcess != null) + { + _mongoDbProcess.Dispose(); + } + + // will be null if we are working in debugging mode (single instance) + if (_dataDirectoryWithPort != null) + { + // finally clean up the data directory we created previously + _fileSystem.DeleteFolder(_dataDirectoryWithPort); + } + + Disposed = true; + State = State.Stopped; + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbRunner.cs b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbRunner.cs new file mode 100644 index 00000000..8834d182 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/MongoDbRunner.cs @@ -0,0 +1,221 @@ +using Microsoft.Extensions.Logging; +using Mongo2Go.Helper; +using System; +using System.IO; +using System.Runtime.InteropServices; + +namespace Mongo2Go +{ + /// + /// Mongo2Go main entry point + /// + public partial class MongoDbRunner : IDisposable + { + private readonly IMongoDbProcess _mongoDbProcess; + private readonly IFileSystem _fileSystem; + private readonly string _dataDirectoryWithPort; + private readonly int _port; + private readonly IMongoBinaryLocator _mongoBin; + + /// + /// State of the current MongoDB instance + /// + public State State { get; private set; } + + /// + /// Connections string that should be used to establish a connection the MongoDB instance + /// + public string ConnectionString { get; private set; } + + /// + /// Starts Multiple MongoDB instances with each call + /// On dispose: kills them and deletes their data directory + /// + /// (Optional) If null, mongod logs are wired to .NET's Console and Debug output (provided you haven't added the --quiet additional argument). + /// If not null, mongod logs are parsed and wired to the provided logger. + /// Should be used for integration tests + public static MongoDbRunner Start(string dataDirectory = null, string binariesSearchPatternOverride = null, string binariesSearchDirectory = null, bool singleNodeReplSet = false, string additionalMongodArguments = null, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout, ILogger logger = null) + { + if (dataDirectory == null) { + dataDirectory = GetTemporaryDataDirectory(); + } + + // this is required to support multiple instances to run in parallel + dataDirectory += Guid.NewGuid().ToString().Replace("-", "").Substring(0, 20); + + return new MongoDbRunner( + PortPool.GetInstance, + new FileSystem(), + new MongoDbProcessStarter(), + new MongoBinaryLocator(binariesSearchPatternOverride, binariesSearchDirectory), + dataDirectory, + singleNodeReplSet, + additionalMongodArguments, + singleNodeReplSetWaitTimeout, + logger); + } + + /// + /// !!! + /// This method is only used for an internal unit test. Use MongoDbRunner.Start() instead. + /// But if you find it to be useful (eg. to change every aspect on your own) feel free to implement the interfaces on your own! + /// + /// see https://github.com/Mongo2Go/Mongo2Go/issues/41 + [Obsolete("Use MongoDbRunner.Start() if possible.")] + public static MongoDbRunner StartUnitTest( + IPortPool portPool, + IFileSystem fileSystem, + IMongoDbProcessStarter processStarter, + IMongoBinaryLocator mongoBin, + string dataDirectory = null, + string additionalMongodArguments = null) + { + return new MongoDbRunner( + portPool, + fileSystem, + processStarter, + mongoBin, + dataDirectory, + additionalMongodArguments: additionalMongodArguments); + } + + /// + /// Only starts one single MongoDB instance (even on multiple calls), does not kill it, does not delete data + /// + /// + /// Should be used for local debugging only + /// WARNING: one single instance on one single machine is not a suitable setup for productive environments!!! + /// + public static MongoDbRunner StartForDebugging(string dataDirectory = null, string binariesSearchPatternOverride = null, string binariesSearchDirectory = null, bool singleNodeReplSet = false, int port = MongoDbDefaults.DefaultPort, string additionalMongodArguments = null, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout) + { + return new MongoDbRunner( + new ProcessWatcher(), + new PortWatcher(), + new FileSystem(), + new MongoDbProcessStarter(), + new MongoBinaryLocator(binariesSearchPatternOverride, binariesSearchDirectory), port, dataDirectory, singleNodeReplSet, additionalMongodArguments, singleNodeReplSetWaitTimeout); + } + + /// + /// !!! + /// This method is only used for an internal unit test. Use MongoDbRunner.StartForDebugging() instead. + /// But if you find it to be useful (eg. to change every aspect on your own) feel free to implement the interfaces on your own! + /// + /// see https://github.com/Mongo2Go/Mongo2Go/issues/41 + [Obsolete("Use MongoDbRunner.StartForDebugging() if possible.")] + public static MongoDbRunner StartForDebuggingUnitTest( + IProcessWatcher processWatcher, + IPortWatcher portWatcher, + IFileSystem fileSystem, + IMongoDbProcessStarter processStarter, + IMongoBinaryLocator mongoBin, + string dataDirectory = null, + string additionalMongodArguments = null) + { + return new MongoDbRunner( + processWatcher, + portWatcher, + fileSystem, + processStarter, + mongoBin, + MongoDbDefaults.DefaultPort, + dataDirectory, + additionalMongodArguments: additionalMongodArguments); + } + + /// + /// Executes Mongoimport on the associated MongoDB Instace + /// + public void Import(string database, string collection, string inputFile, bool drop, string additionalMongodArguments = null) + { + MongoImportExport.Import(_mongoBin.Directory, _port, database, collection, inputFile, drop, additionalMongodArguments); + } + + /// + /// Executes Mongoexport on the associated MongoDB Instace + /// + public void Export(string database, string collection, string outputFile, string additionalMongodArguments = null) + { + MongoImportExport.Export(_mongoBin.Directory, _port, database, collection, outputFile, additionalMongodArguments); + } + + /// + /// usage: local debugging + /// + private MongoDbRunner(IProcessWatcher processWatcher, IPortWatcher portWatcher, IFileSystem fileSystem, IMongoDbProcessStarter processStarter, IMongoBinaryLocator mongoBin, int port, string dataDirectory = null, bool singleNodeReplSet = false, string additionalMongodArguments = null, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout) + { + _fileSystem = fileSystem; + _mongoBin = mongoBin; + _port = port; + + MakeMongoBinarysExecutable(); + + ConnectionString = singleNodeReplSet + ? "mongodb://127.0.0.1:{0}/?directConnection=true&replicaSet=singleNodeReplSet&readPreference=primary".Formatted(_port) + : "mongodb://127.0.0.1:{0}/".Formatted(_port); + + if (processWatcher.IsProcessRunning(MongoDbDefaults.ProcessName) && !portWatcher.IsPortAvailable(_port)) + { + State = State.AlreadyRunning; + return; + } + + if (!portWatcher.IsPortAvailable(_port)) + { + throw new MongoDbPortAlreadyTakenException("MongoDB can't be started. The TCP port {0} is already taken.".Formatted(_port)); + } + + if (dataDirectory == null) { + dataDirectory = GetTemporaryDataDirectory(); + } + + _fileSystem.CreateFolder(dataDirectory); + _fileSystem.DeleteFile("{0}{1}{2}".Formatted(dataDirectory, Path.DirectorySeparatorChar.ToString(), MongoDbDefaults.Lockfile)); + _mongoDbProcess = processStarter.Start(_mongoBin.Directory, dataDirectory, _port, true, singleNodeReplSet, additionalMongodArguments, singleNodeReplSetWaitTimeout); + + State = State.Running; + } + + /// + /// usage: integration tests + /// + private MongoDbRunner(IPortPool portPool, IFileSystem fileSystem, IMongoDbProcessStarter processStarter, IMongoBinaryLocator mongoBin, string dataDirectory = null, bool singleNodeReplSet = false, string additionalMongodArguments = null, ushort singleNodeReplSetWaitTimeout = MongoDbDefaults.SingleNodeReplicaSetWaitTimeout, ILogger logger = null) + { + _fileSystem = fileSystem; + _port = portPool.GetNextOpenPort(); + _mongoBin = mongoBin; + + if (dataDirectory == null) { + dataDirectory = GetTemporaryDataDirectory(); + } + + MakeMongoBinarysExecutable(); + + ConnectionString = singleNodeReplSet + ? "mongodb://127.0.0.1:{0}/?directConnection=true&replicaSet=singleNodeReplSet&readPreference=primary".Formatted(_port) + : "mongodb://127.0.0.1:{0}/".Formatted(_port); + + _dataDirectoryWithPort = "{0}_{1}".Formatted(dataDirectory, _port); + _fileSystem.CreateFolder(_dataDirectoryWithPort); + _fileSystem.DeleteFile("{0}{1}{2}".Formatted(_dataDirectoryWithPort, Path.DirectorySeparatorChar.ToString(), MongoDbDefaults.Lockfile)); + + _mongoDbProcess = processStarter.Start(_mongoBin.Directory, _dataDirectoryWithPort, _port, singleNodeReplSet, additionalMongodArguments, singleNodeReplSetWaitTimeout, logger); + + State = State.Running; + } + + private void MakeMongoBinarysExecutable() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) || + RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + _fileSystem.MakeFileExecutable(Path.Combine(_mongoBin.Directory, MongoDbDefaults.MongodExecutable)); + _fileSystem.MakeFileExecutable(Path.Combine(_mongoBin.Directory, MongoDbDefaults.MongoExportExecutable)); + _fileSystem.MakeFileExecutable(Path.Combine(_mongoBin.Directory, MongoDbDefaults.MongoImportExecutable)); + } + } + + + private static string GetTemporaryDataDirectory() => Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/MonogDbBinariesNotFoundException.cs b/Mongo2Go-4.1.0/src/Mongo2Go/MonogDbBinariesNotFoundException.cs new file mode 100644 index 00000000..22ecbc80 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/MonogDbBinariesNotFoundException.cs @@ -0,0 +1,11 @@ +using System; + +namespace Mongo2Go +{ + public class MonogDbBinariesNotFoundException : Exception + { + public MonogDbBinariesNotFoundException() { } + public MonogDbBinariesNotFoundException(string message) : base(message) { } + public MonogDbBinariesNotFoundException(string message, Exception inner) : base(message, inner) { } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/State.cs b/Mongo2Go-4.1.0/src/Mongo2Go/State.cs new file mode 100644 index 00000000..254750a8 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/State.cs @@ -0,0 +1,9 @@ +namespace Mongo2Go +{ + public enum State + { + Stopped, + Running, + AlreadyRunning + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/packages.lock.json b/Mongo2Go-4.1.0/src/Mongo2Go/packages.lock.json new file mode 100644 index 00000000..668ae79d --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2Go/packages.lock.json @@ -0,0 +1,478 @@ +{ + "version": 1, + "dependencies": { + ".NETFramework,Version=v4.7.2": { + "Microsoft.Extensions.Logging.Abstractions": { + "type": "Direct", + "requested": "[6.0.0, )", + "resolved": "6.0.0", + "contentHash": "/HggWBbTwy8TgebGSX5DBZ24ndhzi93sHUBDvP1IxbZD7FDokYzdAr6+vbWGjw2XAfR2EJ1sfKUotpjHnFWPxA==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4" + } + }, + "Microsoft.NETFramework.ReferenceAssemblies": { + "type": "Direct", + "requested": "[1.0.3, )", + "resolved": "1.0.3", + "contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==", + "dependencies": { + "Microsoft.NETFramework.ReferenceAssemblies.net472": "1.0.3" + } + }, + "Microsoft.SourceLink.GitHub": { + "type": "Direct", + "requested": "[1.0.0, )", + "resolved": "1.0.0", + "contentHash": "aZyGyGg2nFSxix+xMkPmlmZSsnGQ3w+mIG23LTxJZHN+GPwTQ5FpPgDo7RMOq+Kcf5D4hFWfXkGhoGstawX13Q==", + "dependencies": { + "Microsoft.Build.Tasks.Git": "1.0.0", + "Microsoft.SourceLink.Common": "1.0.0" + } + }, + "MinVer": { + "type": "Direct", + "requested": "[2.5.0, )", + "resolved": "2.5.0", + "contentHash": "+vgY+COxnu93nZEVYScloRuboNRIYkElokxTdtKLt6isr/f6GllPt0oLfrHj7fzxgj7SC5xMZg5c2qvd6qyHDQ==" + }, + "MongoDB.Driver": { + "type": "Direct", + "requested": "[3.1.0, )", + "resolved": "3.1.0", + "contentHash": "+O7lKaIl7VUHptE0hqTd7UY1G5KDp/o8S4upG7YL4uChMNKD/U6tz9i17nMGHaD/L2AiPLgaJcaDe2XACsegGA==", + "dependencies": { + "DnsClient": "1.6.1", + "Microsoft.Extensions.Logging.Abstractions": "2.0.0", + "MongoDB.Bson": "3.1.0", + "SharpCompress": "0.30.1", + "Snappier": "1.0.0", + "System.Buffers": "4.5.1", + "System.Net.Http": "4.3.4", + "System.Runtime.InteropServices.RuntimeInformation": "4.3.0", + "ZstdSharp.Port": "0.7.3" + } + }, + "System.Text.Json": { + "type": "Direct", + "requested": "[6.0.10, )", + "resolved": "6.0.10", + "contentHash": "NSB0kDipxn2ychp88NXWfFRFlmi1bst/xynOutbnpEfRCT9JZkZ7KOmF/I/hNKo2dILiMGnqblm+j1sggdLB9g==", + "dependencies": { + "Microsoft.Bcl.AsyncInterfaces": "6.0.0", + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4", + "System.Numerics.Vectors": "4.5.0", + "System.Runtime.CompilerServices.Unsafe": "6.0.0", + "System.Text.Encodings.Web": "6.0.0", + "System.Threading.Tasks.Extensions": "4.5.4", + "System.ValueTuple": "4.5.0" + } + }, + "DnsClient": { + "type": "Transitive", + "resolved": "1.6.1", + "contentHash": "4H/f2uYJOZ+YObZjpY9ABrKZI+JNw3uizp6oMzTXwDw6F+2qIPhpRl/1t68O/6e98+vqNiYGu+lswmwdYUy3gg==", + "dependencies": { + "Microsoft.Win32.Registry": "5.0.0", + "System.Buffers": "4.5.1" + } + }, + "Microsoft.Bcl.AsyncInterfaces": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "UcSjPsst+DfAdJGVDsu346FX0ci0ah+lw3WRtn18NUwEqRt70HaOQ7lI72vy3+1LxtqI3T5GWwV39rQSrCzAeg==", + "dependencies": { + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "Microsoft.Build.Tasks.Git": { + "type": "Transitive", + "resolved": "1.0.0", + "contentHash": "z2fpmmt+1Jfl+ZnBki9nSP08S1/tbEOxFdsK1rSR+LBehIJz1Xv9/6qOOoGNqlwnAGGVGis1Oj6S8Kt9COEYlQ==" + }, + "Microsoft.NETFramework.ReferenceAssemblies.net472": { + "type": "Transitive", + "resolved": "1.0.3", + "contentHash": "0E7evZXHXaDYYiLRfpyXvCh+yzM2rNTyuZDI+ZO7UUqSc6GfjePiXTdqJGtgIKUwdI81tzQKmaWprnUiPj9hAw==" + }, + "Microsoft.SourceLink.Common": { + "type": "Transitive", + "resolved": "1.0.0", + "contentHash": "G8DuQY8/DK5NN+3jm5wcMcd9QYD90UV7MiLmdljSJixi3U/vNaeBKmmXUqI4DJCOeWizIUEh4ALhSt58mR+5eg==" + }, + "Microsoft.Win32.Registry": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "dDoKi0PnDz31yAyETfRntsLArTlVAVzUzCIvvEDsDsucrl33Dl8pIJG06ePTJTI3tGpeyHS9Cq7Foc/s4EeKcg==", + "dependencies": { + "System.Security.AccessControl": "5.0.0", + "System.Security.Principal.Windows": "5.0.0" + } + }, + "MongoDB.Bson": { + "type": "Transitive", + "resolved": "3.1.0", + "contentHash": "3dhaZhz18B5vUoEP13o2j8A6zQfkHdZhwBvLZEjDJum4BTLLv1/Z8bt25UQEtpqvYwLgde4R6ekWZ7XAYUMxuw==", + "dependencies": { + "System.Memory": "4.5.5", + "System.Runtime.CompilerServices.Unsafe": "5.0.0" + } + }, + "SharpCompress": { + "type": "Transitive", + "resolved": "0.30.1", + "contentHash": "XqD4TpfyYGa7QTPzaGlMVbcecKnXy4YmYLDWrU+JIj7IuRNl7DH2END+Ll7ekWIY8o3dAMWLFDE1xdhfIWD1nw==", + "dependencies": { + "System.Memory": "4.5.4", + "System.Text.Encoding.CodePages": "5.0.0" + } + }, + "Snappier": { + "type": "Transitive", + "resolved": "1.0.0", + "contentHash": "rFtK2KEI9hIe8gtx3a0YDXdHOpedIf9wYCEYtBEmtlyiWVX3XlCNV03JrmmAi/Cdfn7dxK+k0sjjcLv4fpHnqA==", + "dependencies": { + "System.Memory": "4.5.4", + "System.Runtime.CompilerServices.Unsafe": "4.7.1", + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "System.Buffers": { + "type": "Transitive", + "resolved": "4.5.1", + "contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg==" + }, + "System.IO": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "3qjaHvxQPDpSOYICjUoTsmoq5u6QJAFRUITgeT/4gqkF1bajbSmb1kwSxEA8AHlofqgcKJcM8udgieRNhaJ5Cg==" + }, + "System.Memory": { + "type": "Transitive", + "resolved": "4.5.5", + "contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Numerics.Vectors": "4.5.0", + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + }, + "System.Net.Http": { + "type": "Transitive", + "resolved": "4.3.4", + "contentHash": "aOa2d51SEbmM+H+Csw7yJOuNZoHkrP2XnAurye5HWYgGVVU54YZDvsLUYRv6h18X3sPnjNCANmN7ZhIPiqMcjA==", + "dependencies": { + "System.Security.Cryptography.X509Certificates": "4.3.0" + } + }, + "System.Numerics.Vectors": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ==" + }, + "System.Runtime": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "JufQi0vPQ0xGnAczR13AUFglDyVYt4Kqnz1AZaiKZ5+GICq0/1MH/mO/eAJHt/mHW1zjKBJd7kV26SrxddAhiw==" + }, + "System.Runtime.CompilerServices.Unsafe": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg==" + }, + "System.Runtime.InteropServices.RuntimeInformation": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "cbz4YJMqRDR7oLeMRbdYv7mYzc++17lNhScCX0goO2XpGWdvAt60CGN+FHdePUEHCe/Jy9jUlvNAiNdM+7jsOw==" + }, + "System.Security.AccessControl": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "dagJ1mHZO3Ani8GH0PHpPEe/oYO+rVdbQjvjJkBRNQkX4t0r1iaeGn8+/ybkSLEan3/slM0t59SVdHzuHf2jmw==", + "dependencies": { + "System.Security.Principal.Windows": "5.0.0" + } + }, + "System.Security.Cryptography.Algorithms": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "W1kd2Y8mYSCgc3ULTAZ0hOP2dSdG5YauTb1089T0/kRcN2MpSAW1izOFROrJgxSlMn3ArsgHXagigyi+ibhevg==", + "dependencies": { + "System.IO": "4.3.0", + "System.Runtime": "4.3.0", + "System.Security.Cryptography.Encoding": "4.3.0", + "System.Security.Cryptography.Primitives": "4.3.0" + } + }, + "System.Security.Cryptography.Encoding": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "1DEWjZZly9ae9C79vFwqaO5kaOlI5q+3/55ohmq/7dpDyDfc8lYe7YVxJUZ5MF/NtbkRjwFRo14yM4OEo9EmDw==" + }, + "System.Security.Cryptography.Primitives": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "7bDIyVFNL/xKeFHjhobUAQqSpJq9YTOpbEs6mR233Et01STBMXNAc/V+BM6dwYGc95gVh/Zf+iVXWzj3mE8DWg==" + }, + "System.Security.Cryptography.X509Certificates": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "t2Tmu6Y2NtJ2um0RtcuhP7ZdNNxXEgUm2JeoA/0NvlMjAhKCnM1NX07TDl3244mVp3QU6LPEhT3HTtH1uF7IYw==", + "dependencies": { + "System.Security.Cryptography.Algorithms": "4.3.0", + "System.Security.Cryptography.Encoding": "4.3.0" + } + }, + "System.Security.Principal.Windows": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "t0MGLukB5WAVU9bO3MGzvlGnyJPgUlcwerXn1kzBRjwLKixT96XV0Uza41W49gVd8zEMFu9vQEFlv0IOrytICA==" + }, + "System.Text.Encoding.CodePages": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "NyscU59xX6Uo91qvhOs2Ccho3AR2TnZPomo1Z0K6YpyztBPM/A5VbkzOO19sy3A3i1TtEnTxA7bCe3Us+r5MWg==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "5.0.0" + } + }, + "System.Text.Encodings.Web": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "Vg8eB5Tawm1IFqj4TVK1czJX89rhFxJo9ELqc/Eiq0eXy13RK00eubyU6TJE6y+GQXjyV5gSfiewDUZjQgSE0w==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4", + "System.Runtime.CompilerServices.Unsafe": "6.0.0" + } + }, + "System.Threading.Tasks.Extensions": { + "type": "Transitive", + "resolved": "4.5.4", + "contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + }, + "System.ValueTuple": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "okurQJO6NRE/apDIP23ajJ0hpiNmJ+f0BwOlB/cSqTLQlw5upkf+5+96+iG2Jw40G1fCVCyPz/FhIABUjMR+RQ==" + }, + "ZstdSharp.Port": { + "type": "Transitive", + "resolved": "0.7.3", + "contentHash": "U9Ix4l4cl58Kzz1rJzj5hoVTjmbx1qGMwzAcbv1j/d3NzrFaESIurQyg+ow4mivCgkE3S413y+U9k4WdnEIkRA==", + "dependencies": { + "Microsoft.Bcl.AsyncInterfaces": "5.0.0", + "System.Memory": "4.5.5" + } + } + }, + ".NETStandard,Version=v2.1": { + "Microsoft.Extensions.Logging.Abstractions": { + "type": "Direct", + "requested": "[6.0.0, )", + "resolved": "6.0.0", + "contentHash": "/HggWBbTwy8TgebGSX5DBZ24ndhzi93sHUBDvP1IxbZD7FDokYzdAr6+vbWGjw2XAfR2EJ1sfKUotpjHnFWPxA==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4" + } + }, + "Microsoft.NETFramework.ReferenceAssemblies": { + "type": "Direct", + "requested": "[1.0.3, )", + "resolved": "1.0.3", + "contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==", + "dependencies": { + "Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3" + } + }, + "Microsoft.SourceLink.GitHub": { + "type": "Direct", + "requested": "[1.0.0, )", + "resolved": "1.0.0", + "contentHash": "aZyGyGg2nFSxix+xMkPmlmZSsnGQ3w+mIG23LTxJZHN+GPwTQ5FpPgDo7RMOq+Kcf5D4hFWfXkGhoGstawX13Q==", + "dependencies": { + "Microsoft.Build.Tasks.Git": "1.0.0", + "Microsoft.SourceLink.Common": "1.0.0" + } + }, + "MinVer": { + "type": "Direct", + "requested": "[2.5.0, )", + "resolved": "2.5.0", + "contentHash": "+vgY+COxnu93nZEVYScloRuboNRIYkElokxTdtKLt6isr/f6GllPt0oLfrHj7fzxgj7SC5xMZg5c2qvd6qyHDQ==" + }, + "MongoDB.Driver": { + "type": "Direct", + "requested": "[3.1.0, )", + "resolved": "3.1.0", + "contentHash": "+O7lKaIl7VUHptE0hqTd7UY1G5KDp/o8S4upG7YL4uChMNKD/U6tz9i17nMGHaD/L2AiPLgaJcaDe2XACsegGA==", + "dependencies": { + "DnsClient": "1.6.1", + "Microsoft.Extensions.Logging.Abstractions": "2.0.0", + "MongoDB.Bson": "3.1.0", + "SharpCompress": "0.30.1", + "Snappier": "1.0.0", + "System.Buffers": "4.5.1", + "ZstdSharp.Port": "0.7.3" + } + }, + "System.Text.Json": { + "type": "Direct", + "requested": "[6.0.10, )", + "resolved": "6.0.10", + "contentHash": "NSB0kDipxn2ychp88NXWfFRFlmi1bst/xynOutbnpEfRCT9JZkZ7KOmF/I/hNKo2dILiMGnqblm+j1sggdLB9g==", + "dependencies": { + "Microsoft.Bcl.AsyncInterfaces": "6.0.0", + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4", + "System.Numerics.Vectors": "4.5.0", + "System.Runtime.CompilerServices.Unsafe": "6.0.0", + "System.Text.Encodings.Web": "6.0.0", + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "DnsClient": { + "type": "Transitive", + "resolved": "1.6.1", + "contentHash": "4H/f2uYJOZ+YObZjpY9ABrKZI+JNw3uizp6oMzTXwDw6F+2qIPhpRl/1t68O/6e98+vqNiYGu+lswmwdYUy3gg==", + "dependencies": { + "Microsoft.Win32.Registry": "5.0.0" + } + }, + "Microsoft.Bcl.AsyncInterfaces": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "UcSjPsst+DfAdJGVDsu346FX0ci0ah+lw3WRtn18NUwEqRt70HaOQ7lI72vy3+1LxtqI3T5GWwV39rQSrCzAeg==" + }, + "Microsoft.Build.Tasks.Git": { + "type": "Transitive", + "resolved": "1.0.0", + "contentHash": "z2fpmmt+1Jfl+ZnBki9nSP08S1/tbEOxFdsK1rSR+LBehIJz1Xv9/6qOOoGNqlwnAGGVGis1Oj6S8Kt9COEYlQ==" + }, + "Microsoft.NETFramework.ReferenceAssemblies.net461": { + "type": "Transitive", + "resolved": "1.0.3", + "contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA==" + }, + "Microsoft.SourceLink.Common": { + "type": "Transitive", + "resolved": "1.0.0", + "contentHash": "G8DuQY8/DK5NN+3jm5wcMcd9QYD90UV7MiLmdljSJixi3U/vNaeBKmmXUqI4DJCOeWizIUEh4ALhSt58mR+5eg==" + }, + "Microsoft.Win32.Registry": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "dDoKi0PnDz31yAyETfRntsLArTlVAVzUzCIvvEDsDsucrl33Dl8pIJG06ePTJTI3tGpeyHS9Cq7Foc/s4EeKcg==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4", + "System.Security.AccessControl": "5.0.0", + "System.Security.Principal.Windows": "5.0.0" + } + }, + "MongoDB.Bson": { + "type": "Transitive", + "resolved": "3.1.0", + "contentHash": "3dhaZhz18B5vUoEP13o2j8A6zQfkHdZhwBvLZEjDJum4BTLLv1/Z8bt25UQEtpqvYwLgde4R6ekWZ7XAYUMxuw==", + "dependencies": { + "System.Memory": "4.5.5", + "System.Runtime.CompilerServices.Unsafe": "5.0.0" + } + }, + "SharpCompress": { + "type": "Transitive", + "resolved": "0.30.1", + "contentHash": "XqD4TpfyYGa7QTPzaGlMVbcecKnXy4YmYLDWrU+JIj7IuRNl7DH2END+Ll7ekWIY8o3dAMWLFDE1xdhfIWD1nw==", + "dependencies": { + "System.Text.Encoding.CodePages": "5.0.0" + } + }, + "Snappier": { + "type": "Transitive", + "resolved": "1.0.0", + "contentHash": "rFtK2KEI9hIe8gtx3a0YDXdHOpedIf9wYCEYtBEmtlyiWVX3XlCNV03JrmmAi/Cdfn7dxK+k0sjjcLv4fpHnqA==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "4.7.1" + } + }, + "System.Buffers": { + "type": "Transitive", + "resolved": "4.5.1", + "contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg==" + }, + "System.Memory": { + "type": "Transitive", + "resolved": "4.5.5", + "contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Numerics.Vectors": "4.4.0", + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + }, + "System.Numerics.Vectors": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ==" + }, + "System.Runtime.CompilerServices.Unsafe": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg==" + }, + "System.Security.AccessControl": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "dagJ1mHZO3Ani8GH0PHpPEe/oYO+rVdbQjvjJkBRNQkX4t0r1iaeGn8+/ybkSLEan3/slM0t59SVdHzuHf2jmw==", + "dependencies": { + "System.Security.Principal.Windows": "5.0.0" + } + }, + "System.Security.Principal.Windows": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "t0MGLukB5WAVU9bO3MGzvlGnyJPgUlcwerXn1kzBRjwLKixT96XV0Uza41W49gVd8zEMFu9vQEFlv0IOrytICA==" + }, + "System.Text.Encoding.CodePages": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "NyscU59xX6Uo91qvhOs2Ccho3AR2TnZPomo1Z0K6YpyztBPM/A5VbkzOO19sy3A3i1TtEnTxA7bCe3Us+r5MWg==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "5.0.0" + } + }, + "System.Text.Encodings.Web": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "Vg8eB5Tawm1IFqj4TVK1czJX89rhFxJo9ELqc/Eiq0eXy13RK00eubyU6TJE6y+GQXjyV5gSfiewDUZjQgSE0w==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4", + "System.Runtime.CompilerServices.Unsafe": "6.0.0" + } + }, + "System.Threading.Tasks.Extensions": { + "type": "Transitive", + "resolved": "4.5.4", + "contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + }, + "ZstdSharp.Port": { + "type": "Transitive", + "resolved": "0.7.3", + "contentHash": "U9Ix4l4cl58Kzz1rJzj5hoVTjmbx1qGMwzAcbv1j/d3NzrFaESIurQyg+ow4mivCgkE3S413y+U9k4WdnEIkRA==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "6.0.0" + } + } + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/FolderSearchTests.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/FolderSearchTests.cs new file mode 100644 index 00000000..c2b7168a --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/FolderSearchTests.cs @@ -0,0 +1,146 @@ +using System; +using System.IO; +using System.Reflection; +using FluentAssertions; +using Machine.Specifications; +using Mongo2Go.Helper; + +// ReSharper disable InconsistentNaming +// ReSharper disable UnusedMember.Local +namespace Mongo2GoTests +{ + [Subject("FolderSearch")] + public class when_requesting_current_executing_directory + { + public static string directory; + + Because of = () => directory = FolderSearch.CurrentExecutingDirectory(); + It should_contain_correct_path = () => directory.Should().Contain(Path.Combine("Mongo2GoTests", "bin")); + } + + [Subject("FolderSearch")] + public class when_searching_for_folder : FolderSearchSpec + { + static string startDirectory = Path.Combine(BaseDir, "test1", "test2"); + static string searchPattern = Path.Combine("packages", "Mongo2Go*", "tools", "mongodb-win32-i386*", "bin"); + static string directory; + + Because of = () => directory = startDirectory.FindFolder(searchPattern); + It should_find_the_path_with_the_highest_version_number = () => directory.Should().Be(MongoBinaries); + } + + + [Subject("FolderSearch")] + public class when_searching_for_not_existing_folder : FolderSearchSpec + { + static string startDirectory = Path.Combine(BaseDir, "test1", "test2"); + static string searchPattern = Path.Combine("packages", "Mongo2Go*", "XXX", "mongodb-win32-i386*", "bin"); + static string directory; + + Because of = () => directory = startDirectory.FindFolder(searchPattern); + It should_return_null = () => directory.Should().BeNull(); + } + + [Subject("FolderSearch")] + public class when_searching_for_not_existing_start_dir : FolderSearchSpec + { + static string startDirectory = Path.Combine(Path.GetRandomFileName()); + static string searchPattern = Path.Combine("packages", "Mongo2Go*", "XXX", "mongodb-win32-i386*", "bin"); + static string directory; + + Because of = () => directory = startDirectory.FindFolder(searchPattern); + It should_return_null = () => directory.Should().BeNull(); + } + + [Subject("FolderSearch")] + public class when_searching_for_folder_upwards : FolderSearchSpec + { + static string searchPattern = Path.Combine("packages", "Mongo2Go*", "tools", "mongodb-win32-i386*", "bin"); + static string directory; + + Because of = () => directory = LocationOfAssembly.FindFolderUpwards(searchPattern); + It should_find_the_path_with_the_highest_version_number = () => directory.Should().Be(MongoBinaries); + } + + [Subject("FolderSearch")] + public class when_searching_for_not_existing_folder_upwards : FolderSearchSpec + { + static string searchPattern = Path.Combine("packages", "Mongo2Go*", "XXX", "mongodb-win32-i386*", "bin"); + static string directory; + + Because of = () => directory = LocationOfAssembly.FindFolderUpwards(searchPattern); + It should_return_null = () => directory.Should().BeNull(); + } + + [Subject("FolderSearch")] + public class when_remove_last_part_of_path + { + static string directory; + + Because of = () => directory = Path.Combine("test1", "test2", "test3").RemoveLastPart(); + It should_remove_the_element = () => directory.Should().Be(Path.Combine("test1", "test2")); + } + + [Subject("FolderSearch")] + public class when_remove_last_part_of_single_element_path + { + static string directory; + + Because of = () => directory = "test1".RemoveLastPart(); + It should_return_null = () => directory.Should().BeNull(); + } + + [Subject("FolderSearch")] + public class when_directory_contains_multiple_versions_mongo2go + { + private readonly string[] directories; + + private static string getAssemblyVersion() + { + // ReSharper disable once PossibleNullReferenceException + return typeof(FolderSearch).GetTypeInfo().Assembly.GetCustomAttribute().InformationalVersion; + } + + public when_directory_contains_multiple_versions_mongo2go() + { + + // setup some directories + directories = new[] + { + Path.Combine(AppDomain.CurrentDomain.BaseDirectory, getAssemblyVersion() + "a"), + Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "2.2.9"), + Path.Combine(AppDomain.CurrentDomain.BaseDirectory, getAssemblyVersion()) + }; + + foreach (var d in directories) + Directory.CreateDirectory(d); + } + + private static string path; + + private Because of = () => path = FolderSearch.FindFolder(AppDomain.CurrentDomain.BaseDirectory, "*"); + + private It should_return_the_one_that_matches_our_own_assembly_version = + () => path.Should().Be(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, getAssemblyVersion())); + } + + public class FolderSearchSpec + { + public static string BaseDir = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); + public static string MongoBinaries = Path.Combine(BaseDir, "test1", "test2", "packages", "Mongo2Go.1.2.3", "tools", "mongodb-win32-i386-2.0.7-rc0", "bin"); + public static string MongoOlderBinaries = Path.Combine(BaseDir, "test1", "test2", "packages", "Mongo2Go.1.1.1", "tools", "mongodb-win32-i386-2.0.7-rc0", "bin"); + public static string LocationOfAssembly = Path.Combine(BaseDir, "test1", "test2", "Project", "bin"); + + Establish context = () => + { + if (!Directory.Exists(BaseDir)) { Directory.CreateDirectory(BaseDir); } + if (!Directory.Exists(MongoBinaries)) { Directory.CreateDirectory(MongoBinaries); } + if (!Directory.Exists(MongoOlderBinaries)) { Directory.CreateDirectory(MongoOlderBinaries); } + if (!Directory.Exists(LocationOfAssembly)) { Directory.CreateDirectory(LocationOfAssembly); } + }; + + Cleanup stuff = () => { if (Directory.Exists(BaseDir)) { Directory.Delete(BaseDir, true); }}; + } +} +// ReSharper restore UnusedMember.Local +// ReSharper restore InconsistentNaming \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Mongo2GoTests.csproj b/Mongo2Go-4.1.0/src/Mongo2GoTests/Mongo2GoTests.csproj new file mode 100644 index 00000000..34f3034a --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Mongo2GoTests.csproj @@ -0,0 +1,21 @@ + + + net8.0 + false + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/MongoDbAdditionalArgumentsTests.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/MongoDbAdditionalArgumentsTests.cs new file mode 100644 index 00000000..09a79097 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/MongoDbAdditionalArgumentsTests.cs @@ -0,0 +1,76 @@ +using FluentAssertions; +using Machine.Specifications; +using Mongo2Go.Helper; +using System; + +// ReSharper disable InconsistentNaming +// ReSharper disable UnusedMember.Local +namespace Mongo2GoTests +{ + [Subject(typeof(MongodArguments))] + public class when_null_additional_arguments_return_empty_string + { + private static string validAdditionalArguments; + + Because of = () => validAdditionalArguments = MongodArguments.GetValidAdditionalArguments(string.Empty, null); + It should_be_empty_string = () => validAdditionalArguments.Should().BeEmpty(); + } + + [Subject(typeof(MongodArguments))] + public class when_no_additional_arguments_return_empty_string + { + private static string validAdditionalArguments; + + Because of = () => validAdditionalArguments = MongodArguments.GetValidAdditionalArguments(string.Empty, string.Empty); + It should_be_empty_string = () => validAdditionalArguments.Should().BeEmpty(); + } + + [Subject(typeof(MongodArguments))] + public class when_additional_arguments_start_with_argument_separator_return_additional_arguments + { + private static string validAdditionalArguments; + private const string additionalArgumentsUnderTest = " --argument_1 under_test --argument_2 under test"; + private const string expectedAdditionalArguments = " --argument_1 under_test --argument_2 under test"; + + Because of = () => validAdditionalArguments = MongodArguments.GetValidAdditionalArguments(string.Empty, additionalArgumentsUnderTest); + It should_be_expected_additional_arguments = () => validAdditionalArguments.Should().Be(expectedAdditionalArguments); + } + + [Subject(typeof(MongodArguments))] + public class when_additional_arguments_does_not_start_with_argument_separator_return_additional_arguments + { + private static string validAdditionalArguments; + private const string additionalArgumentsUnderTest = "argument_1 under_test --argument_2 under test"; + private const string expectedAdditionalArguments = " --argument_1 under_test --argument_2 under test"; + + Because of = () => validAdditionalArguments = MongodArguments.GetValidAdditionalArguments(string.Empty, additionalArgumentsUnderTest); + It should_be_expected_additional_arguments = () => validAdditionalArguments.Should().Be(expectedAdditionalArguments); + } + + [Subject(typeof(MongodArguments))] + public class when_existing_arguments_and_additional_arguments_do_not_have_shared_options_return_additional_arguments + { + private static string validAdditionalArguments; + private const string existingArguments = "--existing_argument1 --existing_argument2"; + private const string additionalArgumentsUnderTest = " --argument_1 under_test --argument_2 under test"; + private const string expectedAdditionalArguments = " --argument_1 under_test --argument_2 under test"; + + Because of = () => validAdditionalArguments = MongodArguments.GetValidAdditionalArguments(existingArguments, additionalArgumentsUnderTest); + It should_be_expected_additional_arguments = () => validAdditionalArguments.Should().Be(expectedAdditionalArguments); + } + + [Subject(typeof(MongodArguments))] + public class when_existing_arguments_and_additional_arguments_have_shared_options_throw_argument_exception + { + private static Exception exception; + private const string duplicateArgument = "existing_argument2"; + private static readonly string existingArguments = $"--existing_argument1 --{duplicateArgument}"; + private static readonly string additionalArgumentsUnderTest = $" --argument_1 under_test --{duplicateArgument} argument2_new_value --argument_2 under test"; + + Because of = () => exception = Catch.Exception(() => MongodArguments.GetValidAdditionalArguments(existingArguments, additionalArgumentsUnderTest)); + It should_throw_argument_exception = () => exception.Should().BeOfType(); + It should_contain_more_than_instance_of_the_duplicate_argument = () => exception.Message.IndexOf(duplicateArgument, StringComparison.InvariantCultureIgnoreCase).Should().NotBe(exception.Message.LastIndexOf(duplicateArgument, StringComparison.InvariantCultureIgnoreCase)); + } +} +// ReSharper restore UnusedMember.Local +// ReSharper restore InconsistentNaming \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoDebuggingTest.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoDebuggingTest.cs new file mode 100644 index 00000000..0bd2b8c3 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoDebuggingTest.cs @@ -0,0 +1,34 @@ +using System.Collections.Generic; +using System.IO; +using System.Linq; +using Mongo2Go; +using MongoDB.Bson; +using MongoDB.Bson.Serialization; +using MongoDB.Driver; + +namespace Mongo2GoTests.Runner +{ + public class MongoDebuggingTest + { + internal static MongoDbRunner _runner; + internal static IMongoCollection _collection; + internal static string _databaseName = "IntegrationTest"; + internal static string _collectionName = "TestCollection"; + internal static IMongoDatabase _database; + + internal static void CreateConnection() + { + _runner = MongoDbRunner.StartForDebugging(singleNodeReplSet: false); + + MongoClient client = new MongoClient(_runner.ConnectionString); + _database = client.GetDatabase(_databaseName); + _collection = _database.GetCollection(_collectionName); + } + + public static IList ReadBsonFile(string fileName) + { + string[] content = File.ReadAllLines(fileName); + return content.Select(s => BsonSerializer.Deserialize(s)).ToList(); + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoIntegrationTest.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoIntegrationTest.cs new file mode 100644 index 00000000..e5a67eb1 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoIntegrationTest.cs @@ -0,0 +1,47 @@ +using Microsoft.Extensions.Logging; +using Mongo2Go; +using MongoDB.Driver; +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace Mongo2GoTests.Runner +{ + public class MongoIntegrationTest + { + internal static MongoDbRunner _runner; + internal static IMongoCollection _collection; + internal static string _databaseName = "IntegrationTest"; + internal static string _collectionName = "TestCollection"; + + internal static void CreateConnection(ILogger logger = null) + { + _runner = MongoDbRunner.Start(singleNodeReplSet: false, logger: logger); + + MongoClient client = new MongoClient(_runner.ConnectionString); + IMongoDatabase database = client.GetDatabase(_databaseName); + _collection = database.GetCollection(_collectionName); + } + } + + public static class TaskExtensions + { + public static async Task WithTimeout(this Task task, TimeSpan timeout) + { + using (var cancellationTokenSource = new CancellationTokenSource()) + { + + var completedTask = await Task.WhenAny(task, Task.Delay(timeout, cancellationTokenSource.Token)); + if (completedTask == task) + { + cancellationTokenSource.Cancel(); + await task; + } + else + { + throw new TimeoutException("The operation has timed out."); + } + } + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoTransactionTest.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoTransactionTest.cs new file mode 100644 index 00000000..ac2c86c2 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/MongoTransactionTest.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.Text; +using Mongo2Go; +using MongoDB.Driver; + +namespace Mongo2GoTests.Runner +{ + public class MongoTransactionTest + { + internal static MongoDbRunner _runner; + internal static IMongoCollection _mainCollection; + internal static IMongoCollection _dependentCollection; + internal static string _databaseName = "TransactionTest"; + internal static string _mainCollectionName = "MainCollection"; + internal static string _dependentCollectionName = "DependentCollection"; + internal static IMongoDatabase database; + internal static IMongoClient client; + internal static void CreateConnection(ushort? singleNodeReplSetWaitTimeout = null) + { + if (singleNodeReplSetWaitTimeout.HasValue) + { + _runner = MongoDbRunner.Start(singleNodeReplSet: true, singleNodeReplSetWaitTimeout: singleNodeReplSetWaitTimeout.Value); + } + else + { + _runner = MongoDbRunner.Start(singleNodeReplSet: true); + } + + client = new MongoClient(_runner.ConnectionString); + database = client.GetDatabase(_databaseName); + _mainCollection = database.GetCollection(_mainCollectionName); + _dependentCollection = database.GetCollection(_dependentCollectionName); + } + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerImportExportTests.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerImportExportTests.cs new file mode 100644 index 00000000..68549b24 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerImportExportTests.cs @@ -0,0 +1,89 @@ +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using FluentAssertions; +using Machine.Specifications; +using Mongo2Go.Helper; +using MongoDB.Driver; +using MongoDB.Driver.Linq; +using It = Machine.Specifications.It; + +// ReSharper disable InconsistentNaming +// ReSharper disable UnusedMember.Local +namespace Mongo2GoTests.Runner +{ + [Subject("Runner Integration Test")] + public class when_using_monogoexport : MongoDebuggingTest + { + static readonly string _testFile = Path.GetTempPath() + "testExport.json"; + static IList parsedContent; + + Establish context = () => + + { + CreateConnection(); + _database.DropCollection(_collectionName); + + _collection.InsertOne(TestDocument.DummyData1()); + _collection.InsertOne(TestDocument.DummyData2()); + _collection.InsertOne(TestDocument.DummyData3()); + }; + + Because of = () => + { + _runner.Export(_databaseName, _collectionName, _testFile); + Thread.Sleep(500); + parsedContent = ReadBsonFile(_testFile); + }; + + It should_preserve_all_values1 = () => parsedContent[0].Should().BeEquivalentTo(TestDocument.DummyData1(), cfg => cfg.Excluding(d => d.Id)); + It should_preserve_all_values2 = () => parsedContent[1].Should().BeEquivalentTo(TestDocument.DummyData2(), cfg => cfg.Excluding(d => d.Id)); + It should_preserve_all_values3 = () => parsedContent[2].Should().BeEquivalentTo(TestDocument.DummyData3(), cfg => cfg.Excluding(d => d.Id)); + + Cleanup stuff = () => + { + new FileSystem().DeleteFile(_testFile); + _runner.Dispose(); + }; + } + + [Subject("Runner Integration Test")] + public class when_using_monogoimport : MongoDebuggingTest + { + static IQueryable query; + static readonly string _testFile = Path.GetTempPath() + "testImport.json"; + + const string _filecontent = + @"{ ""_id"" : { ""$oid"" : ""50227b375dff9218248eadc4"" }, ""StringTest"" : ""Hello World"", ""IntTest"" : 42, ""DateTest"" : { ""$date"" : ""1984-09-30T06:06:06.171Z"" }, ""ListTest"" : [ ""I"", ""am"", ""a"", ""list"", ""of"", ""strings"" ] }" + "\r\n" + + @"{ ""_id"" : { ""$oid"" : ""50227b375dff9218248eadc5"" }, ""StringTest"" : ""Foo"", ""IntTest"" : 23, ""DateTest"" : null, ""ListTest"" : null }" + "\r\n" + + @"{ ""_id"" : { ""$oid"" : ""50227b375dff9218248eadc6"" }, ""StringTest"" : ""Bar"", ""IntTest"" : 77, ""DateTest"" : null, ""ListTest"" : null }" + "\r\n"; + + Establish context = () => + { + CreateConnection(); + _database.DropCollection(_collectionName); + File.WriteAllText(_testFile, _filecontent); + }; + + Because of = () => + { + _runner.Import(_databaseName, _collectionName, _testFile, true); + Thread.Sleep(500); + query = _collection.AsQueryable().Select(c => c).OrderBy(c => c.Id); ; + + }; + + It should_return_document1 = () => query.ToList().ElementAt(0).Should().BeEquivalentTo(TestDocument.DummyData1(), cfg => cfg.Excluding(d => d.Id)); + It should_return_document2 = () => query.ToList().ElementAt(1).Should().BeEquivalentTo(TestDocument.DummyData2(), cfg => cfg.Excluding(d => d.Id)); + It should_return_document3 = () => query.ToList().ElementAt(2).Should().BeEquivalentTo(TestDocument.DummyData3(), cfg => cfg.Excluding(d => d.Id)); + + Cleanup stuff = () => + { + new FileSystem().DeleteFile(_testFile); + _runner.Dispose(); + }; + } +} +// ReSharper restore UnusedMember.Local +// ReSharper restore InconsistentNaming \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerIntegrationTests.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerIntegrationTests.cs new file mode 100644 index 00000000..a4c9380d --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerIntegrationTests.cs @@ -0,0 +1,118 @@ +using FluentAssertions; +using Machine.Specifications; +using MELT; +using MongoDB.Driver; +using MongoDB.Driver.Linq; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using It = Machine.Specifications.It; + +// ReSharper disable InconsistentNaming +// ReSharper disable UnusedMember.Local +namespace Mongo2GoTests.Runner +{ + [Subject("Runner Integration Test")] + public class when_using_the_inbuild_serialization : MongoIntegrationTest + { + static TestDocument findResult; + + + Establish context = () => + { + CreateConnection(); + _collection.InsertOne(TestDocument.DummyData1()); + }; + + Because of = () => findResult = _collection.FindSync(_ => true).First(); + + It should_return_a_result = () => findResult.Should().NotBeNull(); + It should_hava_expected_data = () => findResult.Should().BeEquivalentTo(TestDocument.DummyData1(), cfg => cfg.Excluding(d => d.Id)); + + Cleanup stuff = () => _runner.Dispose(); + } + + [Subject("Runner Integration Test")] + public class when_using_the_new_linq_support : MongoIntegrationTest + { + static List queryResult; + + Establish context = () => + { + CreateConnection(); + _collection.InsertOne(TestDocument.DummyData1()); + _collection.InsertOne(TestDocument.DummyData2()); + _collection.InsertOne(TestDocument.DummyData3()); + }; + + Because of = () => + { + queryResult = (from c in _collection.AsQueryable() + where c.StringTest == TestDocument.DummyData2().StringTest || c.StringTest == TestDocument.DummyData3().StringTest + select c).ToList(); + }; + + It should_return_two_documents = () => queryResult.Count().Should().Be(2); + It should_return_document2 = () => queryResult.ElementAt(0).IntTest = TestDocument.DummyData2().IntTest; + It should_return_document3 = () => queryResult.ElementAt(1).IntTest = TestDocument.DummyData3().IntTest; + + Cleanup stuff = () => _runner.Dispose(); + } + + [Subject("Runner Integration Test")] + public class when_using_commands_that_create_console_output : MongoIntegrationTest + { + static List taskList = new List(); + + private Establish context = () => + { + CreateConnection(); + }; + + private Because of = () => + { + var createIndexModel = new CreateIndexModel(Builders.IndexKeys.Ascending(x => x.IntTest)); + taskList.Add(_collection.Indexes.CreateOneAsync(createIndexModel).WithTimeout(TimeSpan.FromMilliseconds(5000))); + taskList.Add(_collection.Indexes.DropAllAsync().WithTimeout(TimeSpan.FromMilliseconds(5000))); + }; + + It should_not_timeout = () => Task.WaitAll(taskList.ToArray()); + + Cleanup stuff = () => _runner.Dispose(); + } + + + [Subject("Runner Integration Test")] + public class when_using_microsoft_ilogger : MongoIntegrationTest + { + static List taskList = new List(); + static ITestLoggerFactory loggerFactory; + + private Establish context = () => + { + loggerFactory = TestLoggerFactory.Create(); + var logger = loggerFactory.CreateLogger("MyTestLogger"); + CreateConnection(logger); + }; + + private Because of = () => + { + var createIndexModel = new CreateIndexModel(Builders.IndexKeys.Ascending(x => x.IntTest)); + taskList.Add(_collection.Indexes.CreateOneAsync(createIndexModel).WithTimeout(TimeSpan.FromMilliseconds(5000))); + taskList.Add(_collection.Indexes.DropAllAsync().WithTimeout(TimeSpan.FromMilliseconds(5000))); + }; + + It should_not_timeout = () => Task.WaitAll(taskList.ToArray()); + It should_have_received_many_logs = () => + loggerFactory.Sink.LogEntries.Count(l => l.LogLevel == Microsoft.Extensions.Logging.LogLevel.Information) + .Should().BeGreaterThan(10); + It should_have_created_collection_statement = () => loggerFactory.Sink.LogEntries + .Count(l => l.Properties.Any(p => p.Key == "message" && (string)p.Value == "createCollection")) + .Should().BeGreaterOrEqualTo(1); + + Cleanup stuff = () => _runner.Dispose(); + } +} +// ReSharper restore UnusedMember.Local +// ReSharper restore InconsistentNaming \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerTests.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerTests.cs new file mode 100644 index 00000000..0b531782 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerTests.cs @@ -0,0 +1,108 @@ +using FluentAssertions; +using Machine.Specifications; +using Mongo2Go; +using Mongo2Go.Helper; +using Moq; +using System.IO; +using It = Machine.Specifications.It; + +#pragma warning disable CS0618 // Type or member is obsolete + +// ReSharper disable InconsistentNaming +// ReSharper disable UnusedMember.Local + +namespace Mongo2GoTests.Runner +{ + [Subject("Runner")] + public class when_instantiating_the_runner_for_integration_test + { + static MongoDbRunner runner; + static Mock portPoolMock; + static Mock fileSystemMock; + static Mock processStarterMock; + static Mock binaryLocatorMock; + + static string exptectedDataDirectory; + static string exptectedLogfile; + static readonly string exptectedConnectString = "mongodb://127.0.0.1:{0}/".Formatted(MongoDbDefaults.TestStartPort + 1); + + Establish context = () => + { + portPoolMock = new Mock(); + portPoolMock.Setup(m => m.GetNextOpenPort()).Returns(MongoDbDefaults.TestStartPort + 1); + + fileSystemMock = new Mock(); + fileSystemMock.Setup(m => m.CreateFolder(Moq.It.IsAny())).Callback(s => + { + exptectedDataDirectory = s; + exptectedLogfile = Path.Combine(exptectedDataDirectory, MongoDbDefaults.Lockfile); + }); + + var processMock = new Mock(); + + processStarterMock = new Mock(); + processStarterMock.Setup(m => m.Start(Moq.It.IsAny(), Moq.It.IsAny(), Moq.It.IsAny(), false, Moq.It.IsAny(), Moq.It.IsAny(), null)).Returns(processMock.Object); + + binaryLocatorMock = new Mock (); + binaryLocatorMock.Setup(m => m.Directory).Returns(string.Empty); + }; + + Because of = () => runner = MongoDbRunner.StartUnitTest(portPoolMock.Object, fileSystemMock.Object, processStarterMock.Object, binaryLocatorMock.Object); + + It should_create_the_data_directory = () => fileSystemMock.Verify(x => x.CreateFolder(Moq.It.Is(s => s.StartsWith(Path.GetTempPath()))), Times.Exactly(1)); + It should_delete_old_lock_file = () => fileSystemMock.Verify(x => x.DeleteFile(exptectedLogfile), Times.Exactly(1)); + + It should_start_the_process = () => processStarterMock.Verify(x => x.Start(Moq.It.IsAny(), Moq.It.IsAny(), Moq.It.IsAny(), false, Moq.It.IsAny(), Moq.It.IsAny(), null), Times.Exactly(1)); + + It should_have_expected_connection_string = () => runner.ConnectionString.Should().Be(exptectedConnectString); + It should_return_an_instance_with_state_running = () => runner.State.Should().Be(State.Running); + } + + [Subject("Runner")] + public class when_instantiating_the_runner_for_local_debugging + { + static MongoDbRunner runner; + static Mock portWatcherMock; + static Mock processWatcherMock; + static Mock fileSystemMock; + static Mock processStarterMock; + static Mock binaryLocatorMock; + + static string exptectedDataDirectory; + static string exptectedLogfile; + + Establish context = () => + { + processWatcherMock = new Mock(); + processWatcherMock.Setup(m => m.IsProcessRunning(Moq.It.IsAny())).Returns(false); + + portWatcherMock = new Mock(); + portWatcherMock.Setup(m => m.IsPortAvailable(Moq.It.IsAny())).Returns(true); + + fileSystemMock = new Mock(); + fileSystemMock.Setup(m => m.CreateFolder(Moq.It.IsAny())).Callback(s => + { + exptectedDataDirectory = s; + exptectedLogfile = Path.Combine(exptectedDataDirectory, MongoDbDefaults.Lockfile); + }); + + var processMock = new Mock(); + processStarterMock = new Mock(); + processStarterMock.Setup(m => m.Start(Moq.It.IsAny(), exptectedDataDirectory, MongoDbDefaults.DefaultPort, true, false, Moq.It.IsAny(), Moq.It.IsAny(), null)).Returns(processMock.Object); + + binaryLocatorMock = new Mock (); + binaryLocatorMock.Setup(m => m.Directory).Returns(string.Empty); + }; + + Because of = () => runner = MongoDbRunner.StartForDebuggingUnitTest(processWatcherMock.Object, portWatcherMock.Object, fileSystemMock.Object, processStarterMock.Object, binaryLocatorMock.Object); + + It should_check_for_already_running_process = () => processWatcherMock.Verify(x => x.IsProcessRunning(MongoDbDefaults.ProcessName), Times.Exactly(1)); + It should_check_the_default_port = () => portWatcherMock.Verify(x => x.IsPortAvailable(MongoDbDefaults.DefaultPort), Times.Exactly(1)); + It should_create_the_data_directory = () => fileSystemMock.Verify(x => x.CreateFolder(Moq.It.Is(s => s.StartsWith(Path.GetTempPath()))), Times.Exactly(1)); + It should_delete_old_lock_file = () => fileSystemMock.Verify(x => x.DeleteFile(exptectedLogfile), Times.Exactly(1)); + It should_return_an_instance_with_state_running = () => runner.State.Should().Be(State.Running); + It should_start_the_process_without_kill = () => processStarterMock.Verify(x => x.Start(Moq.It.IsAny(), exptectedDataDirectory, MongoDbDefaults.DefaultPort, true, false, Moq.It.IsAny(), Moq.It.IsAny(), null), Times.Exactly(1)); + } +} +// ReSharper restore UnusedMember.Local +// ReSharper restore InconsistentNaming \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerTransactionTests.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerTransactionTests.cs new file mode 100644 index 00000000..5e52b28a --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/RunnerTransactionTests.cs @@ -0,0 +1,190 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using FluentAssertions; +using Machine.Specifications; +using MongoDB.Driver; + +namespace Mongo2GoTests.Runner +{ + [Subject("Runner Transaction Test")] + public class when_transaction_completes : MongoTransactionTest + { + private static TestDocument mainDocument; + private static TestDocument dependentDocument; + + Establish context = () => + + { + CreateConnection(); + database.DropCollection(_mainCollectionName); + database.DropCollection(_dependentCollectionName); + _mainCollection.InsertOne(TestDocument.DummyData2()); + _dependentCollection.InsertOne(TestDocument.DummyData2()); + }; + + private Because of = () => + { + var filter = Builders.Filter.Where(x => x.IntTest == 23); + var update = Builders.Update.Inc(i => i.IntTest, 10); + + using (var sessionHandle = client.StartSession()) + { + try + { + var i = 0; + while (i < 10) + { + try + { + i++; + sessionHandle.StartTransaction(new TransactionOptions( + readConcern: ReadConcern.Local, + writeConcern: WriteConcern.W1)); + try + { + var first = _mainCollection.UpdateOne(sessionHandle, filter, update); + var second = _dependentCollection.UpdateOne(sessionHandle, filter, update); + } + catch (Exception) + { + sessionHandle.AbortTransaction(); + throw; + } + + var j = 0; + while (j < 10) + { + try + { + j++; + sessionHandle.CommitTransaction(); + break; + } + catch (MongoException e) + { + if (e.HasErrorLabel("UnknownTransactionCommitResult")) + continue; + throw; + } + } + break; + } + catch (MongoException e) + { + if (e.HasErrorLabel("TransientTransactionError")) + continue; + throw; + } + } + } + catch (Exception) + { + + } + } + + mainDocument = _mainCollection.FindSync(Builders.Filter.Empty).FirstOrDefault(); + dependentDocument = _dependentCollection.FindSync(Builders.Filter.Empty).FirstOrDefault(); + }; + + It main_should_be_33 = () => mainDocument.IntTest.Should().Be(33); + It dependent_should_be_33 = () => dependentDocument.IntTest.Should().Be(33); + Cleanup cleanup = () => _runner.Dispose(); + } + + + [Subject("Runner Transaction Test")] + public class when_transaction_is_aborted_before_commit : MongoTransactionTest + { + private static TestDocument mainDocument; + private static TestDocument dependentDocument; + private static TestDocument mainDocument_before_commit; + private static TestDocument dependentDocument_before_commit; + Establish context = () => + + { + CreateConnection(); + database.DropCollection(_mainCollectionName); + database.DropCollection(_dependentCollectionName); + _mainCollection.InsertOne(TestDocument.DummyData2()); + _dependentCollection.InsertOne(TestDocument.DummyData2()); + + }; + + private Because of = () => + { + var filter = Builders.Filter.Where(x => x.IntTest == 23); + var update = Builders.Update.Inc(i => i.IntTest, 10); + + using (var sessionHandle = client.StartSession()) + { + try + { + var i = 0; + while (i < 2) + { + try + { + i++; + sessionHandle.StartTransaction(new TransactionOptions( + readConcern: ReadConcern.Local, + writeConcern: WriteConcern.W1)); + try + { + var first = _mainCollection.UpdateOne(sessionHandle, filter, update); + var second = _dependentCollection.UpdateOne(sessionHandle, filter, update); + mainDocument_before_commit = _mainCollection.FindSync(sessionHandle, Builders.Filter.Empty).ToList().FirstOrDefault(); + dependentDocument_before_commit = _dependentCollection.FindSync(sessionHandle, Builders.Filter.Empty).ToList().FirstOrDefault(); + } + catch (Exception) + { + sessionHandle.AbortTransaction(); + throw; + } + + //Throw exception and do not commit + throw new ApplicationException(); + } + catch (MongoException e) + { + if (e.HasErrorLabel("TransientTransactionError")) + continue; + throw; + } + + } + } + catch (Exception) + { + + } + } + + mainDocument = _mainCollection.FindSync(Builders.Filter.Empty).FirstOrDefault(); + dependentDocument = _dependentCollection.FindSync(Builders.Filter.Empty).FirstOrDefault(); + }; + + It main_should_be_still_23_after_aborting = () => mainDocument.IntTest.Should().Be(23); + It dependent_should_be_still_23_after_aborting = () => dependentDocument.IntTest.Should().Be(23); + It main_should_be_33_before_aborting = () => mainDocument_before_commit.IntTest.Should().Be(33); + It dependent_should_be_33_before_aborting = () => dependentDocument_before_commit.IntTest.Should().Be(33); + Cleanup cleanup = () => _runner.Dispose(); + } + + [Subject("Runner Transaction Test")] + public class when_replica_set_not_ready_before_timeout_expires : MongoTransactionTest + { + private static Exception exception; + + Because of = () => exception = Catch.Exception(() => CreateConnection(0)); + + // this passes on Windows (TimeoutException as expected) + // but breaks on my Mac (MongoDB.Driver.MongoCommandException: Command replSetInitiate failed: already initialized.) + It should_throw_timeout_exception = () => { + Console.WriteLine(exception.ToString()); + exception.Should().BeOfType(); + }; + } +} diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/TestDocument.cs b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/TestDocument.cs new file mode 100644 index 00000000..a777bdf1 --- /dev/null +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Runner/TestDocument.cs @@ -0,0 +1,53 @@ +using System; +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace Mongo2GoTests.Runner +{ + public class TestDocument + { + [BsonId] + [BsonRepresentation(BsonType.ObjectId)] + public string Id { get; set; } + + public string StringTest { get; set; } + + public int IntTest { get; set; } + + [BsonDateTimeOptions(Kind = DateTimeKind.Local)] + public DateTime? DateTest { get; set; } + + public List ListTest { get; set; } + + public static TestDocument DummyData1() + { + return new TestDocument + { + StringTest = "Hello World", + IntTest = 42, + DateTest = new DateTime(1984, 09, 30, 6, 6, 6, 171, DateTimeKind.Utc).ToLocalTime(), + ListTest = new List {"I", "am", "a", "list", "of", "strings"} + }; + } + + public static TestDocument DummyData2() + { + return new TestDocument + { + StringTest = "Foo", + IntTest = 23, + }; + } + + public static TestDocument DummyData3() + { + return new TestDocument + { + StringTest = "Bar", + IntTest = 77, + }; + } + + } +} diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/ArchiveExtractor.cs b/Mongo2Go-4.1.0/src/MongoDownloader/ArchiveExtractor.cs new file mode 100644 index 00000000..6fe0bb94 --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/ArchiveExtractor.cs @@ -0,0 +1,152 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using ByteSizeLib; +using Espresso3389.HttpStream; +using HttpProgress; +using ICSharpCode.SharpZipLib.GZip; +using ICSharpCode.SharpZipLib.Tar; +using ICSharpCode.SharpZipLib.Zip; + +namespace MongoDownloader +{ + internal class ArchiveExtractor + { + private static readonly int CachePageSize = Convert.ToInt32(ByteSize.FromMebiBytes(4).Bytes); + + private readonly Options _options; + private readonly BinaryStripper? _binaryStripper; + + public ArchiveExtractor(Options options, BinaryStripper? binaryStripper) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + _binaryStripper = binaryStripper; + } + + public async Task>> DownloadExtractZipArchiveAsync(Download download, DirectoryInfo extractDirectory, ArchiveProgress progress, CancellationToken cancellationToken) + { + var bytesTransferred = 0L; + using var headResponse = await _options.HttpClient.SendAsync(new HttpRequestMessage(HttpMethod.Head, download.Archive.Url), cancellationToken); + var contentLength = headResponse.Content.Headers.ContentLength ?? 0; + var cacheFile = new FileInfo(Path.Combine(_options.CacheDirectory.FullName, download.Archive.Url.Segments.Last())); + await using var cacheStream = new FileStream(cacheFile.FullName, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); + var stopwatch = Stopwatch.StartNew(); + await using var httpStream = new HttpStream(download.Archive.Url, cacheStream, ownStream: false, CachePageSize, cached: null); + httpStream.RangeDownloaded += (_, args) => + { + bytesTransferred += args.Length; + progress.Report(new CopyProgress(stopwatch.Elapsed, 0, bytesTransferred, contentLength)); + }; + using var zipFile = new ZipFile(httpStream); + var binaryRegex = _options.Binaries[(download.Product, download.Platform)]; + var licenseRegex = _options.Licenses[(download.Product, download.Platform)]; + var stripTasks = new List>(); + foreach (var entry in zipFile.Cast().Where(e => e.IsFile)) + { + var nameParts = entry.Name.Split('\\', '/').Skip(1).ToList(); + var zipEntryPath = string.Join('/', nameParts); + var isBinaryFile = binaryRegex.IsMatch(zipEntryPath); + var isLicenseFile = licenseRegex.IsMatch(zipEntryPath); + if (isBinaryFile || isLicenseFile) + { + var destinationPathParts = isLicenseFile ? nameParts.Prepend(ProductDirectoryName(download.Product)) : nameParts; + var destinationFile = new FileInfo(Path.Combine(destinationPathParts.Prepend(extractDirectory.FullName).ToArray())); + destinationFile.Directory?.Create(); + await using var destinationStream = destinationFile.OpenWrite(); + await using var inputStream = zipFile.GetInputStream(entry); + await inputStream.CopyToAsync(destinationStream, cancellationToken); + if (isBinaryFile && _binaryStripper is not null) + { + stripTasks.Add(_binaryStripper.StripAsync(destinationFile, cancellationToken)); + } + } + } + progress.Report(new CopyProgress(stopwatch.Elapsed, 0, bytesTransferred, bytesTransferred)); + return stripTasks; + } + + public IEnumerable> ExtractArchive(Download download, FileInfo archive, DirectoryInfo extractDirectory, CancellationToken cancellationToken) + { + switch (Path.GetExtension(archive.Name)) + { + case ".tgz": + return ExtractTarGzipArchive(download, archive, extractDirectory, cancellationToken); + default: + throw new NotSupportedException($"Only .tgz archives are currently supported. \"{archive.FullName}\" can not be extracted."); + } + } + + private IEnumerable> ExtractTarGzipArchive(Download download, FileInfo archive, DirectoryInfo extractDirectory, CancellationToken cancellationToken) + { + // See https://github.com/icsharpcode/SharpZipLib/wiki/GZip-and-Tar-Samples#-simple-full-extract-from-a-tgz-targz + using var archiveStream = archive.OpenRead(); + using var gzipStream = new GZipInputStream(archiveStream); + using var tarArchive = TarArchive.CreateInputTarArchive(gzipStream, Encoding.UTF8); + var extractedFileNames = new List(); + tarArchive.ProgressMessageEvent += (_, entry, _) => + { + cancellationToken.ThrowIfCancellationRequested(); + extractedFileNames.Add(entry.Name); + }; + tarArchive.ExtractContents(extractDirectory.FullName); + return CleanupExtractedFiles(download, extractDirectory, extractedFileNames); + } + + private IEnumerable> CleanupExtractedFiles(Download download, DirectoryInfo extractDirectory, IEnumerable extractedFileNames) + { + var rootDirectoryToDelete = new HashSet(); + var binaryRegex = _options.Binaries[(download.Product, download.Platform)]; + var licenseRegex = _options.Licenses[(download.Product, download.Platform)]; + var stripTasks = new List>(); + foreach (var extractedFileName in extractedFileNames.Select(e => e.Replace('\\', Path.DirectorySeparatorChar).Replace('/', Path.DirectorySeparatorChar))) + { + var extractedFile = new FileInfo(Path.Combine(extractDirectory.FullName, extractedFileName)); + var parts = extractedFileName.Split(Path.DirectorySeparatorChar); + var entryFileName = string.Join("/", parts.Skip(1)); + rootDirectoryToDelete.Add(parts[0]); + var isBinaryFile = binaryRegex.IsMatch(entryFileName); + var isLicenseFile = licenseRegex.IsMatch(entryFileName); + if (!(isBinaryFile || isLicenseFile)) + { + extractedFile.Delete(); + } + else + { + var destinationPathParts = parts.Skip(1); + if (isLicenseFile) + { + destinationPathParts = destinationPathParts.Prepend(ProductDirectoryName(download.Product)); + } + var destinationFile = new FileInfo(Path.Combine(destinationPathParts.Prepend(extractDirectory.FullName).ToArray())); + destinationFile.Directory?.Create(); + extractedFile.MoveTo(destinationFile.FullName); + if (isBinaryFile && _binaryStripper is not null) + { + stripTasks.Add(_binaryStripper.StripAsync(destinationFile)); + } + } + } + var rootArchiveDirectory = new DirectoryInfo(Path.Combine(extractDirectory.FullName, rootDirectoryToDelete.Single())); + var binDirectory = new DirectoryInfo(Path.Combine(rootArchiveDirectory.FullName, "bin")); + binDirectory.Delete(recursive: false); + rootArchiveDirectory.Delete(recursive: false); + return stripTasks; + } + + private static string ProductDirectoryName(Product product) + { + return product switch + { + Product.CommunityServer => "community-server", + Product.DatabaseTools => "database-tools", + _ => throw new ArgumentOutOfRangeException(nameof(product), product, null) + }; + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/ArchiveProgress.cs b/Mongo2Go-4.1.0/src/MongoDownloader/ArchiveProgress.cs new file mode 100644 index 00000000..7b4cea4c --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/ArchiveProgress.cs @@ -0,0 +1,89 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using ByteSizeLib; +using HttpProgress; +using Spectre.Console; + +namespace MongoDownloader +{ + public class ArchiveProgress : IProgress + { + private readonly ProgressTask _archiveProgress; + private readonly ProgressTask _globalProgress; + private readonly IEnumerable _allArchiveProgresses; + private readonly Download _download; + private readonly string _completedDescription; + + public ArchiveProgress(ProgressTask archiveProgress, ProgressTask globalProgress, IEnumerable allArchiveProgresses, Download download, string completedDescription) + { + _archiveProgress = archiveProgress ?? throw new ArgumentNullException(nameof(archiveProgress)); + _globalProgress = globalProgress ?? throw new ArgumentNullException(nameof(globalProgress)); + _allArchiveProgresses = allArchiveProgresses ?? throw new ArgumentNullException(nameof(allArchiveProgresses)); + _download = download ?? throw new ArgumentNullException(nameof(download)); + _completedDescription = completedDescription ?? throw new ArgumentNullException(nameof(completedDescription)); + } + + public void Report(ICopyProgress progress) + { + _archiveProgress.Value = progress.BytesTransferred; + _archiveProgress.MaxValue = progress.ExpectedBytes; + + string text; + bool isIndeterminate; + if (progress.BytesTransferred < progress.ExpectedBytes) + { + var speed = ByteSize.FromBytes(progress.BytesTransferred / progress.TransferTime.TotalSeconds); + text = $"Downloading {_download} from {_download.Archive.Url} at {speed:0.0}/s"; + isIndeterminate = false; + } + else + { + text = $"Downloaded {_download}"; + isIndeterminate = true; + // Cheat by subtracting 1 so that the progress stays at 99% in indeterminate mode for + // remaining tasks (stripping) to complete with an indeterminate progress bar + _archiveProgress.Value = progress.BytesTransferred - 1; + } + Report(text, isIndeterminate); + + lock (_globalProgress) + { + _globalProgress.Value = _allArchiveProgresses.Sum(e => e.Value); + _globalProgress.MaxValue = _allArchiveProgresses.Sum(e => e.MaxValue); + } + } + + public void Report(string action) + { + Report(action, isIndeterminate: true); + } + + public void ReportCompleted(ByteSize strippedSize) + { + _archiveProgress.Value = _archiveProgress.MaxValue; + + lock (_globalProgress) + { + if (_allArchiveProgresses.All(e => e.IsFinished)) + { + _globalProgress.Description = _completedDescription; + _globalProgress.Value = _globalProgress.MaxValue; + } + } + + var saved = strippedSize.Bytes > 0 ? $" (saved {strippedSize:#.#} by stripping)" : ""; + Report($"Extracted {_download}{saved}", isIndeterminate: false); + } + + private void Report(string description, bool isIndeterminate) + { + _archiveProgress.Description = description; + _archiveProgress.IsIndeterminate = isIndeterminate; + lock (_globalProgress) + { + _globalProgress.IsIndeterminate = _allArchiveProgresses.All(e => e.IsFinished || e.IsIndeterminate); + } + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/BinaryStripper.cs b/Mongo2Go-4.1.0/src/MongoDownloader/BinaryStripper.cs new file mode 100644 index 00000000..7b2c0ee9 --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/BinaryStripper.cs @@ -0,0 +1,92 @@ +using System; +using System.ComponentModel; +using System.IO; +using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; +using ByteSizeLib; +using CliWrap; + +namespace MongoDownloader +{ + public class BinaryStripper + { + private const string LlvmStripToolName = "llvm-strip"; + + private readonly string _llvmStripPath; + + private BinaryStripper(string llvmStripPath) + { + _llvmStripPath = llvmStripPath ?? throw new ArgumentNullException(nameof(llvmStripPath)); + } + + public static async Task CreateAsync(CancellationToken cancellationToken) + { + var llvmStripPath = await GetLlvmStripPathAsync(cancellationToken); + return new BinaryStripper(llvmStripPath); + } + + public async Task StripAsync(FileInfo executable, CancellationToken cancellationToken = default) + { + var sizeBefore = ByteSize.FromBytes(executable.Length); + await Cli.Wrap(_llvmStripPath).WithArguments(executable.FullName).ExecuteAsync(cancellationToken); + executable.Refresh(); + var sizeAfter = ByteSize.FromBytes(executable.Length); + return sizeBefore - sizeAfter; + } + + private static async Task GetLlvmStripPathAsync(CancellationToken cancellationToken) + { + try + { + await Cli.Wrap(LlvmStripToolName).WithArguments("--version").ExecuteAsync(cancellationToken); + // llvm-strip is on the PATH + return LlvmStripToolName; + } + catch (Win32Exception exception) when (exception.NativeErrorCode == 2) + { + // llvm-strip is NOT in the PATH, let's search with homebrew + var llvmStripToolPath = await TryGetLlvmStripPathWithHomebrew(); + + if (llvmStripToolPath != null) + { + return llvmStripToolPath; + } + + throw new FileNotFoundException($"The \"{LlvmStripToolName}\" tool was not found."); + } + } + + private static async Task TryGetLlvmStripPathWithHomebrew() + { + if (!RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + return null; + } + + string? llvmStripToolPath = null; + try + { + await Cli.Wrap("brew") + // don't validate exit code, if `brew list llvm` fails it's because the llvm formula is not installed + .WithValidation(CommandResultValidation.None) + .WithArguments(new[] {"list", "llvm"}) + .WithStandardOutputPipe(PipeTarget.ToDelegate(line => + { + if (llvmStripToolPath == null && line.EndsWith(LlvmStripToolName)) + { + llvmStripToolPath = line; + } + })) + .ExecuteAsync(); + } + catch (Win32Exception exception) when (exception.NativeErrorCode == 2) + { + // brew is not installed + return null; + } + + return llvmStripToolPath; + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/DataModel.cs b/Mongo2Go-4.1.0/src/MongoDownloader/DataModel.cs new file mode 100644 index 00000000..9d26e69e --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/DataModel.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using System.Runtime.InteropServices; +using System.Text.Json.Serialization; + +// ReSharper disable AutoPropertyCanBeMadeGetOnly.Global +// ReSharper disable ClassNeverInstantiated.Global +// ReSharper disable CollectionNeverUpdated.Global + +namespace MongoDownloader +{ + public enum Platform + { + Linux, + // ReSharper disable once InconsistentNaming + macOS, + Windows, + } + + public enum Product + { + CommunityServer, + DatabaseTools, + } + + /// + /// The root object of the JSON describing the available releases. + /// + public class Release + { + [JsonPropertyName("versions")] + public List Versions { get; set; } = new(); + } + + public class Version + { + [JsonPropertyName("version")] + public string Number { get; set; } = ""; + + [JsonPropertyName("production_release")] + public bool Production { get; set; } = false; + + [JsonPropertyName("downloads")] + public List Downloads { get; set; } = new(); + } + + public class Download + { + /// + /// Used to identify the platform for the Community Server archives + /// + [JsonPropertyName("target")] + public string Target { get; set; } = ""; + + /// + /// Used to identify the platform for the Database Tools archives + /// + [JsonPropertyName("name")] + public string Name { get; set; } = ""; + + [JsonPropertyName("arch")] + public string Arch { get; set; } = ""; + + [JsonPropertyName("edition")] + public string Edition { get; set; } = ""; + + [JsonPropertyName("archive")] + public Archive Archive { get; set; } = new(); + + public Product Product { get; set; } + + public Platform Platform { get; set; } + + public Architecture Architecture { get; set; } + + public override string ToString() => $"{Product} for {Platform}/{Architecture.ToString().ToLowerInvariant()}"; + } + + public class Archive + { + [JsonPropertyName("url")] + public Uri Url { get; set; } = default!; + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/MongoDbDownloader.cs b/Mongo2Go-4.1.0/src/MongoDownloader/MongoDbDownloader.cs new file mode 100644 index 00000000..4a7e81c8 --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/MongoDbDownloader.cs @@ -0,0 +1,162 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http.Json; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using ByteSizeLib; +using HttpProgress; +using Spectre.Console; + +namespace MongoDownloader +{ + internal class MongoDbDownloader + { + private readonly ArchiveExtractor _extractor; + private readonly Options _options; + + public MongoDbDownloader(ArchiveExtractor extractor, Options options) + { + _extractor = extractor ?? throw new ArgumentNullException(nameof(extractor)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + } + + public async Task RunAsync(DirectoryInfo toolsDirectory, CancellationToken cancellationToken) + { + var strippedSize = await AnsiConsole + .Progress() + .Columns( + new ProgressBarColumn(), + new PercentageColumn(), + new RemainingTimeColumn(), + new DownloadedColumn(), + new TaskDescriptionColumn { Alignment = Justify.Left } + ) + .StartAsync(async context => await RunAsync(context, toolsDirectory, cancellationToken)); + + return strippedSize; + } + + private async Task RunAsync(ProgressContext context, DirectoryInfo toolsDirectory, CancellationToken cancellationToken) + { + const double initialMaxValue = double.Epsilon; + var globalProgress = context.AddTask("Downloading MongoDB", maxValue: initialMaxValue); + + var (communityServerVersion, communityServerDownloads) = await GetCommunityServerDownloadsAsync(cancellationToken); + globalProgress.Description = $"Downloading MongoDB Community Server {communityServerVersion.Number}"; + + var (databaseToolsVersion, databaseToolsDownloads) = await GetDatabaseToolsDownloadsAsync(cancellationToken); + globalProgress.Description = $"Downloading MongoDB Community Server {communityServerVersion.Number} and Database Tools {databaseToolsVersion.Number}"; + + var tasks = new List>(); + var allArchiveProgresses = new List(); + foreach (var download in communityServerDownloads.Concat(databaseToolsDownloads)) + { + var archiveProgress = context.AddTask($"Downloading {download} from {download.Archive.Url}", maxValue: initialMaxValue); + var directoryName = $"mongodb-{download.Platform.ToString().ToLowerInvariant()}-{download.Architecture.ToString().ToLowerInvariant()}-{communityServerVersion.Number}-database-tools-{databaseToolsVersion.Number}"; + var extractDirectory = new DirectoryInfo(Path.Combine(toolsDirectory.FullName, directoryName)); + allArchiveProgresses.Add(archiveProgress); + var progress = new ArchiveProgress(archiveProgress, globalProgress, allArchiveProgresses, download, $"✅ Downloaded and extracted MongoDB Community Server {communityServerVersion.Number} and Database Tools {databaseToolsVersion.Number} into {new Uri(toolsDirectory.FullName).AbsoluteUri}"); + tasks.Add(ProcessArchiveAsync(download, extractDirectory, progress, cancellationToken)); + } + var strippedSizes = await Task.WhenAll(tasks); + return strippedSizes.Aggregate(new ByteSize(0), (current, strippedSize) => current + strippedSize); + } + + private async Task ProcessArchiveAsync(Download download, DirectoryInfo extractDirectory, ArchiveProgress progress, CancellationToken cancellationToken) + { + IEnumerable> stripTasks; + var archiveExtension = Path.GetExtension(download.Archive.Url.AbsolutePath); + if (archiveExtension == ".zip") + { + stripTasks = await _extractor.DownloadExtractZipArchiveAsync(download, extractDirectory, progress, cancellationToken); + } + else + { + var archiveFileInfo = await DownloadArchiveAsync(download.Archive, progress, cancellationToken); + stripTasks = _extractor.ExtractArchive(download, archiveFileInfo, extractDirectory, cancellationToken); + } + progress.Report("Stripping binaries"); + var completedStripTasks = await Task.WhenAll(stripTasks); + var totalStrippedSize = completedStripTasks.Aggregate(new ByteSize(0), (current, strippedSize) => current + strippedSize); + progress.ReportCompleted(totalStrippedSize); + return totalStrippedSize; + } + + private async Task DownloadArchiveAsync(Archive archive, IProgress progress, CancellationToken cancellationToken) + { + _options.CacheDirectory.Create(); + var destinationFile = new FileInfo(Path.Combine(_options.CacheDirectory.FullName, archive.Url.Segments.Last())); + var useCache = bool.TryParse(Environment.GetEnvironmentVariable("MONGO2GO_DOWNLOADER_USE_CACHED_FILE") ?? "", out var useCachedFile) && useCachedFile; + if (useCache && destinationFile.Exists) + { + progress.Report(new CopyProgress(TimeSpan.Zero, 0, 1, 1)); + return destinationFile; + } + await using var destinationStream = destinationFile.OpenWrite(); + await _options.HttpClient.GetAsync(archive.Url.AbsoluteUri, destinationStream, progress, cancellationToken); + return destinationFile; + } + + private async Task<(Version version, IEnumerable downloads)> GetCommunityServerDownloadsAsync(CancellationToken cancellationToken) + { + var release = await _options.HttpClient.GetFromJsonAsync(_options.CommunityServerUrl, cancellationToken) ?? throw new InvalidOperationException($"Failed to deserialize {nameof(Release)}"); + var version = release.Versions.FirstOrDefault(e => e.Production) ?? throw new InvalidOperationException("No Community Server production version was found"); + var downloads = Enum.GetValues().SelectMany(platform => GetDownloads(platform, Product.CommunityServer, version, _options, _options.Edition)); + return (version, downloads); + } + + private async Task<(Version version, IEnumerable downloads)> GetDatabaseToolsDownloadsAsync(CancellationToken cancellationToken) + { + var release = await _options.HttpClient.GetFromJsonAsync(_options.DatabaseToolsUrl, cancellationToken) ?? throw new InvalidOperationException($"Failed to deserialize {nameof(Release)}"); + var version = release.Versions.FirstOrDefault() ?? throw new InvalidOperationException("No Database Tools version was found"); + var downloads = Enum.GetValues().SelectMany(platform => GetDownloads(platform, Product.DatabaseTools, version, _options)); + return (version, downloads); + } + + private static IEnumerable GetDownloads(Platform platform, Product product, Version version, Options options, Regex? editionRegex = null) + { + var platformRegex = options.PlatformIdentifiers[platform]; + Func platformPredicate = product switch + { + Product.CommunityServer => download => platformRegex.IsMatch(download.Target), + Product.DatabaseTools => download => platformRegex.IsMatch(download.Name), + _ => throw new ArgumentOutOfRangeException(nameof(product), product, $"The value of argument '{nameof(product)}' ({product}) is invalid for enum type '{nameof(Product)}'.") + }; + + foreach (var architecture in options.Architectures[platform]) + { + var architectureRegex = options.ArchitectureIdentifiers[architecture]; + var matchingDownloads = version.Downloads + .Where(platformPredicate) + .Where(e => architectureRegex.IsMatch(e.Arch)) + .Where(e => editionRegex?.IsMatch(e.Edition) ?? true) + .ToList(); + + if (matchingDownloads.Count == 0) + { + var downloads = version.Downloads.OrderBy(e => e.Target).ThenBy(e => e.Arch); + var messages = Enumerable.Empty() + .Append($"Download not found for {platform}/{architecture}.") + .Append($" Available downloads for {product} {version.Number}:") + .Concat(downloads.Select(e => $" - {e.Target}/{e.Arch} ({e.Edition})")); + throw new InvalidOperationException(string.Join(Environment.NewLine, messages)); + } + + if (matchingDownloads.Count > 1) + { + throw new InvalidOperationException($"Found {matchingDownloads.Count} downloads for {platform}/{architecture} but expected to find only one."); + } + + var download = matchingDownloads[0]; + download.Platform = platform; + download.Architecture = architecture; + download.Product = product; + + yield return download; + } + } + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/MongoDownloader.csproj b/Mongo2Go-4.1.0/src/MongoDownloader/MongoDownloader.csproj new file mode 100644 index 00000000..b3752c93 --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/MongoDownloader.csproj @@ -0,0 +1,21 @@ + + + + Exe + net8.0 + enable + false + + + + + + + + + + + + + + diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/Options.cs b/Mongo2Go-4.1.0/src/MongoDownloader/Options.cs new file mode 100644 index 00000000..90d1e8e9 --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/Options.cs @@ -0,0 +1,100 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Net.Http; +using System.Runtime.InteropServices; +using System.Text.RegularExpressions; + +namespace MongoDownloader +{ + internal class Options + { + /// + /// The instance used to fetch data over HTTP. + /// + public HttpClient HttpClient { get; init; } = new(); + + /// + /// The URL of the MongoDB Community Server download information JSON. + /// + public string CommunityServerUrl { get; init; } = "https://s3.amazonaws.com/downloads.mongodb.org/current.json"; + + /// + /// The URL of the MongoDB Database Tools download information JSON. + /// + public string DatabaseToolsUrl { get; init; } = "https://s3.amazonaws.com/downloads.mongodb.org/tools/db/release.json"; + + /// + /// The directory to store the downloaded archive files. + /// + public DirectoryInfo CacheDirectory { get; init; } = new(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.InternetCache), nameof(MongoDownloader))); + + /// + /// The architectures to download for a given platform. + /// + public IReadOnlyDictionary> Architectures { get; init; } = new Dictionary> + { + [Platform.Linux] = new[] { Architecture.Arm64, Architecture.X64 }, + [Platform.macOS] = new[] { Architecture.X64 }, + [Platform.Windows] = new[] { Architecture.X64 }, + }; + + /// + /// The edition of the archive to download. + /// + /// macOS and Windows use base and Linux uses targeted for the community edition + public Regex Edition { get; init; } = new(@"base|targeted"); + + /// + /// The regular expressions used to identify platform-specific archives to download. + /// + public IReadOnlyDictionary PlatformIdentifiers { get; init; } = new Dictionary + { + [Platform.Linux] = new(@"ubuntu2004", RegexOptions.IgnoreCase), + [Platform.macOS] = new(@"macOS", RegexOptions.IgnoreCase), + [Platform.Windows] = new(@"windows", RegexOptions.IgnoreCase), + }; + + /// + /// The regular expressions used to identify architectures to download. + /// + public IReadOnlyDictionary ArchitectureIdentifiers { get; init; } = new Dictionary + { + [Architecture.Arm64] = new("arm64|aarch64", RegexOptions.IgnoreCase), + [Architecture.X64] = new("x86_64", RegexOptions.IgnoreCase), + }; + + /// + /// A dictionary describing how to match MongoDB binaries inside the zip archives. + /// + /// The key is a tuple with the / and the + /// value is a regular expressions to match against the zip file name entry. + /// + public IReadOnlyDictionary<(Product, Platform), Regex> Binaries { get; init; } = new Dictionary<(Product, Platform), Regex> + { + [(Product.CommunityServer, Platform.Linux)] = new(@"bin/mongod"), + [(Product.CommunityServer, Platform.macOS)] = new(@"bin/mongod"), + [(Product.CommunityServer, Platform.Windows)] = new(@"bin/mongod\.exe"), + [(Product.DatabaseTools, Platform.Linux)] = new(@"bin/(mongoexport|mongoimport)"), + [(Product.DatabaseTools, Platform.macOS)] = new(@"bin/(mongoexport|mongoimport)"), + [(Product.DatabaseTools, Platform.Windows)] = new(@"bin/(mongoexport|mongoimport)\.exe"), + }; + + /// + /// A dictionary describing how to match licence files inside the zip archives. + /// + /// The key is a tuple with the / and the + /// value is a regular expressions to match against the zip file name entry. + /// + public IReadOnlyDictionary<(Product, Platform), Regex> Licenses { get; init; } = new Dictionary<(Product, Platform), Regex> + { + // The regular expression matches anything at the zip top level, i.e. does not contain any slash (/) character + [(Product.CommunityServer, Platform.Linux)] = new(@"^[^/]+$"), + [(Product.CommunityServer, Platform.macOS)] = new(@"^[^/]+$"), + [(Product.CommunityServer, Platform.Windows)] = new(@"^[^/]+$"), + [(Product.DatabaseTools, Platform.Linux)] = new(@"^[^/]+$"), + [(Product.DatabaseTools, Platform.macOS)] = new(@"^[^/]+$"), + [(Product.DatabaseTools, Platform.Windows)] = new(@"^[^/]+$"), + }; + } +} \ No newline at end of file diff --git a/Mongo2Go-4.1.0/src/MongoDownloader/Program.cs b/Mongo2Go-4.1.0/src/MongoDownloader/Program.cs new file mode 100644 index 00000000..ef595f1a --- /dev/null +++ b/Mongo2Go-4.1.0/src/MongoDownloader/Program.cs @@ -0,0 +1,86 @@ +using System; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; +using Spectre.Console; + +namespace MongoDownloader +{ + internal static class Program + { + private static async Task Main(string[] args) + { + try + { + var toolsDirectory = GetToolsDirectory(); + + foreach (DirectoryInfo dir in toolsDirectory.EnumerateDirectories()) + { + dir.Delete(true); + } + + var cancellationTokenSource = new CancellationTokenSource(); + Console.CancelKeyPress += (_, eventArgs) => + { + // Try to cancel gracefully the first time, then abort the process the second time Ctrl+C is pressed + eventArgs.Cancel = !cancellationTokenSource.IsCancellationRequested; + cancellationTokenSource.Cancel(); + }; + var options = new Options(); + var performStrip = args.All(e => e != "--no-strip"); + var binaryStripper = performStrip ? await GetBinaryStripperAsync(cancellationTokenSource.Token) : null; + var archiveExtractor = new ArchiveExtractor(options, binaryStripper); + var downloader = new MongoDbDownloader(archiveExtractor, options); + var strippedSize = await downloader.RunAsync(toolsDirectory, cancellationTokenSource.Token); + if (performStrip) + { + AnsiConsole.WriteLine($"Saved {strippedSize:#.#} by stripping executables"); + } + return 0; + } + catch (Exception exception) + { + if (exception is not OperationCanceledException) + { + AnsiConsole.WriteException(exception, ExceptionFormats.ShortenPaths); + } + return 1; + } + } + + private static DirectoryInfo GetToolsDirectory() + { + for (var directory = new DirectoryInfo("."); directory != null; directory = directory.Parent) + { + var toolsDirectory = directory.GetDirectories("tools", SearchOption.TopDirectoryOnly).SingleOrDefault(); + if (toolsDirectory?.Exists ?? false) + { + return toolsDirectory; + } + } + throw new InvalidOperationException("The tools directory was not found"); + } + + private static async Task GetBinaryStripperAsync(CancellationToken cancellationToken) + { + try + { + return await BinaryStripper.CreateAsync(cancellationToken); + } + catch (FileNotFoundException exception) + { + string installCommand; + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + installCommand = "brew install llvm"; + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + installCommand = "scoop install llvm"; + else + installCommand = "apt-get install llvm"; + + throw new Exception($"{exception.Message} Either install llvm with `{installCommand}` or run MongoDownloader with the --no-strip option to skip binary stripping.", exception); + } + } + } +} diff --git a/Mongo2Go-4.1.0/src/mongo2go_200_200.png b/Mongo2Go-4.1.0/src/mongo2go_200_200.png new file mode 100644 index 00000000..23750b60 Binary files /dev/null and b/Mongo2Go-4.1.0/src/mongo2go_200_200.png differ diff --git a/Mongo2Go-4.1.0/src/mongo2go_big.png b/Mongo2Go-4.1.0/src/mongo2go_big.png new file mode 100644 index 00000000..368d6452 Binary files /dev/null and b/Mongo2Go-4.1.0/src/mongo2go_big.png differ diff --git a/Mongo2Go-4.1.0/tools/README.md b/Mongo2Go-4.1.0/tools/README.md new file mode 100644 index 00000000..a6b5bcac --- /dev/null +++ b/Mongo2Go-4.1.0/tools/README.md @@ -0,0 +1,11 @@ +The binaries in this directory are automatically downloaded with the `MongoDownloader` tool. + +In order to download the latest binary: + +1. Go into the `Mongo2Go/src/MongoDownloader` directory +2. Run the downloader with `dotnet run` + +* The _MongoDB Community Server_ binaries are fetched from [https://s3.amazonaws.com/downloads.mongodb.org/current.json](https://s3.amazonaws.com/downloads.mongodb.org/current.json) + The latest production version is downloaded and extracted. +* The _MongoDB Database Tools_ archives are fetched from [https://s3.amazonaws.com/downloads.mongodb.org/tools/db/release.json](https://s3.amazonaws.com/downloads.mongodb.org/tools/db/release.json) + The latest version is downloaded and extracted. \ No newline at end of file diff --git a/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt new file mode 100644 index 00000000..4e1383df --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt @@ -0,0 +1,557 @@ + Server Side Public License + VERSION 1, OCTOBER 16, 2018 + + Copyright © 2018 MongoDB, Inc. + + Everyone is permitted to copy and distribute verbatim copies of this + license document, but changing it is not allowed. + + TERMS AND CONDITIONS + + 0. Definitions. + + “This License” refers to Server Side Public License. + + “Copyright” also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + “The Program” refers to any copyrightable work licensed under this + License. Each licensee is addressed as “you”. “Licensees” and + “recipients” may be individuals or organizations. + + To “modify” a work means to copy from or adapt all or part of the work in + a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a “modified version” of the + earlier work or a work “based on” the earlier work. + + A “covered work” means either the unmodified Program or a work based on + the Program. + + To “propagate” a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To “convey” a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through a + computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays “Appropriate Legal Notices” to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties + are provided), that licensees may convey the work under this License, and + how to view a copy of this License. If the interface presents a list of + user commands or options, such as a menu, a prominent item in the list + meets this criterion. + + 1. Source Code. + + The “source code” for a work means the preferred form of the work for + making modifications to it. “Object code” means any non-source form of a + work. + + A “Standard Interface” means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. The “System + Libraries” of an executable work include anything, other than the work as + a whole, that (a) is included in the normal form of packaging a Major + Component, but which is not part of that Major Component, and (b) serves + only to enable use of the work with that Major Component, or to implement + a Standard Interface for which an implementation is available to the + public in source code form. A “Major Component”, in this context, means a + major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or + a compiler used to produce the work, or an object code interpreter used + to run it. + + The “Corresponding Source” for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control + those activities. However, it does not include the work's System + Libraries, or general-purpose tools or generally available free programs + which are used unmodified in performing those activities but which are + not part of the work. For example, Corresponding Source includes + interface definition files associated with source files for the work, and + the source code for shared libraries and dynamically linked subprograms + that the work is specifically designed to require, such as by intimate + data communication or control flow between those subprograms and other + parts of the work. + + The Corresponding Source need not include anything that users can + regenerate automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program, subject to section 13. The + output from running a covered work is covered by this License only if the + output, given its content, constitutes a covered work. This License + acknowledges your rights of fair use or other equivalent, as provided by + copyright law. Subject to section 13, you may make, run and propagate + covered works that you do not convey, without conditions so long as your + license otherwise remains in force. You may convey covered works to + others for the sole purpose of having them make modifications exclusively + for you, or provide you with facilities for running those works, provided + that you comply with the terms of this License in conveying all + material for which you do not control copyright. Those thus making or + running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them + from making any copies of your copyrighted material outside their + relationship with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article 11 + of the WIPO copyright treaty adopted on 20 December 1996, or similar laws + prohibiting or restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the + covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's users, + your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; keep + intact all notices stating that this License and any non-permissive terms + added in accord with section 7 apply to the code; keep intact all notices + of the absence of any warranty; and give all recipients a copy of this + License along with the Program. You may charge any price or no price for + each copy that you convey, and you may offer support or warranty + protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the terms + of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to “keep intact all + notices”. + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the + whole of the work, and all its parts, regardless of how they are + packaged. This License gives no permission to license the work in any + other way, but it does not invalidate such permission if you have + separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work + need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, and + which are not combined with it such as to form a larger program, in or on + a volume of a storage or distribution medium, is called an “aggregate” if + the compilation and its resulting copyright are not used to limit the + access or legal rights of the compilation's users beyond what the + individual works permit. Inclusion of a covered work in an aggregate does + not cause this License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these + ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily + used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you + offer spare parts or customer support for that product model, to give + anyone who possesses the object code either (1) a copy of the + Corresponding Source for all the software in the product that is + covered by this License, on a durable physical medium customarily used + for software interchange, for a price no more than your reasonable cost + of physically performing this conveying of source, or (2) access to + copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This alternative is + allowed only occasionally and noncommercially, and only if you received + the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to copy + the object code is a network server, the Corresponding Source may be on + a different server (operated by you or a third party) that supports + equivalent copying facilities, provided you maintain clear directions + next to the object code saying where to find the Corresponding Source. + Regardless of what server hosts the Corresponding Source, you remain + obligated to ensure that it is available for as long as needed to + satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of + the work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be included + in conveying the object code work. + + A “User Product” is either (1) a “consumer product”, which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, “normally used” refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + “Installation Information” for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part + of a transaction in which the right of possession and use of the User + Product is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the + Installation Information. But this requirement does not apply if neither + you nor any third party retains the ability to install modified object + code on the User Product (for example, the work has been installed in + ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access + to a network may be denied when the modification itself materially + and adversely affects the operation of the network or violates the + rules and protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + “Additional permissions” are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall be + treated as though they were included in this License, to the extent that + they are valid under applicable law. If additional permissions apply only + to part of the Program, that part may be used separately under those + permissions, but the entire Program remains governed by this License + without regard to the additional permissions. When you convey a copy of + a covered work, you may at your option remove any additional permissions + from that copy, or from any part of it. (Additional permissions may be + written to require their own removal in certain cases when you modify the + work.) You may place additional permissions on material, added by you to + a covered work, for which you have or can give appropriate copyright + permission. + + Notwithstanding any other provision of this License, for material you add + to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material + by anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any + liability that these contractual assumptions directly impose on those + licensors and authors. + + All other non-permissive additional terms are considered “further + restrictions” within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you + may add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. Additional terms, permissive or non-permissive, may be + stated in the form of a separately written license, or stated as + exceptions; the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or modify + it is void, and will automatically terminate your rights under this + License (including any patent licenses granted under the third paragraph + of section 11). + + However, if you cease all violation of this License, then your license + from a particular copyright holder is reinstated (a) provisionally, + unless and until the copyright holder explicitly and finally terminates + your license, and (b) permanently, if the copyright holder fails to + notify you of the violation by some reasonable means prior to 60 days + after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and + you cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a + copy likewise does not require acceptance. However, nothing other than + this License grants you permission to propagate or modify any covered + work. These actions infringe copyright if you do not accept this License. + Therefore, by modifying or propagating a covered work, you indicate your + acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives + a license from the original licensors, to run, modify and propagate that + work, subject to this License. You are not responsible for enforcing + compliance by third parties with this License. + + An “entity transaction” is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work + the party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can + get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted + under this License, and you may not initiate litigation (including a + cross-claim or counterclaim in a lawsuit) alleging that any patent claim + is infringed by making, using, selling, offering for sale, or importing + the Program or any portion of it. + + 11. Patents. + + A “contributor” is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The work + thus licensed is called the contributor's “contributor version”. + + A contributor's “essential patent claims” are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this + License, of making, using, or selling its contributor version, but do not + include claims that would be infringed only as a consequence of further + modification of the contributor version. For purposes of this definition, + “control” includes the right to grant patent sublicenses in a manner + consistent with the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to make, + use, sell, offer for sale, import and otherwise run, modify and propagate + the contents of its contributor version. + + In the following three paragraphs, a “patent license” is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To “grant” such a patent license to a party + means to make such an agreement or commitment not to enforce a patent + against the party. + + If you convey a covered work, knowingly relying on a patent license, and + the Corresponding Source of the work is not available for anyone to copy, + free of charge and under the terms of this License, through a publicly + available network server or other readily accessible means, then you must + either (1) cause the Corresponding Source to be so available, or (2) + arrange to deprive yourself of the benefit of the patent license for this + particular work, or (3) arrange, in a manner consistent with the + requirements of this License, to extend the patent license to downstream + recipients. “Knowingly relying” means you have actual knowledge that, but + for the patent license, your conveying the covered work in a country, or + your recipient's use of the covered work in a country, would infringe + one or more identifiable patents in that country that you have reason + to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties receiving + the covered work authorizing them to use, propagate, modify or convey a + specific copy of the covered work, then the patent license you grant is + automatically extended to all recipients of the covered work and works + based on it. + + A patent license is “discriminatory” if it does not include within the + scope of its coverage, prohibits the exercise of, or is conditioned on + the non-exercise of one or more of the rights that are specifically + granted under this License. You may not convey a covered work if you are + a party to an arrangement with a third party that is in the business of + distributing software, under which you make payment to the third party + based on the extent of your activity of conveying the work, and under + which the third party grants, to any of the parties who would receive the + covered work from you, a discriminatory patent license (a) in connection + with copies of the covered work conveyed by you (or copies made from + those copies), or (b) primarily for and in connection with specific + products or compilations that contain the covered work, unless you + entered into that arrangement, or that patent license was granted, prior + to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot use, + propagate or convey a covered work so as to satisfy simultaneously your + obligations under this License and any other pertinent obligations, then + as a consequence you may not use, propagate or convey it at all. For + example, if you agree to terms that obligate you to collect a royalty for + further conveying from those to whom you convey the Program, the only way + you could satisfy both those terms and this License would be to refrain + entirely from conveying the Program. + + 13. Offering the Program as a Service. + + If you make the functionality of the Program or a modified version + available to third parties as a service, you must make the Service Source + Code available via network download to everyone at no charge, under the + terms of this License. Making the functionality of the Program or + modified version available to third parties as a service includes, + without limitation, enabling third parties to interact with the + functionality of the Program or modified version remotely through a + computer network, offering a service the value of which entirely or + primarily derives from the value of the Program or modified version, or + offering a service that accomplishes for users the primary purpose of the + Program or modified version. + + “Service Source Code” means the Corresponding Source for the Program or + the modified version, and the Corresponding Source for all programs that + you use to make the Program or modified version available as a service, + including, without limitation, management software, user interfaces, + application program interfaces, automation software, monitoring software, + backup software, storage software and hosting software, all such that a + user could run an instance of the service using the Service Source Code + you make available. + + 14. Revised Versions of this License. + + MongoDB, Inc. may publish revised and/or new versions of the Server Side + Public License from time to time. Such new versions will be similar in + spirit to the present version, but may differ in detail to address new + problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the Server Side Public + License “or any later version” applies to it, you have the option of + following the terms and conditions either of that numbered version or of + any later version published by MongoDB, Inc. If the Program does not + specify a version number of the Server Side Public License, you may + choose any version ever published by MongoDB, Inc. + + If the Program specifies that a proxy can decide which future versions of + the Server Side Public License can be used, that proxy's public statement + of acceptance of a version permanently authorizes you to choose that + version for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING + ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF + THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU + OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. + + END OF TERMS AND CONDITIONS diff --git a/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/MPL-2 b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/MPL-2 new file mode 100644 index 00000000..14e2f777 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/MPL-2 @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/README b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/README new file mode 100644 index 00000000..fe759d19 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/README @@ -0,0 +1,87 @@ +MongoDB README + +Welcome to MongoDB! + +COMPONENTS + + mongod - The database server. + mongos - Sharding router. + mongo - The database shell (uses interactive javascript). + +UTILITIES + + install_compass - Installs MongoDB Compass for your platform. + +BUILDING + + See docs/building.md. + +RUNNING + + For command line options invoke: + + $ ./mongod --help + + To run a single server database: + + $ sudo mkdir -p /data/db + $ ./mongod + $ + $ # The mongo javascript shell connects to localhost and test database by default: + $ ./mongo + > help + +INSTALLING COMPASS + + You can install compass using the install_compass script packaged with MongoDB: + + $ ./install_compass + + This will download the appropriate MongoDB Compass package for your platform + and install it. + +DRIVERS + + Client drivers for most programming languages are available at + https://docs.mongodb.com/manual/applications/drivers/. Use the shell + ("mongo") for administrative tasks. + +BUG REPORTS + + See https://github.com/mongodb/mongo/wiki/Submit-Bug-Reports. + +PACKAGING + + Packages are created dynamically by the package.py script located in the + buildscripts directory. This will generate RPM and Debian packages. + +DOCUMENTATION + + https://docs.mongodb.com/manual/ + +CLOUD HOSTED MONGODB + + https://www.mongodb.com/cloud/atlas + +FORUMS + + https://community.mongodb.com + + A forum for technical questions about using MongoDB. + + https://community.mongodb.com/c/server-dev + + A forum for technical questions about building and developing MongoDB. + +LEARN MONGODB + + https://university.mongodb.com/ + +LICENSE + + MongoDB is free and open-source. Versions released prior to October 16, + 2018 are published under the AGPL. All versions released after October + 16, 2018, including patch fixes for prior versions, are published under + the Server Side Public License (SSPL) v1. See individual files for + details. + diff --git a/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..34fb8230 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES @@ -0,0 +1,1568 @@ +MongoDB uses third-party libraries or other resources that may +be distributed under licenses different than the MongoDB software. + +In the event that we accidentally failed to list a required notice, +please bring it to our attention through any of the ways detailed here : + + mongodb-dev@googlegroups.com + +The attached notices are provided for information only. + +For any licenses that require disclosure of source, sources are available at +https://github.com/mongodb/mongo. + + +1) License Notice for Boost +--------------------------- + +http://www.boost.org/LICENSE_1_0.txt + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + +3) License Notice for PCRE +-------------------------- + +http://www.pcre.org/licence.txt + +PCRE LICENCE +------------ + +PCRE is a library of functions to support regular expressions whose syntax +and semantics are as close as possible to those of the Perl 5 language. + +Release 7 of PCRE is distributed under the terms of the "BSD" licence, as +specified below. The documentation for PCRE, supplied in the "doc" +directory, is distributed under the same terms as the software itself. + +The basic library functions are written in C and are freestanding. Also +included in the distribution is a set of C++ wrapper functions. + + +THE BASIC LIBRARY FUNCTIONS +--------------------------- + +Written by: Philip Hazel +Email local part: ph10 +Email domain: cam.ac.uk + +University of Cambridge Computing Service, +Cambridge, England. + +Copyright (c) 1997-2008 University of Cambridge +All rights reserved. + + +THE C++ WRAPPER FUNCTIONS +------------------------- + +Contributed by: Google Inc. + +Copyright (c) 2007-2008, Google Inc. +All rights reserved. + + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +4) License notice for Aladdin MD5 +--------------------------------- + +Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +L. Peter Deutsch +ghost@aladdin.com + +5) License notice for Snappy - http://code.google.com/p/snappy/ +--------------------------------- + Copyright 2005 and onwards Google Inc. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + A light-weight compression algorithm. It is designed for speed of + compression and decompression, rather than for the utmost in space + savings. + + For getting better compression ratios when you are compressing data + with long repeated sequences or compressing data that is similar to + other data, while still compressing fast, you might look at first + using BMDiff and then compressing the output of BMDiff with + Snappy. + +6) License notice for Google Perftools (TCMalloc utility) +--------------------------------- +New BSD License + +Copyright (c) 1998-2006, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following +conditions are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +7) License notice for Linenoise +------------------------------- + + Copyright (c) 2010, Salvatore Sanfilippo + Copyright (c) 2010, Pieter Noordhuis + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Redis nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + +8) License notice for S2 Geometry Library +----------------------------------------- + Copyright 2005 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +9) License notice for MurmurHash +-------------------------------- + + Copyright (c) 2010-2012 Austin Appleby + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +10) License notice for Snowball + Copyright (c) 2001, Dr Martin Porter + All rights reserved. + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +11) License notice for yaml-cpp +------------------------------- + +Copyright (c) 2008 Jesse Beder. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +12) License notice for zlib +--------------------------- + +http://www.zlib.net/zlib_license.html + +zlib.h -- interface of the 'zlib' general purpose compression library +version 1.2.8, April 28th, 2013 + +Copyright (C) 1995-2013 Jean-loup Gailly and Mark Adler + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +Jean-loup Gailly Mark Adler +jloup@gzip.org madler@alumni.caltech.edu + + +13) License notice for 3rd party software included in the WiredTiger library +---------------------------------------------------------------------------- + +http://source.wiredtiger.com/license.html + +WiredTiger Distribution Files | Copyright Holder | License +----------------------------- | ----------------------------------- | ---------------------- +src/include/bitstring.i | University of California, Berkeley | BSD-3-Clause License +src/include/queue.h | University of California, Berkeley | BSD-3-Clause License +src/os_posix/os_getopt.c | University of California, Berkeley | BSD-3-Clause License +src/support/hash_city.c | Google, Inc. | The MIT License +src/support/hash_fnv.c | Authors | Public Domain + + +Other optional 3rd party software included in the WiredTiger distribution is removed by MongoDB. + + +BSD-3-CLAUSE LICENSE +-------------------- + +http://www.opensource.org/licenses/BSD-3-Clause + +Copyright (c) 1987, 1989, 1991, 1993, 1994 + The Regents of the University of California. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +4. Neither the name of the University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + + +THE MIT LICENSE +--------------- + +http://www.opensource.org/licenses/MIT + +Copyright (c) 2011 Google, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +14) License Notice for SpiderMonkey +----------------------------------- + +|------------------------------------------------|------------------|---------------| +| SpiderMonkey Distribution Files | Copyright Holder | License | +|------------------------------------------------|------------------|---------------| +| js/src/jit/shared/AssemblerBuffer-x86-shared.h | Apple, Inc | BSD-2-Clause | +| js/src/jit/shared/BaseAssembler-x86-shared.h | | | +|------------------------------------------------|------------------|---------------| +| js/src/builtin/ | Google, Inc | BSD-3-Clause | +| js/src/irregexp/ | | | +| js/src/jit/arm/ | | | +| js/src/jit/mips/ | | | +| mfbt/double-conversion/ | | | +|------------------------------------------------|------------------|---------------| +| intl/icu/source/common/unicode/ | IBM, Inc | ICU | +|------------------------------------------------|------------------|---------------| +| js/src/asmjs/ | Mozilla, Inc | Apache2 | +|------------------------------------------------|------------------|---------------| +| js/public/ | Mozilla, Inc | MPL2 | +| js/src/ | | | +| mfbt | | | +|------------------------------------------------|------------------|---------------| +| js/src/vm/Unicode.cpp | None | Public Domain | +|------------------------------------------------|------------------|---------------| +| mfbt/lz4.c | Yann Collet | BSD-2-Clause | +| mfbt/lz4.h | | | +|------------------------------------------------|------------------|---------------| + +Other optional 3rd party software included in the SpiderMonkey distribution is removed by MongoDB. + + +Apple, Inc: BSD-2-Clause +------------------------ + +Copyright (C) 2008 Apple Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Google, Inc: BSD-3-Clause +------------------------- + +Copyright 2012 the V8 project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +ICU License - ICU 1.8.1 and later +--------------------------------- + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and +others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, provided that the above copyright notice(s) and this +permission notice appear in all copies of the Software and that both the +above copyright notice(s) and this permission notice appear in supporting +documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property +of their respective owners. + + +Mozilla, Inc: Apache 2 +---------------------- + +Copyright 2014 Mozilla Foundation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Mozilla, Inc: MPL 2 +------------------- + +Copyright 2014 Mozilla Foundation + +This Source Code Form is subject to the terms of the Mozilla Public +License, v. 2.0. If a copy of the MPL was not distributed with this +file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +Public Domain +------------- + +Any copyright is dedicated to the Public Domain. +http://creativecommons.org/licenses/publicdomain/ + + +LZ4: BSD-2-Clause +----------------- + +Copyright (C) 2011-2014, Yann Collet. +BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You can contact the author at : +- LZ4 source repository : http://code.google.com/p/lz4/ +- LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c + +15) License Notice for Intel DFP Math Library +--------------------------------------------- + +Copyright (c) 2011, Intel Corp. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + his list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +16) License Notice for Unicode Data +----------------------------------- + +Copyright © 1991-2015 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +17 ) License Notice for Valgrind.h +---------------------------------- + +---------------------------------------------------------------- + +Notice that the following BSD-style license applies to this one +file (valgrind.h) only. The rest of Valgrind is licensed under the +terms of the GNU General Public License, version 2, unless +otherwise indicated. See the COPYING file in the source +distribution for details. + +---------------------------------------------------------------- + +This file is part of Valgrind, a dynamic binary instrumentation +framework. + +Copyright (C) 2000-2015 Julian Seward. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. The origin of this software must not be misrepresented; you must + not claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product + documentation would be appreciated but is not required. + +3. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + +4. The name of the author may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------- + +Notice that the above BSD-style license applies to this one file +(valgrind.h) only. The entire rest of Valgrind is licensed under +the terms of the GNU General Public License, version 2. See the +COPYING file in the source distribution for details. + +---------------------------------------------------------------- + +18) License notice for ICU4C +---------------------------- + +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2016 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, provided that the above +copyright notice(s) and this permission notice appear in all copies of +the Software and that both the above copyright notice(s) and this +permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY +SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, use +or other dealings in this Software without prior written authorization +of the copyright holder. + + +All trademarks and registered trademarks mentioned herein are the +property of their respective owners. + +--------------------- + +Third-Party Software Licenses + +This section contains third-party software notices and/or additional +terms for licensed third-party software components included within ICU +libraries. + +1. Unicode Data Files and Software + +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 1991-2016 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt) + + # The Google Chrome software developed by Google is licensed under + # the BSD license. Other software included in this distribution is + # provided under other licenses, as set forth below. + # + # The BSD License + # http://opensource.org/licenses/bsd-license.php + # Copyright (C) 2006-2008, Google Inc. + # + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are met: + # + # Redistributions of source code must retain the above copyright notice, + # this list of conditions and the following disclaimer. + # Redistributions in binary form must reproduce the above + # copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided with + # the distribution. + # Neither the name of Google Inc. nor the names of its + # contributors may be used to endorse or promote products derived from + # this software without specific prior written permission. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # + # + # The word list in cjdict.txt are generated by combining three word lists + # listed below with further processing for compound word breaking. The + # frequency is generated with an iterative training against Google web + # corpora. + # + # * Libtabe (Chinese) + # - https://sourceforge.net/project/?group_id=1519 + # - Its license terms and conditions are shown below. + # + # * IPADIC (Japanese) + # - http://chasen.aist-nara.ac.jp/chasen/distribution.html + # - Its license terms and conditions are shown below. + # + # ---------COPYING.libtabe ---- BEGIN-------------------- + # + # /* + # * Copyrighy (c) 1999 TaBE Project. + # * Copyright (c) 1999 Pai-Hsiang Hsiao. + # * All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the TaBE Project nor the names of its + # * contributors may be used to endorse or promote products derived + # * from this software without specific prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # /* + # * Copyright (c) 1999 Computer Systems and Communication Lab, + # * Institute of Information Science, Academia + # * Sinica. All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the Computer Systems and Communication Lab + # * nor the names of its contributors may be used to endorse or + # * promote products derived from this software without specific + # * prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # Copyright 1996 Chih-Hao Tsai @ Beckman Institute, + # University of Illinois + # c-tsai4@uiuc.edu http://casper.beckman.uiuc.edu/~c-tsai4 + # + # ---------------COPYING.libtabe-----END-------------------------------- + # + # + # ---------------COPYING.ipadic-----BEGIN------------------------------- + # + # Copyright 2000, 2001, 2002, 2003 Nara Institute of Science + # and Technology. All Rights Reserved. + # + # Use, reproduction, and distribution of this software is permitted. + # Any copy of this software, whether in its original form or modified, + # must include both the above copyright notice and the following + # paragraphs. + # + # Nara Institute of Science and Technology (NAIST), + # the copyright holders, disclaims all warranties with regard to this + # software, including all implied warranties of merchantability and + # fitness, in no event shall NAIST be liable for + # any special, indirect or consequential damages or any damages + # whatsoever resulting from loss of use, data or profits, whether in an + # action of contract, negligence or other tortuous action, arising out + # of or in connection with the use or performance of this software. + # + # A large portion of the dictionary entries + # originate from ICOT Free Software. The following conditions for ICOT + # Free Software applies to the current dictionary as well. + # + # Each User may also freely distribute the Program, whether in its + # original form or modified, to any third party or parties, PROVIDED + # that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear + # on, or be attached to, the Program, which is distributed substantially + # in the same form as set out herein and that such intended + # distribution, if actually made, will neither violate or otherwise + # contravene any of the laws and regulations of the countries having + # jurisdiction over the User or the intended distribution itself. + # + # NO WARRANTY + # + # The program was produced on an experimental basis in the course of the + # research and development conducted during the project and is provided + # to users as so produced on an experimental basis. Accordingly, the + # program is provided without any warranty whatsoever, whether express, + # implied, statutory or otherwise. The term "warranty" used herein + # includes, but is not limited to, any warranty of the quality, + # performance, merchantability and fitness for a particular purpose of + # the program and the nonexistence of any infringement or violation of + # any right of any third party. + # + # Each user of the program will agree and understand, and be deemed to + # have agreed and understood, that there is no warranty whatsoever for + # the program and, accordingly, the entire risk arising from or + # otherwise connected with the program is assumed by the user. + # + # Therefore, neither ICOT, the copyright holder, or any other + # organization that participated in or was otherwise related to the + # development of the program and their respective officials, directors, + # officers and other employees shall be held liable for any and all + # damages, including, without limitation, general, special, incidental + # and consequential damages, arising out of or otherwise in connection + # with the use or inability to use the program or any product, material + # or result produced or otherwise obtained by using the program, + # regardless of whether they have been advised of, or otherwise had + # knowledge of, the possibility of such damages at any time during the + # project or thereafter. Each user will be deemed to have agreed to the + # foregoing by his or her commencement of use of the program. The term + # "use" as used herein includes, but is not limited to, the use, + # modification, copying and distribution of the program and the + # production of secondary products from the program. + # + # In the case where the program, whether in its original form or + # modified, was distributed or delivered to or received by a user from + # any person, organization or entity other than ICOT, unless it makes or + # grants independently of ICOT any specific warranty to the user in + # writing, such person, organization or entity, will also be exempted + # from and not be held liable to the user for any such damages as noted + # above as far as the program is concerned. + # + # ---------------COPYING.ipadic-----END---------------------------------- + +3. Lao Word Break Dictionary Data (laodict.txt) + + # Copyright (c) 2013 International Business Machines Corporation + # and others. All Rights Reserved. + # + # Project: http://code.google.com/p/lao-dictionary/ + # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt + # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt + # (copied below) + # + # This file is derived from the above dictionary, with slight + # modifications. + # ---------------------------------------------------------------------- + # Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell. + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, + # are permitted provided that the following conditions are met: + # + # + # Redistributions of source code must retain the above copyright notice, this + # list of conditions and the following disclaimer. Redistributions in + # binary form must reproduce the above copyright notice, this list of + # conditions and the following disclaimer in the documentation and/or + # other materials provided with the distribution. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. + # -------------------------------------------------------------------------- + +4. Burmese Word Break Dictionary Data (burmesedict.txt) + + # Copyright (c) 2014 International Business Machines Corporation + # and others. All Rights Reserved. + # + # This list is part of a project hosted at: + # github.com/kanyawtech/myanmar-karen-word-lists + # + # -------------------------------------------------------------------------- + # Copyright (c) 2013, LeRoy Benjamin Sharon + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: Redistributions of source code must retain the above + # copyright notice, this list of conditions and the following + # disclaimer. Redistributions in binary form must reproduce the + # above copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided + # with the distribution. + # + # Neither the name Myanmar Karen Word Lists, nor the names of its + # contributors may be used to endorse or promote products derived + # from this software without specific prior written permission. + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS + # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + # THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + # SUCH DAMAGE. + # -------------------------------------------------------------------------- + +5. Time Zone Database + + ICU uses the public domain data and code derived from Time Zone +Database for its time zone support. The ownership of the TZ database +is explained in BCP 175: Procedure for Maintaining the Time Zone +Database section 7. + + # 7. Database Ownership + # + # The TZ database itself is not an IETF Contribution or an IETF + # document. Rather it is a pre-existing and regularly updated work + # that is in the public domain, and is intended to remain in the + # public domain. Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do + # not apply to the TZ Database or contributions that individuals make + # to it. Should any claims be made and substantiated against the TZ + # Database, the organization that is providing the IANA + # Considerations defined in this RFC, under the memorandum of + # understanding with the IETF, currently ICANN, may act in accordance + # with all competent court orders. No ownership claims will be made + # by ICANN or the IETF Trust on the database or the code. Any person + # making a contribution to the database or code waives all rights to + # future claims in that contribution or in the TZ Database. + +19) License notice for timelib +------------------------------ + +The MIT License (MIT) + +Copyright (c) 2015-2017 Derick Rethans +Copyright (c) 2017 MongoDB, Inc + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +20) License notice for windows dirent implementation +---------------------------------------------------- + + * Dirent interface for Microsoft Visual Studio + * Version 1.21 + * + * Copyright (C) 2006-2012 Toni Ronkko + * This file is part of dirent. Dirent may be freely distributed + * under the MIT license. For all details and documentation, see + * https://github.com/tronkko/dirent + + + 21) License notice for abseil-cpp +---------------------------- + + Copyright (c) Google Inc. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + 22) License notice for Zstandard +---------------------------- + + BSD License + + For Zstandard software + + Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, + are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 23) License notice for ASIO +---------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 24) License notice for MPark.Variant +------------------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 25) License notice for fmt +--------------------------- + +Copyright (c) 2012 - present, Victor Zverovich +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of + conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or other + materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF +THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 26) License notice for SafeInt +--------------------------- + +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the MIT License. + +MIT License + +Copyright (c) 2018 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + 27) License Notice for Raft TLA+ Specification +----------------------------------------------- + +https://github.com/ongardie/dissertation/blob/master/LICENSE + +Copyright 2014 Diego Ongaro. + +Some of our TLA+ specifications are based on the Raft TLA+ specification by Diego Ongaro. + +End diff --git a/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md new file mode 100644 index 00000000..01b6a37e --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md @@ -0,0 +1,13 @@ +Copyright 2014 MongoDB, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/README.md b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/README.md new file mode 100644 index 00000000..20f3ffe8 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/README.md @@ -0,0 +1,72 @@ +MongoDB Tools +=================================== + + - **bsondump** - _display BSON files in a human-readable format_ + - **mongoimport** - _Convert data from JSON, TSV or CSV and insert them into a collection_ + - **mongoexport** - _Write an existing collection to CSV or JSON format_ + - **mongodump/mongorestore** - _Dump MongoDB backups to disk in .BSON format, or restore them to a live database_ + - **mongostat** - _Monitor live MongoDB servers, replica sets, or sharded clusters_ + - **mongofiles** - _Read, write, delete, or update files in [GridFS](http://docs.mongodb.org/manual/core/gridfs/)_ + - **mongotop** - _Monitor read/write activity on a mongo server_ + + +Report any bugs, improvements, or new feature requests at https://jira.mongodb.org/browse/TOOLS + +Building Tools +--------------- + +We currently build the tools with Go version 1.15. Other Go versions may work but they are untested. + +Using `go get` to directly build the tools will not work. To build them, it's recommended to first clone this repository: + +``` +git clone https://github.com/mongodb/mongo-tools +cd mongo-tools +``` + +Then run `./make build` to build all the tools, placing them in the `bin` directory inside the repository. + +You can also build a subset of the tools using the `-tools` option. For example, `./make build -tools=mongodump,mongorestore` builds only `mongodump` and `mongorestore`. + +To use the build/test scripts in this repository, you **_must_** set GOROOT to your Go root directory. This may depend on how you installed Go. + +``` +export GOROOT=/usr/local/go +``` + +Updating Dependencies +--------------- +Starting with version 100.3.1, the tools use `go mod` to manage dependencies. All dependencies are listed in the `go.mod` file and are directly vendored in the `vendor` directory. + +In order to make changes to dependencies, you first need to change the `go.mod` file. You can manually edit that file to add/update/remove entries, or you can run the following in the repository directory: + +``` +go mod edit -require=@ # for adding or updating a dependency +go mod edit -droprequire= # for removing a dependency +``` + +Then run `go mod vendor -v` to reconstruct the `vendor` directory to match the changed `go.mod` file. + +Optionally, run `go mod tidy -v` to ensure that the `go.mod` file matches the `mongo-tools` source code. + +Contributing +--------------- +See our [Contributor's Guide](CONTRIBUTING.md). + +Documentation +--------------- +See the MongoDB packages [documentation](https://docs.mongodb.org/database-tools/). + +For documentation on older versions of the MongoDB, reference that version of the [MongoDB Server Manual](docs.mongodb.com/manual): + +- [MongoDB 4.2 Tools](https://docs.mongodb.org/v4.2/reference/program) +- [MongoDB 4.0 Tools](https://docs.mongodb.org/v4.0/reference/program) +- [MongoDB 3.6 Tools](https://docs.mongodb.org/v3.6/reference/program) + +Adding New Platforms Support +--------------- +See our [Adding New Platform Support Guide](PLATFORMSUPPORT.md). + +Vendoring the Change into Server Repo +--------------- +See our [Vendor the Change into Server Repo](SERVERVENDORING.md). diff --git a/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..c747d0b8 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES @@ -0,0 +1,3319 @@ +--------------------------------------------------------------------- +License notice for hashicorp/go-rootcerts +--------------------------------------------------------------------- + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + +--------------------------------------------------------------------- +License notice for JSON and CSV code from github.com/golang/go +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/escaper +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Lucas Morales + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo/bson +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/openssl +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/3rf/mongo-lint +---------------------------------------------------------------------- + +Copyright (c) 2013 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/go-stack/stack +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 Chris Hines + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/golang/snappy +---------------------------------------------------------------------- + +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/google/gopacket +---------------------------------------------------------------------- + +Copyright (c) 2012 Google, Inc. All rights reserved. +Copyright (c) 2009-2011 Andreas Krennmair. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Andreas Krennmair, Google, nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/gopherjs/gopherjs +---------------------------------------------------------------------- + +Copyright (c) 2013 Richard Musiol. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/howeyc/gopass +---------------------------------------------------------------------- + +Copyright (c) 2012 Chris Howey + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/jessevdk/go-flags +---------------------------------------------------------------------- + +Copyright (c) 2012 Jesse van den Kieboom. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/jtolds/gls +---------------------------------------------------------------------- + +Copyright (c) 2013, Space Monkey, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mattn/go-runewidth +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mongodb/mongo-go-driver +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/nsf/termbox-go +---------------------------------------------------------------------- + +Copyright (C) 2012 termbox-go authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/patrickmn/go-cache +---------------------------------------------------------------------- + +Copyright (c) 2012-2015 Patrick Mylund Nielsen and the go-cache contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions +---------------------------------------------------------------------- + +Copyright (c) 2015 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/go-render +---------------------------------------------------------------------- + +// Copyright (c) 2015 The Chromium Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglematchers +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglemock +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/ogletest +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/reqtrace +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey +---------------------------------------------------------------------- + +Copyright (c) 2014 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/spacemonkeygo/spacelog +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/xdg/scram +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/xdg/stringprep +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/youmark/pkcs8 +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 youmark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for golang.org/x/crypto +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/sync +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/text +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for gopkg.in/tomb.v2 +---------------------------------------------------------------------- + +tomb - support for clean goroutine termination in Go. + +Copyright (c) 2010-2011 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt new file mode 100644 index 00000000..4e1383df --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt @@ -0,0 +1,557 @@ + Server Side Public License + VERSION 1, OCTOBER 16, 2018 + + Copyright © 2018 MongoDB, Inc. + + Everyone is permitted to copy and distribute verbatim copies of this + license document, but changing it is not allowed. + + TERMS AND CONDITIONS + + 0. Definitions. + + “This License” refers to Server Side Public License. + + “Copyright” also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + “The Program” refers to any copyrightable work licensed under this + License. Each licensee is addressed as “you”. “Licensees” and + “recipients” may be individuals or organizations. + + To “modify” a work means to copy from or adapt all or part of the work in + a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a “modified version” of the + earlier work or a work “based on” the earlier work. + + A “covered work” means either the unmodified Program or a work based on + the Program. + + To “propagate” a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To “convey” a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through a + computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays “Appropriate Legal Notices” to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties + are provided), that licensees may convey the work under this License, and + how to view a copy of this License. If the interface presents a list of + user commands or options, such as a menu, a prominent item in the list + meets this criterion. + + 1. Source Code. + + The “source code” for a work means the preferred form of the work for + making modifications to it. “Object code” means any non-source form of a + work. + + A “Standard Interface” means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. The “System + Libraries” of an executable work include anything, other than the work as + a whole, that (a) is included in the normal form of packaging a Major + Component, but which is not part of that Major Component, and (b) serves + only to enable use of the work with that Major Component, or to implement + a Standard Interface for which an implementation is available to the + public in source code form. A “Major Component”, in this context, means a + major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or + a compiler used to produce the work, or an object code interpreter used + to run it. + + The “Corresponding Source” for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control + those activities. However, it does not include the work's System + Libraries, or general-purpose tools or generally available free programs + which are used unmodified in performing those activities but which are + not part of the work. For example, Corresponding Source includes + interface definition files associated with source files for the work, and + the source code for shared libraries and dynamically linked subprograms + that the work is specifically designed to require, such as by intimate + data communication or control flow between those subprograms and other + parts of the work. + + The Corresponding Source need not include anything that users can + regenerate automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program, subject to section 13. The + output from running a covered work is covered by this License only if the + output, given its content, constitutes a covered work. This License + acknowledges your rights of fair use or other equivalent, as provided by + copyright law. Subject to section 13, you may make, run and propagate + covered works that you do not convey, without conditions so long as your + license otherwise remains in force. You may convey covered works to + others for the sole purpose of having them make modifications exclusively + for you, or provide you with facilities for running those works, provided + that you comply with the terms of this License in conveying all + material for which you do not control copyright. Those thus making or + running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them + from making any copies of your copyrighted material outside their + relationship with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article 11 + of the WIPO copyright treaty adopted on 20 December 1996, or similar laws + prohibiting or restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the + covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's users, + your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; keep + intact all notices stating that this License and any non-permissive terms + added in accord with section 7 apply to the code; keep intact all notices + of the absence of any warranty; and give all recipients a copy of this + License along with the Program. You may charge any price or no price for + each copy that you convey, and you may offer support or warranty + protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the terms + of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to “keep intact all + notices”. + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the + whole of the work, and all its parts, regardless of how they are + packaged. This License gives no permission to license the work in any + other way, but it does not invalidate such permission if you have + separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work + need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, and + which are not combined with it such as to form a larger program, in or on + a volume of a storage or distribution medium, is called an “aggregate” if + the compilation and its resulting copyright are not used to limit the + access or legal rights of the compilation's users beyond what the + individual works permit. Inclusion of a covered work in an aggregate does + not cause this License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these + ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily + used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you + offer spare parts or customer support for that product model, to give + anyone who possesses the object code either (1) a copy of the + Corresponding Source for all the software in the product that is + covered by this License, on a durable physical medium customarily used + for software interchange, for a price no more than your reasonable cost + of physically performing this conveying of source, or (2) access to + copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This alternative is + allowed only occasionally and noncommercially, and only if you received + the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to copy + the object code is a network server, the Corresponding Source may be on + a different server (operated by you or a third party) that supports + equivalent copying facilities, provided you maintain clear directions + next to the object code saying where to find the Corresponding Source. + Regardless of what server hosts the Corresponding Source, you remain + obligated to ensure that it is available for as long as needed to + satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of + the work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be included + in conveying the object code work. + + A “User Product” is either (1) a “consumer product”, which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, “normally used” refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + “Installation Information” for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part + of a transaction in which the right of possession and use of the User + Product is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the + Installation Information. But this requirement does not apply if neither + you nor any third party retains the ability to install modified object + code on the User Product (for example, the work has been installed in + ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access + to a network may be denied when the modification itself materially + and adversely affects the operation of the network or violates the + rules and protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + “Additional permissions” are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall be + treated as though they were included in this License, to the extent that + they are valid under applicable law. If additional permissions apply only + to part of the Program, that part may be used separately under those + permissions, but the entire Program remains governed by this License + without regard to the additional permissions. When you convey a copy of + a covered work, you may at your option remove any additional permissions + from that copy, or from any part of it. (Additional permissions may be + written to require their own removal in certain cases when you modify the + work.) You may place additional permissions on material, added by you to + a covered work, for which you have or can give appropriate copyright + permission. + + Notwithstanding any other provision of this License, for material you add + to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material + by anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any + liability that these contractual assumptions directly impose on those + licensors and authors. + + All other non-permissive additional terms are considered “further + restrictions” within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you + may add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. Additional terms, permissive or non-permissive, may be + stated in the form of a separately written license, or stated as + exceptions; the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or modify + it is void, and will automatically terminate your rights under this + License (including any patent licenses granted under the third paragraph + of section 11). + + However, if you cease all violation of this License, then your license + from a particular copyright holder is reinstated (a) provisionally, + unless and until the copyright holder explicitly and finally terminates + your license, and (b) permanently, if the copyright holder fails to + notify you of the violation by some reasonable means prior to 60 days + after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and + you cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a + copy likewise does not require acceptance. However, nothing other than + this License grants you permission to propagate or modify any covered + work. These actions infringe copyright if you do not accept this License. + Therefore, by modifying or propagating a covered work, you indicate your + acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives + a license from the original licensors, to run, modify and propagate that + work, subject to this License. You are not responsible for enforcing + compliance by third parties with this License. + + An “entity transaction” is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work + the party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can + get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted + under this License, and you may not initiate litigation (including a + cross-claim or counterclaim in a lawsuit) alleging that any patent claim + is infringed by making, using, selling, offering for sale, or importing + the Program or any portion of it. + + 11. Patents. + + A “contributor” is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The work + thus licensed is called the contributor's “contributor version”. + + A contributor's “essential patent claims” are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this + License, of making, using, or selling its contributor version, but do not + include claims that would be infringed only as a consequence of further + modification of the contributor version. For purposes of this definition, + “control” includes the right to grant patent sublicenses in a manner + consistent with the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to make, + use, sell, offer for sale, import and otherwise run, modify and propagate + the contents of its contributor version. + + In the following three paragraphs, a “patent license” is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To “grant” such a patent license to a party + means to make such an agreement or commitment not to enforce a patent + against the party. + + If you convey a covered work, knowingly relying on a patent license, and + the Corresponding Source of the work is not available for anyone to copy, + free of charge and under the terms of this License, through a publicly + available network server or other readily accessible means, then you must + either (1) cause the Corresponding Source to be so available, or (2) + arrange to deprive yourself of the benefit of the patent license for this + particular work, or (3) arrange, in a manner consistent with the + requirements of this License, to extend the patent license to downstream + recipients. “Knowingly relying” means you have actual knowledge that, but + for the patent license, your conveying the covered work in a country, or + your recipient's use of the covered work in a country, would infringe + one or more identifiable patents in that country that you have reason + to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties receiving + the covered work authorizing them to use, propagate, modify or convey a + specific copy of the covered work, then the patent license you grant is + automatically extended to all recipients of the covered work and works + based on it. + + A patent license is “discriminatory” if it does not include within the + scope of its coverage, prohibits the exercise of, or is conditioned on + the non-exercise of one or more of the rights that are specifically + granted under this License. You may not convey a covered work if you are + a party to an arrangement with a third party that is in the business of + distributing software, under which you make payment to the third party + based on the extent of your activity of conveying the work, and under + which the third party grants, to any of the parties who would receive the + covered work from you, a discriminatory patent license (a) in connection + with copies of the covered work conveyed by you (or copies made from + those copies), or (b) primarily for and in connection with specific + products or compilations that contain the covered work, unless you + entered into that arrangement, or that patent license was granted, prior + to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot use, + propagate or convey a covered work so as to satisfy simultaneously your + obligations under this License and any other pertinent obligations, then + as a consequence you may not use, propagate or convey it at all. For + example, if you agree to terms that obligate you to collect a royalty for + further conveying from those to whom you convey the Program, the only way + you could satisfy both those terms and this License would be to refrain + entirely from conveying the Program. + + 13. Offering the Program as a Service. + + If you make the functionality of the Program or a modified version + available to third parties as a service, you must make the Service Source + Code available via network download to everyone at no charge, under the + terms of this License. Making the functionality of the Program or + modified version available to third parties as a service includes, + without limitation, enabling third parties to interact with the + functionality of the Program or modified version remotely through a + computer network, offering a service the value of which entirely or + primarily derives from the value of the Program or modified version, or + offering a service that accomplishes for users the primary purpose of the + Program or modified version. + + “Service Source Code” means the Corresponding Source for the Program or + the modified version, and the Corresponding Source for all programs that + you use to make the Program or modified version available as a service, + including, without limitation, management software, user interfaces, + application program interfaces, automation software, monitoring software, + backup software, storage software and hosting software, all such that a + user could run an instance of the service using the Service Source Code + you make available. + + 14. Revised Versions of this License. + + MongoDB, Inc. may publish revised and/or new versions of the Server Side + Public License from time to time. Such new versions will be similar in + spirit to the present version, but may differ in detail to address new + problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the Server Side Public + License “or any later version” applies to it, you have the option of + following the terms and conditions either of that numbered version or of + any later version published by MongoDB, Inc. If the Program does not + specify a version number of the Server Side Public License, you may + choose any version ever published by MongoDB, Inc. + + If the Program specifies that a proxy can decide which future versions of + the Server Side Public License can be used, that proxy's public statement + of acceptance of a version permanently authorizes you to choose that + version for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING + ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF + THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU + OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. + + END OF TERMS AND CONDITIONS diff --git a/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/MPL-2 b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/MPL-2 new file mode 100644 index 00000000..14e2f777 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/MPL-2 @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/README b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/README new file mode 100644 index 00000000..fe759d19 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/README @@ -0,0 +1,87 @@ +MongoDB README + +Welcome to MongoDB! + +COMPONENTS + + mongod - The database server. + mongos - Sharding router. + mongo - The database shell (uses interactive javascript). + +UTILITIES + + install_compass - Installs MongoDB Compass for your platform. + +BUILDING + + See docs/building.md. + +RUNNING + + For command line options invoke: + + $ ./mongod --help + + To run a single server database: + + $ sudo mkdir -p /data/db + $ ./mongod + $ + $ # The mongo javascript shell connects to localhost and test database by default: + $ ./mongo + > help + +INSTALLING COMPASS + + You can install compass using the install_compass script packaged with MongoDB: + + $ ./install_compass + + This will download the appropriate MongoDB Compass package for your platform + and install it. + +DRIVERS + + Client drivers for most programming languages are available at + https://docs.mongodb.com/manual/applications/drivers/. Use the shell + ("mongo") for administrative tasks. + +BUG REPORTS + + See https://github.com/mongodb/mongo/wiki/Submit-Bug-Reports. + +PACKAGING + + Packages are created dynamically by the package.py script located in the + buildscripts directory. This will generate RPM and Debian packages. + +DOCUMENTATION + + https://docs.mongodb.com/manual/ + +CLOUD HOSTED MONGODB + + https://www.mongodb.com/cloud/atlas + +FORUMS + + https://community.mongodb.com + + A forum for technical questions about using MongoDB. + + https://community.mongodb.com/c/server-dev + + A forum for technical questions about building and developing MongoDB. + +LEARN MONGODB + + https://university.mongodb.com/ + +LICENSE + + MongoDB is free and open-source. Versions released prior to October 16, + 2018 are published under the AGPL. All versions released after October + 16, 2018, including patch fixes for prior versions, are published under + the Server Side Public License (SSPL) v1. See individual files for + details. + diff --git a/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..34fb8230 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES @@ -0,0 +1,1568 @@ +MongoDB uses third-party libraries or other resources that may +be distributed under licenses different than the MongoDB software. + +In the event that we accidentally failed to list a required notice, +please bring it to our attention through any of the ways detailed here : + + mongodb-dev@googlegroups.com + +The attached notices are provided for information only. + +For any licenses that require disclosure of source, sources are available at +https://github.com/mongodb/mongo. + + +1) License Notice for Boost +--------------------------- + +http://www.boost.org/LICENSE_1_0.txt + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + +3) License Notice for PCRE +-------------------------- + +http://www.pcre.org/licence.txt + +PCRE LICENCE +------------ + +PCRE is a library of functions to support regular expressions whose syntax +and semantics are as close as possible to those of the Perl 5 language. + +Release 7 of PCRE is distributed under the terms of the "BSD" licence, as +specified below. The documentation for PCRE, supplied in the "doc" +directory, is distributed under the same terms as the software itself. + +The basic library functions are written in C and are freestanding. Also +included in the distribution is a set of C++ wrapper functions. + + +THE BASIC LIBRARY FUNCTIONS +--------------------------- + +Written by: Philip Hazel +Email local part: ph10 +Email domain: cam.ac.uk + +University of Cambridge Computing Service, +Cambridge, England. + +Copyright (c) 1997-2008 University of Cambridge +All rights reserved. + + +THE C++ WRAPPER FUNCTIONS +------------------------- + +Contributed by: Google Inc. + +Copyright (c) 2007-2008, Google Inc. +All rights reserved. + + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +4) License notice for Aladdin MD5 +--------------------------------- + +Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +L. Peter Deutsch +ghost@aladdin.com + +5) License notice for Snappy - http://code.google.com/p/snappy/ +--------------------------------- + Copyright 2005 and onwards Google Inc. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + A light-weight compression algorithm. It is designed for speed of + compression and decompression, rather than for the utmost in space + savings. + + For getting better compression ratios when you are compressing data + with long repeated sequences or compressing data that is similar to + other data, while still compressing fast, you might look at first + using BMDiff and then compressing the output of BMDiff with + Snappy. + +6) License notice for Google Perftools (TCMalloc utility) +--------------------------------- +New BSD License + +Copyright (c) 1998-2006, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following +conditions are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +7) License notice for Linenoise +------------------------------- + + Copyright (c) 2010, Salvatore Sanfilippo + Copyright (c) 2010, Pieter Noordhuis + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Redis nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + +8) License notice for S2 Geometry Library +----------------------------------------- + Copyright 2005 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +9) License notice for MurmurHash +-------------------------------- + + Copyright (c) 2010-2012 Austin Appleby + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +10) License notice for Snowball + Copyright (c) 2001, Dr Martin Porter + All rights reserved. + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +11) License notice for yaml-cpp +------------------------------- + +Copyright (c) 2008 Jesse Beder. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +12) License notice for zlib +--------------------------- + +http://www.zlib.net/zlib_license.html + +zlib.h -- interface of the 'zlib' general purpose compression library +version 1.2.8, April 28th, 2013 + +Copyright (C) 1995-2013 Jean-loup Gailly and Mark Adler + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +Jean-loup Gailly Mark Adler +jloup@gzip.org madler@alumni.caltech.edu + + +13) License notice for 3rd party software included in the WiredTiger library +---------------------------------------------------------------------------- + +http://source.wiredtiger.com/license.html + +WiredTiger Distribution Files | Copyright Holder | License +----------------------------- | ----------------------------------- | ---------------------- +src/include/bitstring.i | University of California, Berkeley | BSD-3-Clause License +src/include/queue.h | University of California, Berkeley | BSD-3-Clause License +src/os_posix/os_getopt.c | University of California, Berkeley | BSD-3-Clause License +src/support/hash_city.c | Google, Inc. | The MIT License +src/support/hash_fnv.c | Authors | Public Domain + + +Other optional 3rd party software included in the WiredTiger distribution is removed by MongoDB. + + +BSD-3-CLAUSE LICENSE +-------------------- + +http://www.opensource.org/licenses/BSD-3-Clause + +Copyright (c) 1987, 1989, 1991, 1993, 1994 + The Regents of the University of California. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +4. Neither the name of the University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + + +THE MIT LICENSE +--------------- + +http://www.opensource.org/licenses/MIT + +Copyright (c) 2011 Google, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +14) License Notice for SpiderMonkey +----------------------------------- + +|------------------------------------------------|------------------|---------------| +| SpiderMonkey Distribution Files | Copyright Holder | License | +|------------------------------------------------|------------------|---------------| +| js/src/jit/shared/AssemblerBuffer-x86-shared.h | Apple, Inc | BSD-2-Clause | +| js/src/jit/shared/BaseAssembler-x86-shared.h | | | +|------------------------------------------------|------------------|---------------| +| js/src/builtin/ | Google, Inc | BSD-3-Clause | +| js/src/irregexp/ | | | +| js/src/jit/arm/ | | | +| js/src/jit/mips/ | | | +| mfbt/double-conversion/ | | | +|------------------------------------------------|------------------|---------------| +| intl/icu/source/common/unicode/ | IBM, Inc | ICU | +|------------------------------------------------|------------------|---------------| +| js/src/asmjs/ | Mozilla, Inc | Apache2 | +|------------------------------------------------|------------------|---------------| +| js/public/ | Mozilla, Inc | MPL2 | +| js/src/ | | | +| mfbt | | | +|------------------------------------------------|------------------|---------------| +| js/src/vm/Unicode.cpp | None | Public Domain | +|------------------------------------------------|------------------|---------------| +| mfbt/lz4.c | Yann Collet | BSD-2-Clause | +| mfbt/lz4.h | | | +|------------------------------------------------|------------------|---------------| + +Other optional 3rd party software included in the SpiderMonkey distribution is removed by MongoDB. + + +Apple, Inc: BSD-2-Clause +------------------------ + +Copyright (C) 2008 Apple Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Google, Inc: BSD-3-Clause +------------------------- + +Copyright 2012 the V8 project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +ICU License - ICU 1.8.1 and later +--------------------------------- + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and +others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, provided that the above copyright notice(s) and this +permission notice appear in all copies of the Software and that both the +above copyright notice(s) and this permission notice appear in supporting +documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property +of their respective owners. + + +Mozilla, Inc: Apache 2 +---------------------- + +Copyright 2014 Mozilla Foundation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Mozilla, Inc: MPL 2 +------------------- + +Copyright 2014 Mozilla Foundation + +This Source Code Form is subject to the terms of the Mozilla Public +License, v. 2.0. If a copy of the MPL was not distributed with this +file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +Public Domain +------------- + +Any copyright is dedicated to the Public Domain. +http://creativecommons.org/licenses/publicdomain/ + + +LZ4: BSD-2-Clause +----------------- + +Copyright (C) 2011-2014, Yann Collet. +BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You can contact the author at : +- LZ4 source repository : http://code.google.com/p/lz4/ +- LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c + +15) License Notice for Intel DFP Math Library +--------------------------------------------- + +Copyright (c) 2011, Intel Corp. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + his list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +16) License Notice for Unicode Data +----------------------------------- + +Copyright © 1991-2015 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +17 ) License Notice for Valgrind.h +---------------------------------- + +---------------------------------------------------------------- + +Notice that the following BSD-style license applies to this one +file (valgrind.h) only. The rest of Valgrind is licensed under the +terms of the GNU General Public License, version 2, unless +otherwise indicated. See the COPYING file in the source +distribution for details. + +---------------------------------------------------------------- + +This file is part of Valgrind, a dynamic binary instrumentation +framework. + +Copyright (C) 2000-2015 Julian Seward. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. The origin of this software must not be misrepresented; you must + not claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product + documentation would be appreciated but is not required. + +3. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + +4. The name of the author may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------- + +Notice that the above BSD-style license applies to this one file +(valgrind.h) only. The entire rest of Valgrind is licensed under +the terms of the GNU General Public License, version 2. See the +COPYING file in the source distribution for details. + +---------------------------------------------------------------- + +18) License notice for ICU4C +---------------------------- + +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2016 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, provided that the above +copyright notice(s) and this permission notice appear in all copies of +the Software and that both the above copyright notice(s) and this +permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY +SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, use +or other dealings in this Software without prior written authorization +of the copyright holder. + + +All trademarks and registered trademarks mentioned herein are the +property of their respective owners. + +--------------------- + +Third-Party Software Licenses + +This section contains third-party software notices and/or additional +terms for licensed third-party software components included within ICU +libraries. + +1. Unicode Data Files and Software + +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 1991-2016 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt) + + # The Google Chrome software developed by Google is licensed under + # the BSD license. Other software included in this distribution is + # provided under other licenses, as set forth below. + # + # The BSD License + # http://opensource.org/licenses/bsd-license.php + # Copyright (C) 2006-2008, Google Inc. + # + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are met: + # + # Redistributions of source code must retain the above copyright notice, + # this list of conditions and the following disclaimer. + # Redistributions in binary form must reproduce the above + # copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided with + # the distribution. + # Neither the name of Google Inc. nor the names of its + # contributors may be used to endorse or promote products derived from + # this software without specific prior written permission. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # + # + # The word list in cjdict.txt are generated by combining three word lists + # listed below with further processing for compound word breaking. The + # frequency is generated with an iterative training against Google web + # corpora. + # + # * Libtabe (Chinese) + # - https://sourceforge.net/project/?group_id=1519 + # - Its license terms and conditions are shown below. + # + # * IPADIC (Japanese) + # - http://chasen.aist-nara.ac.jp/chasen/distribution.html + # - Its license terms and conditions are shown below. + # + # ---------COPYING.libtabe ---- BEGIN-------------------- + # + # /* + # * Copyrighy (c) 1999 TaBE Project. + # * Copyright (c) 1999 Pai-Hsiang Hsiao. + # * All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the TaBE Project nor the names of its + # * contributors may be used to endorse or promote products derived + # * from this software without specific prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # /* + # * Copyright (c) 1999 Computer Systems and Communication Lab, + # * Institute of Information Science, Academia + # * Sinica. All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the Computer Systems and Communication Lab + # * nor the names of its contributors may be used to endorse or + # * promote products derived from this software without specific + # * prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # Copyright 1996 Chih-Hao Tsai @ Beckman Institute, + # University of Illinois + # c-tsai4@uiuc.edu http://casper.beckman.uiuc.edu/~c-tsai4 + # + # ---------------COPYING.libtabe-----END-------------------------------- + # + # + # ---------------COPYING.ipadic-----BEGIN------------------------------- + # + # Copyright 2000, 2001, 2002, 2003 Nara Institute of Science + # and Technology. All Rights Reserved. + # + # Use, reproduction, and distribution of this software is permitted. + # Any copy of this software, whether in its original form or modified, + # must include both the above copyright notice and the following + # paragraphs. + # + # Nara Institute of Science and Technology (NAIST), + # the copyright holders, disclaims all warranties with regard to this + # software, including all implied warranties of merchantability and + # fitness, in no event shall NAIST be liable for + # any special, indirect or consequential damages or any damages + # whatsoever resulting from loss of use, data or profits, whether in an + # action of contract, negligence or other tortuous action, arising out + # of or in connection with the use or performance of this software. + # + # A large portion of the dictionary entries + # originate from ICOT Free Software. The following conditions for ICOT + # Free Software applies to the current dictionary as well. + # + # Each User may also freely distribute the Program, whether in its + # original form or modified, to any third party or parties, PROVIDED + # that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear + # on, or be attached to, the Program, which is distributed substantially + # in the same form as set out herein and that such intended + # distribution, if actually made, will neither violate or otherwise + # contravene any of the laws and regulations of the countries having + # jurisdiction over the User or the intended distribution itself. + # + # NO WARRANTY + # + # The program was produced on an experimental basis in the course of the + # research and development conducted during the project and is provided + # to users as so produced on an experimental basis. Accordingly, the + # program is provided without any warranty whatsoever, whether express, + # implied, statutory or otherwise. The term "warranty" used herein + # includes, but is not limited to, any warranty of the quality, + # performance, merchantability and fitness for a particular purpose of + # the program and the nonexistence of any infringement or violation of + # any right of any third party. + # + # Each user of the program will agree and understand, and be deemed to + # have agreed and understood, that there is no warranty whatsoever for + # the program and, accordingly, the entire risk arising from or + # otherwise connected with the program is assumed by the user. + # + # Therefore, neither ICOT, the copyright holder, or any other + # organization that participated in or was otherwise related to the + # development of the program and their respective officials, directors, + # officers and other employees shall be held liable for any and all + # damages, including, without limitation, general, special, incidental + # and consequential damages, arising out of or otherwise in connection + # with the use or inability to use the program or any product, material + # or result produced or otherwise obtained by using the program, + # regardless of whether they have been advised of, or otherwise had + # knowledge of, the possibility of such damages at any time during the + # project or thereafter. Each user will be deemed to have agreed to the + # foregoing by his or her commencement of use of the program. The term + # "use" as used herein includes, but is not limited to, the use, + # modification, copying and distribution of the program and the + # production of secondary products from the program. + # + # In the case where the program, whether in its original form or + # modified, was distributed or delivered to or received by a user from + # any person, organization or entity other than ICOT, unless it makes or + # grants independently of ICOT any specific warranty to the user in + # writing, such person, organization or entity, will also be exempted + # from and not be held liable to the user for any such damages as noted + # above as far as the program is concerned. + # + # ---------------COPYING.ipadic-----END---------------------------------- + +3. Lao Word Break Dictionary Data (laodict.txt) + + # Copyright (c) 2013 International Business Machines Corporation + # and others. All Rights Reserved. + # + # Project: http://code.google.com/p/lao-dictionary/ + # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt + # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt + # (copied below) + # + # This file is derived from the above dictionary, with slight + # modifications. + # ---------------------------------------------------------------------- + # Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell. + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, + # are permitted provided that the following conditions are met: + # + # + # Redistributions of source code must retain the above copyright notice, this + # list of conditions and the following disclaimer. Redistributions in + # binary form must reproduce the above copyright notice, this list of + # conditions and the following disclaimer in the documentation and/or + # other materials provided with the distribution. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. + # -------------------------------------------------------------------------- + +4. Burmese Word Break Dictionary Data (burmesedict.txt) + + # Copyright (c) 2014 International Business Machines Corporation + # and others. All Rights Reserved. + # + # This list is part of a project hosted at: + # github.com/kanyawtech/myanmar-karen-word-lists + # + # -------------------------------------------------------------------------- + # Copyright (c) 2013, LeRoy Benjamin Sharon + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: Redistributions of source code must retain the above + # copyright notice, this list of conditions and the following + # disclaimer. Redistributions in binary form must reproduce the + # above copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided + # with the distribution. + # + # Neither the name Myanmar Karen Word Lists, nor the names of its + # contributors may be used to endorse or promote products derived + # from this software without specific prior written permission. + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS + # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + # THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + # SUCH DAMAGE. + # -------------------------------------------------------------------------- + +5. Time Zone Database + + ICU uses the public domain data and code derived from Time Zone +Database for its time zone support. The ownership of the TZ database +is explained in BCP 175: Procedure for Maintaining the Time Zone +Database section 7. + + # 7. Database Ownership + # + # The TZ database itself is not an IETF Contribution or an IETF + # document. Rather it is a pre-existing and regularly updated work + # that is in the public domain, and is intended to remain in the + # public domain. Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do + # not apply to the TZ Database or contributions that individuals make + # to it. Should any claims be made and substantiated against the TZ + # Database, the organization that is providing the IANA + # Considerations defined in this RFC, under the memorandum of + # understanding with the IETF, currently ICANN, may act in accordance + # with all competent court orders. No ownership claims will be made + # by ICANN or the IETF Trust on the database or the code. Any person + # making a contribution to the database or code waives all rights to + # future claims in that contribution or in the TZ Database. + +19) License notice for timelib +------------------------------ + +The MIT License (MIT) + +Copyright (c) 2015-2017 Derick Rethans +Copyright (c) 2017 MongoDB, Inc + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +20) License notice for windows dirent implementation +---------------------------------------------------- + + * Dirent interface for Microsoft Visual Studio + * Version 1.21 + * + * Copyright (C) 2006-2012 Toni Ronkko + * This file is part of dirent. Dirent may be freely distributed + * under the MIT license. For all details and documentation, see + * https://github.com/tronkko/dirent + + + 21) License notice for abseil-cpp +---------------------------- + + Copyright (c) Google Inc. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + 22) License notice for Zstandard +---------------------------- + + BSD License + + For Zstandard software + + Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, + are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 23) License notice for ASIO +---------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 24) License notice for MPark.Variant +------------------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 25) License notice for fmt +--------------------------- + +Copyright (c) 2012 - present, Victor Zverovich +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of + conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or other + materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF +THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 26) License notice for SafeInt +--------------------------- + +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the MIT License. + +MIT License + +Copyright (c) 2018 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + 27) License Notice for Raft TLA+ Specification +----------------------------------------------- + +https://github.com/ongardie/dissertation/blob/master/LICENSE + +Copyright 2014 Diego Ongaro. + +Some of our TLA+ specifications are based on the Raft TLA+ specification by Diego Ongaro. + +End diff --git a/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md new file mode 100644 index 00000000..01b6a37e --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md @@ -0,0 +1,13 @@ +Copyright 2014 MongoDB, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/README.md b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/README.md new file mode 100644 index 00000000..20f3ffe8 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/README.md @@ -0,0 +1,72 @@ +MongoDB Tools +=================================== + + - **bsondump** - _display BSON files in a human-readable format_ + - **mongoimport** - _Convert data from JSON, TSV or CSV and insert them into a collection_ + - **mongoexport** - _Write an existing collection to CSV or JSON format_ + - **mongodump/mongorestore** - _Dump MongoDB backups to disk in .BSON format, or restore them to a live database_ + - **mongostat** - _Monitor live MongoDB servers, replica sets, or sharded clusters_ + - **mongofiles** - _Read, write, delete, or update files in [GridFS](http://docs.mongodb.org/manual/core/gridfs/)_ + - **mongotop** - _Monitor read/write activity on a mongo server_ + + +Report any bugs, improvements, or new feature requests at https://jira.mongodb.org/browse/TOOLS + +Building Tools +--------------- + +We currently build the tools with Go version 1.15. Other Go versions may work but they are untested. + +Using `go get` to directly build the tools will not work. To build them, it's recommended to first clone this repository: + +``` +git clone https://github.com/mongodb/mongo-tools +cd mongo-tools +``` + +Then run `./make build` to build all the tools, placing them in the `bin` directory inside the repository. + +You can also build a subset of the tools using the `-tools` option. For example, `./make build -tools=mongodump,mongorestore` builds only `mongodump` and `mongorestore`. + +To use the build/test scripts in this repository, you **_must_** set GOROOT to your Go root directory. This may depend on how you installed Go. + +``` +export GOROOT=/usr/local/go +``` + +Updating Dependencies +--------------- +Starting with version 100.3.1, the tools use `go mod` to manage dependencies. All dependencies are listed in the `go.mod` file and are directly vendored in the `vendor` directory. + +In order to make changes to dependencies, you first need to change the `go.mod` file. You can manually edit that file to add/update/remove entries, or you can run the following in the repository directory: + +``` +go mod edit -require=@ # for adding or updating a dependency +go mod edit -droprequire= # for removing a dependency +``` + +Then run `go mod vendor -v` to reconstruct the `vendor` directory to match the changed `go.mod` file. + +Optionally, run `go mod tidy -v` to ensure that the `go.mod` file matches the `mongo-tools` source code. + +Contributing +--------------- +See our [Contributor's Guide](CONTRIBUTING.md). + +Documentation +--------------- +See the MongoDB packages [documentation](https://docs.mongodb.org/database-tools/). + +For documentation on older versions of the MongoDB, reference that version of the [MongoDB Server Manual](docs.mongodb.com/manual): + +- [MongoDB 4.2 Tools](https://docs.mongodb.org/v4.2/reference/program) +- [MongoDB 4.0 Tools](https://docs.mongodb.org/v4.0/reference/program) +- [MongoDB 3.6 Tools](https://docs.mongodb.org/v3.6/reference/program) + +Adding New Platforms Support +--------------- +See our [Adding New Platform Support Guide](PLATFORMSUPPORT.md). + +Vendoring the Change into Server Repo +--------------- +See our [Vendor the Change into Server Repo](SERVERVENDORING.md). diff --git a/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..c747d0b8 --- /dev/null +++ b/Mongo2Go-4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES @@ -0,0 +1,3319 @@ +--------------------------------------------------------------------- +License notice for hashicorp/go-rootcerts +--------------------------------------------------------------------- + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + +--------------------------------------------------------------------- +License notice for JSON and CSV code from github.com/golang/go +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/escaper +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Lucas Morales + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo/bson +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/openssl +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/3rf/mongo-lint +---------------------------------------------------------------------- + +Copyright (c) 2013 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/go-stack/stack +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 Chris Hines + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/golang/snappy +---------------------------------------------------------------------- + +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/google/gopacket +---------------------------------------------------------------------- + +Copyright (c) 2012 Google, Inc. All rights reserved. +Copyright (c) 2009-2011 Andreas Krennmair. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Andreas Krennmair, Google, nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/gopherjs/gopherjs +---------------------------------------------------------------------- + +Copyright (c) 2013 Richard Musiol. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/howeyc/gopass +---------------------------------------------------------------------- + +Copyright (c) 2012 Chris Howey + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/jessevdk/go-flags +---------------------------------------------------------------------- + +Copyright (c) 2012 Jesse van den Kieboom. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/jtolds/gls +---------------------------------------------------------------------- + +Copyright (c) 2013, Space Monkey, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mattn/go-runewidth +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mongodb/mongo-go-driver +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/nsf/termbox-go +---------------------------------------------------------------------- + +Copyright (C) 2012 termbox-go authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/patrickmn/go-cache +---------------------------------------------------------------------- + +Copyright (c) 2012-2015 Patrick Mylund Nielsen and the go-cache contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions +---------------------------------------------------------------------- + +Copyright (c) 2015 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/go-render +---------------------------------------------------------------------- + +// Copyright (c) 2015 The Chromium Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglematchers +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglemock +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/ogletest +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/reqtrace +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey +---------------------------------------------------------------------- + +Copyright (c) 2014 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/spacemonkeygo/spacelog +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/xdg/scram +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/xdg/stringprep +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/youmark/pkcs8 +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 youmark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for golang.org/x/crypto +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/sync +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/text +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for gopkg.in/tomb.v2 +---------------------------------------------------------------------- + +tomb - support for clean goroutine termination in Go. + +Copyright (c) 2010-2011 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Mongo2Go.4.1.0.nupkg b/Mongo2Go.4.1.0.nupkg new file mode 100644 index 00000000..a9378128 Binary files /dev/null and b/Mongo2Go.4.1.0.nupkg differ diff --git a/Mongo2Go.4.1.0/.signature.p7s b/Mongo2Go.4.1.0/.signature.p7s new file mode 100644 index 00000000..39597809 Binary files /dev/null and b/Mongo2Go.4.1.0/.signature.p7s differ diff --git a/Mongo2Go.4.1.0/Mongo2Go.nuspec b/Mongo2Go.4.1.0/Mongo2Go.nuspec new file mode 100644 index 00000000..d7128cba --- /dev/null +++ b/Mongo2Go.4.1.0/Mongo2Go.nuspec @@ -0,0 +1,46 @@ + + + + Mongo2Go + 4.1.0 + Johannes Hoppe and many contributors + MIT + https://licenses.nuget.org/MIT + icon.png + https://github.com/Mongo2Go/Mongo2Go + Mongo2Go is a managed wrapper around MongoDB binaries. It targets .NET Framework 4.7.2 and .NET Standard 2.1. +This Nuget package contains the executables of mongod, mongoimport and mongoexport v4.4.4 for Windows, Linux and macOS. + + +Mongo2Go has two use cases: + +1. Providing multiple, temporary and isolated MongoDB databases for integration tests +2. Providing a quick to set up MongoDB database for a local developer environment + https://github.com/Mongo2Go/Mongo2Go/releases + Copyright © 2012-2025 Johannes Hoppe and many ❤️ contributors + MongoDB Mongo unit test integration runner + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Mongo2Go.4.1.0/[Content_Types].xml b/Mongo2Go.4.1.0/[Content_Types].xml new file mode 100644 index 00000000..1169a9a7 --- /dev/null +++ b/Mongo2Go.4.1.0/[Content_Types].xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Mongo2Go.4.1.0/_rels/.rels b/Mongo2Go.4.1.0/_rels/.rels new file mode 100644 index 00000000..5c173848 --- /dev/null +++ b/Mongo2Go.4.1.0/_rels/.rels @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/Mongo2Go.4.1.0/icon.png b/Mongo2Go.4.1.0/icon.png new file mode 100644 index 00000000..23750b60 Binary files /dev/null and b/Mongo2Go.4.1.0/icon.png differ diff --git a/Mongo2Go.4.1.0/lib/net472/Mongo2Go.xml b/Mongo2Go.4.1.0/lib/net472/Mongo2Go.xml new file mode 100644 index 00000000..9def389d --- /dev/null +++ b/Mongo2Go.4.1.0/lib/net472/Mongo2Go.xml @@ -0,0 +1,175 @@ + + + + Mongo2Go + + + + + Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + + + + + Returns and reserves a new port + + + + + Returns the if it is verified that it does not contain any mongod argument already defined by Mongo2Go. + + mongod arguments defined by Mongo2Go + Additional mongod arguments + contains at least one mongod argument already defined by Mongo2Go + A string with the additional mongod arguments + + + + Starts a new process. Process can be killed + + + + + Starts a new process. + + + + + Input File: Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + + + + + Output File: Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + + + + + Structure of a log generated by mongod. Used to deserialize the logs + and pass them to an ILogger. + See: https://docs.mongodb.com/manual/reference/log-messages/#json-log-output-format + Note: "truncated" and "size" are not parsed as we're unsure how to + properly parse and use them. + + + + + Severity of the logs as defined by MongoDB. Mapped to LogLevel + as defined by Microsoft. + D1-D2 mapped to Debug level. D3-D5 mapped Trace level. + + + + + Intention: port numbers won't be assigned twice to avoid connection problems with integration tests + + + + + Returns and reserves a new port + + + + + Reads from Output stream to determine if process is ready + + + + + Send the mongod process logs to .NET's console and debug outputs. + + + + + + Parses and redirects mongod logs to ILogger. + + + + + + + saves about 40 keystrokes + + + + + Populates the template using the provided arguments and the invariant culture + + + + + Populates the template using the provided arguments using the provided formatter + + + + + Mongo2Go main entry point + + + + + State of the current MongoDB instance + + + + + Connections string that should be used to establish a connection the MongoDB instance + + + + + Starts Multiple MongoDB instances with each call + On dispose: kills them and deletes their data directory + + (Optional) If null, mongod logs are wired to .NET's Console and Debug output (provided you haven't added the --quiet additional argument). + If not null, mongod logs are parsed and wired to the provided logger. + Should be used for integration tests + + + + !!! + This method is only used for an internal unit test. Use MongoDbRunner.Start() instead. + But if you find it to be useful (eg. to change every aspect on your own) feel free to implement the interfaces on your own! + + see https://github.com/Mongo2Go/Mongo2Go/issues/41 + + + + Only starts one single MongoDB instance (even on multiple calls), does not kill it, does not delete data + + + Should be used for local debugging only + WARNING: one single instance on one single machine is not a suitable setup for productive environments!!! + + + + + !!! + This method is only used for an internal unit test. Use MongoDbRunner.StartForDebugging() instead. + But if you find it to be useful (eg. to change every aspect on your own) feel free to implement the interfaces on your own! + + see https://github.com/Mongo2Go/Mongo2Go/issues/41 + + + + Executes Mongoimport on the associated MongoDB Instace + + + + + Executes Mongoexport on the associated MongoDB Instace + + + + + usage: local debugging + + + + + usage: integration tests + + + + diff --git a/Mongo2Go.4.1.0/lib/netstandard2.1/Mongo2Go.xml b/Mongo2Go.4.1.0/lib/netstandard2.1/Mongo2Go.xml new file mode 100644 index 00000000..9def389d --- /dev/null +++ b/Mongo2Go.4.1.0/lib/netstandard2.1/Mongo2Go.xml @@ -0,0 +1,175 @@ + + + + Mongo2Go + + + + + Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + + + + + Returns and reserves a new port + + + + + Returns the if it is verified that it does not contain any mongod argument already defined by Mongo2Go. + + mongod arguments defined by Mongo2Go + Additional mongod arguments + contains at least one mongod argument already defined by Mongo2Go + A string with the additional mongod arguments + + + + Starts a new process. Process can be killed + + + + + Starts a new process. + + + + + Input File: Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + + + + + Output File: Absolute path stays unchanged, relative path will be relative to current executing directory (usually the /bin folder) + + + + + Structure of a log generated by mongod. Used to deserialize the logs + and pass them to an ILogger. + See: https://docs.mongodb.com/manual/reference/log-messages/#json-log-output-format + Note: "truncated" and "size" are not parsed as we're unsure how to + properly parse and use them. + + + + + Severity of the logs as defined by MongoDB. Mapped to LogLevel + as defined by Microsoft. + D1-D2 mapped to Debug level. D3-D5 mapped Trace level. + + + + + Intention: port numbers won't be assigned twice to avoid connection problems with integration tests + + + + + Returns and reserves a new port + + + + + Reads from Output stream to determine if process is ready + + + + + Send the mongod process logs to .NET's console and debug outputs. + + + + + + Parses and redirects mongod logs to ILogger. + + + + + + + saves about 40 keystrokes + + + + + Populates the template using the provided arguments and the invariant culture + + + + + Populates the template using the provided arguments using the provided formatter + + + + + Mongo2Go main entry point + + + + + State of the current MongoDB instance + + + + + Connections string that should be used to establish a connection the MongoDB instance + + + + + Starts Multiple MongoDB instances with each call + On dispose: kills them and deletes their data directory + + (Optional) If null, mongod logs are wired to .NET's Console and Debug output (provided you haven't added the --quiet additional argument). + If not null, mongod logs are parsed and wired to the provided logger. + Should be used for integration tests + + + + !!! + This method is only used for an internal unit test. Use MongoDbRunner.Start() instead. + But if you find it to be useful (eg. to change every aspect on your own) feel free to implement the interfaces on your own! + + see https://github.com/Mongo2Go/Mongo2Go/issues/41 + + + + Only starts one single MongoDB instance (even on multiple calls), does not kill it, does not delete data + + + Should be used for local debugging only + WARNING: one single instance on one single machine is not a suitable setup for productive environments!!! + + + + + !!! + This method is only used for an internal unit test. Use MongoDbRunner.StartForDebugging() instead. + But if you find it to be useful (eg. to change every aspect on your own) feel free to implement the interfaces on your own! + + see https://github.com/Mongo2Go/Mongo2Go/issues/41 + + + + Executes Mongoimport on the associated MongoDB Instace + + + + + Executes Mongoexport on the associated MongoDB Instace + + + + + usage: local debugging + + + + + usage: integration tests + + + + diff --git a/Mongo2Go.4.1.0/package/services/metadata/core-properties/002438390f9b42fb9cd0a8f5b12ea55f.psmdcp b/Mongo2Go.4.1.0/package/services/metadata/core-properties/002438390f9b42fb9cd0a8f5b12ea55f.psmdcp new file mode 100644 index 00000000..c23173ff --- /dev/null +++ b/Mongo2Go.4.1.0/package/services/metadata/core-properties/002438390f9b42fb9cd0a8f5b12ea55f.psmdcp @@ -0,0 +1,16 @@ + + + Johannes Hoppe and many contributors + Mongo2Go is a managed wrapper around MongoDB binaries. It targets .NET Framework 4.7.2 and .NET Standard 2.1. +This Nuget package contains the executables of mongod, mongoimport and mongoexport v4.4.4 for Windows, Linux and macOS. + + +Mongo2Go has two use cases: + +1. Providing multiple, temporary and isolated MongoDB databases for integration tests +2. Providing a quick to set up MongoDB database for a local developer environment + Mongo2Go + 4.1.0 + MongoDB Mongo unit test integration runner + NuGet.Build.Tasks.Pack, Version=6.11.1.2, Culture=neutral, PublicKeyToken=31bf3856ad364e35;.NET Standard 2.0 + \ No newline at end of file diff --git a/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt new file mode 100644 index 00000000..4e1383df --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt @@ -0,0 +1,557 @@ + Server Side Public License + VERSION 1, OCTOBER 16, 2018 + + Copyright © 2018 MongoDB, Inc. + + Everyone is permitted to copy and distribute verbatim copies of this + license document, but changing it is not allowed. + + TERMS AND CONDITIONS + + 0. Definitions. + + “This License” refers to Server Side Public License. + + “Copyright” also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + “The Program” refers to any copyrightable work licensed under this + License. Each licensee is addressed as “you”. “Licensees” and + “recipients” may be individuals or organizations. + + To “modify” a work means to copy from or adapt all or part of the work in + a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a “modified version” of the + earlier work or a work “based on” the earlier work. + + A “covered work” means either the unmodified Program or a work based on + the Program. + + To “propagate” a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To “convey” a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through a + computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays “Appropriate Legal Notices” to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties + are provided), that licensees may convey the work under this License, and + how to view a copy of this License. If the interface presents a list of + user commands or options, such as a menu, a prominent item in the list + meets this criterion. + + 1. Source Code. + + The “source code” for a work means the preferred form of the work for + making modifications to it. “Object code” means any non-source form of a + work. + + A “Standard Interface” means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. The “System + Libraries” of an executable work include anything, other than the work as + a whole, that (a) is included in the normal form of packaging a Major + Component, but which is not part of that Major Component, and (b) serves + only to enable use of the work with that Major Component, or to implement + a Standard Interface for which an implementation is available to the + public in source code form. A “Major Component”, in this context, means a + major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or + a compiler used to produce the work, or an object code interpreter used + to run it. + + The “Corresponding Source” for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control + those activities. However, it does not include the work's System + Libraries, or general-purpose tools or generally available free programs + which are used unmodified in performing those activities but which are + not part of the work. For example, Corresponding Source includes + interface definition files associated with source files for the work, and + the source code for shared libraries and dynamically linked subprograms + that the work is specifically designed to require, such as by intimate + data communication or control flow between those subprograms and other + parts of the work. + + The Corresponding Source need not include anything that users can + regenerate automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program, subject to section 13. The + output from running a covered work is covered by this License only if the + output, given its content, constitutes a covered work. This License + acknowledges your rights of fair use or other equivalent, as provided by + copyright law. Subject to section 13, you may make, run and propagate + covered works that you do not convey, without conditions so long as your + license otherwise remains in force. You may convey covered works to + others for the sole purpose of having them make modifications exclusively + for you, or provide you with facilities for running those works, provided + that you comply with the terms of this License in conveying all + material for which you do not control copyright. Those thus making or + running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them + from making any copies of your copyrighted material outside their + relationship with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article 11 + of the WIPO copyright treaty adopted on 20 December 1996, or similar laws + prohibiting or restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the + covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's users, + your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; keep + intact all notices stating that this License and any non-permissive terms + added in accord with section 7 apply to the code; keep intact all notices + of the absence of any warranty; and give all recipients a copy of this + License along with the Program. You may charge any price or no price for + each copy that you convey, and you may offer support or warranty + protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the terms + of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to “keep intact all + notices”. + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the + whole of the work, and all its parts, regardless of how they are + packaged. This License gives no permission to license the work in any + other way, but it does not invalidate such permission if you have + separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work + need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, and + which are not combined with it such as to form a larger program, in or on + a volume of a storage or distribution medium, is called an “aggregate” if + the compilation and its resulting copyright are not used to limit the + access or legal rights of the compilation's users beyond what the + individual works permit. Inclusion of a covered work in an aggregate does + not cause this License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these + ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily + used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you + offer spare parts or customer support for that product model, to give + anyone who possesses the object code either (1) a copy of the + Corresponding Source for all the software in the product that is + covered by this License, on a durable physical medium customarily used + for software interchange, for a price no more than your reasonable cost + of physically performing this conveying of source, or (2) access to + copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This alternative is + allowed only occasionally and noncommercially, and only if you received + the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to copy + the object code is a network server, the Corresponding Source may be on + a different server (operated by you or a third party) that supports + equivalent copying facilities, provided you maintain clear directions + next to the object code saying where to find the Corresponding Source. + Regardless of what server hosts the Corresponding Source, you remain + obligated to ensure that it is available for as long as needed to + satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of + the work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be included + in conveying the object code work. + + A “User Product” is either (1) a “consumer product”, which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, “normally used” refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + “Installation Information” for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part + of a transaction in which the right of possession and use of the User + Product is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the + Installation Information. But this requirement does not apply if neither + you nor any third party retains the ability to install modified object + code on the User Product (for example, the work has been installed in + ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access + to a network may be denied when the modification itself materially + and adversely affects the operation of the network or violates the + rules and protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + “Additional permissions” are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall be + treated as though they were included in this License, to the extent that + they are valid under applicable law. If additional permissions apply only + to part of the Program, that part may be used separately under those + permissions, but the entire Program remains governed by this License + without regard to the additional permissions. When you convey a copy of + a covered work, you may at your option remove any additional permissions + from that copy, or from any part of it. (Additional permissions may be + written to require their own removal in certain cases when you modify the + work.) You may place additional permissions on material, added by you to + a covered work, for which you have or can give appropriate copyright + permission. + + Notwithstanding any other provision of this License, for material you add + to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material + by anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any + liability that these contractual assumptions directly impose on those + licensors and authors. + + All other non-permissive additional terms are considered “further + restrictions” within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you + may add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. Additional terms, permissive or non-permissive, may be + stated in the form of a separately written license, or stated as + exceptions; the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or modify + it is void, and will automatically terminate your rights under this + License (including any patent licenses granted under the third paragraph + of section 11). + + However, if you cease all violation of this License, then your license + from a particular copyright holder is reinstated (a) provisionally, + unless and until the copyright holder explicitly and finally terminates + your license, and (b) permanently, if the copyright holder fails to + notify you of the violation by some reasonable means prior to 60 days + after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and + you cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a + copy likewise does not require acceptance. However, nothing other than + this License grants you permission to propagate or modify any covered + work. These actions infringe copyright if you do not accept this License. + Therefore, by modifying or propagating a covered work, you indicate your + acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives + a license from the original licensors, to run, modify and propagate that + work, subject to this License. You are not responsible for enforcing + compliance by third parties with this License. + + An “entity transaction” is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work + the party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can + get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted + under this License, and you may not initiate litigation (including a + cross-claim or counterclaim in a lawsuit) alleging that any patent claim + is infringed by making, using, selling, offering for sale, or importing + the Program or any portion of it. + + 11. Patents. + + A “contributor” is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The work + thus licensed is called the contributor's “contributor version”. + + A contributor's “essential patent claims” are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this + License, of making, using, or selling its contributor version, but do not + include claims that would be infringed only as a consequence of further + modification of the contributor version. For purposes of this definition, + “control” includes the right to grant patent sublicenses in a manner + consistent with the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to make, + use, sell, offer for sale, import and otherwise run, modify and propagate + the contents of its contributor version. + + In the following three paragraphs, a “patent license” is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To “grant” such a patent license to a party + means to make such an agreement or commitment not to enforce a patent + against the party. + + If you convey a covered work, knowingly relying on a patent license, and + the Corresponding Source of the work is not available for anyone to copy, + free of charge and under the terms of this License, through a publicly + available network server or other readily accessible means, then you must + either (1) cause the Corresponding Source to be so available, or (2) + arrange to deprive yourself of the benefit of the patent license for this + particular work, or (3) arrange, in a manner consistent with the + requirements of this License, to extend the patent license to downstream + recipients. “Knowingly relying” means you have actual knowledge that, but + for the patent license, your conveying the covered work in a country, or + your recipient's use of the covered work in a country, would infringe + one or more identifiable patents in that country that you have reason + to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties receiving + the covered work authorizing them to use, propagate, modify or convey a + specific copy of the covered work, then the patent license you grant is + automatically extended to all recipients of the covered work and works + based on it. + + A patent license is “discriminatory” if it does not include within the + scope of its coverage, prohibits the exercise of, or is conditioned on + the non-exercise of one or more of the rights that are specifically + granted under this License. You may not convey a covered work if you are + a party to an arrangement with a third party that is in the business of + distributing software, under which you make payment to the third party + based on the extent of your activity of conveying the work, and under + which the third party grants, to any of the parties who would receive the + covered work from you, a discriminatory patent license (a) in connection + with copies of the covered work conveyed by you (or copies made from + those copies), or (b) primarily for and in connection with specific + products or compilations that contain the covered work, unless you + entered into that arrangement, or that patent license was granted, prior + to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot use, + propagate or convey a covered work so as to satisfy simultaneously your + obligations under this License and any other pertinent obligations, then + as a consequence you may not use, propagate or convey it at all. For + example, if you agree to terms that obligate you to collect a royalty for + further conveying from those to whom you convey the Program, the only way + you could satisfy both those terms and this License would be to refrain + entirely from conveying the Program. + + 13. Offering the Program as a Service. + + If you make the functionality of the Program or a modified version + available to third parties as a service, you must make the Service Source + Code available via network download to everyone at no charge, under the + terms of this License. Making the functionality of the Program or + modified version available to third parties as a service includes, + without limitation, enabling third parties to interact with the + functionality of the Program or modified version remotely through a + computer network, offering a service the value of which entirely or + primarily derives from the value of the Program or modified version, or + offering a service that accomplishes for users the primary purpose of the + Program or modified version. + + “Service Source Code” means the Corresponding Source for the Program or + the modified version, and the Corresponding Source for all programs that + you use to make the Program or modified version available as a service, + including, without limitation, management software, user interfaces, + application program interfaces, automation software, monitoring software, + backup software, storage software and hosting software, all such that a + user could run an instance of the service using the Service Source Code + you make available. + + 14. Revised Versions of this License. + + MongoDB, Inc. may publish revised and/or new versions of the Server Side + Public License from time to time. Such new versions will be similar in + spirit to the present version, but may differ in detail to address new + problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the Server Side Public + License “or any later version” applies to it, you have the option of + following the terms and conditions either of that numbered version or of + any later version published by MongoDB, Inc. If the Program does not + specify a version number of the Server Side Public License, you may + choose any version ever published by MongoDB, Inc. + + If the Program specifies that a proxy can decide which future versions of + the Server Side Public License can be used, that proxy's public statement + of acceptance of a version permanently authorizes you to choose that + version for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING + ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF + THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU + OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. + + END OF TERMS AND CONDITIONS diff --git a/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/MPL-2 b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/MPL-2 new file mode 100644 index 00000000..14e2f777 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/MPL-2 @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/README b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/README new file mode 100644 index 00000000..fe759d19 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/README @@ -0,0 +1,87 @@ +MongoDB README + +Welcome to MongoDB! + +COMPONENTS + + mongod - The database server. + mongos - Sharding router. + mongo - The database shell (uses interactive javascript). + +UTILITIES + + install_compass - Installs MongoDB Compass for your platform. + +BUILDING + + See docs/building.md. + +RUNNING + + For command line options invoke: + + $ ./mongod --help + + To run a single server database: + + $ sudo mkdir -p /data/db + $ ./mongod + $ + $ # The mongo javascript shell connects to localhost and test database by default: + $ ./mongo + > help + +INSTALLING COMPASS + + You can install compass using the install_compass script packaged with MongoDB: + + $ ./install_compass + + This will download the appropriate MongoDB Compass package for your platform + and install it. + +DRIVERS + + Client drivers for most programming languages are available at + https://docs.mongodb.com/manual/applications/drivers/. Use the shell + ("mongo") for administrative tasks. + +BUG REPORTS + + See https://github.com/mongodb/mongo/wiki/Submit-Bug-Reports. + +PACKAGING + + Packages are created dynamically by the package.py script located in the + buildscripts directory. This will generate RPM and Debian packages. + +DOCUMENTATION + + https://docs.mongodb.com/manual/ + +CLOUD HOSTED MONGODB + + https://www.mongodb.com/cloud/atlas + +FORUMS + + https://community.mongodb.com + + A forum for technical questions about using MongoDB. + + https://community.mongodb.com/c/server-dev + + A forum for technical questions about building and developing MongoDB. + +LEARN MONGODB + + https://university.mongodb.com/ + +LICENSE + + MongoDB is free and open-source. Versions released prior to October 16, + 2018 are published under the AGPL. All versions released after October + 16, 2018, including patch fixes for prior versions, are published under + the Server Side Public License (SSPL) v1. See individual files for + details. + diff --git a/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..34fb8230 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES @@ -0,0 +1,1568 @@ +MongoDB uses third-party libraries or other resources that may +be distributed under licenses different than the MongoDB software. + +In the event that we accidentally failed to list a required notice, +please bring it to our attention through any of the ways detailed here : + + mongodb-dev@googlegroups.com + +The attached notices are provided for information only. + +For any licenses that require disclosure of source, sources are available at +https://github.com/mongodb/mongo. + + +1) License Notice for Boost +--------------------------- + +http://www.boost.org/LICENSE_1_0.txt + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + +3) License Notice for PCRE +-------------------------- + +http://www.pcre.org/licence.txt + +PCRE LICENCE +------------ + +PCRE is a library of functions to support regular expressions whose syntax +and semantics are as close as possible to those of the Perl 5 language. + +Release 7 of PCRE is distributed under the terms of the "BSD" licence, as +specified below. The documentation for PCRE, supplied in the "doc" +directory, is distributed under the same terms as the software itself. + +The basic library functions are written in C and are freestanding. Also +included in the distribution is a set of C++ wrapper functions. + + +THE BASIC LIBRARY FUNCTIONS +--------------------------- + +Written by: Philip Hazel +Email local part: ph10 +Email domain: cam.ac.uk + +University of Cambridge Computing Service, +Cambridge, England. + +Copyright (c) 1997-2008 University of Cambridge +All rights reserved. + + +THE C++ WRAPPER FUNCTIONS +------------------------- + +Contributed by: Google Inc. + +Copyright (c) 2007-2008, Google Inc. +All rights reserved. + + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +4) License notice for Aladdin MD5 +--------------------------------- + +Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +L. Peter Deutsch +ghost@aladdin.com + +5) License notice for Snappy - http://code.google.com/p/snappy/ +--------------------------------- + Copyright 2005 and onwards Google Inc. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + A light-weight compression algorithm. It is designed for speed of + compression and decompression, rather than for the utmost in space + savings. + + For getting better compression ratios when you are compressing data + with long repeated sequences or compressing data that is similar to + other data, while still compressing fast, you might look at first + using BMDiff and then compressing the output of BMDiff with + Snappy. + +6) License notice for Google Perftools (TCMalloc utility) +--------------------------------- +New BSD License + +Copyright (c) 1998-2006, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following +conditions are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +7) License notice for Linenoise +------------------------------- + + Copyright (c) 2010, Salvatore Sanfilippo + Copyright (c) 2010, Pieter Noordhuis + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Redis nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + +8) License notice for S2 Geometry Library +----------------------------------------- + Copyright 2005 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +9) License notice for MurmurHash +-------------------------------- + + Copyright (c) 2010-2012 Austin Appleby + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +10) License notice for Snowball + Copyright (c) 2001, Dr Martin Porter + All rights reserved. + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +11) License notice for yaml-cpp +------------------------------- + +Copyright (c) 2008 Jesse Beder. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +12) License notice for zlib +--------------------------- + +http://www.zlib.net/zlib_license.html + +zlib.h -- interface of the 'zlib' general purpose compression library +version 1.2.8, April 28th, 2013 + +Copyright (C) 1995-2013 Jean-loup Gailly and Mark Adler + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +Jean-loup Gailly Mark Adler +jloup@gzip.org madler@alumni.caltech.edu + + +13) License notice for 3rd party software included in the WiredTiger library +---------------------------------------------------------------------------- + +http://source.wiredtiger.com/license.html + +WiredTiger Distribution Files | Copyright Holder | License +----------------------------- | ----------------------------------- | ---------------------- +src/include/bitstring.i | University of California, Berkeley | BSD-3-Clause License +src/include/queue.h | University of California, Berkeley | BSD-3-Clause License +src/os_posix/os_getopt.c | University of California, Berkeley | BSD-3-Clause License +src/support/hash_city.c | Google, Inc. | The MIT License +src/support/hash_fnv.c | Authors | Public Domain + + +Other optional 3rd party software included in the WiredTiger distribution is removed by MongoDB. + + +BSD-3-CLAUSE LICENSE +-------------------- + +http://www.opensource.org/licenses/BSD-3-Clause + +Copyright (c) 1987, 1989, 1991, 1993, 1994 + The Regents of the University of California. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +4. Neither the name of the University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + + +THE MIT LICENSE +--------------- + +http://www.opensource.org/licenses/MIT + +Copyright (c) 2011 Google, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +14) License Notice for SpiderMonkey +----------------------------------- + +|------------------------------------------------|------------------|---------------| +| SpiderMonkey Distribution Files | Copyright Holder | License | +|------------------------------------------------|------------------|---------------| +| js/src/jit/shared/AssemblerBuffer-x86-shared.h | Apple, Inc | BSD-2-Clause | +| js/src/jit/shared/BaseAssembler-x86-shared.h | | | +|------------------------------------------------|------------------|---------------| +| js/src/builtin/ | Google, Inc | BSD-3-Clause | +| js/src/irregexp/ | | | +| js/src/jit/arm/ | | | +| js/src/jit/mips/ | | | +| mfbt/double-conversion/ | | | +|------------------------------------------------|------------------|---------------| +| intl/icu/source/common/unicode/ | IBM, Inc | ICU | +|------------------------------------------------|------------------|---------------| +| js/src/asmjs/ | Mozilla, Inc | Apache2 | +|------------------------------------------------|------------------|---------------| +| js/public/ | Mozilla, Inc | MPL2 | +| js/src/ | | | +| mfbt | | | +|------------------------------------------------|------------------|---------------| +| js/src/vm/Unicode.cpp | None | Public Domain | +|------------------------------------------------|------------------|---------------| +| mfbt/lz4.c | Yann Collet | BSD-2-Clause | +| mfbt/lz4.h | | | +|------------------------------------------------|------------------|---------------| + +Other optional 3rd party software included in the SpiderMonkey distribution is removed by MongoDB. + + +Apple, Inc: BSD-2-Clause +------------------------ + +Copyright (C) 2008 Apple Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Google, Inc: BSD-3-Clause +------------------------- + +Copyright 2012 the V8 project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +ICU License - ICU 1.8.1 and later +--------------------------------- + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and +others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, provided that the above copyright notice(s) and this +permission notice appear in all copies of the Software and that both the +above copyright notice(s) and this permission notice appear in supporting +documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property +of their respective owners. + + +Mozilla, Inc: Apache 2 +---------------------- + +Copyright 2014 Mozilla Foundation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Mozilla, Inc: MPL 2 +------------------- + +Copyright 2014 Mozilla Foundation + +This Source Code Form is subject to the terms of the Mozilla Public +License, v. 2.0. If a copy of the MPL was not distributed with this +file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +Public Domain +------------- + +Any copyright is dedicated to the Public Domain. +http://creativecommons.org/licenses/publicdomain/ + + +LZ4: BSD-2-Clause +----------------- + +Copyright (C) 2011-2014, Yann Collet. +BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You can contact the author at : +- LZ4 source repository : http://code.google.com/p/lz4/ +- LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c + +15) License Notice for Intel DFP Math Library +--------------------------------------------- + +Copyright (c) 2011, Intel Corp. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + his list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +16) License Notice for Unicode Data +----------------------------------- + +Copyright © 1991-2015 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +17 ) License Notice for Valgrind.h +---------------------------------- + +---------------------------------------------------------------- + +Notice that the following BSD-style license applies to this one +file (valgrind.h) only. The rest of Valgrind is licensed under the +terms of the GNU General Public License, version 2, unless +otherwise indicated. See the COPYING file in the source +distribution for details. + +---------------------------------------------------------------- + +This file is part of Valgrind, a dynamic binary instrumentation +framework. + +Copyright (C) 2000-2015 Julian Seward. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. The origin of this software must not be misrepresented; you must + not claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product + documentation would be appreciated but is not required. + +3. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + +4. The name of the author may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------- + +Notice that the above BSD-style license applies to this one file +(valgrind.h) only. The entire rest of Valgrind is licensed under +the terms of the GNU General Public License, version 2. See the +COPYING file in the source distribution for details. + +---------------------------------------------------------------- + +18) License notice for ICU4C +---------------------------- + +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2016 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, provided that the above +copyright notice(s) and this permission notice appear in all copies of +the Software and that both the above copyright notice(s) and this +permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY +SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, use +or other dealings in this Software without prior written authorization +of the copyright holder. + + +All trademarks and registered trademarks mentioned herein are the +property of their respective owners. + +--------------------- + +Third-Party Software Licenses + +This section contains third-party software notices and/or additional +terms for licensed third-party software components included within ICU +libraries. + +1. Unicode Data Files and Software + +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 1991-2016 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt) + + # The Google Chrome software developed by Google is licensed under + # the BSD license. Other software included in this distribution is + # provided under other licenses, as set forth below. + # + # The BSD License + # http://opensource.org/licenses/bsd-license.php + # Copyright (C) 2006-2008, Google Inc. + # + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are met: + # + # Redistributions of source code must retain the above copyright notice, + # this list of conditions and the following disclaimer. + # Redistributions in binary form must reproduce the above + # copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided with + # the distribution. + # Neither the name of Google Inc. nor the names of its + # contributors may be used to endorse or promote products derived from + # this software without specific prior written permission. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # + # + # The word list in cjdict.txt are generated by combining three word lists + # listed below with further processing for compound word breaking. The + # frequency is generated with an iterative training against Google web + # corpora. + # + # * Libtabe (Chinese) + # - https://sourceforge.net/project/?group_id=1519 + # - Its license terms and conditions are shown below. + # + # * IPADIC (Japanese) + # - http://chasen.aist-nara.ac.jp/chasen/distribution.html + # - Its license terms and conditions are shown below. + # + # ---------COPYING.libtabe ---- BEGIN-------------------- + # + # /* + # * Copyrighy (c) 1999 TaBE Project. + # * Copyright (c) 1999 Pai-Hsiang Hsiao. + # * All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the TaBE Project nor the names of its + # * contributors may be used to endorse or promote products derived + # * from this software without specific prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # /* + # * Copyright (c) 1999 Computer Systems and Communication Lab, + # * Institute of Information Science, Academia + # * Sinica. All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the Computer Systems and Communication Lab + # * nor the names of its contributors may be used to endorse or + # * promote products derived from this software without specific + # * prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # Copyright 1996 Chih-Hao Tsai @ Beckman Institute, + # University of Illinois + # c-tsai4@uiuc.edu http://casper.beckman.uiuc.edu/~c-tsai4 + # + # ---------------COPYING.libtabe-----END-------------------------------- + # + # + # ---------------COPYING.ipadic-----BEGIN------------------------------- + # + # Copyright 2000, 2001, 2002, 2003 Nara Institute of Science + # and Technology. All Rights Reserved. + # + # Use, reproduction, and distribution of this software is permitted. + # Any copy of this software, whether in its original form or modified, + # must include both the above copyright notice and the following + # paragraphs. + # + # Nara Institute of Science and Technology (NAIST), + # the copyright holders, disclaims all warranties with regard to this + # software, including all implied warranties of merchantability and + # fitness, in no event shall NAIST be liable for + # any special, indirect or consequential damages or any damages + # whatsoever resulting from loss of use, data or profits, whether in an + # action of contract, negligence or other tortuous action, arising out + # of or in connection with the use or performance of this software. + # + # A large portion of the dictionary entries + # originate from ICOT Free Software. The following conditions for ICOT + # Free Software applies to the current dictionary as well. + # + # Each User may also freely distribute the Program, whether in its + # original form or modified, to any third party or parties, PROVIDED + # that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear + # on, or be attached to, the Program, which is distributed substantially + # in the same form as set out herein and that such intended + # distribution, if actually made, will neither violate or otherwise + # contravene any of the laws and regulations of the countries having + # jurisdiction over the User or the intended distribution itself. + # + # NO WARRANTY + # + # The program was produced on an experimental basis in the course of the + # research and development conducted during the project and is provided + # to users as so produced on an experimental basis. Accordingly, the + # program is provided without any warranty whatsoever, whether express, + # implied, statutory or otherwise. The term "warranty" used herein + # includes, but is not limited to, any warranty of the quality, + # performance, merchantability and fitness for a particular purpose of + # the program and the nonexistence of any infringement or violation of + # any right of any third party. + # + # Each user of the program will agree and understand, and be deemed to + # have agreed and understood, that there is no warranty whatsoever for + # the program and, accordingly, the entire risk arising from or + # otherwise connected with the program is assumed by the user. + # + # Therefore, neither ICOT, the copyright holder, or any other + # organization that participated in or was otherwise related to the + # development of the program and their respective officials, directors, + # officers and other employees shall be held liable for any and all + # damages, including, without limitation, general, special, incidental + # and consequential damages, arising out of or otherwise in connection + # with the use or inability to use the program or any product, material + # or result produced or otherwise obtained by using the program, + # regardless of whether they have been advised of, or otherwise had + # knowledge of, the possibility of such damages at any time during the + # project or thereafter. Each user will be deemed to have agreed to the + # foregoing by his or her commencement of use of the program. The term + # "use" as used herein includes, but is not limited to, the use, + # modification, copying and distribution of the program and the + # production of secondary products from the program. + # + # In the case where the program, whether in its original form or + # modified, was distributed or delivered to or received by a user from + # any person, organization or entity other than ICOT, unless it makes or + # grants independently of ICOT any specific warranty to the user in + # writing, such person, organization or entity, will also be exempted + # from and not be held liable to the user for any such damages as noted + # above as far as the program is concerned. + # + # ---------------COPYING.ipadic-----END---------------------------------- + +3. Lao Word Break Dictionary Data (laodict.txt) + + # Copyright (c) 2013 International Business Machines Corporation + # and others. All Rights Reserved. + # + # Project: http://code.google.com/p/lao-dictionary/ + # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt + # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt + # (copied below) + # + # This file is derived from the above dictionary, with slight + # modifications. + # ---------------------------------------------------------------------- + # Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell. + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, + # are permitted provided that the following conditions are met: + # + # + # Redistributions of source code must retain the above copyright notice, this + # list of conditions and the following disclaimer. Redistributions in + # binary form must reproduce the above copyright notice, this list of + # conditions and the following disclaimer in the documentation and/or + # other materials provided with the distribution. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. + # -------------------------------------------------------------------------- + +4. Burmese Word Break Dictionary Data (burmesedict.txt) + + # Copyright (c) 2014 International Business Machines Corporation + # and others. All Rights Reserved. + # + # This list is part of a project hosted at: + # github.com/kanyawtech/myanmar-karen-word-lists + # + # -------------------------------------------------------------------------- + # Copyright (c) 2013, LeRoy Benjamin Sharon + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: Redistributions of source code must retain the above + # copyright notice, this list of conditions and the following + # disclaimer. Redistributions in binary form must reproduce the + # above copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided + # with the distribution. + # + # Neither the name Myanmar Karen Word Lists, nor the names of its + # contributors may be used to endorse or promote products derived + # from this software without specific prior written permission. + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS + # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + # THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + # SUCH DAMAGE. + # -------------------------------------------------------------------------- + +5. Time Zone Database + + ICU uses the public domain data and code derived from Time Zone +Database for its time zone support. The ownership of the TZ database +is explained in BCP 175: Procedure for Maintaining the Time Zone +Database section 7. + + # 7. Database Ownership + # + # The TZ database itself is not an IETF Contribution or an IETF + # document. Rather it is a pre-existing and regularly updated work + # that is in the public domain, and is intended to remain in the + # public domain. Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do + # not apply to the TZ Database or contributions that individuals make + # to it. Should any claims be made and substantiated against the TZ + # Database, the organization that is providing the IANA + # Considerations defined in this RFC, under the memorandum of + # understanding with the IETF, currently ICANN, may act in accordance + # with all competent court orders. No ownership claims will be made + # by ICANN or the IETF Trust on the database or the code. Any person + # making a contribution to the database or code waives all rights to + # future claims in that contribution or in the TZ Database. + +19) License notice for timelib +------------------------------ + +The MIT License (MIT) + +Copyright (c) 2015-2017 Derick Rethans +Copyright (c) 2017 MongoDB, Inc + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +20) License notice for windows dirent implementation +---------------------------------------------------- + + * Dirent interface for Microsoft Visual Studio + * Version 1.21 + * + * Copyright (C) 2006-2012 Toni Ronkko + * This file is part of dirent. Dirent may be freely distributed + * under the MIT license. For all details and documentation, see + * https://github.com/tronkko/dirent + + + 21) License notice for abseil-cpp +---------------------------- + + Copyright (c) Google Inc. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + 22) License notice for Zstandard +---------------------------- + + BSD License + + For Zstandard software + + Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, + are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 23) License notice for ASIO +---------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 24) License notice for MPark.Variant +------------------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 25) License notice for fmt +--------------------------- + +Copyright (c) 2012 - present, Victor Zverovich +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of + conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or other + materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF +THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 26) License notice for SafeInt +--------------------------- + +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the MIT License. + +MIT License + +Copyright (c) 2018 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + 27) License Notice for Raft TLA+ Specification +----------------------------------------------- + +https://github.com/ongardie/dissertation/blob/master/LICENSE + +Copyright 2014 Diego Ongaro. + +Some of our TLA+ specifications are based on the Raft TLA+ specification by Diego Ongaro. + +End diff --git a/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md new file mode 100644 index 00000000..01b6a37e --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md @@ -0,0 +1,13 @@ +Copyright 2014 MongoDB, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/README.md b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/README.md new file mode 100644 index 00000000..20f3ffe8 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/README.md @@ -0,0 +1,72 @@ +MongoDB Tools +=================================== + + - **bsondump** - _display BSON files in a human-readable format_ + - **mongoimport** - _Convert data from JSON, TSV or CSV and insert them into a collection_ + - **mongoexport** - _Write an existing collection to CSV or JSON format_ + - **mongodump/mongorestore** - _Dump MongoDB backups to disk in .BSON format, or restore them to a live database_ + - **mongostat** - _Monitor live MongoDB servers, replica sets, or sharded clusters_ + - **mongofiles** - _Read, write, delete, or update files in [GridFS](http://docs.mongodb.org/manual/core/gridfs/)_ + - **mongotop** - _Monitor read/write activity on a mongo server_ + + +Report any bugs, improvements, or new feature requests at https://jira.mongodb.org/browse/TOOLS + +Building Tools +--------------- + +We currently build the tools with Go version 1.15. Other Go versions may work but they are untested. + +Using `go get` to directly build the tools will not work. To build them, it's recommended to first clone this repository: + +``` +git clone https://github.com/mongodb/mongo-tools +cd mongo-tools +``` + +Then run `./make build` to build all the tools, placing them in the `bin` directory inside the repository. + +You can also build a subset of the tools using the `-tools` option. For example, `./make build -tools=mongodump,mongorestore` builds only `mongodump` and `mongorestore`. + +To use the build/test scripts in this repository, you **_must_** set GOROOT to your Go root directory. This may depend on how you installed Go. + +``` +export GOROOT=/usr/local/go +``` + +Updating Dependencies +--------------- +Starting with version 100.3.1, the tools use `go mod` to manage dependencies. All dependencies are listed in the `go.mod` file and are directly vendored in the `vendor` directory. + +In order to make changes to dependencies, you first need to change the `go.mod` file. You can manually edit that file to add/update/remove entries, or you can run the following in the repository directory: + +``` +go mod edit -require=@ # for adding or updating a dependency +go mod edit -droprequire= # for removing a dependency +``` + +Then run `go mod vendor -v` to reconstruct the `vendor` directory to match the changed `go.mod` file. + +Optionally, run `go mod tidy -v` to ensure that the `go.mod` file matches the `mongo-tools` source code. + +Contributing +--------------- +See our [Contributor's Guide](CONTRIBUTING.md). + +Documentation +--------------- +See the MongoDB packages [documentation](https://docs.mongodb.org/database-tools/). + +For documentation on older versions of the MongoDB, reference that version of the [MongoDB Server Manual](docs.mongodb.com/manual): + +- [MongoDB 4.2 Tools](https://docs.mongodb.org/v4.2/reference/program) +- [MongoDB 4.0 Tools](https://docs.mongodb.org/v4.0/reference/program) +- [MongoDB 3.6 Tools](https://docs.mongodb.org/v3.6/reference/program) + +Adding New Platforms Support +--------------- +See our [Adding New Platform Support Guide](PLATFORMSUPPORT.md). + +Vendoring the Change into Server Repo +--------------- +See our [Vendor the Change into Server Repo](SERVERVENDORING.md). diff --git a/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..c747d0b8 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-linux-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES @@ -0,0 +1,3319 @@ +--------------------------------------------------------------------- +License notice for hashicorp/go-rootcerts +--------------------------------------------------------------------- + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + +--------------------------------------------------------------------- +License notice for JSON and CSV code from github.com/golang/go +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/escaper +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Lucas Morales + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo/bson +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/openssl +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/3rf/mongo-lint +---------------------------------------------------------------------- + +Copyright (c) 2013 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/go-stack/stack +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 Chris Hines + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/golang/snappy +---------------------------------------------------------------------- + +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/google/gopacket +---------------------------------------------------------------------- + +Copyright (c) 2012 Google, Inc. All rights reserved. +Copyright (c) 2009-2011 Andreas Krennmair. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Andreas Krennmair, Google, nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/gopherjs/gopherjs +---------------------------------------------------------------------- + +Copyright (c) 2013 Richard Musiol. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/howeyc/gopass +---------------------------------------------------------------------- + +Copyright (c) 2012 Chris Howey + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/jessevdk/go-flags +---------------------------------------------------------------------- + +Copyright (c) 2012 Jesse van den Kieboom. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/jtolds/gls +---------------------------------------------------------------------- + +Copyright (c) 2013, Space Monkey, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mattn/go-runewidth +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mongodb/mongo-go-driver +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/nsf/termbox-go +---------------------------------------------------------------------- + +Copyright (C) 2012 termbox-go authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/patrickmn/go-cache +---------------------------------------------------------------------- + +Copyright (c) 2012-2015 Patrick Mylund Nielsen and the go-cache contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions +---------------------------------------------------------------------- + +Copyright (c) 2015 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/go-render +---------------------------------------------------------------------- + +// Copyright (c) 2015 The Chromium Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglematchers +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglemock +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/ogletest +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/reqtrace +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey +---------------------------------------------------------------------- + +Copyright (c) 2014 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/spacemonkeygo/spacelog +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/xdg/scram +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/xdg/stringprep +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/youmark/pkcs8 +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 youmark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for golang.org/x/crypto +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/sync +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/text +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for gopkg.in/tomb.v2 +---------------------------------------------------------------------- + +tomb - support for clean goroutine termination in Go. + +Copyright (c) 2010-2011 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt new file mode 100644 index 00000000..4e1383df --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt @@ -0,0 +1,557 @@ + Server Side Public License + VERSION 1, OCTOBER 16, 2018 + + Copyright © 2018 MongoDB, Inc. + + Everyone is permitted to copy and distribute verbatim copies of this + license document, but changing it is not allowed. + + TERMS AND CONDITIONS + + 0. Definitions. + + “This License” refers to Server Side Public License. + + “Copyright” also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + “The Program” refers to any copyrightable work licensed under this + License. Each licensee is addressed as “you”. “Licensees” and + “recipients” may be individuals or organizations. + + To “modify” a work means to copy from or adapt all or part of the work in + a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a “modified version” of the + earlier work or a work “based on” the earlier work. + + A “covered work” means either the unmodified Program or a work based on + the Program. + + To “propagate” a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To “convey” a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through a + computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays “Appropriate Legal Notices” to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties + are provided), that licensees may convey the work under this License, and + how to view a copy of this License. If the interface presents a list of + user commands or options, such as a menu, a prominent item in the list + meets this criterion. + + 1. Source Code. + + The “source code” for a work means the preferred form of the work for + making modifications to it. “Object code” means any non-source form of a + work. + + A “Standard Interface” means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. The “System + Libraries” of an executable work include anything, other than the work as + a whole, that (a) is included in the normal form of packaging a Major + Component, but which is not part of that Major Component, and (b) serves + only to enable use of the work with that Major Component, or to implement + a Standard Interface for which an implementation is available to the + public in source code form. A “Major Component”, in this context, means a + major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or + a compiler used to produce the work, or an object code interpreter used + to run it. + + The “Corresponding Source” for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control + those activities. However, it does not include the work's System + Libraries, or general-purpose tools or generally available free programs + which are used unmodified in performing those activities but which are + not part of the work. For example, Corresponding Source includes + interface definition files associated with source files for the work, and + the source code for shared libraries and dynamically linked subprograms + that the work is specifically designed to require, such as by intimate + data communication or control flow between those subprograms and other + parts of the work. + + The Corresponding Source need not include anything that users can + regenerate automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program, subject to section 13. The + output from running a covered work is covered by this License only if the + output, given its content, constitutes a covered work. This License + acknowledges your rights of fair use or other equivalent, as provided by + copyright law. Subject to section 13, you may make, run and propagate + covered works that you do not convey, without conditions so long as your + license otherwise remains in force. You may convey covered works to + others for the sole purpose of having them make modifications exclusively + for you, or provide you with facilities for running those works, provided + that you comply with the terms of this License in conveying all + material for which you do not control copyright. Those thus making or + running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them + from making any copies of your copyrighted material outside their + relationship with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article 11 + of the WIPO copyright treaty adopted on 20 December 1996, or similar laws + prohibiting or restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the + covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's users, + your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; keep + intact all notices stating that this License and any non-permissive terms + added in accord with section 7 apply to the code; keep intact all notices + of the absence of any warranty; and give all recipients a copy of this + License along with the Program. You may charge any price or no price for + each copy that you convey, and you may offer support or warranty + protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the terms + of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to “keep intact all + notices”. + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the + whole of the work, and all its parts, regardless of how they are + packaged. This License gives no permission to license the work in any + other way, but it does not invalidate such permission if you have + separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work + need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, and + which are not combined with it such as to form a larger program, in or on + a volume of a storage or distribution medium, is called an “aggregate” if + the compilation and its resulting copyright are not used to limit the + access or legal rights of the compilation's users beyond what the + individual works permit. Inclusion of a covered work in an aggregate does + not cause this License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these + ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily + used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you + offer spare parts or customer support for that product model, to give + anyone who possesses the object code either (1) a copy of the + Corresponding Source for all the software in the product that is + covered by this License, on a durable physical medium customarily used + for software interchange, for a price no more than your reasonable cost + of physically performing this conveying of source, or (2) access to + copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This alternative is + allowed only occasionally and noncommercially, and only if you received + the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to copy + the object code is a network server, the Corresponding Source may be on + a different server (operated by you or a third party) that supports + equivalent copying facilities, provided you maintain clear directions + next to the object code saying where to find the Corresponding Source. + Regardless of what server hosts the Corresponding Source, you remain + obligated to ensure that it is available for as long as needed to + satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of + the work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be included + in conveying the object code work. + + A “User Product” is either (1) a “consumer product”, which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, “normally used” refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + “Installation Information” for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part + of a transaction in which the right of possession and use of the User + Product is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the + Installation Information. But this requirement does not apply if neither + you nor any third party retains the ability to install modified object + code on the User Product (for example, the work has been installed in + ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access + to a network may be denied when the modification itself materially + and adversely affects the operation of the network or violates the + rules and protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + “Additional permissions” are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall be + treated as though they were included in this License, to the extent that + they are valid under applicable law. If additional permissions apply only + to part of the Program, that part may be used separately under those + permissions, but the entire Program remains governed by this License + without regard to the additional permissions. When you convey a copy of + a covered work, you may at your option remove any additional permissions + from that copy, or from any part of it. (Additional permissions may be + written to require their own removal in certain cases when you modify the + work.) You may place additional permissions on material, added by you to + a covered work, for which you have or can give appropriate copyright + permission. + + Notwithstanding any other provision of this License, for material you add + to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material + by anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any + liability that these contractual assumptions directly impose on those + licensors and authors. + + All other non-permissive additional terms are considered “further + restrictions” within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you + may add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. Additional terms, permissive or non-permissive, may be + stated in the form of a separately written license, or stated as + exceptions; the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or modify + it is void, and will automatically terminate your rights under this + License (including any patent licenses granted under the third paragraph + of section 11). + + However, if you cease all violation of this License, then your license + from a particular copyright holder is reinstated (a) provisionally, + unless and until the copyright holder explicitly and finally terminates + your license, and (b) permanently, if the copyright holder fails to + notify you of the violation by some reasonable means prior to 60 days + after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and + you cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a + copy likewise does not require acceptance. However, nothing other than + this License grants you permission to propagate or modify any covered + work. These actions infringe copyright if you do not accept this License. + Therefore, by modifying or propagating a covered work, you indicate your + acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives + a license from the original licensors, to run, modify and propagate that + work, subject to this License. You are not responsible for enforcing + compliance by third parties with this License. + + An “entity transaction” is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work + the party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can + get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted + under this License, and you may not initiate litigation (including a + cross-claim or counterclaim in a lawsuit) alleging that any patent claim + is infringed by making, using, selling, offering for sale, or importing + the Program or any portion of it. + + 11. Patents. + + A “contributor” is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The work + thus licensed is called the contributor's “contributor version”. + + A contributor's “essential patent claims” are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this + License, of making, using, or selling its contributor version, but do not + include claims that would be infringed only as a consequence of further + modification of the contributor version. For purposes of this definition, + “control” includes the right to grant patent sublicenses in a manner + consistent with the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to make, + use, sell, offer for sale, import and otherwise run, modify and propagate + the contents of its contributor version. + + In the following three paragraphs, a “patent license” is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To “grant” such a patent license to a party + means to make such an agreement or commitment not to enforce a patent + against the party. + + If you convey a covered work, knowingly relying on a patent license, and + the Corresponding Source of the work is not available for anyone to copy, + free of charge and under the terms of this License, through a publicly + available network server or other readily accessible means, then you must + either (1) cause the Corresponding Source to be so available, or (2) + arrange to deprive yourself of the benefit of the patent license for this + particular work, or (3) arrange, in a manner consistent with the + requirements of this License, to extend the patent license to downstream + recipients. “Knowingly relying” means you have actual knowledge that, but + for the patent license, your conveying the covered work in a country, or + your recipient's use of the covered work in a country, would infringe + one or more identifiable patents in that country that you have reason + to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties receiving + the covered work authorizing them to use, propagate, modify or convey a + specific copy of the covered work, then the patent license you grant is + automatically extended to all recipients of the covered work and works + based on it. + + A patent license is “discriminatory” if it does not include within the + scope of its coverage, prohibits the exercise of, or is conditioned on + the non-exercise of one or more of the rights that are specifically + granted under this License. You may not convey a covered work if you are + a party to an arrangement with a third party that is in the business of + distributing software, under which you make payment to the third party + based on the extent of your activity of conveying the work, and under + which the third party grants, to any of the parties who would receive the + covered work from you, a discriminatory patent license (a) in connection + with copies of the covered work conveyed by you (or copies made from + those copies), or (b) primarily for and in connection with specific + products or compilations that contain the covered work, unless you + entered into that arrangement, or that patent license was granted, prior + to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot use, + propagate or convey a covered work so as to satisfy simultaneously your + obligations under this License and any other pertinent obligations, then + as a consequence you may not use, propagate or convey it at all. For + example, if you agree to terms that obligate you to collect a royalty for + further conveying from those to whom you convey the Program, the only way + you could satisfy both those terms and this License would be to refrain + entirely from conveying the Program. + + 13. Offering the Program as a Service. + + If you make the functionality of the Program or a modified version + available to third parties as a service, you must make the Service Source + Code available via network download to everyone at no charge, under the + terms of this License. Making the functionality of the Program or + modified version available to third parties as a service includes, + without limitation, enabling third parties to interact with the + functionality of the Program or modified version remotely through a + computer network, offering a service the value of which entirely or + primarily derives from the value of the Program or modified version, or + offering a service that accomplishes for users the primary purpose of the + Program or modified version. + + “Service Source Code” means the Corresponding Source for the Program or + the modified version, and the Corresponding Source for all programs that + you use to make the Program or modified version available as a service, + including, without limitation, management software, user interfaces, + application program interfaces, automation software, monitoring software, + backup software, storage software and hosting software, all such that a + user could run an instance of the service using the Service Source Code + you make available. + + 14. Revised Versions of this License. + + MongoDB, Inc. may publish revised and/or new versions of the Server Side + Public License from time to time. Such new versions will be similar in + spirit to the present version, but may differ in detail to address new + problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the Server Side Public + License “or any later version” applies to it, you have the option of + following the terms and conditions either of that numbered version or of + any later version published by MongoDB, Inc. If the Program does not + specify a version number of the Server Side Public License, you may + choose any version ever published by MongoDB, Inc. + + If the Program specifies that a proxy can decide which future versions of + the Server Side Public License can be used, that proxy's public statement + of acceptance of a version permanently authorizes you to choose that + version for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING + ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF + THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU + OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. + + END OF TERMS AND CONDITIONS diff --git a/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/MPL-2 b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/MPL-2 new file mode 100644 index 00000000..14e2f777 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/MPL-2 @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/README b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/README new file mode 100644 index 00000000..fe759d19 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/README @@ -0,0 +1,87 @@ +MongoDB README + +Welcome to MongoDB! + +COMPONENTS + + mongod - The database server. + mongos - Sharding router. + mongo - The database shell (uses interactive javascript). + +UTILITIES + + install_compass - Installs MongoDB Compass for your platform. + +BUILDING + + See docs/building.md. + +RUNNING + + For command line options invoke: + + $ ./mongod --help + + To run a single server database: + + $ sudo mkdir -p /data/db + $ ./mongod + $ + $ # The mongo javascript shell connects to localhost and test database by default: + $ ./mongo + > help + +INSTALLING COMPASS + + You can install compass using the install_compass script packaged with MongoDB: + + $ ./install_compass + + This will download the appropriate MongoDB Compass package for your platform + and install it. + +DRIVERS + + Client drivers for most programming languages are available at + https://docs.mongodb.com/manual/applications/drivers/. Use the shell + ("mongo") for administrative tasks. + +BUG REPORTS + + See https://github.com/mongodb/mongo/wiki/Submit-Bug-Reports. + +PACKAGING + + Packages are created dynamically by the package.py script located in the + buildscripts directory. This will generate RPM and Debian packages. + +DOCUMENTATION + + https://docs.mongodb.com/manual/ + +CLOUD HOSTED MONGODB + + https://www.mongodb.com/cloud/atlas + +FORUMS + + https://community.mongodb.com + + A forum for technical questions about using MongoDB. + + https://community.mongodb.com/c/server-dev + + A forum for technical questions about building and developing MongoDB. + +LEARN MONGODB + + https://university.mongodb.com/ + +LICENSE + + MongoDB is free and open-source. Versions released prior to October 16, + 2018 are published under the AGPL. All versions released after October + 16, 2018, including patch fixes for prior versions, are published under + the Server Side Public License (SSPL) v1. See individual files for + details. + diff --git a/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..34fb8230 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES @@ -0,0 +1,1568 @@ +MongoDB uses third-party libraries or other resources that may +be distributed under licenses different than the MongoDB software. + +In the event that we accidentally failed to list a required notice, +please bring it to our attention through any of the ways detailed here : + + mongodb-dev@googlegroups.com + +The attached notices are provided for information only. + +For any licenses that require disclosure of source, sources are available at +https://github.com/mongodb/mongo. + + +1) License Notice for Boost +--------------------------- + +http://www.boost.org/LICENSE_1_0.txt + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + +3) License Notice for PCRE +-------------------------- + +http://www.pcre.org/licence.txt + +PCRE LICENCE +------------ + +PCRE is a library of functions to support regular expressions whose syntax +and semantics are as close as possible to those of the Perl 5 language. + +Release 7 of PCRE is distributed under the terms of the "BSD" licence, as +specified below. The documentation for PCRE, supplied in the "doc" +directory, is distributed under the same terms as the software itself. + +The basic library functions are written in C and are freestanding. Also +included in the distribution is a set of C++ wrapper functions. + + +THE BASIC LIBRARY FUNCTIONS +--------------------------- + +Written by: Philip Hazel +Email local part: ph10 +Email domain: cam.ac.uk + +University of Cambridge Computing Service, +Cambridge, England. + +Copyright (c) 1997-2008 University of Cambridge +All rights reserved. + + +THE C++ WRAPPER FUNCTIONS +------------------------- + +Contributed by: Google Inc. + +Copyright (c) 2007-2008, Google Inc. +All rights reserved. + + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +4) License notice for Aladdin MD5 +--------------------------------- + +Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +L. Peter Deutsch +ghost@aladdin.com + +5) License notice for Snappy - http://code.google.com/p/snappy/ +--------------------------------- + Copyright 2005 and onwards Google Inc. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + A light-weight compression algorithm. It is designed for speed of + compression and decompression, rather than for the utmost in space + savings. + + For getting better compression ratios when you are compressing data + with long repeated sequences or compressing data that is similar to + other data, while still compressing fast, you might look at first + using BMDiff and then compressing the output of BMDiff with + Snappy. + +6) License notice for Google Perftools (TCMalloc utility) +--------------------------------- +New BSD License + +Copyright (c) 1998-2006, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following +conditions are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +7) License notice for Linenoise +------------------------------- + + Copyright (c) 2010, Salvatore Sanfilippo + Copyright (c) 2010, Pieter Noordhuis + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Redis nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + +8) License notice for S2 Geometry Library +----------------------------------------- + Copyright 2005 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +9) License notice for MurmurHash +-------------------------------- + + Copyright (c) 2010-2012 Austin Appleby + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +10) License notice for Snowball + Copyright (c) 2001, Dr Martin Porter + All rights reserved. + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +11) License notice for yaml-cpp +------------------------------- + +Copyright (c) 2008 Jesse Beder. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +12) License notice for zlib +--------------------------- + +http://www.zlib.net/zlib_license.html + +zlib.h -- interface of the 'zlib' general purpose compression library +version 1.2.8, April 28th, 2013 + +Copyright (C) 1995-2013 Jean-loup Gailly and Mark Adler + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +Jean-loup Gailly Mark Adler +jloup@gzip.org madler@alumni.caltech.edu + + +13) License notice for 3rd party software included in the WiredTiger library +---------------------------------------------------------------------------- + +http://source.wiredtiger.com/license.html + +WiredTiger Distribution Files | Copyright Holder | License +----------------------------- | ----------------------------------- | ---------------------- +src/include/bitstring.i | University of California, Berkeley | BSD-3-Clause License +src/include/queue.h | University of California, Berkeley | BSD-3-Clause License +src/os_posix/os_getopt.c | University of California, Berkeley | BSD-3-Clause License +src/support/hash_city.c | Google, Inc. | The MIT License +src/support/hash_fnv.c | Authors | Public Domain + + +Other optional 3rd party software included in the WiredTiger distribution is removed by MongoDB. + + +BSD-3-CLAUSE LICENSE +-------------------- + +http://www.opensource.org/licenses/BSD-3-Clause + +Copyright (c) 1987, 1989, 1991, 1993, 1994 + The Regents of the University of California. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +4. Neither the name of the University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + + +THE MIT LICENSE +--------------- + +http://www.opensource.org/licenses/MIT + +Copyright (c) 2011 Google, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +14) License Notice for SpiderMonkey +----------------------------------- + +|------------------------------------------------|------------------|---------------| +| SpiderMonkey Distribution Files | Copyright Holder | License | +|------------------------------------------------|------------------|---------------| +| js/src/jit/shared/AssemblerBuffer-x86-shared.h | Apple, Inc | BSD-2-Clause | +| js/src/jit/shared/BaseAssembler-x86-shared.h | | | +|------------------------------------------------|------------------|---------------| +| js/src/builtin/ | Google, Inc | BSD-3-Clause | +| js/src/irregexp/ | | | +| js/src/jit/arm/ | | | +| js/src/jit/mips/ | | | +| mfbt/double-conversion/ | | | +|------------------------------------------------|------------------|---------------| +| intl/icu/source/common/unicode/ | IBM, Inc | ICU | +|------------------------------------------------|------------------|---------------| +| js/src/asmjs/ | Mozilla, Inc | Apache2 | +|------------------------------------------------|------------------|---------------| +| js/public/ | Mozilla, Inc | MPL2 | +| js/src/ | | | +| mfbt | | | +|------------------------------------------------|------------------|---------------| +| js/src/vm/Unicode.cpp | None | Public Domain | +|------------------------------------------------|------------------|---------------| +| mfbt/lz4.c | Yann Collet | BSD-2-Clause | +| mfbt/lz4.h | | | +|------------------------------------------------|------------------|---------------| + +Other optional 3rd party software included in the SpiderMonkey distribution is removed by MongoDB. + + +Apple, Inc: BSD-2-Clause +------------------------ + +Copyright (C) 2008 Apple Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Google, Inc: BSD-3-Clause +------------------------- + +Copyright 2012 the V8 project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +ICU License - ICU 1.8.1 and later +--------------------------------- + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and +others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, provided that the above copyright notice(s) and this +permission notice appear in all copies of the Software and that both the +above copyright notice(s) and this permission notice appear in supporting +documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property +of their respective owners. + + +Mozilla, Inc: Apache 2 +---------------------- + +Copyright 2014 Mozilla Foundation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Mozilla, Inc: MPL 2 +------------------- + +Copyright 2014 Mozilla Foundation + +This Source Code Form is subject to the terms of the Mozilla Public +License, v. 2.0. If a copy of the MPL was not distributed with this +file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +Public Domain +------------- + +Any copyright is dedicated to the Public Domain. +http://creativecommons.org/licenses/publicdomain/ + + +LZ4: BSD-2-Clause +----------------- + +Copyright (C) 2011-2014, Yann Collet. +BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You can contact the author at : +- LZ4 source repository : http://code.google.com/p/lz4/ +- LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c + +15) License Notice for Intel DFP Math Library +--------------------------------------------- + +Copyright (c) 2011, Intel Corp. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + his list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +16) License Notice for Unicode Data +----------------------------------- + +Copyright © 1991-2015 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +17 ) License Notice for Valgrind.h +---------------------------------- + +---------------------------------------------------------------- + +Notice that the following BSD-style license applies to this one +file (valgrind.h) only. The rest of Valgrind is licensed under the +terms of the GNU General Public License, version 2, unless +otherwise indicated. See the COPYING file in the source +distribution for details. + +---------------------------------------------------------------- + +This file is part of Valgrind, a dynamic binary instrumentation +framework. + +Copyright (C) 2000-2015 Julian Seward. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. The origin of this software must not be misrepresented; you must + not claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product + documentation would be appreciated but is not required. + +3. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + +4. The name of the author may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------- + +Notice that the above BSD-style license applies to this one file +(valgrind.h) only. The entire rest of Valgrind is licensed under +the terms of the GNU General Public License, version 2. See the +COPYING file in the source distribution for details. + +---------------------------------------------------------------- + +18) License notice for ICU4C +---------------------------- + +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2016 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, provided that the above +copyright notice(s) and this permission notice appear in all copies of +the Software and that both the above copyright notice(s) and this +permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY +SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, use +or other dealings in this Software without prior written authorization +of the copyright holder. + + +All trademarks and registered trademarks mentioned herein are the +property of their respective owners. + +--------------------- + +Third-Party Software Licenses + +This section contains third-party software notices and/or additional +terms for licensed third-party software components included within ICU +libraries. + +1. Unicode Data Files and Software + +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 1991-2016 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt) + + # The Google Chrome software developed by Google is licensed under + # the BSD license. Other software included in this distribution is + # provided under other licenses, as set forth below. + # + # The BSD License + # http://opensource.org/licenses/bsd-license.php + # Copyright (C) 2006-2008, Google Inc. + # + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are met: + # + # Redistributions of source code must retain the above copyright notice, + # this list of conditions and the following disclaimer. + # Redistributions in binary form must reproduce the above + # copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided with + # the distribution. + # Neither the name of Google Inc. nor the names of its + # contributors may be used to endorse or promote products derived from + # this software without specific prior written permission. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # + # + # The word list in cjdict.txt are generated by combining three word lists + # listed below with further processing for compound word breaking. The + # frequency is generated with an iterative training against Google web + # corpora. + # + # * Libtabe (Chinese) + # - https://sourceforge.net/project/?group_id=1519 + # - Its license terms and conditions are shown below. + # + # * IPADIC (Japanese) + # - http://chasen.aist-nara.ac.jp/chasen/distribution.html + # - Its license terms and conditions are shown below. + # + # ---------COPYING.libtabe ---- BEGIN-------------------- + # + # /* + # * Copyrighy (c) 1999 TaBE Project. + # * Copyright (c) 1999 Pai-Hsiang Hsiao. + # * All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the TaBE Project nor the names of its + # * contributors may be used to endorse or promote products derived + # * from this software without specific prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # /* + # * Copyright (c) 1999 Computer Systems and Communication Lab, + # * Institute of Information Science, Academia + # * Sinica. All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the Computer Systems and Communication Lab + # * nor the names of its contributors may be used to endorse or + # * promote products derived from this software without specific + # * prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # Copyright 1996 Chih-Hao Tsai @ Beckman Institute, + # University of Illinois + # c-tsai4@uiuc.edu http://casper.beckman.uiuc.edu/~c-tsai4 + # + # ---------------COPYING.libtabe-----END-------------------------------- + # + # + # ---------------COPYING.ipadic-----BEGIN------------------------------- + # + # Copyright 2000, 2001, 2002, 2003 Nara Institute of Science + # and Technology. All Rights Reserved. + # + # Use, reproduction, and distribution of this software is permitted. + # Any copy of this software, whether in its original form or modified, + # must include both the above copyright notice and the following + # paragraphs. + # + # Nara Institute of Science and Technology (NAIST), + # the copyright holders, disclaims all warranties with regard to this + # software, including all implied warranties of merchantability and + # fitness, in no event shall NAIST be liable for + # any special, indirect or consequential damages or any damages + # whatsoever resulting from loss of use, data or profits, whether in an + # action of contract, negligence or other tortuous action, arising out + # of or in connection with the use or performance of this software. + # + # A large portion of the dictionary entries + # originate from ICOT Free Software. The following conditions for ICOT + # Free Software applies to the current dictionary as well. + # + # Each User may also freely distribute the Program, whether in its + # original form or modified, to any third party or parties, PROVIDED + # that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear + # on, or be attached to, the Program, which is distributed substantially + # in the same form as set out herein and that such intended + # distribution, if actually made, will neither violate or otherwise + # contravene any of the laws and regulations of the countries having + # jurisdiction over the User or the intended distribution itself. + # + # NO WARRANTY + # + # The program was produced on an experimental basis in the course of the + # research and development conducted during the project and is provided + # to users as so produced on an experimental basis. Accordingly, the + # program is provided without any warranty whatsoever, whether express, + # implied, statutory or otherwise. The term "warranty" used herein + # includes, but is not limited to, any warranty of the quality, + # performance, merchantability and fitness for a particular purpose of + # the program and the nonexistence of any infringement or violation of + # any right of any third party. + # + # Each user of the program will agree and understand, and be deemed to + # have agreed and understood, that there is no warranty whatsoever for + # the program and, accordingly, the entire risk arising from or + # otherwise connected with the program is assumed by the user. + # + # Therefore, neither ICOT, the copyright holder, or any other + # organization that participated in or was otherwise related to the + # development of the program and their respective officials, directors, + # officers and other employees shall be held liable for any and all + # damages, including, without limitation, general, special, incidental + # and consequential damages, arising out of or otherwise in connection + # with the use or inability to use the program or any product, material + # or result produced or otherwise obtained by using the program, + # regardless of whether they have been advised of, or otherwise had + # knowledge of, the possibility of such damages at any time during the + # project or thereafter. Each user will be deemed to have agreed to the + # foregoing by his or her commencement of use of the program. The term + # "use" as used herein includes, but is not limited to, the use, + # modification, copying and distribution of the program and the + # production of secondary products from the program. + # + # In the case where the program, whether in its original form or + # modified, was distributed or delivered to or received by a user from + # any person, organization or entity other than ICOT, unless it makes or + # grants independently of ICOT any specific warranty to the user in + # writing, such person, organization or entity, will also be exempted + # from and not be held liable to the user for any such damages as noted + # above as far as the program is concerned. + # + # ---------------COPYING.ipadic-----END---------------------------------- + +3. Lao Word Break Dictionary Data (laodict.txt) + + # Copyright (c) 2013 International Business Machines Corporation + # and others. All Rights Reserved. + # + # Project: http://code.google.com/p/lao-dictionary/ + # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt + # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt + # (copied below) + # + # This file is derived from the above dictionary, with slight + # modifications. + # ---------------------------------------------------------------------- + # Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell. + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, + # are permitted provided that the following conditions are met: + # + # + # Redistributions of source code must retain the above copyright notice, this + # list of conditions and the following disclaimer. Redistributions in + # binary form must reproduce the above copyright notice, this list of + # conditions and the following disclaimer in the documentation and/or + # other materials provided with the distribution. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. + # -------------------------------------------------------------------------- + +4. Burmese Word Break Dictionary Data (burmesedict.txt) + + # Copyright (c) 2014 International Business Machines Corporation + # and others. All Rights Reserved. + # + # This list is part of a project hosted at: + # github.com/kanyawtech/myanmar-karen-word-lists + # + # -------------------------------------------------------------------------- + # Copyright (c) 2013, LeRoy Benjamin Sharon + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: Redistributions of source code must retain the above + # copyright notice, this list of conditions and the following + # disclaimer. Redistributions in binary form must reproduce the + # above copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided + # with the distribution. + # + # Neither the name Myanmar Karen Word Lists, nor the names of its + # contributors may be used to endorse or promote products derived + # from this software without specific prior written permission. + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS + # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + # THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + # SUCH DAMAGE. + # -------------------------------------------------------------------------- + +5. Time Zone Database + + ICU uses the public domain data and code derived from Time Zone +Database for its time zone support. The ownership of the TZ database +is explained in BCP 175: Procedure for Maintaining the Time Zone +Database section 7. + + # 7. Database Ownership + # + # The TZ database itself is not an IETF Contribution or an IETF + # document. Rather it is a pre-existing and regularly updated work + # that is in the public domain, and is intended to remain in the + # public domain. Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do + # not apply to the TZ Database or contributions that individuals make + # to it. Should any claims be made and substantiated against the TZ + # Database, the organization that is providing the IANA + # Considerations defined in this RFC, under the memorandum of + # understanding with the IETF, currently ICANN, may act in accordance + # with all competent court orders. No ownership claims will be made + # by ICANN or the IETF Trust on the database or the code. Any person + # making a contribution to the database or code waives all rights to + # future claims in that contribution or in the TZ Database. + +19) License notice for timelib +------------------------------ + +The MIT License (MIT) + +Copyright (c) 2015-2017 Derick Rethans +Copyright (c) 2017 MongoDB, Inc + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +20) License notice for windows dirent implementation +---------------------------------------------------- + + * Dirent interface for Microsoft Visual Studio + * Version 1.21 + * + * Copyright (C) 2006-2012 Toni Ronkko + * This file is part of dirent. Dirent may be freely distributed + * under the MIT license. For all details and documentation, see + * https://github.com/tronkko/dirent + + + 21) License notice for abseil-cpp +---------------------------- + + Copyright (c) Google Inc. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + 22) License notice for Zstandard +---------------------------- + + BSD License + + For Zstandard software + + Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, + are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 23) License notice for ASIO +---------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 24) License notice for MPark.Variant +------------------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 25) License notice for fmt +--------------------------- + +Copyright (c) 2012 - present, Victor Zverovich +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of + conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or other + materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF +THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 26) License notice for SafeInt +--------------------------- + +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the MIT License. + +MIT License + +Copyright (c) 2018 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + 27) License Notice for Raft TLA+ Specification +----------------------------------------------- + +https://github.com/ongardie/dissertation/blob/master/LICENSE + +Copyright 2014 Diego Ongaro. + +Some of our TLA+ specifications are based on the Raft TLA+ specification by Diego Ongaro. + +End diff --git a/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md new file mode 100644 index 00000000..01b6a37e --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md @@ -0,0 +1,13 @@ +Copyright 2014 MongoDB, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/README.md b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/README.md new file mode 100644 index 00000000..20f3ffe8 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/README.md @@ -0,0 +1,72 @@ +MongoDB Tools +=================================== + + - **bsondump** - _display BSON files in a human-readable format_ + - **mongoimport** - _Convert data from JSON, TSV or CSV and insert them into a collection_ + - **mongoexport** - _Write an existing collection to CSV or JSON format_ + - **mongodump/mongorestore** - _Dump MongoDB backups to disk in .BSON format, or restore them to a live database_ + - **mongostat** - _Monitor live MongoDB servers, replica sets, or sharded clusters_ + - **mongofiles** - _Read, write, delete, or update files in [GridFS](http://docs.mongodb.org/manual/core/gridfs/)_ + - **mongotop** - _Monitor read/write activity on a mongo server_ + + +Report any bugs, improvements, or new feature requests at https://jira.mongodb.org/browse/TOOLS + +Building Tools +--------------- + +We currently build the tools with Go version 1.15. Other Go versions may work but they are untested. + +Using `go get` to directly build the tools will not work. To build them, it's recommended to first clone this repository: + +``` +git clone https://github.com/mongodb/mongo-tools +cd mongo-tools +``` + +Then run `./make build` to build all the tools, placing them in the `bin` directory inside the repository. + +You can also build a subset of the tools using the `-tools` option. For example, `./make build -tools=mongodump,mongorestore` builds only `mongodump` and `mongorestore`. + +To use the build/test scripts in this repository, you **_must_** set GOROOT to your Go root directory. This may depend on how you installed Go. + +``` +export GOROOT=/usr/local/go +``` + +Updating Dependencies +--------------- +Starting with version 100.3.1, the tools use `go mod` to manage dependencies. All dependencies are listed in the `go.mod` file and are directly vendored in the `vendor` directory. + +In order to make changes to dependencies, you first need to change the `go.mod` file. You can manually edit that file to add/update/remove entries, or you can run the following in the repository directory: + +``` +go mod edit -require=@ # for adding or updating a dependency +go mod edit -droprequire= # for removing a dependency +``` + +Then run `go mod vendor -v` to reconstruct the `vendor` directory to match the changed `go.mod` file. + +Optionally, run `go mod tidy -v` to ensure that the `go.mod` file matches the `mongo-tools` source code. + +Contributing +--------------- +See our [Contributor's Guide](CONTRIBUTING.md). + +Documentation +--------------- +See the MongoDB packages [documentation](https://docs.mongodb.org/database-tools/). + +For documentation on older versions of the MongoDB, reference that version of the [MongoDB Server Manual](docs.mongodb.com/manual): + +- [MongoDB 4.2 Tools](https://docs.mongodb.org/v4.2/reference/program) +- [MongoDB 4.0 Tools](https://docs.mongodb.org/v4.0/reference/program) +- [MongoDB 3.6 Tools](https://docs.mongodb.org/v3.6/reference/program) + +Adding New Platforms Support +--------------- +See our [Adding New Platform Support Guide](PLATFORMSUPPORT.md). + +Vendoring the Change into Server Repo +--------------- +See our [Vendor the Change into Server Repo](SERVERVENDORING.md). diff --git a/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..c747d0b8 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-macos-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES @@ -0,0 +1,3319 @@ +--------------------------------------------------------------------- +License notice for hashicorp/go-rootcerts +--------------------------------------------------------------------- + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + +--------------------------------------------------------------------- +License notice for JSON and CSV code from github.com/golang/go +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/escaper +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Lucas Morales + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo/bson +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/openssl +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/3rf/mongo-lint +---------------------------------------------------------------------- + +Copyright (c) 2013 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/go-stack/stack +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 Chris Hines + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/golang/snappy +---------------------------------------------------------------------- + +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/google/gopacket +---------------------------------------------------------------------- + +Copyright (c) 2012 Google, Inc. All rights reserved. +Copyright (c) 2009-2011 Andreas Krennmair. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Andreas Krennmair, Google, nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/gopherjs/gopherjs +---------------------------------------------------------------------- + +Copyright (c) 2013 Richard Musiol. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/howeyc/gopass +---------------------------------------------------------------------- + +Copyright (c) 2012 Chris Howey + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/jessevdk/go-flags +---------------------------------------------------------------------- + +Copyright (c) 2012 Jesse van den Kieboom. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/jtolds/gls +---------------------------------------------------------------------- + +Copyright (c) 2013, Space Monkey, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mattn/go-runewidth +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mongodb/mongo-go-driver +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/nsf/termbox-go +---------------------------------------------------------------------- + +Copyright (C) 2012 termbox-go authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/patrickmn/go-cache +---------------------------------------------------------------------- + +Copyright (c) 2012-2015 Patrick Mylund Nielsen and the go-cache contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions +---------------------------------------------------------------------- + +Copyright (c) 2015 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/go-render +---------------------------------------------------------------------- + +// Copyright (c) 2015 The Chromium Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglematchers +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglemock +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/ogletest +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/reqtrace +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey +---------------------------------------------------------------------- + +Copyright (c) 2014 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/spacemonkeygo/spacelog +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/xdg/scram +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/xdg/stringprep +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/youmark/pkcs8 +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 youmark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for golang.org/x/crypto +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/sync +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/text +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for gopkg.in/tomb.v2 +---------------------------------------------------------------------- + +tomb - support for clean goroutine termination in Go. + +Copyright (c) 2010-2011 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt new file mode 100644 index 00000000..4e1383df --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/LICENSE-Community.txt @@ -0,0 +1,557 @@ + Server Side Public License + VERSION 1, OCTOBER 16, 2018 + + Copyright © 2018 MongoDB, Inc. + + Everyone is permitted to copy and distribute verbatim copies of this + license document, but changing it is not allowed. + + TERMS AND CONDITIONS + + 0. Definitions. + + “This License” refers to Server Side Public License. + + “Copyright” also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + “The Program” refers to any copyrightable work licensed under this + License. Each licensee is addressed as “you”. “Licensees” and + “recipients” may be individuals or organizations. + + To “modify” a work means to copy from or adapt all or part of the work in + a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a “modified version” of the + earlier work or a work “based on” the earlier work. + + A “covered work” means either the unmodified Program or a work based on + the Program. + + To “propagate” a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To “convey” a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through a + computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays “Appropriate Legal Notices” to the + extent that it includes a convenient and prominently visible feature that + (1) displays an appropriate copyright notice, and (2) tells the user that + there is no warranty for the work (except to the extent that warranties + are provided), that licensees may convey the work under this License, and + how to view a copy of this License. If the interface presents a list of + user commands or options, such as a menu, a prominent item in the list + meets this criterion. + + 1. Source Code. + + The “source code” for a work means the preferred form of the work for + making modifications to it. “Object code” means any non-source form of a + work. + + A “Standard Interface” means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that is + widely used among developers working in that language. The “System + Libraries” of an executable work include anything, other than the work as + a whole, that (a) is included in the normal form of packaging a Major + Component, but which is not part of that Major Component, and (b) serves + only to enable use of the work with that Major Component, or to implement + a Standard Interface for which an implementation is available to the + public in source code form. A “Major Component”, in this context, means a + major essential component (kernel, window system, and so on) of the + specific operating system (if any) on which the executable work runs, or + a compiler used to produce the work, or an object code interpreter used + to run it. + + The “Corresponding Source” for a work in object code form means all the + source code needed to generate, install, and (for an executable work) run + the object code and to modify the work, including scripts to control + those activities. However, it does not include the work's System + Libraries, or general-purpose tools or generally available free programs + which are used unmodified in performing those activities but which are + not part of the work. For example, Corresponding Source includes + interface definition files associated with source files for the work, and + the source code for shared libraries and dynamically linked subprograms + that the work is specifically designed to require, such as by intimate + data communication or control flow between those subprograms and other + parts of the work. + + The Corresponding Source need not include anything that users can + regenerate automatically from other parts of the Corresponding Source. + + The Corresponding Source for a work in source code form is that same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program, subject to section 13. The + output from running a covered work is covered by this License only if the + output, given its content, constitutes a covered work. This License + acknowledges your rights of fair use or other equivalent, as provided by + copyright law. Subject to section 13, you may make, run and propagate + covered works that you do not convey, without conditions so long as your + license otherwise remains in force. You may convey covered works to + others for the sole purpose of having them make modifications exclusively + for you, or provide you with facilities for running those works, provided + that you comply with the terms of this License in conveying all + material for which you do not control copyright. Those thus making or + running the covered works for you must do so exclusively on your + behalf, under your direction and control, on terms that prohibit them + from making any copies of your copyrighted material outside their + relationship with you. + + Conveying under any other circumstances is permitted solely under the + conditions stated below. Sublicensing is not allowed; section 10 makes it + unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article 11 + of the WIPO copyright treaty adopted on 20 December 1996, or similar laws + prohibiting or restricting circumvention of such measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention is + effected by exercising rights under this License with respect to the + covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's users, + your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; keep + intact all notices stating that this License and any non-permissive terms + added in accord with section 7 apply to the code; keep intact all notices + of the absence of any warranty; and give all recipients a copy of this + License along with the Program. You may charge any price or no price for + each copy that you convey, and you may offer support or warranty + protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the terms + of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, + and giving a relevant date. + + b) The work must carry prominent notices stating that it is released + under this License and any conditions added under section 7. This + requirement modifies the requirement in section 4 to “keep intact all + notices”. + + c) You must license the entire work, as a whole, under this License to + anyone who comes into possession of a copy. This License will therefore + apply, along with any applicable section 7 additional terms, to the + whole of the work, and all its parts, regardless of how they are + packaged. This License gives no permission to license the work in any + other way, but it does not invalidate such permission if you have + separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your work + need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, and + which are not combined with it such as to form a larger program, in or on + a volume of a storage or distribution medium, is called an “aggregate” if + the compilation and its resulting copyright are not used to limit the + access or legal rights of the compilation's users beyond what the + individual works permit. Inclusion of a covered work in an aggregate does + not cause this License to apply to the other parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms of + sections 4 and 5, provided that you also convey the machine-readable + Corresponding Source under the terms of this License, in one of these + ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium customarily + used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a written + offer, valid for at least three years and valid for as long as you + offer spare parts or customer support for that product model, to give + anyone who possesses the object code either (1) a copy of the + Corresponding Source for all the software in the product that is + covered by this License, on a durable physical medium customarily used + for software interchange, for a price no more than your reasonable cost + of physically performing this conveying of source, or (2) access to + copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This alternative is + allowed only occasionally and noncommercially, and only if you received + the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place + (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to copy + the object code is a network server, the Corresponding Source may be on + a different server (operated by you or a third party) that supports + equivalent copying facilities, provided you maintain clear directions + next to the object code saying where to find the Corresponding Source. + Regardless of what server hosts the Corresponding Source, you remain + obligated to ensure that it is available for as long as needed to + satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you + inform other peers where the object code and Corresponding Source of + the work are being offered to the general public at no charge under + subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be included + in conveying the object code work. + + A “User Product” is either (1) a “consumer product”, which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, “normally used” refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + “Installation Information” for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as part + of a transaction in which the right of possession and use of the User + Product is transferred to the recipient in perpetuity or for a fixed term + (regardless of how the transaction is characterized), the Corresponding + Source conveyed under this section must be accompanied by the + Installation Information. But this requirement does not apply if neither + you nor any third party retains the ability to install modified object + code on the User Product (for example, the work has been installed in + ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access + to a network may be denied when the modification itself materially + and adversely affects the operation of the network or violates the + rules and protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, in + accord with this section must be in a format that is publicly documented + (and with an implementation available to the public in source code form), + and must require no special password or key for unpacking, reading or + copying. + + 7. Additional Terms. + + “Additional permissions” are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall be + treated as though they were included in this License, to the extent that + they are valid under applicable law. If additional permissions apply only + to part of the Program, that part may be used separately under those + permissions, but the entire Program remains governed by this License + without regard to the additional permissions. When you convey a copy of + a covered work, you may at your option remove any additional permissions + from that copy, or from any part of it. (Additional permissions may be + written to require their own removal in certain cases when you modify the + work.) You may place additional permissions on material, added by you to + a covered work, for which you have or can give appropriate copyright + permission. + + Notwithstanding any other provision of this License, for material you add + to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade + names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material + by anyone who conveys the material (or modified versions of it) with + contractual assumptions of liability to the recipient, for any + liability that these contractual assumptions directly impose on those + licensors and authors. + + All other non-permissive additional terms are considered “further + restrictions” within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further restriction, + you may remove that term. If a license document contains a further + restriction but permits relicensing or conveying under this License, you + may add to a covered work material governed by the terms of that license + document, provided that the further restriction does not survive such + relicensing or conveying. + + If you add terms to a covered work in accord with this section, you must + place, in the relevant source files, a statement of the additional terms + that apply to those files, or a notice indicating where to find the + applicable terms. Additional terms, permissive or non-permissive, may be + stated in the form of a separately written license, or stated as + exceptions; the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or modify + it is void, and will automatically terminate your rights under this + License (including any patent licenses granted under the third paragraph + of section 11). + + However, if you cease all violation of this License, then your license + from a particular copyright holder is reinstated (a) provisionally, + unless and until the copyright holder explicitly and finally terminates + your license, and (b) permanently, if the copyright holder fails to + notify you of the violation by some reasonable means prior to 60 days + after the cessation. + + Moreover, your license from a particular copyright holder is reinstated + permanently if the copyright holder notifies you of the violation by some + reasonable means, this is the first time you have received notice of + violation of this License (for any work) from that copyright holder, and + you cure the violation prior to 30 days after your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or run a + copy of the Program. Ancillary propagation of a covered work occurring + solely as a consequence of using peer-to-peer transmission to receive a + copy likewise does not require acceptance. However, nothing other than + this License grants you permission to propagate or modify any covered + work. These actions infringe copyright if you do not accept this License. + Therefore, by modifying or propagating a covered work, you indicate your + acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically receives + a license from the original licensors, to run, modify and propagate that + work, subject to this License. You are not responsible for enforcing + compliance by third parties with this License. + + An “entity transaction” is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered work + results from an entity transaction, each party to that transaction who + receives a copy of the work also receives whatever licenses to the work + the party's predecessor in interest had or could give under the previous + paragraph, plus a right to possession of the Corresponding Source of the + work from the predecessor in interest, if the predecessor has it or can + get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the rights + granted or affirmed under this License. For example, you may not impose a + license fee, royalty, or other charge for exercise of rights granted + under this License, and you may not initiate litigation (including a + cross-claim or counterclaim in a lawsuit) alleging that any patent claim + is infringed by making, using, selling, offering for sale, or importing + the Program or any portion of it. + + 11. Patents. + + A “contributor” is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The work + thus licensed is called the contributor's “contributor version”. + + A contributor's “essential patent claims” are all patent claims owned or + controlled by the contributor, whether already acquired or hereafter + acquired, that would be infringed by some manner, permitted by this + License, of making, using, or selling its contributor version, but do not + include claims that would be infringed only as a consequence of further + modification of the contributor version. For purposes of this definition, + “control” includes the right to grant patent sublicenses in a manner + consistent with the requirements of this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to make, + use, sell, offer for sale, import and otherwise run, modify and propagate + the contents of its contributor version. + + In the following three paragraphs, a “patent license” is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To “grant” such a patent license to a party + means to make such an agreement or commitment not to enforce a patent + against the party. + + If you convey a covered work, knowingly relying on a patent license, and + the Corresponding Source of the work is not available for anyone to copy, + free of charge and under the terms of this License, through a publicly + available network server or other readily accessible means, then you must + either (1) cause the Corresponding Source to be so available, or (2) + arrange to deprive yourself of the benefit of the patent license for this + particular work, or (3) arrange, in a manner consistent with the + requirements of this License, to extend the patent license to downstream + recipients. “Knowingly relying” means you have actual knowledge that, but + for the patent license, your conveying the covered work in a country, or + your recipient's use of the covered work in a country, would infringe + one or more identifiable patents in that country that you have reason + to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties receiving + the covered work authorizing them to use, propagate, modify or convey a + specific copy of the covered work, then the patent license you grant is + automatically extended to all recipients of the covered work and works + based on it. + + A patent license is “discriminatory” if it does not include within the + scope of its coverage, prohibits the exercise of, or is conditioned on + the non-exercise of one or more of the rights that are specifically + granted under this License. You may not convey a covered work if you are + a party to an arrangement with a third party that is in the business of + distributing software, under which you make payment to the third party + based on the extent of your activity of conveying the work, and under + which the third party grants, to any of the parties who would receive the + covered work from you, a discriminatory patent license (a) in connection + with copies of the covered work conveyed by you (or copies made from + those copies), or (b) primarily for and in connection with specific + products or compilations that contain the covered work, unless you + entered into that arrangement, or that patent license was granted, prior + to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting any + implied license or other defenses to infringement that may otherwise be + available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot use, + propagate or convey a covered work so as to satisfy simultaneously your + obligations under this License and any other pertinent obligations, then + as a consequence you may not use, propagate or convey it at all. For + example, if you agree to terms that obligate you to collect a royalty for + further conveying from those to whom you convey the Program, the only way + you could satisfy both those terms and this License would be to refrain + entirely from conveying the Program. + + 13. Offering the Program as a Service. + + If you make the functionality of the Program or a modified version + available to third parties as a service, you must make the Service Source + Code available via network download to everyone at no charge, under the + terms of this License. Making the functionality of the Program or + modified version available to third parties as a service includes, + without limitation, enabling third parties to interact with the + functionality of the Program or modified version remotely through a + computer network, offering a service the value of which entirely or + primarily derives from the value of the Program or modified version, or + offering a service that accomplishes for users the primary purpose of the + Program or modified version. + + “Service Source Code” means the Corresponding Source for the Program or + the modified version, and the Corresponding Source for all programs that + you use to make the Program or modified version available as a service, + including, without limitation, management software, user interfaces, + application program interfaces, automation software, monitoring software, + backup software, storage software and hosting software, all such that a + user could run an instance of the service using the Service Source Code + you make available. + + 14. Revised Versions of this License. + + MongoDB, Inc. may publish revised and/or new versions of the Server Side + Public License from time to time. Such new versions will be similar in + spirit to the present version, but may differ in detail to address new + problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies that a certain numbered version of the Server Side Public + License “or any later version” applies to it, you have the option of + following the terms and conditions either of that numbered version or of + any later version published by MongoDB, Inc. If the Program does not + specify a version number of the Server Side Public License, you may + choose any version ever published by MongoDB, Inc. + + If the Program specifies that a proxy can decide which future versions of + the Server Side Public License can be used, that proxy's public statement + of acceptance of a version permanently authorizes you to choose that + version for the Program. + + Later license versions may give you additional or different permissions. + However, no additional obligations are imposed on any author or copyright + holder as a result of your choosing to follow a later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING + ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF + THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO + LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU + OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided above + cannot be given local legal effect according to their terms, reviewing + courts shall apply local law that most closely approximates an absolute + waiver of all civil liability in connection with the Program, unless a + warranty or assumption of liability accompanies a copy of the Program in + return for a fee. + + END OF TERMS AND CONDITIONS diff --git a/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/MPL-2 b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/MPL-2 new file mode 100644 index 00000000..14e2f777 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/MPL-2 @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/README b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/README new file mode 100644 index 00000000..fe759d19 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/README @@ -0,0 +1,87 @@ +MongoDB README + +Welcome to MongoDB! + +COMPONENTS + + mongod - The database server. + mongos - Sharding router. + mongo - The database shell (uses interactive javascript). + +UTILITIES + + install_compass - Installs MongoDB Compass for your platform. + +BUILDING + + See docs/building.md. + +RUNNING + + For command line options invoke: + + $ ./mongod --help + + To run a single server database: + + $ sudo mkdir -p /data/db + $ ./mongod + $ + $ # The mongo javascript shell connects to localhost and test database by default: + $ ./mongo + > help + +INSTALLING COMPASS + + You can install compass using the install_compass script packaged with MongoDB: + + $ ./install_compass + + This will download the appropriate MongoDB Compass package for your platform + and install it. + +DRIVERS + + Client drivers for most programming languages are available at + https://docs.mongodb.com/manual/applications/drivers/. Use the shell + ("mongo") for administrative tasks. + +BUG REPORTS + + See https://github.com/mongodb/mongo/wiki/Submit-Bug-Reports. + +PACKAGING + + Packages are created dynamically by the package.py script located in the + buildscripts directory. This will generate RPM and Debian packages. + +DOCUMENTATION + + https://docs.mongodb.com/manual/ + +CLOUD HOSTED MONGODB + + https://www.mongodb.com/cloud/atlas + +FORUMS + + https://community.mongodb.com + + A forum for technical questions about using MongoDB. + + https://community.mongodb.com/c/server-dev + + A forum for technical questions about building and developing MongoDB. + +LEARN MONGODB + + https://university.mongodb.com/ + +LICENSE + + MongoDB is free and open-source. Versions released prior to October 16, + 2018 are published under the AGPL. All versions released after October + 16, 2018, including patch fixes for prior versions, are published under + the Server Side Public License (SSPL) v1. See individual files for + details. + diff --git a/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..34fb8230 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/community-server/THIRD-PARTY-NOTICES @@ -0,0 +1,1568 @@ +MongoDB uses third-party libraries or other resources that may +be distributed under licenses different than the MongoDB software. + +In the event that we accidentally failed to list a required notice, +please bring it to our attention through any of the ways detailed here : + + mongodb-dev@googlegroups.com + +The attached notices are provided for information only. + +For any licenses that require disclosure of source, sources are available at +https://github.com/mongodb/mongo. + + +1) License Notice for Boost +--------------------------- + +http://www.boost.org/LICENSE_1_0.txt + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + +3) License Notice for PCRE +-------------------------- + +http://www.pcre.org/licence.txt + +PCRE LICENCE +------------ + +PCRE is a library of functions to support regular expressions whose syntax +and semantics are as close as possible to those of the Perl 5 language. + +Release 7 of PCRE is distributed under the terms of the "BSD" licence, as +specified below. The documentation for PCRE, supplied in the "doc" +directory, is distributed under the same terms as the software itself. + +The basic library functions are written in C and are freestanding. Also +included in the distribution is a set of C++ wrapper functions. + + +THE BASIC LIBRARY FUNCTIONS +--------------------------- + +Written by: Philip Hazel +Email local part: ph10 +Email domain: cam.ac.uk + +University of Cambridge Computing Service, +Cambridge, England. + +Copyright (c) 1997-2008 University of Cambridge +All rights reserved. + + +THE C++ WRAPPER FUNCTIONS +------------------------- + +Contributed by: Google Inc. + +Copyright (c) 2007-2008, Google Inc. +All rights reserved. + + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +4) License notice for Aladdin MD5 +--------------------------------- + +Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +L. Peter Deutsch +ghost@aladdin.com + +5) License notice for Snappy - http://code.google.com/p/snappy/ +--------------------------------- + Copyright 2005 and onwards Google Inc. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + A light-weight compression algorithm. It is designed for speed of + compression and decompression, rather than for the utmost in space + savings. + + For getting better compression ratios when you are compressing data + with long repeated sequences or compressing data that is similar to + other data, while still compressing fast, you might look at first + using BMDiff and then compressing the output of BMDiff with + Snappy. + +6) License notice for Google Perftools (TCMalloc utility) +--------------------------------- +New BSD License + +Copyright (c) 1998-2006, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or +without modification, are permitted provided that the following +conditions are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +7) License notice for Linenoise +------------------------------- + + Copyright (c) 2010, Salvatore Sanfilippo + Copyright (c) 2010, Pieter Noordhuis + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Redis nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + +8) License notice for S2 Geometry Library +----------------------------------------- + Copyright 2005 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +9) License notice for MurmurHash +-------------------------------- + + Copyright (c) 2010-2012 Austin Appleby + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +10) License notice for Snowball + Copyright (c) 2001, Dr Martin Porter + All rights reserved. + +THE "BSD" LICENCE +----------------- + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +11) License notice for yaml-cpp +------------------------------- + +Copyright (c) 2008 Jesse Beder. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +12) License notice for zlib +--------------------------- + +http://www.zlib.net/zlib_license.html + +zlib.h -- interface of the 'zlib' general purpose compression library +version 1.2.8, April 28th, 2013 + +Copyright (C) 1995-2013 Jean-loup Gailly and Mark Adler + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +Jean-loup Gailly Mark Adler +jloup@gzip.org madler@alumni.caltech.edu + + +13) License notice for 3rd party software included in the WiredTiger library +---------------------------------------------------------------------------- + +http://source.wiredtiger.com/license.html + +WiredTiger Distribution Files | Copyright Holder | License +----------------------------- | ----------------------------------- | ---------------------- +src/include/bitstring.i | University of California, Berkeley | BSD-3-Clause License +src/include/queue.h | University of California, Berkeley | BSD-3-Clause License +src/os_posix/os_getopt.c | University of California, Berkeley | BSD-3-Clause License +src/support/hash_city.c | Google, Inc. | The MIT License +src/support/hash_fnv.c | Authors | Public Domain + + +Other optional 3rd party software included in the WiredTiger distribution is removed by MongoDB. + + +BSD-3-CLAUSE LICENSE +-------------------- + +http://www.opensource.org/licenses/BSD-3-Clause + +Copyright (c) 1987, 1989, 1991, 1993, 1994 + The Regents of the University of California. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +4. Neither the name of the University nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + + +THE MIT LICENSE +--------------- + +http://www.opensource.org/licenses/MIT + +Copyright (c) 2011 Google, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +14) License Notice for SpiderMonkey +----------------------------------- + +|------------------------------------------------|------------------|---------------| +| SpiderMonkey Distribution Files | Copyright Holder | License | +|------------------------------------------------|------------------|---------------| +| js/src/jit/shared/AssemblerBuffer-x86-shared.h | Apple, Inc | BSD-2-Clause | +| js/src/jit/shared/BaseAssembler-x86-shared.h | | | +|------------------------------------------------|------------------|---------------| +| js/src/builtin/ | Google, Inc | BSD-3-Clause | +| js/src/irregexp/ | | | +| js/src/jit/arm/ | | | +| js/src/jit/mips/ | | | +| mfbt/double-conversion/ | | | +|------------------------------------------------|------------------|---------------| +| intl/icu/source/common/unicode/ | IBM, Inc | ICU | +|------------------------------------------------|------------------|---------------| +| js/src/asmjs/ | Mozilla, Inc | Apache2 | +|------------------------------------------------|------------------|---------------| +| js/public/ | Mozilla, Inc | MPL2 | +| js/src/ | | | +| mfbt | | | +|------------------------------------------------|------------------|---------------| +| js/src/vm/Unicode.cpp | None | Public Domain | +|------------------------------------------------|------------------|---------------| +| mfbt/lz4.c | Yann Collet | BSD-2-Clause | +| mfbt/lz4.h | | | +|------------------------------------------------|------------------|---------------| + +Other optional 3rd party software included in the SpiderMonkey distribution is removed by MongoDB. + + +Apple, Inc: BSD-2-Clause +------------------------ + +Copyright (C) 2008 Apple Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Google, Inc: BSD-3-Clause +------------------------- + +Copyright 2012 the V8 project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +ICU License - ICU 1.8.1 and later +--------------------------------- + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and +others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, provided that the above copyright notice(s) and this +permission notice appear in all copies of the Software and that both the +above copyright notice(s) and this permission notice appear in supporting +documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property +of their respective owners. + + +Mozilla, Inc: Apache 2 +---------------------- + +Copyright 2014 Mozilla Foundation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Mozilla, Inc: MPL 2 +------------------- + +Copyright 2014 Mozilla Foundation + +This Source Code Form is subject to the terms of the Mozilla Public +License, v. 2.0. If a copy of the MPL was not distributed with this +file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +Public Domain +------------- + +Any copyright is dedicated to the Public Domain. +http://creativecommons.org/licenses/publicdomain/ + + +LZ4: BSD-2-Clause +----------------- + +Copyright (C) 2011-2014, Yann Collet. +BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You can contact the author at : +- LZ4 source repository : http://code.google.com/p/lz4/ +- LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c + +15) License Notice for Intel DFP Math Library +--------------------------------------------- + +Copyright (c) 2011, Intel Corp. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + his list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. +IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +16) License Notice for Unicode Data +----------------------------------- + +Copyright © 1991-2015 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +17 ) License Notice for Valgrind.h +---------------------------------- + +---------------------------------------------------------------- + +Notice that the following BSD-style license applies to this one +file (valgrind.h) only. The rest of Valgrind is licensed under the +terms of the GNU General Public License, version 2, unless +otherwise indicated. See the COPYING file in the source +distribution for details. + +---------------------------------------------------------------- + +This file is part of Valgrind, a dynamic binary instrumentation +framework. + +Copyright (C) 2000-2015 Julian Seward. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. The origin of this software must not be misrepresented; you must + not claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product + documentation would be appreciated but is not required. + +3. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + +4. The name of the author may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------- + +Notice that the above BSD-style license applies to this one file +(valgrind.h) only. The entire rest of Valgrind is licensed under +the terms of the GNU General Public License, version 2. See the +COPYING file in the source distribution for details. + +---------------------------------------------------------------- + +18) License notice for ICU4C +---------------------------- + +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2016 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, provided that the above +copyright notice(s) and this permission notice appear in all copies of +the Software and that both the above copyright notice(s) and this +permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY +SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, use +or other dealings in this Software without prior written authorization +of the copyright holder. + + +All trademarks and registered trademarks mentioned herein are the +property of their respective owners. + +--------------------- + +Third-Party Software Licenses + +This section contains third-party software notices and/or additional +terms for licensed third-party software components included within ICU +libraries. + +1. Unicode Data Files and Software + +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 1991-2016 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in +http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, +(b) this copyright and permission notice appear in associated +documentation, and +(c) there is clear notice in each modified Data File or in the Software +as well as in the documentation associated with the Data File(s) or +Software that the data or software has been modified. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. + +2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt) + + # The Google Chrome software developed by Google is licensed under + # the BSD license. Other software included in this distribution is + # provided under other licenses, as set forth below. + # + # The BSD License + # http://opensource.org/licenses/bsd-license.php + # Copyright (C) 2006-2008, Google Inc. + # + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions are met: + # + # Redistributions of source code must retain the above copyright notice, + # this list of conditions and the following disclaimer. + # Redistributions in binary form must reproduce the above + # copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided with + # the distribution. + # Neither the name of Google Inc. nor the names of its + # contributors may be used to endorse or promote products derived from + # this software without specific prior written permission. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + # + # + # The word list in cjdict.txt are generated by combining three word lists + # listed below with further processing for compound word breaking. The + # frequency is generated with an iterative training against Google web + # corpora. + # + # * Libtabe (Chinese) + # - https://sourceforge.net/project/?group_id=1519 + # - Its license terms and conditions are shown below. + # + # * IPADIC (Japanese) + # - http://chasen.aist-nara.ac.jp/chasen/distribution.html + # - Its license terms and conditions are shown below. + # + # ---------COPYING.libtabe ---- BEGIN-------------------- + # + # /* + # * Copyrighy (c) 1999 TaBE Project. + # * Copyright (c) 1999 Pai-Hsiang Hsiao. + # * All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the TaBE Project nor the names of its + # * contributors may be used to endorse or promote products derived + # * from this software without specific prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # /* + # * Copyright (c) 1999 Computer Systems and Communication Lab, + # * Institute of Information Science, Academia + # * Sinica. All rights reserved. + # * + # * Redistribution and use in source and binary forms, with or without + # * modification, are permitted provided that the following conditions + # * are met: + # * + # * . Redistributions of source code must retain the above copyright + # * notice, this list of conditions and the following disclaimer. + # * . Redistributions in binary form must reproduce the above copyright + # * notice, this list of conditions and the following disclaimer in + # * the documentation and/or other materials provided with the + # * distribution. + # * . Neither the name of the Computer Systems and Communication Lab + # * nor the names of its contributors may be used to endorse or + # * promote products derived from this software without specific + # * prior written permission. + # * + # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # * OF THE POSSIBILITY OF SUCH DAMAGE. + # */ + # + # Copyright 1996 Chih-Hao Tsai @ Beckman Institute, + # University of Illinois + # c-tsai4@uiuc.edu http://casper.beckman.uiuc.edu/~c-tsai4 + # + # ---------------COPYING.libtabe-----END-------------------------------- + # + # + # ---------------COPYING.ipadic-----BEGIN------------------------------- + # + # Copyright 2000, 2001, 2002, 2003 Nara Institute of Science + # and Technology. All Rights Reserved. + # + # Use, reproduction, and distribution of this software is permitted. + # Any copy of this software, whether in its original form or modified, + # must include both the above copyright notice and the following + # paragraphs. + # + # Nara Institute of Science and Technology (NAIST), + # the copyright holders, disclaims all warranties with regard to this + # software, including all implied warranties of merchantability and + # fitness, in no event shall NAIST be liable for + # any special, indirect or consequential damages or any damages + # whatsoever resulting from loss of use, data or profits, whether in an + # action of contract, negligence or other tortuous action, arising out + # of or in connection with the use or performance of this software. + # + # A large portion of the dictionary entries + # originate from ICOT Free Software. The following conditions for ICOT + # Free Software applies to the current dictionary as well. + # + # Each User may also freely distribute the Program, whether in its + # original form or modified, to any third party or parties, PROVIDED + # that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear + # on, or be attached to, the Program, which is distributed substantially + # in the same form as set out herein and that such intended + # distribution, if actually made, will neither violate or otherwise + # contravene any of the laws and regulations of the countries having + # jurisdiction over the User or the intended distribution itself. + # + # NO WARRANTY + # + # The program was produced on an experimental basis in the course of the + # research and development conducted during the project and is provided + # to users as so produced on an experimental basis. Accordingly, the + # program is provided without any warranty whatsoever, whether express, + # implied, statutory or otherwise. The term "warranty" used herein + # includes, but is not limited to, any warranty of the quality, + # performance, merchantability and fitness for a particular purpose of + # the program and the nonexistence of any infringement or violation of + # any right of any third party. + # + # Each user of the program will agree and understand, and be deemed to + # have agreed and understood, that there is no warranty whatsoever for + # the program and, accordingly, the entire risk arising from or + # otherwise connected with the program is assumed by the user. + # + # Therefore, neither ICOT, the copyright holder, or any other + # organization that participated in or was otherwise related to the + # development of the program and their respective officials, directors, + # officers and other employees shall be held liable for any and all + # damages, including, without limitation, general, special, incidental + # and consequential damages, arising out of or otherwise in connection + # with the use or inability to use the program or any product, material + # or result produced or otherwise obtained by using the program, + # regardless of whether they have been advised of, or otherwise had + # knowledge of, the possibility of such damages at any time during the + # project or thereafter. Each user will be deemed to have agreed to the + # foregoing by his or her commencement of use of the program. The term + # "use" as used herein includes, but is not limited to, the use, + # modification, copying and distribution of the program and the + # production of secondary products from the program. + # + # In the case where the program, whether in its original form or + # modified, was distributed or delivered to or received by a user from + # any person, organization or entity other than ICOT, unless it makes or + # grants independently of ICOT any specific warranty to the user in + # writing, such person, organization or entity, will also be exempted + # from and not be held liable to the user for any such damages as noted + # above as far as the program is concerned. + # + # ---------------COPYING.ipadic-----END---------------------------------- + +3. Lao Word Break Dictionary Data (laodict.txt) + + # Copyright (c) 2013 International Business Machines Corporation + # and others. All Rights Reserved. + # + # Project: http://code.google.com/p/lao-dictionary/ + # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt + # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt + # (copied below) + # + # This file is derived from the above dictionary, with slight + # modifications. + # ---------------------------------------------------------------------- + # Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell. + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, + # are permitted provided that the following conditions are met: + # + # + # Redistributions of source code must retain the above copyright notice, this + # list of conditions and the following disclaimer. Redistributions in + # binary form must reproduce the above copyright notice, this list of + # conditions and the following disclaimer in the documentation and/or + # other materials provided with the distribution. + # + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. + # -------------------------------------------------------------------------- + +4. Burmese Word Break Dictionary Data (burmesedict.txt) + + # Copyright (c) 2014 International Business Machines Corporation + # and others. All Rights Reserved. + # + # This list is part of a project hosted at: + # github.com/kanyawtech/myanmar-karen-word-lists + # + # -------------------------------------------------------------------------- + # Copyright (c) 2013, LeRoy Benjamin Sharon + # All rights reserved. + # + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: Redistributions of source code must retain the above + # copyright notice, this list of conditions and the following + # disclaimer. Redistributions in binary form must reproduce the + # above copyright notice, this list of conditions and the following + # disclaimer in the documentation and/or other materials provided + # with the distribution. + # + # Neither the name Myanmar Karen Word Lists, nor the names of its + # contributors may be used to endorse or promote products derived + # from this software without specific prior written permission. + # + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND + # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, + # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS + # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + # THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + # SUCH DAMAGE. + # -------------------------------------------------------------------------- + +5. Time Zone Database + + ICU uses the public domain data and code derived from Time Zone +Database for its time zone support. The ownership of the TZ database +is explained in BCP 175: Procedure for Maintaining the Time Zone +Database section 7. + + # 7. Database Ownership + # + # The TZ database itself is not an IETF Contribution or an IETF + # document. Rather it is a pre-existing and regularly updated work + # that is in the public domain, and is intended to remain in the + # public domain. Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do + # not apply to the TZ Database or contributions that individuals make + # to it. Should any claims be made and substantiated against the TZ + # Database, the organization that is providing the IANA + # Considerations defined in this RFC, under the memorandum of + # understanding with the IETF, currently ICANN, may act in accordance + # with all competent court orders. No ownership claims will be made + # by ICANN or the IETF Trust on the database or the code. Any person + # making a contribution to the database or code waives all rights to + # future claims in that contribution or in the TZ Database. + +19) License notice for timelib +------------------------------ + +The MIT License (MIT) + +Copyright (c) 2015-2017 Derick Rethans +Copyright (c) 2017 MongoDB, Inc + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +20) License notice for windows dirent implementation +---------------------------------------------------- + + * Dirent interface for Microsoft Visual Studio + * Version 1.21 + * + * Copyright (C) 2006-2012 Toni Ronkko + * This file is part of dirent. Dirent may be freely distributed + * under the MIT license. For all details and documentation, see + * https://github.com/tronkko/dirent + + + 21) License notice for abseil-cpp +---------------------------- + + Copyright (c) Google Inc. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + 22) License notice for Zstandard +---------------------------- + + BSD License + + For Zstandard software + + Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, + are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 23) License notice for ASIO +---------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 24) License notice for MPark.Variant +------------------------------------- +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + 25) License notice for fmt +--------------------------- + +Copyright (c) 2012 - present, Victor Zverovich +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this list of + conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or other + materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF +THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 26) License notice for SafeInt +--------------------------- + +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the MIT License. + +MIT License + +Copyright (c) 2018 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + 27) License Notice for Raft TLA+ Specification +----------------------------------------------- + +https://github.com/ongardie/dissertation/blob/master/LICENSE + +Copyright 2014 Diego Ongaro. + +Some of our TLA+ specifications are based on the Raft TLA+ specification by Diego Ongaro. + +End diff --git a/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md new file mode 100644 index 00000000..01b6a37e --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/LICENSE.md @@ -0,0 +1,13 @@ +Copyright 2014 MongoDB, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/README.md b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/README.md new file mode 100644 index 00000000..20f3ffe8 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/README.md @@ -0,0 +1,72 @@ +MongoDB Tools +=================================== + + - **bsondump** - _display BSON files in a human-readable format_ + - **mongoimport** - _Convert data from JSON, TSV or CSV and insert them into a collection_ + - **mongoexport** - _Write an existing collection to CSV or JSON format_ + - **mongodump/mongorestore** - _Dump MongoDB backups to disk in .BSON format, or restore them to a live database_ + - **mongostat** - _Monitor live MongoDB servers, replica sets, or sharded clusters_ + - **mongofiles** - _Read, write, delete, or update files in [GridFS](http://docs.mongodb.org/manual/core/gridfs/)_ + - **mongotop** - _Monitor read/write activity on a mongo server_ + + +Report any bugs, improvements, or new feature requests at https://jira.mongodb.org/browse/TOOLS + +Building Tools +--------------- + +We currently build the tools with Go version 1.15. Other Go versions may work but they are untested. + +Using `go get` to directly build the tools will not work. To build them, it's recommended to first clone this repository: + +``` +git clone https://github.com/mongodb/mongo-tools +cd mongo-tools +``` + +Then run `./make build` to build all the tools, placing them in the `bin` directory inside the repository. + +You can also build a subset of the tools using the `-tools` option. For example, `./make build -tools=mongodump,mongorestore` builds only `mongodump` and `mongorestore`. + +To use the build/test scripts in this repository, you **_must_** set GOROOT to your Go root directory. This may depend on how you installed Go. + +``` +export GOROOT=/usr/local/go +``` + +Updating Dependencies +--------------- +Starting with version 100.3.1, the tools use `go mod` to manage dependencies. All dependencies are listed in the `go.mod` file and are directly vendored in the `vendor` directory. + +In order to make changes to dependencies, you first need to change the `go.mod` file. You can manually edit that file to add/update/remove entries, or you can run the following in the repository directory: + +``` +go mod edit -require=@ # for adding or updating a dependency +go mod edit -droprequire= # for removing a dependency +``` + +Then run `go mod vendor -v` to reconstruct the `vendor` directory to match the changed `go.mod` file. + +Optionally, run `go mod tidy -v` to ensure that the `go.mod` file matches the `mongo-tools` source code. + +Contributing +--------------- +See our [Contributor's Guide](CONTRIBUTING.md). + +Documentation +--------------- +See the MongoDB packages [documentation](https://docs.mongodb.org/database-tools/). + +For documentation on older versions of the MongoDB, reference that version of the [MongoDB Server Manual](docs.mongodb.com/manual): + +- [MongoDB 4.2 Tools](https://docs.mongodb.org/v4.2/reference/program) +- [MongoDB 4.0 Tools](https://docs.mongodb.org/v4.0/reference/program) +- [MongoDB 3.6 Tools](https://docs.mongodb.org/v3.6/reference/program) + +Adding New Platforms Support +--------------- +See our [Adding New Platform Support Guide](PLATFORMSUPPORT.md). + +Vendoring the Change into Server Repo +--------------- +See our [Vendor the Change into Server Repo](SERVERVENDORING.md). diff --git a/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES new file mode 100644 index 00000000..c747d0b8 --- /dev/null +++ b/Mongo2Go.4.1.0/tools/mongodb-windows-4.4.4-database-tools-100.3.1/database-tools/THIRD-PARTY-NOTICES @@ -0,0 +1,3319 @@ +--------------------------------------------------------------------- +License notice for hashicorp/go-rootcerts +--------------------------------------------------------------------- + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + +--------------------------------------------------------------------- +License notice for JSON and CSV code from github.com/golang/go +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/escaper +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Lucas Morales + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/llmgo/bson +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/10gen/openssl +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/3rf/mongo-lint +---------------------------------------------------------------------- + +Copyright (c) 2013 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/go-stack/stack +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 Chris Hines + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/golang/snappy +---------------------------------------------------------------------- + +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/google/gopacket +---------------------------------------------------------------------- + +Copyright (c) 2012 Google, Inc. All rights reserved. +Copyright (c) 2009-2011 Andreas Krennmair. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Andreas Krennmair, Google, nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/gopherjs/gopherjs +---------------------------------------------------------------------- + +Copyright (c) 2013 Richard Musiol. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/howeyc/gopass +---------------------------------------------------------------------- + +Copyright (c) 2012 Chris Howey + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/jessevdk/go-flags +---------------------------------------------------------------------- + +Copyright (c) 2012 Jesse van den Kieboom. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/jtolds/gls +---------------------------------------------------------------------- + +Copyright (c) 2013, Space Monkey, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mattn/go-runewidth +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/mongodb/mongo-go-driver +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/nsf/termbox-go +---------------------------------------------------------------------- + +Copyright (C) 2012 termbox-go authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/patrickmn/go-cache +---------------------------------------------------------------------- + +Copyright (c) 2012-2015 Patrick Mylund Nielsen and the go-cache contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions +---------------------------------------------------------------------- + +Copyright (c) 2015 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/go-render +---------------------------------------------------------------------- + +// Copyright (c) 2015 The Chromium Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglematchers +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/oglemock +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/ogletest +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/assertions/internal/reqtrace +---------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey +---------------------------------------------------------------------- + +Copyright (c) 2014 SmartyStreets, LLC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +NOTE: Various optional and subordinate components carry their own licensing +requirements and restrictions. Use of those components is subject to the terms +and conditions outlined the respective license of each component. + +---------------------------------------------------------------------- +License notice for github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/spacemonkeygo/spacelog +---------------------------------------------------------------------- + +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for github.com/xdg/scram +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/xdg/stringprep +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/youmark/pkcs8 +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 youmark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for golang.org/x/crypto +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/sync +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/text +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for gopkg.in/tomb.v2 +---------------------------------------------------------------------- + +tomb - support for clean goroutine termination in Go. + +Copyright (c) 2010-2011 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/SPRINTS.md b/SPRINTS.md index 87bb31c5..30e89075 100644 --- a/SPRINTS.md +++ b/SPRINTS.md @@ -310,7 +310,7 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Slack/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-503 | Package Slack connector as restart-time plug-in (manifest + host registration). | | Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Teams/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-603 | Package Teams connector as restart-time plug-in (manifest + host registration). | | Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Email/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-EMAIL-15-703 | Package Email connector as restart-time plug-in (manifest + host registration). | -| Sprint 15 | Notify Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Scanner WebService Guild | SCANNER-EVENTS-15-201 | Emit `scanner.report.ready` + `scanner.scan.completed` events. | +| Sprint 15 | Notify Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DOING (2025-10-19) | Scanner WebService Guild | SCANNER-EVENTS-15-201 | Emit `scanner.report.ready` + `scanner.scan.completed` events. | | Sprint 15 | Benchmarks | bench/TASKS.md | TODO | Bench Guild, Notify Team | BENCH-NOTIFY-15-001 | Notify dispatch throughput bench with results CSV. | | Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Webhook/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-WEBHOOK-15-803 | Package Webhook connector as restart-time plug-in (manifest + host registration). | | Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Models/TASKS.md | TODO | Scheduler Models Guild | SCHED-MODELS-16-101 | Define Scheduler DTOs & validation. | diff --git a/SPRINTS.updated.tmp b/SPRINTS.updated.tmp new file mode 100644 index 00000000..7c385fa1 --- /dev/null +++ b/SPRINTS.updated.tmp @@ -0,0 +1,426 @@ +This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). + +| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | +| --- | --- | --- | --- | --- | --- | --- | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-001 | SemVer primitive range-style metadata
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Concelier.Models/AGENTS.md. This task lays the groundwork—complete the SemVer helper updates before teammates pick up FEEDMODELS-SCHEMA-01-002/003 and FEEDMODELS-SCHEMA-02-900. Use ./src/FASTER_MODELING_AND_NORMALIZATION.md for the target rule structure. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-002 | Provenance decision rationale field
Instructions to work:
AdvisoryProvenance now carries `decisionReason` and docs/tests were updated. Connectors and merge tasks should populate the field when applying precedence/freshness/tie-breaker logic; see src/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md for usage guidance. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-003 | Normalized version rules collection
Instructions to work:
`AffectedPackage.NormalizedVersions` and supporting comparer/docs/tests shipped. Connector owners must emit rule arrays per ./src/FASTER_MODELING_AND_NORMALIZATION.md and report progress via FEEDMERGE-COORD-02-900 so merge/storage backfills can proceed. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-02-900 | Range primitives for SemVer/EVR/NEVRA metadata
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Concelier.Models/AGENTS.md before resuming this stalled effort. Confirm helpers align with the new `NormalizedVersions` representation so connectors finishing in Sprint 2 can emit consistent metadata. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Normalization/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
Shared `SemVerRangeRuleBuilder` now outputs primitives + normalized rules per `FASTER_MODELING_AND_NORMALIZATION.md`; CVE/GHSA connectors consuming the API have verified fixtures. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill
AdvisoryStore dual-writes flattened `normalizedVersions` when `concelier.storage.enableSemVerStyle` is set; migration `20251011-semver-style-backfill` updates historical records and docs outline the rollout. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence
Storage now persists `provenance.decisionReason` for advisories and merge events; tests cover round-trips. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Bootstrapper seeds compound/sparse indexes for flattened normalized rules and `docs/dev/mongo_indices.md` documents query guidance. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor
Updated constructors/tests keep storage suites passing with the new feature flag defaults. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options
WebService wires `authority.resilience.*` into `AddStellaOpsAuthClient` and adds binding coverage via `AuthorityClientResilienceOptionsAreBound`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning
Install/runbooks document connected vs air-gapped resilience profiles and monitoring hooks. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns
Operator guides now call out `route/status/subject/clientId/scopes/bypass/remote` audit fields and SIEM triggers. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Concelier operator guide for enforcement cutoff
Install guide reiterates the 2025-12-31 cutoff and links audit signals to the rollout checklist. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding
Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through
`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHCORE-BUILD-OPENIDDICT / AUTHCORE-STORAGE-DEVICE-TOKENS / AUTHCORE-BOOTSTRAP-INVITES | Address remaining Authority compile blockers (OpenIddict transaction shim, token device document, bootstrap invite cleanup) so `dotnet build src/StellaOps.Authority.sln` returns success. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish
Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DOING (2025-10-14) | Team WebService & Authority | SEC2.PLG | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`; Serilog enrichment complete, storage durability tests in flight. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DOING (2025-10-14) | Team WebService & Authority | SEC3.PLG | Ensure lockout responses carry rate-limit metadata through plugin logs/events; retry-after propagation and limiter tests underway. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DOING (2025-10-14) | Team WebService & Authority | SEC5.PLG | Address plugin-specific mitigations in threat model backlog; mitigation items tracked, docs updates pending. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | BLOCKED (2025-10-12) | Team WebService & Authority | PLG4-6.CAPABILITIES | Finalise capability metadata exposure and docs once Authority rate-limiter stream (CORE8/SEC3) is stable; awaiting dependency unblock. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | TODO | Team WebService & Authority | PLG6.DIAGRAM | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | REVIEW (2025-10-13) | Team WebService & Authority | PLG7.RFC | Socialize LDAP plugin RFC and capture guild feedback; awaiting final review sign-off and follow-up issue tracking. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-001 | Fetch pipeline & state tracking
Summary planner now drives monthly/yearly VINCE fetches, persists pending summaries/notes, and hydrates VINCE detail queue with telemetry.
Team instructions: Read ./AGENTS.md and src/StellaOps.Concelier.Connector.CertCc/AGENTS.md. Coordinate daily with Models/Merge leads so new normalizedVersions output and provenance tags stay aligned with ./src/FASTER_MODELING_AND_NORMALIZATION.md. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher
Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-003 | DTO & parser implementation
Added VINCE DTO aggregate, Markdown→text sanitizer, vendor/status/vulnerability parsers, and parser regression fixture. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-004 | Canonical mapping & range primitives
VINCE DTO aggregate flows through `CertCcMapper`, emitting vendor range primitives + normalized version rules that persist via `_advisoryStore`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests
Snapshot harness refreshed 2025-10-12; `certcc-*.snapshot.json` regenerated and regression suite green without UPDATE flag drift. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation
`CertCcDiagnostics` publishes summary/detail/parse/map metrics (meter `StellaOps.Concelier.Connector.CertCc`), README documents instruments, and log guidance captured for Ops on 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation
Harness now wires `AddSourceCommon`, resets `FakeTimeProvider`, and passes canned-response regression run dated 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff
Fixtures regenerated with normalized ranges + provenance fields on 2025-10-11; QA handoff notes published and merge backfill unblocked. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up
Fixtures regenerated with normalizedVersions + provenance decision reasons; handoff notes updated for Merge backfill 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan
Staged reintegration plan published in `src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation
Detail fetch now tolerates 404/403/410 responses and regression tests cover mixed endpoint availability. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md | DOING (2025-10-10) | Team Connector Resumption – CERT/RedHat | FEEDCONN-REDHAT-02-001 | Fixture validation sweep
Instructions to work:
Regenerating RHSA fixtures awaits remaining range provenance patches; review snapshot diffs and update docs once upstream helpers land. Conflict resolver deltas logged in src/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-001 | Canonical mapping & range primitives
Mapper emits SemVer rules (`scheme=apple:*`); fixtures regenerated with trimmed references + new RSR coverage, update tooling finalized. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-002 | Deterministic fixtures/tests
Sanitized live fixtures + regression snapshots wired into tests; normalized rule coverage asserted. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-003 | Telemetry & documentation
Apple meter metrics wired into Concelier WebService OpenTelemetry configuration; README and fixtures document normalizedVersions coverage. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-004 | Live HTML regression sweep
Sanitised HT125326/HT125328/HT106355/HT214108/HT215500 fixtures recorded and regression tests green on 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-005 | Fixture regeneration tooling
`UPDATE_APPLE_FIXTURES=1` flow fetches & rewrites fixtures; README documents usage.
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md. Resume stalled tasks, ensuring normalizedVersions output and fixtures align with ./src/FASTER_MODELING_AND_NORMALIZATION.md before handing data to the conflict sprint. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance
Team instructions: Read ./AGENTS.md and each module's AGENTS file. Adopt the `NormalizedVersions` array emitted by the models sprint, wiring provenance `decisionReason` where merge overrides occur. Follow ./src/FASTER_MODELING_AND_NORMALIZATION.md; report via src/StellaOps.Concelier.Merge/TASKS.md (FEEDMERGE-COORD-02-900). Progress 2025-10-11: GHSA/OSV emit normalized arrays with refreshed fixtures; CVE mapper now surfaces SemVer normalized ranges; NVD/KEV adoption pending; outstanding follow-ups include FEEDSTORAGE-DATA-02-001, FEEDMERGE-ENGINE-02-002, and rolling `tools/FixtureUpdater` updates across connectors. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Cve/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-CVE-02-003 | CVE normalized versions uplift | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-KEV-02-003 | KEV normalized versions propagation | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-04-003 | OSV parity fixture refresh | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DOING (2025-10-10) | Team WebService & Authority | FEEDWEB-DOCS-01-001 | Document authority toggle & scope requirements
Quickstart updates are staged; awaiting Docs guild review before publishing operator guide refresh. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options
WebService wires `authority.resilience.*` into `AddStellaOpsAuthClient` and adds binding coverage via `AuthorityClientResilienceOptionsAreBound`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning
Operator docs now outline connected vs air-gapped resilience profiles and monitoring cues. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns
Audit logging guidance highlights `route/status/subject/clientId/scopes/bypass/remote` fields and SIEM alerts. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Concelier operator guide for enforcement cutoff
Install guide reiterates the 2025-12-31 cutoff and ties audit signals to rollout checks. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Team WebService & Authority | FEEDWEB-OPS-01-006 | Rename plugin drop directory to namespaced path
Repoint build outputs to `StellaOps.Concelier.PluginBinaries`/`StellaOps.Authority.PluginBinaries`, update PluginHost defaults, Offline Kit packaging, and operator docs. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | BLOCKED (2025-10-10) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption
Roll out retry/offline knobs to deployment docs and align CLI parity once LIB5 resilience options land; unblock when library release is available and docs review completes. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCRYPTO-ENGINE-01-001 | SEC3.A — Shared metadata resolver confirmed via host test run; SEC3.B now unblocked for tuning guidance (outline captured in `docs/dev/authority-rate-limit-tuning-outline.md`). | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-13) | Team Authority Platform & Security Guild | AUTHSEC-DOCS-01-002 | SEC3.B — Published `docs/security/rate-limits.md` with tuning matrix, alert thresholds, and lockout interplay guidance; Docs guild can lift copy into plugin guide. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHSEC-CRYPTO-02-001 | SEC5.B1 — Introduce libsodium signing provider and parity tests to unblock CLI verification enhancements. | +| Sprint 9 | Sovereign Crypto Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-19) | Security Guild | SEC6.A | BouncyCastle-backed Ed25519 signing plug-in wired via `ICryptoProviderRegistry`; Scanner WebService now resolves signing through the registry; AGENTS updated to enforce plug-in rule. | +| Sprint 1 | Bootstrap & Replay Hardening | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Security Guild | AUTHSEC-CRYPTO-02-004 | SEC5.D/E — Finish bootstrap invite lifecycle (API/store/cleanup) and token device heuristics; build currently red due to pending handler integration. | +| Sprint 1 | Developer Tooling | src/StellaOps.Cli/TASKS.md | DONE (2025-10-15) | DevEx/CLI | AUTHCLI-DIAG-01-001 | Surface password policy diagnostics in CLI startup/output so operators see weakened overrides immediately.
CLI now loads Authority plug-ins at startup, logs weakened password policies (length/complexity), and regression coverage lives in `StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests`. | +| Sprint 1 | Developer Tooling | src/StellaOps.Cli/TASKS.md | TODO – Display export metadata (sha256, size, Rekor link), support optional artifact download path, and handle cache hits gracefully. | DevEx/CLI | EXCITITOR-CLI-01-002 | EXCITITOR-CLI-01-002 – Export download & attestation UX | +| Sprint 1 | Developer Tooling | src/StellaOps.Cli/TASKS.md | TODO – Update docs/09_API_CLI_REFERENCE.md and quickstart snippets to cover Excititor verbs, offline guidance, and attestation verification workflow. | Docs/CLI | EXCITITOR-CLI-01-003 | EXCITITOR-CLI-01-003 – CLI docs & examples for Excititor | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHPLUG-DOCS-01-001 | PLG6.DOC — Developer guide copy + diagrams merged 2025-10-11; limiter guidance incorporated and handed to Docs guild for asset export. | +| Sprint 1 | Backlog | src/StellaOps.Web/TASKS.md | TODO | UX Specialist, Angular Eng | WEB1.TRIVY-SETTINGS | Implement Trivy DB exporter settings panel with `publishFull`, `publishDelta`, `includeFull`, `includeDelta` toggles and “Run export now” action using future `/exporters/trivy-db/settings` API. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Normalization/TASKS.md | DONE (2025-10-12) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
`SemVerRangeRuleBuilder` shipped 2025-10-12 with comparator/` | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Indexes seeded + docs updated 2025-10-11 to cover flattened normalized rules for connector adoption. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDMERGE-ENGINE-02-002 | Normalized versions union & dedupe
Affected package resolver unions/dedupes normalized rules, stamps merge provenance with `decisionReason`, and tests cover the rollout. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Merge/TASKS.md | DOING (2025-10-12) | Team Merge & QA Enforcement | FEEDMERGE-COORD-02-900 | Range primitives rollout coordination
Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical range primitives with provenance tags; fixtures tracked in `RANGE_PRIMITIVES_COORDINATION.md`. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-005 | NVD merge/export parity regression | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow
Resolved 2025-10-12: OSV mapper now derives canonical PURLs for Go + scoped npm packages when raw payloads omit `purl`; conflict fixtures unchanged for invalid npm names. Verified via `dotnet test src/StellaOps.Concelier.Connector.Osv.Tests`, `src/StellaOps.Concelier.Connector.Ghsa.Tests`, `src/StellaOps.Concelier.Connector.Nvd.Tests`, and backbone normalization/storage suites. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Acsc/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch→parse→map pipeline, fixtures, diagnostics, and README finished 2025-10-12; downstream export parity captured via FEEDEXPORT-JSON-04-001 / FEEDEXPORT-TRIVY-04-001 (completed). | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Acsc/TASKS.md | **DONE (2025-10-11)** – Reproduced Akamai resets, drafted downgrade plan (two-stage HTTP/2 retry + relay fallback), and filed `FEEDCONN-SHARED-HTTP2-001`; module README TODO will host the per-environment knob matrix. | BE-Conn-ACSC | FEEDCONN-ACSC-02-008 | FEEDCONN-ACSC-02-008 HTTP client compatibility plan | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Cccs/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-008 | Observability meter, historical harvest plan, and DOM sanitizer refinements wrapped; ops notes live under `docs/ops/concelier-cccs-operations.md` with fixtures validating EN/FR list handling. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.CertBund/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-008 | Telemetry/docs (02-006) and history/locale sweep (02-007) completed alongside pipeline; runbook `docs/ops/concelier-certbund-operations.md` captures locale guidance and offline packaging. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Kisa/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | Connector, tests, and telemetry/docs (02-006) finalized; localisation notes in `docs/dev/kisa_connector_notes.md` complete rollout. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-RUBDU-02-001 … 02-008 | Fetch/parser/mapper refinements, regression fixtures, telemetry/docs, access options, and trusted root packaging all landed; README documents offline access strategy. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md | DONE (2025-10-13) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-NKCKI-02-001 … 02-008 | Listing fetch, parser, mapper, fixtures, telemetry/docs, and archive plan finished; Mongo2Go/libcrypto dependency resolved via bundled OpenSSL noted in ops guide. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ICSCISA-02-001 … 02-011 | Feed parser attachment fixes, SemVer exact values, regression suites, telemetry/docs updates, and handover complete; ops runbook now details attachment verification + proxy usage. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CISCO-02-001 … 02-007 | OAuth fetch pipeline, DTO/mapping, tests, and telemetry/docs shipped; monitoring/export integration follow-ups recorded in Ops docs and exporter backlog (completed). | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-MSRC-02-001 … 02-008 | Azure AD onboarding (02-008) unblocked fetch/parse/map pipeline; fixtures, telemetry/docs, and Offline Kit guidance published in `docs/ops/concelier-msrc-operations.md`. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Cve/TASKS.md | DONE (2025-10-15) | Team Connector Support & Monitoring | FEEDCONN-CVE-02-001 … 02-002 | CVE data-source selection, fetch pipeline, and docs landed 2025-10-10. 2025-10-15: smoke verified using the seeded mirror fallback; connector now logs a warning and pulls from `seed-data/cve/` until live CVE Services credentials arrive. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Support & Monitoring | FEEDCONN-KEV-02-001 … 02-002 | KEV catalog ingestion, fixtures, telemetry, and schema validation completed 2025-10-12; ops dashboard published. | +| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-01-001 | Canonical schema docs refresh
Updated canonical schema + provenance guides with SemVer style, normalized version rules, decision reason change log, and migration notes. | +| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-001 | Concelier-SemVer Playbook
Published merge playbook covering mapper patterns, dedupe flow, indexes, and rollout checklist. | +| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-002 | Normalized versions query guide
Delivered Mongo index/query addendum with `$unwind` recipes, dedupe checks, and operational checklist.
Instructions to work:
DONE Read ./AGENTS.md and docs/AGENTS.md. Document every schema/index/query change produced in Sprint 1-2 leveraging ./src/FASTER_MODELING_AND_NORMALIZATION.md. | +| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | REVIEW | Docs Guild, Plugin Team | DOC4.AUTH-PDG | Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-03-001 | Canonical merger implementation
`CanonicalMerger` ships with freshness/tie-breaker logic, provenance, and unit coverage feeding Merge. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-03-002 | Field precedence and tie-breaker map
Field precedence tables and tie-breaker metrics wired into the canonical merge flow; docs/tests updated.
Instructions to work:
Read ./AGENTS.md and core AGENTS. Implement the conflict resolver exactly as specified in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md, coordinating with Merge and Storage teammates. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-DATA-03-001 | Merge event provenance audit prep
Merge events now persist `fieldDecisions` and analytics-ready provenance snapshots. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill
Dual-write/backfill flag delivered; migration + options validated in tests. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor
Storage tests adjusted for normalized versions/decision reasons.
Instructions to work:
Read ./AGENTS.md and storage AGENTS. Extend merge events with decision reasons and analytics views to support the conflict rules, and deliver the dual-write/backfill for `NormalizedVersions` + `decisionReason` so connectors can roll out safely. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-001 | GHSA/NVD/OSV conflict rules
Merge pipeline consumes `CanonicalMerger` output prior to precedence merge. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-002 | Override metrics instrumentation
Merge events capture per-field decisions; counters/logs align with conflict rules. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-003 | Reference & credit union pipeline
Canonical merge preserves unions with updated tests. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-QA-04-001 | End-to-end conflict regression suite
Added regression tests (`AdvisoryMergeServiceTests`) covering canonical + precedence flow.
Instructions to work:
Read ./AGENTS.md and merge AGENTS. Integrate the canonical merger, instrument metrics, and deliver comprehensive regression tests following ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-002 | GHSA conflict regression fixtures | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures | +| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures
Instructions to work:
Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. | +| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Concelier Conflict Rules
Runbook published at `docs/ops/concelier-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. | +| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-16) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout
Ops review completed, alert thresholds applied, and change log appended in `docs/ops/concelier-conflict-resolution.md`; task closed after connector signals verified. | +| Sprint 3 | Backlog | src/StellaOps.Concelier.Connector.Common/TASKS.md | **TODO (2025-10-15)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands. | Tools Guild, BE-Conn-MSRC | FEEDCONN-SHARED-STATE-003 | FEEDCONN-SHARED-STATE-003 Source state seeding helper | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-15) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-04-001 | Advisory schema parity (description/CWE/canonical metric)
Extend `Advisory` and related records with description text, CWE collection, and canonical metric pointer; refresh validation + serializer determinism tests. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-15) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-04-003 | Canonical merger parity for new fields
Teach `CanonicalMerger` to populate description, CWEResults, and canonical metric pointer with provenance + regression coverage. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-15) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-04-004 | Reference normalization & freshness instrumentation cleanup
Implement URL normalization for reference dedupe, align freshness-sensitive instrumentation, and add analytics tests. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-15) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-004 | Merge pipeline parity for new advisory fields
Ensure merge service + merge events surface description/CWE/canonical metric decisions with updated metrics/tests. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-15) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-005 | Connector coordination for new advisory fields
GHSA/NVD/OSV connectors now ship description, CWE, and canonical metric data with refreshed fixtures; merge coordination log updated and exporters notified. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Exporter.Json/TASKS.md | DONE (2025-10-15) | Team Exporters – JSON | FEEDEXPORT-JSON-04-001 | Surface new advisory fields in JSON exporter
Update schemas/offline bundle + fixtures once model/core parity lands.
2025-10-15: `dotnet test src/StellaOps.Concelier.Exporter.Json.Tests` validated canonical metric/CWE emission. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md | DONE (2025-10-15) | Team Exporters – Trivy DB | FEEDEXPORT-TRIVY-04-001 | Propagate new advisory fields into Trivy DB package
Extend Bolt builder, metadata, and regression tests for the expanded schema.
2025-10-15: `dotnet test src/StellaOps.Concelier.Exporter.TrivyDb.Tests` confirmed canonical metric/CWE propagation. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-16) | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-004 | Harden CVSS fallback so canonical metric ids persist when GitHub omits vectors; extend fixtures and document severity precedence hand-off to Merge. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-04-005 | Map OSV advisories lacking CVSS vectors to canonical metric ids/notes and document CWE provenance quirks; schedule parity fixture updates. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-001 | Stand up canonical VEX claim/consensus records with deterministic serializers so Storage/Exports share a stable contract. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-002 | Implement trust-weighted consensus resolver with baseline policy weights, justification gates, telemetry output, and majority/tie handling. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-003 | Publish shared connector/exporter/attestation abstractions and deterministic query signature utilities for cache/attestation workflows. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-15) | Team Excititor Policy | EXCITITOR-POLICY-01-001 | Established policy options & snapshot provider covering baseline weights/overrides. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-15) | Team Excititor Policy | EXCITITOR-POLICY-01-002 | Policy evaluator now feeds consensus resolver with immutable snapshots. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-003 | Author policy diagnostics, CLI/WebService surfacing, and documentation updates. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-004 | Implement YAML/JSON schema validation and deterministic diagnostics for operator bundles. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-005 | Add policy change tracking, snapshot digests, and telemetry/logging hooks. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-15) | Team Excititor Storage | EXCITITOR-STORAGE-01-001 | Mongo mapping registry plus raw/export entities and DI extensions in place. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-16) | Team Excititor Storage | EXCITITOR-STORAGE-01-004 | Build provider/consensus/cache class maps and related collections. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-15) | Team Excititor Export | EXCITITOR-EXPORT-01-001 | Export engine delivers cache lookup, manifest creation, and policy integration. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-17) | Team Excititor Export | EXCITITOR-EXPORT-01-004 | Connect export engine to attestation client and persist Rekor metadata. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Attestation/TASKS.md | DONE (2025-10-16) | Team Excititor Attestation | EXCITITOR-ATTEST-01-001 | Implement in-toto predicate + DSSE builder providing envelopes for export attestation. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Attestation/TASKS.md | TODO – Add verification helpers for Worker/WebService, metrics/logging hooks, and negative-path regression tests. | Team Excititor Attestation | EXCITITOR-ATTEST-01-003 | EXCITITOR-ATTEST-01-003 – Verification suite & observability | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Connectors.Abstractions/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors | EXCITITOR-CONN-ABS-01-001 | Deliver shared connector context/base classes so provider plug-ins can be activated via WebService/Worker. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-17) | Team Excititor WebService | EXCITITOR-WEB-01-001 | Scaffold minimal API host, DI, and `/excititor/status` endpoint integrating policy, storage, export, and attestation services. | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.WebService/TASKS.md | TODO – Implement `/excititor/init`, `/excititor/ingest/run`, `/excititor/ingest/resume`, `/excititor/reconcile` with token scope enforcement and structured run telemetry. | Team Excititor WebService | EXCITITOR-WEB-01-002 | EXCITITOR-WEB-01-002 – Ingest & reconcile endpoints | +| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.WebService/TASKS.md | TODO – Add `/excititor/export`, `/excititor/export/{id}`, `/excititor/export/{id}/download`, `/excititor/verify`, returning artifact + attestation metadata with cache awareness. | Team Excititor WebService | EXCITITOR-WEB-01-003 | EXCITITOR-WEB-01-003 – Export & verify endpoints | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Worker/TASKS.md | DONE (2025-10-17) | Team Excititor Worker | EXCITITOR-WORKER-01-001 | Create Worker host with provider scheduling and logging to drive recurring pulls/reconciliation. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Worker/TASKS.md | TODO – Implement durable resume markers, exponential backoff with jitter, and quarantine for failing connectors per architecture spec. | Team Excititor Worker | EXCITITOR-WORKER-01-002 | EXCITITOR-WORKER-01-002 – Resume tokens & retry policy | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Worker/TASKS.md | TODO – Add scheduled attestation re-verification and cache pruning routines, surfacing metrics for export reuse ratios. | Team Excititor Worker | EXCITITOR-WORKER-01-003 | EXCITITOR-WORKER-01-003 – Verification & cache GC loops | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-CSAF-01-001 | Implement CSAF normalizer foundation translating provider documents into `VexClaim` entries. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CSAF/TASKS.md | TODO – Normalize CSAF `product_status` + `justification` values into policy-aware enums with audit diagnostics for unsupported codes. | Team Excititor Formats | EXCITITOR-FMT-CSAF-01-002 | EXCITITOR-FMT-CSAF-01-002 – Status/justification mapping | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CSAF/TASKS.md | TODO – Provide CSAF export writer producing deterministic documents (per vuln/product) and manifest metadata for attestation. | Team Excititor Formats | EXCITITOR-FMT-CSAF-01-003 | EXCITITOR-FMT-CSAF-01-003 – CSAF export adapter | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-CYCLONE-01-001 | Implement CycloneDX VEX normalizer capturing `analysis` state and component references. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md | TODO – Implement helpers to reconcile component/service references against policy expectations and emit diagnostics for missing SBOM links. | Team Excititor Formats | EXCITITOR-FMT-CYCLONE-01-002 | EXCITITOR-FMT-CYCLONE-01-002 – Component reference reconciliation | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md | TODO – Provide exporters producing CycloneDX VEX output with canonical ordering and hash-stable manifests. | Team Excititor Formats | EXCITITOR-FMT-CYCLONE-01-003 | EXCITITOR-FMT-CYCLONE-01-003 – CycloneDX export serializer | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-OPENVEX-01-001 | Implement OpenVEX normalizer to ingest attestations into canonical claims with provenance. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md | TODO – Add reducers merging multiple OpenVEX statements, resolving conflicts deterministically, and emitting policy diagnostics. | Team Excititor Formats | EXCITITOR-FMT-OPENVEX-01-002 | EXCITITOR-FMT-OPENVEX-01-002 – Statement merge utilities | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md | TODO – Provide export serializer generating canonical OpenVEX documents with optional SBOM references and hash-stable ordering. | Team Excititor Formats | EXCITITOR-FMT-OPENVEX-01-003 | EXCITITOR-FMT-OPENVEX-01-003 – OpenVEX export writer | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-001 | Ship Red Hat CSAF provider metadata discovery enabling incremental pulls. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-002 | Fetch CSAF windows with ETag handling, resume tokens, quarantine on schema errors, and persist raw docs. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-003 | Populate provider trust overrides (cosign issuer, identity regex) and provenance hints for policy evaluation/logging. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-004 | Persist resume cursors (last updated timestamp/document hashes) in storage and reload during fetch to avoid duplicates. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-005 | Register connector in Worker/WebService DI, add scheduled jobs, and document CLI triggers for Red Hat CSAF pulls. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-006 | Add CSAF normalization parity fixtures ensuring RHSA-specific metadata is preserved. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Cisco | EXCITITOR-CONN-CISCO-01-001 | Implement Cisco CSAF endpoint discovery/auth to unlock paginated pulls. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Cisco | EXCITITOR-CONN-CISCO-01-002 | Implement Cisco CSAF paginated fetch loop with dedupe and raw persistence support. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md | TODO – Emit cosign/PGP trust metadata and advisory provenance hints for policy weighting. | Team Excititor Connectors – Cisco | EXCITITOR-CONN-CISCO-01-003 | EXCITITOR-CONN-CISCO-01-003 – Provider trust metadata | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – SUSE | EXCITITOR-CONN-SUSE-01-001 | Build Rancher VEX Hub discovery/subscription path with offline snapshot support. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md | TODO – Process hub events with resume checkpoints, deduplication, and quarantine path for malformed payloads. | Team Excititor Connectors – SUSE | EXCITITOR-CONN-SUSE-01-002 | EXCITITOR-CONN-SUSE-01-002 – Checkpointed event ingestion | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md | TODO – Emit provider trust configuration (signers, weight overrides) and attach provenance hints for consensus engine. | Team Excititor Connectors – SUSE | EXCITITOR-CONN-SUSE-01-003 | EXCITITOR-CONN-SUSE-01-003 – Trust metadata & policy hints | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – MSRC | EXCITITOR-CONN-MS-01-001 | Deliver AAD onboarding/token cache for MSRC CSAF ingestion. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md | TODO – Fetch CSAF packages with retry/backoff, checksum verification, and raw document persistence plus quarantine for schema failures. | Team Excititor Connectors – MSRC | EXCITITOR-CONN-MS-01-002 | EXCITITOR-CONN-MS-01-002 – CSAF download pipeline | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md | TODO – Emit cosign/AAD issuer metadata, attach provenance details, and document policy integration. | Team Excititor Connectors – MSRC | EXCITITOR-CONN-MS-01-003 | EXCITITOR-CONN-MS-01-003 – Trust metadata & provenance hints | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md | DOING (2025-10-17) | Team Excititor Connectors – Oracle | EXCITITOR-CONN-ORACLE-01-001 | Implement Oracle CSAF catalogue discovery with CPU calendar awareness and offline snapshot import; connector wiring and fixtures underway. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md | TODO – Fetch CSAF documents with retry/backoff, checksum validation, revision deduplication, and raw persistence. | Team Excititor Connectors – Oracle | EXCITITOR-CONN-ORACLE-01-002 | EXCITITOR-CONN-ORACLE-01-002 – CSAF download & dedupe pipeline | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md | TODO – Emit Oracle signing metadata (PGP/cosign) and provenance hints for consensus weighting. | Team Excititor Connectors – Oracle | EXCITITOR-CONN-ORACLE-01-003 | EXCITITOR-CONN-ORACLE-01-003 – Trust metadata + provenance | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Ubuntu | EXCITITOR-CONN-UBUNTU-01-001 | Implement Ubuntu CSAF discovery and channel selection for USN ingestion. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md | TODO – Fetch CSAF bundles with ETag handling, checksum validation, deduplication, and raw persistence. | Team Excititor Connectors – Ubuntu | EXCITITOR-CONN-UBUNTU-01-002 | EXCITITOR-CONN-UBUNTU-01-002 – Incremental fetch & deduplication | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md | TODO – Emit Ubuntu signing metadata (GPG fingerprints) plus provenance hints for policy weighting and diagnostics. | Team Excititor Connectors – Ubuntu | EXCITITOR-CONN-UBUNTU-01-003 | EXCITITOR-CONN-UBUNTU-01-003 – Trust metadata & provenance | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-001 | Wire OCI discovery/auth to fetch OpenVEX attestations for configured images. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-002 | Attestation fetch & verify loop – download DSSE attestations, trigger verification, handle retries/backoff, persist raw statements. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-003 | Provenance metadata & policy hooks – emit image, subject digest, issuer, and trust metadata for policy weighting/logging. | +| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Cli/TASKS.md | DONE (2025-10-18) | DevEx/CLI | EXCITITOR-CLI-01-001 | Add `excititor` CLI verbs bridging to WebService with consistent auth and offline UX. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-19) | Team Excititor Core & Policy | EXCITITOR-CORE-02-001 | Context signal schema prep – extend consensus models with severity/KEV/EPSS fields and update canonical serializers. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-19) | Team Excititor Policy | EXCITITOR-POLICY-02-001 | Scoring coefficients & weight ceilings – add α/β options, weight boosts, and validation guidance. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Excititor Storage | EXCITITOR-STORAGE-02-001 | Statement events & scoring signals – create immutable VEX statement store plus consensus extensions with indexes/migrations. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Team Excititor WebService | EXCITITOR-WEB-01-004 | Resolve API & signed responses – expose `/excititor/resolve`, return signed consensus/score envelopes, document auth. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-19) | Team Excititor WebService | EXCITITOR-WEB-01-005 | Mirror distribution endpoints – expose download APIs for downstream Excititor instances. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Attestation/TASKS.md | DONE (2025-10-16) | Team Excititor Attestation | EXCITITOR-ATTEST-01-002 | Rekor v2 client integration – ship transparency log client with retries and offline queue. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Worker/TASKS.md | TODO | Team Excititor Worker | EXCITITOR-WORKER-01-004 | TTL refresh & stability damper – schedule re-resolve loops and guard against status flapping. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Export/TASKS.md | TODO | Team Excititor Export | EXCITITOR-EXPORT-01-005 | Score & resolve envelope surfaces – include signed consensus/score artifacts in exports. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Export/TASKS.md | TODO | Team Excititor Export | EXCITITOR-EXPORT-01-006 | Quiet provenance packaging – attach quieted-by statement IDs, signers, justification codes to exports and attestations. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Export/TASKS.md | TODO | Team Excititor Export | EXCITITOR-EXPORT-01-007 | Mirror bundle + domain manifest – publish signed consensus bundles for mirrors. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md | TODO | Excititor Connectors – Stella | EXCITITOR-CONN-STELLA-07-001 | Excititor mirror connector – ingest signed mirror bundles and map to VexClaims with resume handling. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md | TODO | Excititor Connectors – Stella | EXCITITOR-CONN-STELLA-07-002 | Normalize mirror bundles into VexClaim sets referencing original provider metadata and mirror provenance. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md | TODO | Excititor Connectors – Stella | EXCITITOR-CONN-STELLA-07-003 | Implement incremental cursor handling per-export digest, support resume, and document configuration for downstream Excititor mirrors. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-07-001 | Advisory event log & asOf queries – surface immutable statements and replay capability. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Team Core Engine & Data Science | FEEDCORE-ENGINE-07-002 | Noise prior computation service – learn false-positive priors and expose deterministic summaries. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-07-003 | Unknown state ledger & confidence seeding – persist unknown flags, seed confidence bands, expose query surface. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-07-001 | Advisory statement & conflict collections – provision Mongo schema/indexes for event-sourced merge. | +| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Merge/TASKS.md | TODO | BE-Merge | FEEDMERGE-ENGINE-07-001 | Conflict sets & explainers – persist conflict materialization and replay hashes for merge decisions. | +| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | TODO | Plugin Platform Guild | PLUGIN-DI-08-001 | Scoped service support in plugin bootstrap
Teach the plugin loader/registrar to surface services with scoped lifetimes, honour `StellaOps.DependencyInjection` metadata, and document the new contract. | +| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | TODO | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-002 | Update Authority plugin integration
Flow scoped services through identity-provider registrars, bootstrap flows, and background jobs; add regression coverage around scoped lifetimes. | +| Sprint 8 | Mongo strengthening | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Team Normalization & Storage Backbone | FEEDSTORAGE-MONGO-08-001 | Causal-consistent Concelier storage sessions
Ensure `AddMongoStorage` registers a scoped session facilitator (causal consistency + majority concerns), update repositories to accept optional session handles, and add integration coverage proving read-your-write and monotonic reads across a replica set/election scenario. | +| Sprint 8 | Mongo strengthening | src/StellaOps.Authority/TASKS.md | BLOCKED (2025-10-19) | Authority Core & Storage Guild | AUTHSTORAGE-MONGO-08-001 | Harden Authority Mongo usage
Scoped sessions with causal consistency pending rate-limiter stream updates; resume once plugin lockout telemetry stabilises. | +| Sprint 8 | Mongo strengthening | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Team Excititor Storage | EXCITITOR-STORAGE-MONGO-08-001 | Causal consistency for Excititor repositories
Register Mongo options with majority defaults, push session-aware overloads through raw/export/consensus/cache stores, and extend migration/tests to validate causal reads after writes (including GridFS-backed content) under replica-set failover. | +| Sprint 8 | Platform Maintenance | src/StellaOps.Excititor.Worker/TASKS.md | TODO | Team Excititor Worker | EXCITITOR-WORKER-02-001 | Resolve Microsoft.Extensions.Caching.Memory advisory – bump to latest .NET 10 preview, regenerate lockfiles, and rerun worker/webservice tests to clear NU1903. | +| Sprint 8 | Platform Maintenance | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Team Excititor Storage | EXCITITOR-STORAGE-03-001 | Statement backfill tooling – provide CLI/backfill scripts that populate the `vex.statements` log via WebService ingestion and validate severity/KEV/EPSS signal replay. | +| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Exporter.Json/TASKS.md | TODO | Concelier Export Guild | CONCELIER-EXPORT-08-201 | Mirror bundle + domain manifest – produce signed JSON aggregates for `*.stella-ops.org` mirrors. | +| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md | TODO | Concelier Export Guild | CONCELIER-EXPORT-08-202 | Mirror-ready Trivy DB bundles – ship domain-specific archives + metadata for downstream sync. | +| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-08-201 | Mirror distribution endpoints – expose domain-scoped index/download APIs with auth/quota. | +| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Concelier mirror connector – fetch mirror manifest, verify signatures, and hydrate canonical DTOs with resume support. | +| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-002 | Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. | +| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-003 | Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. | +| Sprint 8 | Mirror Distribution | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-MIRROR-08-001 | Managed mirror deployments for `*.stella-ops.org` – Helm/Compose overlays, CDN, runbooks. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-19) | Team Scanner Core | SCANNER-CORE-09-501 | Define shared DTOs (ScanJob, ProgressEvent), error taxonomy, and deterministic ID/timestamp helpers aligning with `ARCHITECTURE_SCANNER.md` §3–§4. `docs/scanner-core-contracts.md` now carries the canonical JSON snippet + acceptance notes. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-19) | Team Scanner Core | SCANNER-CORE-09-502 | Observability helpers (correlation IDs, logging scopes, metric namespacing, deterministic hashes) consumed by WebService/Worker. Added `ScannerLogExtensionsPerformanceTests` to lock ≤5 µs overhead. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-503 | Security utilities: Authority client factory, OpTok caching, DPoP verifier, restart-time plug-in guardrails for scanner components. | +| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-001 | Buildx driver scaffold + handshake with Scanner.Emit (local CAS). | +| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-002 | OCI annotations + provenance hand-off to Attestor. | +| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-003 | CI demo: minimal SBOM push & backend report wiring. | +| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-004 | Stabilize descriptor nonce derivation so repeated builds emit deterministic placeholders. | +| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-005 | Integrate determinism guard into GitHub/Gitea workflows and archive proof artifacts. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-101 | Minimal API host with Authority enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-102 | `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation support. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-WEB-09-103 | Progress streaming (SSE/JSONL) with correlation IDs and ISO-8601 UTC timestamps, documented in API reference. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-WEB-09-104 | Configuration binding for Mongo, MinIO, queue, feature flags; startup diagnostics and fail-fast policy. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-105 | Policy snapshot loader + schema + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-106 | `/reports` verdict assembly (Feedser+Vexer+Policy) + signed response envelope. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-107 | Expose score inputs, config version, and quiet provenance in `/reports` JSON and signed payload. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-201 | Worker host bootstrap with Authority auth, hosted services, and graceful shutdown semantics. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-202 | Lease/heartbeat loop with retry+jitter, poison-job quarantine, structured logging. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-203 | Analyzer dispatch skeleton emitting deterministic stage progress and honoring cancellation tokens. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-204 | Worker metrics (queue latency, stage duration, failure counts) with OpenTelemetry resource wiring. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-205 | Harden heartbeat jitter so lease safety margin stays ≥3× and cover with regression tests + optional live queue smoke run. | +| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-001 | Policy schema + binder + diagnostics. | +| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-002 | Policy snapshot store + revision digests. | +| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-003 | `/policy/preview` API (image digest → projected verdict diff). | +| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-004 | Versioned scoring config with schema validation, trust table, and golden fixtures. | +| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-005 | Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. | +| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-CORE-09-006 | Unknown state & confidence decay – deterministic bands surfaced in policy outputs. | +| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild, Scanner WebService Guild | POLICY-RUNTIME-17-201 | Define runtime reachability feed contract and alignment plan for `SCANNER-RUNTIME-17-401` once Zastava endpoints land; document policy expectations for reachability tags. | +| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-19) | DevOps Guild | DEVOPS-HELM-09-001 | Helm/Compose environment profiles (dev/staging/airgap) with deterministic digests. | +| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | TODO | DevOps Guild, Scanner WebService Guild | DEVOPS-SCANNER-09-204 | Surface `SCANNER__EVENTS__*` environment variables across docker-compose (dev/stage/airgap) and Helm values, defaulting to share the Redis queue DSN. | +| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | TODO | DevOps Guild, Notify Guild | DEVOPS-SCANNER-09-205 | Add Notify smoke stage that tails the Redis stream and asserts `scanner.report.ready`/`scanner.scan.completed` reach Notify WebService in staging. | +| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, DevEx | DOCS-ADR-09-001 | Establish ADR process and template. | +| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, Platform Events | DOCS-EVENTS-09-002 | Publish event schema catalog (`docs/events/`) for critical envelopes. | +| Sprint 9 | Docs & Governance | docs/TASKS.md | TODO | Platform Events Guild | PLATFORM-EVENTS-09-401 | Embed canonical event samples into contract/integration tests and ensure CI validates payloads against published schemas. | +| Sprint 9 | Docs & Governance | docs/TASKS.md | TODO | Runtime Guild | RUNTIME-GUILD-09-402 | Confirm Scanner WebService surfaces `quietedFindingCount` and progress hints to runtime consumers; document readiness checklist. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-301 | Mongo catalog schemas/indexes for images, layers, artifacts, jobs, lifecycle rules plus migrations. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-302 | MinIO layout, immutability policies, client abstraction, and configuration binding. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-303 | Repositories/services with dual-write feature flag, deterministic digests, TTL enforcement tests. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-401 | Queue abstraction + Redis Streams adapter with ack/claim APIs and idempotency tokens. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-402 | Pluggable backend support (Redis, NATS) with configuration binding, health probes, failover docs. | +| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-403 | Retry + dead-letter strategy with structured logs/metrics for offline deployments. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-101 | Implement layer cache store keyed by layer digest with metadata retention per architecture §3.3. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-102 | Build file CAS with dedupe, TTL enforcement, and offline import/export hooks. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-103 | Expose cache metrics/logging and configuration toggles for warm/cold thresholds. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-104 | Implement cache invalidation workflows (layer delete, TTL expiry, diff invalidation). | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-201 | Alpine/apk analyzer emitting deterministic components with provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-202 | Debian/dpkg analyzer mapping packages to purl identity with evidence. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-203 | RPM analyzer capturing EVR, file listings, provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-204 | Shared OS evidence helpers for package identity + provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-205 | Vendor metadata enrichment (source packages, license, CVE hints). | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | QA + OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-206 | Determinism harness + fixtures for OS analyzers. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild + DevOps | SCANNER-ANALYZERS-OS-10-207 | Package OS analyzers as restart-time plug-ins (manifest + host registration). | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-301 | Java analyzer emitting `pkg:maven` with provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DOING (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-302 | Node analyzer handling workspaces/symlinks emitting `pkg:npm`; workspace/symlink coverage and determinism harness in progress. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-303 | Python analyzer reading `*.dist-info`, RECORD hashes, entry points. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-304 | Go analyzer leveraging buildinfo for `pkg:golang` components. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-305 | .NET analyzer parsing `*.deps.json`, assembly metadata, RID variants. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-306 | Rust analyzer detecting crates or falling back to `bin:{sha256}`. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Shared language evidence helpers + usage flag propagation. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-308 | Determinism + fixture harness for language analyzers. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DOING (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309 | Package language analyzers as restart-time plug-ins (manifest + host registration); manifests and DI wiring under development. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-302..309 | Detailed per-language sprint plan (Node, Python, Go, .NET, Rust) with gates and benchmarks. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-10-401 | POSIX shell AST parser with deterministic output. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-10-402 | Command resolution across layered rootfs with evidence attribution. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-10-403 | Interpreter tracing for shell wrappers to Python/Node/Java launchers. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-10-404 | Python entry analyzer (venv shebang, module invocation, usage flag). | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-10-405 | Node/Java launcher analyzer capturing script/jar targets. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-10-406 | Explainability + diagnostics for unresolved constructs with metrics. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | TODO | EntryTrace Guild | SCANNER-ENTRYTRACE-10-407 | Package EntryTrace analyzers as restart-time plug-ins (manifest + host registration). | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Diff/TASKS.md | TODO | Diff Guild | SCANNER-DIFF-10-501 | Build component differ tracking add/remove/version changes with deterministic ordering. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Diff/TASKS.md | TODO | Diff Guild | SCANNER-DIFF-10-502 | Attribute diffs to introducing/removing layers including provenance evidence. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Diff/TASKS.md | TODO | Diff Guild | SCANNER-DIFF-10-503 | Produce JSON diff output for inventory vs usage views aligned with API contract. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-10-601 | Compose inventory SBOM (CycloneDX JSON/Protobuf) from layer fragments. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-10-602 | Compose usage SBOM leveraging EntryTrace to flag actual usage. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-10-603 | Generate BOM index sidecar (purl table + roaring bitmap + usage flag). | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-10-604 | Package artifacts for export + attestation with deterministic manifests. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-10-605 | Emit BOM-Index sidecar schema/fixtures (CRITICAL PATH for SP16). | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-10-606 | Usage view bit flags integrated with EntryTrace. | +| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-10-607 | Embed scoring inputs, confidence band, and quiet provenance in CycloneDX/DSSE artifacts. | +| Sprint 10 | Benchmarks | bench/TASKS.md | DONE (2025-10-19) | Bench Guild, Scanner Team | BENCH-SCANNER-10-001 | Analyzer microbench harness committed with baseline CSV + CLI hook. | +| Sprint 10 | Benchmarks | bench/TASKS.md | TODO | Bench Guild, Language Analyzer Guild | BENCH-SCANNER-10-002 | Wire real language analyzers into bench harness & refresh baselines post-implementation. | +| Sprint 10 | Samples | samples/TASKS.md | DONE (2025-10-19) | Samples Guild, Scanner Team | SAMPLES-10-001 | Sample images, SBOMs, and BOM-Index fixtures published under `samples/`. | +| Sprint 10 | Samples | samples/TASKS.md | TODO | Samples Guild, Policy Guild | SAMPLES-13-004 | Add policy preview/report fixtures showing confidence bands and unknown-age tags. | +| Sprint 10 | DevOps Perf | ops/devops/TASKS.md | DONE (2025-10-19) | DevOps Guild | DEVOPS-PERF-10-001 | Perf smoke job added to CI enforcing <5 s compose budget with regression guard. | +| Sprint 10 | DevOps Perf | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-PERF-10-002 | Publish analyzer bench metrics to Grafana/perf workbook and alarm on ≥20 % regressions. | +| Sprint 10 | DevOps Perf | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-SEC-10-301 | Address NU1902/NU1903 advisories for `MongoDB.Driver` 2.12.0 and `SharpCompress` 0.23.0 surfaced during scanner cache and worker test runs. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-305A | Parse `*.deps.json` + `runtimeconfig.json`, build RID graph, and normalize to `pkg:nuget` components. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-305B | Extract assembly metadata (strong name, file/product info) and optional Authenticode details when offline cert bundle provided. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-305C | Handle self-contained apps and native assets; merge with EntryTrace usage hints. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-307D | Integrate shared helpers (license mapping, quiet provenance) and concurrency-safe caches. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-308D | Determinism fixtures + benchmark harness; compare to competitor scanners for accuracy/perf. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-309D | Package plug-in (manifest, DI registration) and update Offline Kit instructions. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-304A | Parse Go build info blob (`runtime/debug` format) and `.note.go.buildid`; map to module/version and evidence. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-304B | Implement DWARF-lite reader for VCS metadata + dirty flag; add cache to avoid re-reading identical binaries. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-304C | Fallback heuristics for stripped binaries with deterministic `bin:{sha256}` labeling and quiet provenance. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-307G | Wire shared helpers (license mapping, usage flags) and ensure concurrency-safe buffer reuse. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-308G | Determinism fixtures + benchmark harness (Vs competitor). | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-309G | Package plug-in manifest + Offline Kit notes; ensure Worker DI registration. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-302C | Surface script metadata (postinstall/preinstall) and policy hints; emit telemetry counters and evidence records. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-307N | Integrate shared helpers for license/licence evidence, canonical JSON serialization, and usage flag propagation. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-308N | Author determinism harness + fixtures for Node analyzer; add benchmark suite. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-309N | Package Node analyzer as restart-time plug-in (manifest, DI registration, Offline Kit notes). | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-303A | STREAM-based parser for `*.dist-info` (`METADATA`, `WHEEL`, `entry_points.txt`) with normalization + evidence capture. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-303B | RECORD hash verifier with chunked hashing, Zip64 support, and mismatch diagnostics. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-303C | Editable install + pip cache detection; integrate EntryTrace hints for runtime usage flags. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-307P | Shared helper integration (license metadata, quiet provenance, component merging). | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-308P | Golden fixtures + determinism harness for Python analyzer; add benchmark and hash throughput reporting. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-309P | Package plug-in (manifest, DI registration) and document Offline Kit bundling of Python stdlib metadata if needed. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-306A | Parse Cargo metadata (`Cargo.lock`, `.fingerprint`, `.metadata`) and map crates to components with evidence. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-306B | Implement heuristic classifier using ELF section names, symbol mangling, and `.comment` data for stripped binaries. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-306C | Integrate binary hash fallback (`bin:{sha256}`) and tie into shared quiet provenance helpers. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-307R | Finalize shared helper usage (license, usage flags) and concurrency-safe caches. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-308R | Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. | +| Sprint 10 | Backlog | src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md | TODO | TBD | SCANNER-ANALYZERS-LANG-10-309R | Package plug-in manifest + Offline Kit documentation; ensure Worker integration. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-DPOP-11-001 | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-MTLS-11-002 | Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Signer/TASKS.md | TODO | Signer Guild | SIGNER-API-11-101 | `/sign/dsse` pipeline with Authority auth, PoE introspection, release verification, DSSE signing. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Signer/TASKS.md | TODO | Signer Guild | SIGNER-REF-11-102 | `/verify/referrers` endpoint with OCI lookup, caching, and policy enforcement. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Signer/TASKS.md | TODO | Signer Guild | SIGNER-QUOTA-11-103 | Enforce plan quotas, concurrency/QPS limits, artifact size caps with metrics/audit logs. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Guild | ATTESTOR-API-11-201 | `/rekor/entries` submission pipeline with dedupe, proof acquisition, and persistence. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Guild | ATTESTOR-VERIFY-11-202 | `/rekor/verify` + retrieval endpoints validating signatures and Merkle proofs. | +| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Guild | ATTESTOR-OBS-11-203 | Telemetry, alerting, mTLS hardening, and archive workflow for Attestor. | +| Sprint 11 | UI Integration | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-ATTEST-11-005 | Attestation visibility (Rekor id, status) on Scan Detail. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | TODO | Zastava Core Guild | ZASTAVA-CORE-12-201 | Define runtime event/admission DTOs, hashing helpers, and versioning strategy. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | TODO | Zastava Core Guild | ZASTAVA-CORE-12-202 | Provide configuration/logging/metrics utilities shared by Observer/Webhook. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | TODO | Zastava Core Guild | ZASTAVA-CORE-12-203 | Authority client helpers, OpTok caching, and security guardrails for runtime services. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | TODO | Zastava Core Guild | ZASTAVA-OPS-12-204 | Operational runbooks, alert rules, and dashboard exports for runtime plane. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | TODO | Zastava Observer Guild | ZASTAVA-OBS-12-001 | Container lifecycle watcher emitting deterministic runtime events with buffering. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | TODO | Zastava Observer Guild | ZASTAVA-OBS-12-002 | Capture entrypoint traces + loaded libraries, hashing binaries and linking to baseline SBOM. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | TODO | Zastava Observer Guild | ZASTAVA-OBS-12-003 | Posture checks for signatures/SBOM/attestation with offline caching. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | TODO | Zastava Observer Guild | ZASTAVA-OBS-12-004 | Batch `/runtime/events` submissions with disk-backed buffer and rate limits. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Webhook/TASKS.md | TODO | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-101 | Admission controller host with TLS bootstrap and Authority auth. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Webhook/TASKS.md | TODO | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-102 | Query Scanner `/policy/runtime`, resolve digests, enforce verdicts. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Webhook/TASKS.md | TODO | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-103 | Caching, fail-open/closed toggles, metrics/logging for admission decisions. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Scanner WebService Guild | SCANNER-RUNTIME-12-301 | `/runtime/events` ingestion endpoint with validation, batching, storage hooks. | +| Sprint 12 | Runtime Guardrails | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Scanner WebService Guild | SCANNER-RUNTIME-12-302 | `/policy/runtime` endpoint joining SBOM baseline + policy verdict with TTL guidance. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-AUTH-13-001 | Integrate Authority OIDC + DPoP flows with session management. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SCANS-13-002 | Build scans module (list/detail/SBOM/diff/attestation) with performance + accessibility targets. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-VEX-13-003 | Implement VEX explorer + policy editor with preview integration. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-ADMIN-13-004 | Deliver admin area (tenants/clients/quotas/licensing) with RBAC + audit hooks. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SCHED-13-005 | Scheduler panel: schedules CRUD, run history, dry-run preview. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-NOTIFY-13-006 | Notify panel: channels/rules CRUD, deliveries view, test send. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-POLICY-13-007 | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI | CLI-RUNTIME-13-005 | Add runtime policy test verbs that consume `/policy/runtime` and display verdicts. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI | CLI-OFFLINE-13-006 | Implement offline kit pull/import/status commands with integrity checks. | +| Sprint 13 | UX & CLI Experience | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI | CLI-PLUGIN-13-007 | Package non-core CLI verbs as restart-time plug-ins (manifest + loader tests). | +| Sprint 13 | UX & CLI Experience | src/StellaOps.Cli/TASKS.md | TODO – Once `/api/v1/scanner/policy/runtime` exits TODO, verify CLI output against final schema (field names, metadata) and update formatter/tests if the contract moves. Capture joint review notes in docs/09 and link Scanner task sign-off. | DevEx/CLI, Scanner WebService Guild | CLI-RUNTIME-13-008 | CLI-RUNTIME-13-008 – Runtime policy contract sync | +| Sprint 13 | UX & CLI Experience | src/StellaOps.Cli/TASKS.md | TODO – Build Spectre test harness exercising `runtime policy test` against a stubbed backend to lock output shape (table + `--json`) and guard regressions. Integrate into `dotnet test` suite. | DevEx/CLI, QA Guild | CLI-RUNTIME-13-009 | CLI-RUNTIME-13-009 – Runtime policy smoke fixture | +| Sprint 14 | Release & Offline Ops | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-REL-14-001 | Deterministic build/release pipeline with SBOM/provenance, signing, and manifest generation. | +| Sprint 14 | Release & Offline Ops | ops/offline-kit/TASKS.md | TODO | Offline Kit Guild | DEVOPS-OFFLINE-14-002 | Offline kit packaging workflow with integrity verification and documentation. | +| Sprint 14 | Release & Offline Ops | ops/deployment/TASKS.md | TODO | Deployment Guild | DEVOPS-OPS-14-003 | Deployment/update/rollback automation and channel management documentation. | +| Sprint 14 | Release & Offline Ops | ops/licensing/TASKS.md | TODO | Licensing Guild | DEVOPS-LIC-14-004 | Registry token service tied to Authority, plan gating, revocation handling, monitoring. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Models/TASKS.md | TODO | Notify Models Guild | NOTIFY-MODELS-15-101 | Define core Notify DTOs, validation helpers, canonical serialization. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Models/TASKS.md | TODO | Notify Models Guild | NOTIFY-MODELS-15-102 | Publish schema docs and sample payloads for Notify. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Models/TASKS.md | TODO | Notify Models Guild | NOTIFY-MODELS-15-103 | Versioning/migration helpers for rules/templates/deliveries. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Storage.Mongo/TASKS.md | TODO | Notify Storage Guild | NOTIFY-STORAGE-15-201 | Mongo schemas/indexes for rules, channels, deliveries, digests, locks, audit. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Storage.Mongo/TASKS.md | TODO | Notify Storage Guild | NOTIFY-STORAGE-15-202 | Repositories with tenant scoping, soft delete, TTL, causal consistency options. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Storage.Mongo/TASKS.md | TODO | Notify Storage Guild | NOTIFY-STORAGE-15-203 | Delivery history retention and query APIs. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Queue/TASKS.md | TODO | Notify Queue Guild | NOTIFY-QUEUE-15-401 | Bus abstraction + Redis Streams adapter with ordering/idempotency. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Queue/TASKS.md | TODO | Notify Queue Guild | NOTIFY-QUEUE-15-402 | NATS JetStream adapter with health probes and failover. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Queue/TASKS.md | TODO | Notify Queue Guild | NOTIFY-QUEUE-15-403 | Delivery queue with retry/dead-letter + metrics. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Engine/TASKS.md | TODO | Notify Engine Guild | NOTIFY-ENGINE-15-301 | Rules evaluation core (filters, throttles, idempotency). | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Engine/TASKS.md | TODO | Notify Engine Guild | NOTIFY-ENGINE-15-302 | Action planner + digest coalescer. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Engine/TASKS.md | TODO | Notify Engine Guild | NOTIFY-ENGINE-15-303 | Template rendering engine (Slack/Teams/Email/Webhook). | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Engine/TASKS.md | TODO | Notify Engine Guild | NOTIFY-ENGINE-15-304 | Test-send sandbox + preview utilities. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.WebService/TASKS.md | TODO | Notify WebService Guild | NOTIFY-WEB-15-101 | Minimal API host with Authority enforcement and plug-in loading. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.WebService/TASKS.md | TODO | Notify WebService Guild | NOTIFY-WEB-15-102 | Rules/channel/template CRUD with audit logging. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.WebService/TASKS.md | TODO | Notify WebService Guild | NOTIFY-WEB-15-103 | Delivery history & test-send endpoints. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.WebService/TASKS.md | TODO | Notify WebService Guild | NOTIFY-WEB-15-104 | Configuration binding + startup diagnostics. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Worker/TASKS.md | TODO | Notify Worker Guild | NOTIFY-WORKER-15-201 | Bus subscription + leasing loop with backoff. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Worker/TASKS.md | TODO | Notify Worker Guild | NOTIFY-WORKER-15-202 | Rules evaluation pipeline integration. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Worker/TASKS.md | TODO | Notify Worker Guild | NOTIFY-WORKER-15-203 | Channel dispatch orchestration with retries. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Worker/TASKS.md | TODO | Notify Worker Guild | NOTIFY-WORKER-15-204 | Metrics/telemetry for Notify workers. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Slack/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-501 | Slack connector with rate-limit aware delivery. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Slack/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-502 | Slack health/test-send support. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Teams/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-601 | Teams connector with Adaptive Cards. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Teams/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-602 | Teams health/test-send support. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Email/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-EMAIL-15-701 | SMTP connector with TLS + rendering. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Email/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-EMAIL-15-702 | DKIM + health/test-send flows. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Webhook/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-WEBHOOK-15-801 | Webhook connector with signing/retries. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Webhook/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-WEBHOOK-15-802 | Webhook health/test-send support. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Slack/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-503 | Package Slack connector as restart-time plug-in (manifest + host registration). | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Teams/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-603 | Package Teams connector as restart-time plug-in (manifest + host registration). | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Email/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-EMAIL-15-703 | Package Email connector as restart-time plug-in (manifest + host registration). | +| Sprint 15 | Notify Foundations | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Scanner WebService Guild | SCANNER-EVENTS-15-201 | Emit `scanner.report.ready` + `scanner.scan.completed` events. | +| Sprint 15 | Benchmarks | bench/TASKS.md | TODO | Bench Guild, Notify Team | BENCH-NOTIFY-15-001 | Notify dispatch throughput bench with results CSV. | +| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Webhook/TASKS.md | TODO | Notify Connectors Guild | NOTIFY-CONN-WEBHOOK-15-803 | Package Webhook connector as restart-time plug-in (manifest + host registration). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Define Scheduler DTOs & validation. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-102 | Publish schema docs/sample payloads. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Models/TASKS.md | TODO | Scheduler Models Guild | SCHED-MODELS-16-103 | Versioning/migration helpers for schedules/runs. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Storage.Mongo/TASKS.md | TODO | Scheduler Storage Guild | SCHED-STORAGE-16-201 | Mongo schemas/indexes for Scheduler state (models ready 2025-10-19). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Storage.Mongo/TASKS.md | TODO | Scheduler Storage Guild | SCHED-STORAGE-16-202 | Repositories with tenant scoping, TTL, causal consistency (models ready 2025-10-19). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Storage.Mongo/TASKS.md | TODO | Scheduler Storage Guild | SCHED-STORAGE-16-203 | Audit + stats materialization for UI (models ready 2025-10-19). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Queue/TASKS.md | TODO | Scheduler Queue Guild | SCHED-QUEUE-16-401 | Queue abstraction + Redis Streams adapter (samples available 2025-10-19). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Queue/TASKS.md | TODO | Scheduler Queue Guild | SCHED-QUEUE-16-402 | NATS JetStream adapter with health probes (samples available 2025-10-19). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Queue/TASKS.md | TODO | Scheduler Queue Guild | SCHED-QUEUE-16-403 | Dead-letter handling + metrics (samples available 2025-10-19). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.ImpactIndex/TASKS.md | TODO | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-301 | Ingest BOM-Index into roaring bitmap store. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.ImpactIndex/TASKS.md | TODO | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-302 | Query APIs for ResolveByPurls/ResolveByVulns/ResolveAll. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.ImpactIndex/TASKS.md | TODO | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-303 | Snapshot/compaction/invalidation workflow. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.ImpactIndex/TASKS.md | DOING | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-300 | **STUB** ImpactIndex ingest/query using fixtures (to be removed by SP16 completion). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-16-101 | Minimal API host with Authority enforcement. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-16-102 | Schedules CRUD (cron validation, pause/resume, audit). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-16-103 | Runs API (list/detail/cancel) + impact previews. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-16-104 | Feedser/Vexer webhook handlers with security enforcement. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-16-201 | Planner loop (cron/event triggers, leases, fairness). | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-16-202 | ImpactIndex targeting and shard planning. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-16-203 | Runner execution invoking Scanner analysis/content refresh. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-16-204 | Emit rescan/report events for Notify/UI. | +| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-16-205 | Metrics/telemetry for Scheduler planners/runners. | +| Sprint 16 | Benchmarks | bench/TASKS.md | TODO | Bench Guild, Scheduler Team | BENCH-IMPACT-16-001 | ImpactIndex throughput bench + RAM profile. | +| Sprint 17 | Symbol Intelligence & Forensics | src/StellaOps.Scanner.Emit/TASKS.md | TODO | Emit Guild | SCANNER-EMIT-17-701 | Record GNU build-id for ELF components and surface it in SBOM/diff outputs. | +| Sprint 17 | Symbol Intelligence & Forensics | src/StellaOps.Zastava.Observer/TASKS.md | TODO | Zastava Observer Guild | ZASTAVA-OBS-17-005 | Collect GNU build-id during runtime observation and attach it to emitted events. | +| Sprint 17 | Symbol Intelligence & Forensics | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Scanner WebService Guild | SCANNER-RUNTIME-17-401 | Persist runtime build-id observations and expose them for debug-symbol correlation. | +| Sprint 17 | Symbol Intelligence & Forensics | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-REL-17-002 | Ship stripped debug artifacts organised by build-id within release/offline kits. | +| Sprint 17 | Symbol Intelligence & Forensics | docs/TASKS.md | TODO | Docs Guild | DOCS-RUNTIME-17-004 | Document build-id workflows for SBOMs, runtime events, and debug-store usage. | diff --git a/SPRINTS_IMPLEMENTION_PLAN.md b/SPRINTS_IMPLEMENTION_PLAN.md deleted file mode 100644 index aaeedffa..00000000 --- a/SPRINTS_IMPLEMENTION_PLAN.md +++ /dev/null @@ -1,295 +0,0 @@ -# StellaOps Multi-Sprint Implementation Plan (Agile Track) - -This plan translates the current `SPRINTS.md` (read the file if you have not) backlog into parallel-friendly execution clusters. Each sprint is decomposed into **groups** that can run concurrently without stepping on the same directories. For every group we capture: - -- **Tasks** (ID · est. effort · path) -- **Acceptance metrics** (quantitative targets to reduce rework) -- **Gate** artifacts required before dependent groups can start - -Durations are estimated work sizes (1 d ≈ one focused engineer day). Milestones are gated by artifacts—not calendar dates—to keep us agile and adaptable to competitor pressure. - ---- - -## Sprint 9 – Scanner Core Foundations (ID: SP9, ~3 w) - -### Group SP9-G1 — Core Contracts & Observability (src/StellaOps.Scanner.Core) ~1 w -- Tasks: - - SCANNER-CORE-09-501 · 3 d · `/src/StellaOps.Scanner.Core/TASKS.md` - - SCANNER-CORE-09-502 · 2 d · same path - - SCANNER-CORE-09-503 · 2 d · same path -- Acceptance metrics: DTO round-trip tests stable; middleware adds ≤5 µs per call. -- Gate SP9-G1 → WebService: `scanner-core-contracts.md` snippet plus `ScannerCoreContractsTests` green. - -### Group SP9-G2 — Queue Backbone (src/StellaOps.Scanner.Queue) ~1 w -- Tasks: SCANNER-QUEUE-09-401 (3 d), -402 (2 d), -403 (2 d) · `/src/StellaOps.Scanner.Queue/TASKS.md` -- Acceptance: dequeue latency p95 ≤20 ms at 40 rps; chaos test retains leases. -- Gate: Redis/NATS adapters docs + `QueueLeaseIntegrationTests` passing. -- Status: **DONE (2025-10-19)** – Gate satisfied via Redis/NATS adapter docs and `QueueLeaseIntegrationTests` run under fake clock. - -### Group SP9-G3 — Storage Backbone (src/StellaOps.Scanner.Storage) ~1 w -- Tasks: SCANNER-STORAGE-09-301 (3 d), -302 (2 d), -303 (2 d) -- Acceptance: majority write/read ≤50 ms; TTL verified. -- Gate: migrations checked in; `StorageDualWriteFixture` passes. -- Status: **DONE (2025-10-19)** – Mongo bootstrapper + migrations committed; MinIO dual-write service wired; `StorageDualWriteFixture` green on Mongo2Go. - -### Group SP9-G4 — WebService Host & Policy Surfacing (src/StellaOps.Scanner.WebService) ~1.2 w -- Tasks: SCANNER-WEB-09-101 (2 d), -102 (3 d), -103 (2 d), -104 (2 d), SCANNER-POLICY-09-105 (3 d), SCANNER-POLICY-09-106 (4 d) -- Acceptance: `/api/v1/scans` enqueue p95 ≤50 ms under synthetic load; policy validation errors actionable; `/reports` response signed. -- Gate SP9-G4 → SP10/SP11: `/reports` OpenAPI frozen; sample signed envelope committed in `samples/api/reports/`. -- Status: **IN PROGRESS (2025-10-19)** – Minimal host and `/api/v1/scans` endpoints delivered (SCANNER-WEB-09-101/102 done); progress streaming and policy/report surfaces remain. - -### Group SP9-G5 — Worker Host (src/StellaOps.Scanner.Worker) ~1 w -- Tasks: SCANNER-WORKER-09-201 (3 d), -202 (3 d), -203 (2 d), -204 (2 d), -205 (1 d) -- Acceptance: job lease never drops <3× heartbeat; progress events deterministic. -- Gate: `WorkerBasicScanScenario` integration recorded + optional live queue smoke validation. -- Status: **DONE (2025-10-19)** – Host bootstrap, heartbeat jitter clamp, deterministic stage pipeline, metrics, and Redis-backed smoke harness landed; `WorkerBasicScanScenarioTests` and `RedisWorkerSmokeTests` (flagged) green. - -### Group SP9-G6 — Buildx Plug-in (src/StellaOps.Scanner.Sbomer.BuildXPlugin) ~0.8 w -- Tasks: SP9-BLDX-09-001 (3 d), SP9-BLDX-09-002 (2 d), SP9-BLDX-09-003 (2 d), SP9-BLDX-09-004 (2 d), SP9-BLDX-09-005 (1 d) -- Acceptance: build-time overhead ≤300 ms/layer on 4 vCPU; CAS handshake reliable in CI sample. -- Gate: buildx demo workflow artifact + quickstart doc + determinism regression guard in CI. -- Status: **DONE (2025-10-19)** — manifest+CAS scaffold, descriptor/Attestor hand-off, GitHub/Gitea determinism workflows, quickstart update, and golden tests committed. - -### Group SP9-G7 — Policy Engine Core (src/StellaOps.Policy) ~1 w -- Tasks: POLICY-CORE-09-001 (2 d) ✅, -002 (3 d) ✅, -003 (3 d) ✅, -004 (3 d), -005 (4 d), -006 (2 d) -- Acceptance: policy parsing ≥200 files/s; preview diff response <200 ms for 500-component SBOM; quieting logic audited. -- Gate: `policy-schema@1` published; revision digests stored; preview API doc updated. - -### Group SP9-G8 — DevOps Early Guardrails (ops/devops) ~0.4 w -- Tasks: DEVOPS-HELM-09-001 (3 d) — **DONE (2025-10-19)** -- Acceptance: helm/compose profiles for dev/stage/airgap lint + dry-run clean; manifests pinned to digest. -- Gate: profiles merged under `deploy/`; install guide cross-link satisfied via `deploy/compose/` bundles and `docs/21_INSTALL_GUIDE.md`. - -### Group SP9-G9 — Documentation & Events (docs/) ~0.4 w -- Tasks: DOCS-ADR-09-001 (2 d), DOCS-EVENTS-09-002 (2 d) -- Acceptance: ADR process broadcast; event schemas validated via CI. -- Gate: `docs/adr/index.md` linking template; `docs/events/README.md` referencing schemas. -- Status: **DONE (2025-10-19)** – ADR contribution guide + template updates merged, Docs CI Ajv validation wired, events catalog documented, guild announcement recorded. - ---- - -## Sprint 10 – Scanner Analyzers & SBOM (ID: SP10, ~4 w) - -### Group SP10-G1 — OS Analyzer Plug-ins (src/StellaOps.Scanner.Analyzers.OS) ~1 w -- Tasks: SCANNER-ANALYZERS-OS-10-201..207 (durations 2–3 d each) -- Acceptance: analyzer runtime <1.5 s/image; memory <250 MB. -- Gate: plug-ins packaged under `plugins/scanner/analyzers/os/`; determinism CI job green. - -### Group SP10-G2 — Language Analyzer Plug-ins (src/StellaOps.Scanner.Analyzers.Lang) ~1.5 w -- Tasks: SCANNER-ANALYZERS-LANG-10-301..309 -- Acceptance: Node analyzer handles 10 k modules <2 s; Python memory <200 MB. -- Gate: golden outputs stored; plugin manifests present. - -### Group SP10-G3 — EntryTrace Plug-ins (src/StellaOps.Scanner.EntryTrace) ~0.8 w -- Tasks: SCANNER-ENTRYTRACE-10-401..407 -- Acceptance: ≥95 % launcher resolution success on samples; unknown reasons enumerated. -- Gate: entrytrace plug-ins packaged; explainability doc updated. - -### Group SP10-G4 — SBOM Composition & BOM Index (src/StellaOps.Scanner.Diff + Emit) ~1 w -- Tasks: SCANNER-DIFF-10-501..503, SCANNER-EMIT-10-601..606 -- Acceptance: BOM-Index emission <500 ms/image; diff output deterministic across runs. -- Gate SP10-G4 → SP16: `docs/artifacts/bom-index/` schema + fixtures; tests `BOMIndexGoldenIsStable` & `UsageFlagsAreAccurate` green. - -### Group SP10-G5 — Cache Subsystem (src/StellaOps.Scanner.Cache) ~0.6 w -- Tasks: SCANNER-CACHE-10-101..104 -- Acceptance: cache hit instrumentation validated; eviction keeps footprint <5 GB. -- Gate: cache configuration doc; integration test `LayerCacheRoundTrip` green. - -### Group SP10-G6 — Benchmarks & Samples (bench/, samples/, ops/devops) ~0.6 w -- Tasks: BENCH-SCANNER-10-001 (2 d), SAMPLES-10-001 (finish – 3 d), DEVOPS-PERF-10-001 (2 d) -- Acceptance: analyzer benchmark CSV published; perf CI guard ensures SBOM compose <5 s; sample SBOM/BOM-Index committed. -- Gate: bench results stored under `bench/`; `samples/` populated; CI job added. - ---- - -## Sprint 11 – Signing Chain Bring-up (ID: SP11, ~3 w) - -### Group SP11-G1 — Authority Sender Constraints (src/StellaOps.Authority) ~0.8 w -- Tasks: AUTH-DPOP-11-001 (3 d), AUTH-MTLS-11-002 (2 d) -- Acceptance: DPoP nonce dance validated; mTLS tokens issued in ≤40 ms. -- Gate: updated Authority OpenAPI; QA scripts verifying DPoP/mTLS. - -### Group SP11-G2 — Signer Service (src/StellaOps.Signer) ~1.2 w -- Tasks: SIGNER-API-11-101 (4 d), SIGNER-REF-11-102 (2 d), SIGNER-QUOTA-11-103 (2 d) -- Acceptance: signing throughput ≥30 req/min; p95 latency ≤200 ms. -- Gate SP11-G2 → Attestor/UI: `/sign/dsse` OpenAPI frozen; signed DSSE bundle in repo; Rekor interop test passing. - -### Group SP11-G3 — Attestor Service (src/StellaOps.Attestor) ~1 w -- Tasks: ATTESTOR-API-11-201 (3 d), ATTESTOR-VERIFY-11-202 (2 d), ATTESTOR-OBS-11-203 (2 d) -- Acceptance: inclusion proof retrieval <500 ms; audit log coverage 100 %. -- Gate: Attestor API doc + verification script. - -### Group SP11-G4 — UI Attestation Hooks (src/StellaOps.UI) ~0.4 w -- Tasks: UI-ATTEST-11-005 (3 d) -- Acceptance: attestation panel renders within 200 ms; Rekor link verified. -- Gate SP11-G4 → SP13-G1: recorded UX walkthrough. - ---- - -## Sprint 12 – Runtime Guardrails (ID: SP12, ~3 w) - -### Group SP12-G1 — Zastava Core (src/StellaOps.Zastava.Core) ~0.8 w -- Tasks: ZASTAVA-CORE-12-201..204 -- Acceptance: DTO tests stable; configuration docs produced. -- Gate: schema doc + logging helpers integrated. - -### Group SP12-G2 — Zastava Observer (src/StellaOps.Zastava.Observer) ~0.8 w -- Tasks: ZASTAVA-OBS-12-001..004 -- Acceptance: observer memory <200 MB; event flush ≤2 s. -- Gate: sample runtime events stored; offline buffer test passes. - -### Group SP12-G3 — Zastava Webhook (src/StellaOps.Zastava.Webhook) ~0.6 w -- Tasks: ZASTAVA-WEBHOOK-12-101..103 -- Acceptance: admission latency p95 ≤45 ms; cache TTL adhered to. -- Gate: TLS rotation procedure documented; readiness probe script. - -### Group SP12-G4 — Scanner Runtime APIs (src/StellaOps.Scanner.WebService) ~0.8 w -- Tasks: SCANNER-RUNTIME-12-301 (2 d), SCANNER-RUNTIME-12-302 (3 d) -- Acceptance: `/runtime/events` handles 500 events/sec; `/policy/runtime` output matches webhook decisions. -- Gate SP12-G4 → SP13/SP15: API documented, fixtures updated. - ---- - -## Sprint 13 – UX & CLI Experience (ID: SP13, ~2 w) - -### Group SP13-G1 — UI Shell & Panels (src/StellaOps.UI) ~1.6 w -- Tasks: UI-AUTH-13-001 (3 d), UI-SCANS-13-002 (4 d), UI-VEX-13-003 (3 d), UI-ADMIN-13-004 (2 d), UI-SCHED-13-005 (3 d), UI-NOTIFY-13-006 (3 d) -- Acceptance: Lighthouse ≥85; Scheduler/Notify panels function against mocked APIs. -- Gate: UI dev server fixtures committed; QA sign-off captured. - -### Group SP13-G2 — CLI Enhancements (src/StellaOps.Cli) ~0.8 w -- Tasks: CLI-RUNTIME-13-005 (3 d), CLI-OFFLINE-13-006 (3 d), CLI-PLUGIN-13-007 (2 d) -- Acceptance: runtime policy CLI completes <1 s for 10 images; offline kit commands resume downloads. -- Gate: CLI plugin manifest doc; smoke tests covering new verbs. - ---- - -## Sprint 14 – Release & Offline Ops (ID: SP14, ~2 w) - -### Group SP14-G1 — Release Automation (ops/devops) ~0.8 w -- Tasks: DEVOPS-REL-14-001 (4 d) -- Acceptance: reproducible build diff tool shows zero drift across two runs; signing pipeline green. -- Gate: signed manifest + provenance published. - -### Group SP14-G2 — Offline Kit Packaging (ops/offline-kit) ~0.6 w -- Tasks: DEVOPS-OFFLINE-14-002 (3 d) -- Acceptance: kit import <5 min with integrity verification CLI. -- Gate: kit doc updated; import script included. - -### Group SP14-G3 — Deployment Playbooks (ops/deployment) ~0.4 w -- Tasks: DEVOPS-OPS-14-003 (2 d) -- Acceptance: rollback drill recorded; compatibility matrix produced. -- Gate: playbook PR merged with Ops sign-off. - -### Group SP14-G4 — Licensing Token Service (ops/licensing) ~0.4 w -- Tasks: DEVOPS-LIC-14-004 (2 d) -- Acceptance: token service handles 100 req/min; revocation latency <60 s. -- Gate: monitoring dashboard links; failover doc. - ---- - -## Sprint 15 – Notify Foundations (ID: SP15, ~3 w) - -### Group SP15-G1 — Models & Storage (src/StellaOps.Notify.Models + Storage.Mongo) ~0.8 w -- Tasks: NOTIFY-MODELS-15-101 (2 d), -102 (2 d), -103 (1 d); NOTIFY-STORAGE-15-201 (3 d), -202 (2 d), -203 (1 d) -- Acceptance: rule CRUD latency <120 ms; delivery retention job verified. -- Gate: schema docs + fixtures published. - -### Group SP15-G2 — Engine & Queue (src/StellaOps.Notify.Engine + Queue) ~0.8 w -- Tasks: NOTIFY-ENGINE-15-301..304, NOTIFY-QUEUE-15-401..403 -- Acceptance: rules evaluation ≥5k events/min; queue dead-letter <0.5 %. -- Gate: digest outputs committed; queue config doc updated. - -### Group SP15-G3 — WebService & Worker (src/StellaOps.Notify.WebService + Worker) ~0.8 w -- Tasks: NOTIFY-WEB-15-101..104, NOTIFY-WORKER-15-201..204 -- Acceptance: API p95 <120 ms; worker delivery success ≥99 %. -- Gate: end-to-end fixture run producing delivery record. - -### Group SP15-G4 — Channel Plug-ins (src/StellaOps.Notify.Connectors.*) ~0.6 w -- Tasks: NOTIFY-CONN-SLACK-15-501..503, NOTIFY-CONN-TEAMS-15-601..603, NOTIFY-CONN-EMAIL-15-701..703, NOTIFY-CONN-WEBHOOK-15-801..803 -- Acceptance: channel-specific retry policies verified; rate limits respected. -- Gate: plug-in manifests inside `plugins/notify/**`; test-send docs. - -### Group SP15-G5 — Events & Benchmarks (src/StellaOps.Scanner.WebService + bench) ~0.5 w -- Tasks: SCANNER-EVENTS-15-201 (2 d), BENCH-NOTIFY-15-001 (2 d) -- Acceptance: event emission latency <100 ms; throughput bench results stored. -- Gate: `docs/events/samples/` contains sample payloads; bench CSV in repo. - ---- - -## Sprint 16 – Scheduler Intelligence (ID: SP16, ~4 w) - -### Group SP16-G1 — Models & Storage (src/StellaOps.Scheduler.Models + Storage.Mongo) ~1 w -- Tasks: SCHED-MODELS-16-101 (3 d), -102 (2 d), -103 (2 d); SCHED-STORAGE-16-201 (3 d), -202 (2 d), -203 (2 d) -- Acceptance: schedule CRUD latency <120 ms; run retention TTL enforced. -- Gate: schema doc + integration tests passing. - -### Group SP16-G2 — ImpactIndex & Queue (src/StellaOps.Scheduler.ImpactIndex + Queue + Bench) ~1.2 w -- Tasks: SCHED-IMPACT-16-300 (2 d, DOING), SCHED-IMPACT-16-301 (3 d), -302 (3 d), -303 (2 d); SCHED-QUEUE-16-401..403 (each 2 d); BENCH-IMPACT-16-001 (2 d) -- Acceptance: impact resolve 10k productKeys <300 ms hot; stub removed by sprint end. -- Gate: roaring snapshot stored; bench CSV published; removal plan for stub recorded. - -### Group SP16-G3 — Scheduler WebService (src/StellaOps.Scheduler.WebService) ~0.8 w -- Tasks: SCHED-WEB-16-101..104 (each 2 d) -- Acceptance: preview endpoint <250 ms; webhook security enforced. -- Gate: OpenAPI published; dry-run JSON fixtures stored. - -### Group SP16-G4 — Scheduler Worker (src/StellaOps.Scheduler.Worker) ~1 w -- Tasks: SCHED-WORKER-16-201 (3 d), -202 (2 d), -203 (3 d), -204 (2 d), -205 (2 d) -- Acceptance: planner fairness metrics captured; runner success ≥98 % across 1k sims. -- Gate: event emission to Notify verified; metrics dashboards live. - ---- - -## Sprint 17 – Symbol Intelligence & Forensics (ID: SP17, ~2.5 w) - -### Group SP17-G1 — Scanner Forensics (src/StellaOps.Scanner.Emit + WebService) ~1.2 w -- Tasks: SCANNER-EMIT-17-701 (4 d), SCANNER-RUNTIME-17-401 (3 d) -- Acceptance: forensic overlays add ≤150 ms per image; runtime API exposes symbol hints with feature flag. -- Gate: forensic SBOM samples committed; API doc updated. - -### Group SP17-G2 — Zastava Observability (src/StellaOps.Zastava.Observer) ~0.6 w -- Tasks: ZASTAVA-OBS-17-005 (3 d) -- Acceptance: new telemetry surfaces symbol diffs; observer CPU <10 % under load. -- Gate: Grafana dashboard export, alert thresholds defined. - -### Group SP17-G3 — Release Hardening (ops/devops) ~0.4 w -- Tasks: DEVOPS-REL-17-002 (2 d) -- Acceptance: deterministic build verifier job updated to include forensics artifacts. -- Gate: CI pipeline stage `forensics-verify` green. - -### Group SP17-G4 — Documentation (docs/) ~0.3 w -- Tasks: DOCS-RUNTIME-17-004 (2 d) -- Acceptance: runtime forensic guide published with troubleshooting. -- Gate: docs review sign-off; links added to UI help. - ---- - -## Integration Buffers -- **INT-A (0.3 w, after SP10):** Image → SBOM → BOM-Index → Scheduler preview → UI dry-run using fixtures. -- **INT-B (0.3 w, after SP11 & SP15):** SBOM → policy verdict → signed DSSE → Rekor entry → Notify delivery end-to-end. - -## Parallelisation Strategy -- SP9 core modules and SP11 authority upgrades can progress in parallel; scanner clients rely on feature flags while DPoP/mTLS hardening lands. -- SP10 SBOM emission may start alongside Scheduler ImpactIndex using `samples/` fixtures; stub SCHED-IMPACT-16-300 keeps velocity while awaiting roaring index. -- Notify foundations (SP15) can begin once event schemas freeze (delivered in SP9-G9/SP12-G4), consuming canned events until Scanner emits live ones. -- UI (SP13) uses mocked endpoints early, decoupling front-end delivery from backend readiness. - -## Risk Registry - -| Risk ID | Description | Owner | Mitigation | Trigger | -|---------|-------------|-------|-----------|---------| -| R1 | BOM-Index memory blow-up on large fleets | Scheduler ImpactIndex Guild | Shard + mmap plan; monitor BENCH-IMPACT-16-001 | RAM > 8 GB in bench | -| R2 | Buildx plugin latency regression | BuildX Guild | DEVOPS-PERF-10-001 guard; fallback to post-build scan | Buildx job >300 ms/layer | -| R3 | Notify digests flooding Slack | Notify Engine Guild | throttle defaults, BENCH-NOTIFY-15-001 coverage | Dropped messages >1 % | -| R4 | Policy precedence confusion | Policy Guild | ADR, preview API, unit tests | Operator escalation about precedence | -| R5 | ImpactIndex stub lingers | Scheduler ImpactIndex Guild | Track SCHED-IMPACT-16-300 removal in sprint review | Stub present past SP16 | -| R6 | Symbol forensics slows runtime | Scanner Emit Guild | Feature flag; perf tests in SP17-G1 | Forensics adds >150 ms/image | - -## Envelope & ADR Governance -- Event schemas (`docs/events/*.json`) versioned; producers must bump suffix on breaking changes. -- ADR template (`docs/adr/0000-template.md`) mandatory for BOM-Index format, event envelopes, DPoP nonce policy, Rekor migration. - ---- - -**Summary:** The plan keeps high-impact artifacts (policy engine, BOM-Index, signing chain) on the critical path while unlocking parallel tracks (Notify, Scheduler, UI) through early schema freezes and fixtures. Integration buffers ensure cross-team touchpoints are validated continuously, supporting rapid iteration against competitive pressure. diff --git a/bench/Scanner.Analyzers/README.md b/bench/Scanner.Analyzers/README.md new file mode 100644 index 00000000..bb81b1a8 --- /dev/null +++ b/bench/Scanner.Analyzers/README.md @@ -0,0 +1,36 @@ +# Scanner Analyzer Microbench Harness + +The bench harness exercises the language analyzers against representative filesystem layouts so that regressions are caught before they ship. + +## Layout +- `run-bench.js` – Node.js script that traverses the sample `node_modules/` and `site-packages/` trees, replicating the package discovery work performed by the upcoming analyzers. +- `config.json` – Declarative list of scenarios the harness executes. Each scenario points at a directory in `samples/`. +- `baseline.csv` – Reference numbers captured on the 4 vCPU warm rig described in `docs/12_PERFORMANCE_WORKBOOK.md`. CI publishes fresh CSVs so perf trends stay visible. + +## Running locally + +```bash +cd bench/Scanner.Analyzers +node run-bench.js --out baseline.csv --samples ../.. +``` + +The harness prints a table to stdout and writes the CSV (if `--out` is specified) with the following headers: + +``` +scenario,iterations,sample_count,mean_ms,p95_ms,max_ms +``` + +Use `--iterations` to override the default (5 passes per scenario) and `--threshold-ms` to customize the failure budget. Budgets default to 5 000 ms, aligned with the SBOM compose objective. + +## Adding scenarios +1. Drop the fixture tree under `samples//...`. +2. Append a new scenario entry to `config.json` describing: + - `id` – snake_case scenario name (also used in CSV). + - `label` – human-friendly description shown in logs. + - `root` – path to the directory that will be scanned. + - `matcher` – glob describing files that will be parsed (POSIX `**` patterns). + - `parser` – `node` or `python` to choose the metadata reader. +3. Re-run `node run-bench.js --out baseline.csv`. +4. Commit both the fixture and updated baseline. + +The harness is intentionally dependency-free to remain runnable inside minimal CI runners. diff --git a/bench/Scanner.Analyzers/baseline.csv b/bench/Scanner.Analyzers/baseline.csv new file mode 100644 index 00000000..4940af95 --- /dev/null +++ b/bench/Scanner.Analyzers/baseline.csv @@ -0,0 +1,3 @@ +scenario,iterations,sample_count,mean_ms,p95_ms,max_ms +node_monorepo_walk,5,4,233.9428,319.8564,344.4611 +python_site_packages_walk,5,3,72.9166,74.8970,74.9884 diff --git a/bench/Scanner.Analyzers/config.json b/bench/Scanner.Analyzers/config.json new file mode 100644 index 00000000..34558e91 --- /dev/null +++ b/bench/Scanner.Analyzers/config.json @@ -0,0 +1,20 @@ +{ + "thresholdMs": 5000, + "iterations": 5, + "scenarios": [ + { + "id": "node_monorepo_walk", + "label": "Node.js monorepo package.json harvest", + "root": "samples/runtime/npm-monorepo/node_modules", + "matcher": "**/package.json", + "parser": "node" + }, + { + "id": "python_site_packages_walk", + "label": "Python site-packages dist-info crawl", + "root": "samples/runtime/python-venv/lib/python3.11/site-packages", + "matcher": "**/*.dist-info/METADATA", + "parser": "python" + } + ] +} diff --git a/bench/Scanner.Analyzers/lang/README.md b/bench/Scanner.Analyzers/lang/README.md new file mode 100644 index 00000000..6c94894e --- /dev/null +++ b/bench/Scanner.Analyzers/lang/README.md @@ -0,0 +1,12 @@ +# Scanner Language Analyzer Benchmarks + +This directory will capture benchmark results for language analyzers (Node, Python, Go, .NET, Rust). + +Pending tasks: +- LA1: Node analyzer microbench CSV + flamegraph. +- LA2: Python hash throughput CSV. +- LA3: Go build info extraction benchmarks. +- LA4: .NET RID dedupe performance matrix. +- LA5: Rust heuristic coverage comparisons. + +Results should be committed as deterministic CSV/JSON outputs with accompanying methodology notes. diff --git a/bench/Scanner.Analyzers/run-bench.js b/bench/Scanner.Analyzers/run-bench.js new file mode 100644 index 00000000..1601265e --- /dev/null +++ b/bench/Scanner.Analyzers/run-bench.js @@ -0,0 +1,249 @@ +#!/usr/bin/env node +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const { performance } = require('perf_hooks'); + +function globToRegExp(pattern) { + let working = pattern + .replace(/\*\*/g, ':::DOUBLE_WILDCARD:::') + .replace(/\*/g, ':::SINGLE_WILDCARD:::'); + working = working.replace(/([.+^${}()|[\]\\])/g, '\\$1'); + working = working + .replace(/:::DOUBLE_WILDCARD:::\//g, '(?:.*/)?') + .replace(/:::DOUBLE_WILDCARD:::/g, '.*') + .replace(/:::SINGLE_WILDCARD:::/g, '[^/]*'); + return new RegExp(`^${working}$`); +} + +function walkFiles(root, matcher) { + const out = []; + const stack = [root]; + while (stack.length) { + const current = stack.pop(); + const stat = fs.statSync(current, { throwIfNoEntry: true }); + if (stat.isDirectory()) { + const entries = fs.readdirSync(current); + for (const entry of entries) { + stack.push(path.join(current, entry)); + } + } else if (stat.isFile()) { + const relativePath = path.relative(root, current).replace(/\\/g, '/'); + if (matcher.test(relativePath)) { + out.push(current); + } + } + } + return out; +} + +function parseArgs(argv) { + const args = { + config: path.join(__dirname, 'config.json'), + iterations: undefined, + thresholdMs: undefined, + out: undefined, + repoRoot: path.join(__dirname, '..', '..'), + }; + + for (let i = 2; i < argv.length; i++) { + const current = argv[i]; + switch (current) { + case '--config': + args.config = argv[++i]; + break; + case '--iterations': + args.iterations = Number(argv[++i]); + break; + case '--threshold-ms': + args.thresholdMs = Number(argv[++i]); + break; + case '--out': + args.out = argv[++i]; + break; + case '--repo-root': + case '--samples': + args.repoRoot = argv[++i]; + break; + default: + throw new Error(`Unknown argument: ${current}`); + } + } + + return args; +} + +function loadConfig(configPath) { + const json = fs.readFileSync(configPath, 'utf8'); + const cfg = JSON.parse(json); + if (!Array.isArray(cfg.scenarios) || cfg.scenarios.length === 0) { + throw new Error('config.scenarios must be a non-empty array'); + } + return cfg; +} + +function ensureWithinRepo(repoRoot, target) { + const relative = path.relative(repoRoot, target); + if (relative === '' || relative === '.') { + return true; + } + return !relative.startsWith('..') && !path.isAbsolute(relative); +} + +function parseNodePackage(contents) { + const parsed = JSON.parse(contents); + if (!parsed.name || !parsed.version) { + throw new Error('package.json missing name/version'); + } + return { name: parsed.name, version: parsed.version }; +} + +function parsePythonMetadata(contents) { + let name; + let version; + for (const line of contents.split(/\r?\n/)) { + if (!name && line.startsWith('Name:')) { + name = line.slice(5).trim(); + } else if (!version && line.startsWith('Version:')) { + version = line.slice(8).trim(); + } + if (name && version) { + break; + } + } + if (!name || !version) { + throw new Error('METADATA missing Name/Version headers'); + } + return { name, version }; +} + +function formatRow(row) { + const cols = [ + row.id.padEnd(28), + row.sampleCount.toString().padStart(5), + row.meanMs.toFixed(2).padStart(9), + row.p95Ms.toFixed(2).padStart(9), + row.maxMs.toFixed(2).padStart(9), + ]; + return cols.join(' | '); +} + +function percentile(sortedDurations, percentile) { + if (sortedDurations.length === 0) { + return 0; + } + const rank = (percentile / 100) * (sortedDurations.length - 1); + const lower = Math.floor(rank); + const upper = Math.ceil(rank); + const weight = rank - lower; + if (upper >= sortedDurations.length) { + return sortedDurations[lower]; + } + return sortedDurations[lower] + weight * (sortedDurations[upper] - sortedDurations[lower]); +} + +function main() { + const args = parseArgs(process.argv); + const cfg = loadConfig(args.config); + const iterations = args.iterations ?? cfg.iterations ?? 5; + const thresholdMs = args.thresholdMs ?? cfg.thresholdMs ?? 5000; + + const results = []; + const failures = []; + + for (const scenario of cfg.scenarios) { + const scenarioRoot = path.resolve(args.repoRoot, scenario.root); + if (!ensureWithinRepo(args.repoRoot, scenarioRoot)) { + throw new Error(`Scenario root ${scenario.root} escapes repo root ${args.repoRoot}`); + } + if (!fs.existsSync(scenarioRoot)) { + throw new Error(`Scenario root ${scenarioRoot} does not exist`); + } + + const matcher = globToRegExp(scenario.matcher.replace(/\\/g, '/')); + const durations = []; + let sampleCount = 0; + + for (let attempt = 0; attempt < iterations; attempt++) { + const start = performance.now(); + const files = walkFiles(scenarioRoot, matcher); + if (files.length === 0) { + throw new Error(`Scenario ${scenario.id} matched no files`); + } + + for (const filePath of files) { + const contents = fs.readFileSync(filePath, 'utf8'); + if (scenario.parser === 'node') { + parseNodePackage(contents); + } else if (scenario.parser === 'python') { + parsePythonMetadata(contents); + } else { + throw new Error(`Unknown parser ${scenario.parser} for scenario ${scenario.id}`); + } + } + const end = performance.now(); + durations.push(end - start); + sampleCount = files.length; + } + + durations.sort((a, b) => a - b); + const mean = durations.reduce((acc, value) => acc + value, 0) / durations.length; + const p95 = percentile(durations, 95); + const max = durations[durations.length - 1]; + + if (max > thresholdMs) { + failures.push(`${scenario.id} exceeded threshold: ${(max).toFixed(2)} ms > ${thresholdMs} ms`); + } + + results.push({ + id: scenario.id, + label: scenario.label, + sampleCount, + meanMs: mean, + p95Ms: p95, + maxMs: max, + iterations, + }); + } + + console.log('Scenario | Count | Mean(ms) | P95(ms) | Max(ms)'); + console.log('---------------------------- | ----- | --------- | --------- | ----------'); + for (const row of results) { + console.log(formatRow(row)); + } + + if (args.out) { + const header = 'scenario,iterations,sample_count,mean_ms,p95_ms,max_ms\n'; + const csvRows = results + .map((row) => + [ + row.id, + row.iterations, + row.sampleCount, + row.meanMs.toFixed(4), + row.p95Ms.toFixed(4), + row.maxMs.toFixed(4), + ].join(',') + ) + .join('\n'); + fs.writeFileSync(args.out, header + csvRows + '\n', 'utf8'); + } + + if (failures.length > 0) { + console.error('\nPerformance threshold exceeded:'); + for (const failure of failures) { + console.error(` - ${failure}`); + } + process.exitCode = 1; + } +} + +if (require.main === module) { + try { + main(); + } catch (err) { + console.error(err instanceof Error ? err.message : err); + process.exit(1); + } +} diff --git a/bench/TASKS.md b/bench/TASKS.md index e62bc419..9ac28b5c 100644 --- a/bench/TASKS.md +++ b/bench/TASKS.md @@ -2,6 +2,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| BENCH-SCANNER-10-001 | TODO | Bench Guild, Scanner Team | SCANNER-ANALYZERS-LANG-10-303 | Analyzer microbench harness (node_modules, site-packages) + baseline CSV. | Harness committed under `bench/Scanner.Analyzers`; baseline CSV recorded; CI job publishes results. | +| BENCH-SCANNER-10-001 | DONE | Bench Guild, Scanner Team | SCANNER-ANALYZERS-LANG-10-303 | Analyzer microbench harness (node_modules, site-packages) + baseline CSV. | Harness committed under `bench/Scanner.Analyzers`; baseline CSV recorded; CI job publishes results. | +| BENCH-SCANNER-10-002 | TODO | Bench Guild, Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-301..309 | Wire real language analyzers into bench harness & refresh baselines post-implementation. | Harness executes analyzer assemblies end-to-end; updated baseline committed; CI trend doc linked. | | BENCH-IMPACT-16-001 | TODO | Bench Guild, Scheduler Team | SCHED-IMPACT-16-301 | ImpactIndex throughput bench (resolve 10k productKeys) + RAM profile. | Benchmark script ready; baseline metrics recorded; alert thresholds defined. | | BENCH-NOTIFY-15-001 | TODO | Bench Guild, Notify Team | NOTIFY-ENGINE-15-301 | Notify dispatch throughput bench (vary rule density) with results CSV. | Bench executed; results stored; regression alert configured. | diff --git a/deploy/compose/docker-compose.airgap.yaml b/deploy/compose/docker-compose.airgap.yaml index d73dfe2a..0b2364d3 100644 --- a/deploy/compose/docker-compose.airgap.yaml +++ b/deploy/compose/docker-compose.airgap.yaml @@ -1,5 +1,3 @@ -version: "3.9" - x-release-labels: &release-labels com.stellaops.release.version: "2025.09.2-airgap" com.stellaops.release.channel: "airgap" @@ -164,6 +162,22 @@ services: - stellaops labels: *release-labels + notify-web: + image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2} + restart: unless-stopped + depends_on: + - mongo + - authority + environment: + DOTNET_ENVIRONMENT: Production + volumes: + - ../../etc/notify.prod.yaml:/app/etc/notify.yaml:ro + ports: + - "${NOTIFY_WEB_PORT:-9446}:8446" + networks: + - stellaops + labels: *release-labels + excititor: image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68 restart: unless-stopped diff --git a/deploy/compose/docker-compose.dev.yaml b/deploy/compose/docker-compose.dev.yaml index e4c428de..f5b66f2e 100644 --- a/deploy/compose/docker-compose.dev.yaml +++ b/deploy/compose/docker-compose.dev.yaml @@ -1,5 +1,3 @@ -version: "3.9" - x-release-labels: &release-labels com.stellaops.release.version: "2025.10.0-edge" com.stellaops.release.channel: "edge" @@ -162,6 +160,22 @@ services: - stellaops labels: *release-labels + notify-web: + image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.10.0-edge} + restart: unless-stopped + depends_on: + - mongo + - authority + environment: + DOTNET_ENVIRONMENT: Development + volumes: + - ../../etc/notify.dev.yaml:/app/etc/notify.yaml:ro + ports: + - "${NOTIFY_WEB_PORT:-8446}:8446" + networks: + - stellaops + labels: *release-labels + excititor: image: registry.stella-ops.org/stellaops/excititor@sha256:d9bd5cadf1eab427447ce3df7302c30ded837239771cc6433b9befb895054285 restart: unless-stopped diff --git a/deploy/compose/docker-compose.stage.yaml b/deploy/compose/docker-compose.stage.yaml index a1504a82..6cd11e06 100644 --- a/deploy/compose/docker-compose.stage.yaml +++ b/deploy/compose/docker-compose.stage.yaml @@ -1,5 +1,3 @@ -version: "3.9" - x-release-labels: &release-labels com.stellaops.release.version: "2025.09.2" com.stellaops.release.channel: "stable" @@ -162,6 +160,22 @@ services: - stellaops labels: *release-labels + notify-web: + image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2} + restart: unless-stopped + depends_on: + - mongo + - authority + environment: + DOTNET_ENVIRONMENT: Production + volumes: + - ../../etc/notify.stage.yaml:/app/etc/notify.yaml:ro + ports: + - "${NOTIFY_WEB_PORT:-8446}:8446" + networks: + - stellaops + labels: *release-labels + excititor: image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa restart: unless-stopped diff --git a/deploy/helm/stellaops/templates/configmaps.yaml b/deploy/helm/stellaops/templates/configmaps.yaml new file mode 100644 index 00000000..62dd8658 --- /dev/null +++ b/deploy/helm/stellaops/templates/configmaps.yaml @@ -0,0 +1,15 @@ +{{- $root := . -}} +{{- range $name, $cfg := .Values.configMaps }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "stellaops.fullname" (dict "root" $root "name" $name) }} + labels: + {{- include "stellaops.labels" (dict "root" $root "name" $name "svc" (dict "class" "config")) | nindent 4 }} +data: +{{- range $fileName, $content := $cfg.data }} + {{ $fileName }}: | +{{ $content | nindent 4 }} +{{- end }} +--- +{{- end }} diff --git a/deploy/helm/stellaops/templates/core.yaml b/deploy/helm/stellaops/templates/core.yaml index 4e155024..8673d071 100644 --- a/deploy/helm/stellaops/templates/core.yaml +++ b/deploy/helm/stellaops/templates/core.yaml @@ -1,5 +1,6 @@ {{- $root := . -}} {{- range $name, $svc := .Values.services }} +{{- $configMounts := (default (list) $svc.configMounts) }} apiVersion: apps/v1 kind: Deployment metadata: @@ -53,11 +54,12 @@ spec: containerPort: {{ $port.containerPort }} protocol: {{ default "TCP" $port.protocol }} {{- end }} -{{- else if $svc.service.port }} +{{- else if and $svc.service (hasKey $svc.service "port") }} + {{- $svcService := $svc.service }} ports: - name: {{ printf "%s-http" $name | trunc 63 | trimSuffix "-" }} - containerPort: {{ $svc.service.targetPort | default $svc.service.port }} - protocol: TCP + containerPort: {{ default (index $svcService "port") (index $svcService "targetPort") }} + protocol: {{ default "TCP" (index $svcService "protocol") }} {{- end }} {{- if $svc.resources }} resources: @@ -71,11 +73,25 @@ spec: readinessProbe: {{ toYaml $svc.readinessProbe | nindent 12 }} {{- end }} -{{- if $svc.volumeMounts }} +{{- if or $svc.volumeMounts $configMounts }} volumeMounts: +{{- if $svc.volumeMounts }} {{ toYaml $svc.volumeMounts | nindent 12 }} {{- end }} - {{- if or $svc.volumes $svc.volumeClaims }} +{{- range $mount := $configMounts }} + - name: {{ $mount.name }} + mountPath: {{ $mount.mountPath }} +{{- if $mount.subPath }} + subPath: {{ $mount.subPath }} +{{- end }} +{{- if hasKey $mount "readOnly" }} + readOnly: {{ $mount.readOnly }} +{{- else }} + readOnly: true +{{- end }} +{{- end }} +{{- end }} + {{- if or $svc.volumes (or $svc.volumeClaims $configMounts) }} volumes: {{- if $svc.volumes }} {{ toYaml $svc.volumes | nindent 8 }} @@ -86,6 +102,19 @@ spec: persistentVolumeClaim: claimName: {{ $claim.claimName }} {{- end }} +{{- end }} +{{- range $mount := $configMounts }} + - name: {{ $mount.name }} + configMap: + name: {{ include "stellaops.fullname" (dict "root" $root "name" $mount.configMap) }} +{{- if $mount.items }} + items: +{{ toYaml $mount.items | nindent 12 }} +{{- else if $mount.subPath }} + items: + - key: {{ $mount.subPath }} + path: {{ $mount.subPath }} +{{- end }} {{- end }} {{- end }} {{- if $svc.serviceAccount }} diff --git a/deploy/helm/stellaops/values-airgap.yaml b/deploy/helm/stellaops/values-airgap.yaml index 878ed21c..ee4307f5 100644 --- a/deploy/helm/stellaops/values-airgap.yaml +++ b/deploy/helm/stellaops/values-airgap.yaml @@ -8,6 +8,49 @@ global: pullPolicy: IfNotPresent labels: stellaops.io/channel: airgap + +configMaps: + notify-config: + data: + notify.yaml: | + storage: + driver: mongo + connectionString: "mongodb://notify-mongo.prod.svc.cluster.local:27017" + database: "stellaops_notify" + commandTimeoutSeconds: 60 + + authority: + enabled: true + issuer: "https://authority.stella-ops.org" + metadataAddress: "https://authority.stella-ops.org/.well-known/openid-configuration" + requireHttpsMetadata: true + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify + readScope: notify.read + adminScope: notify.admin + + api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + + plugins: + baseDirectory: "/var/opt/stellaops" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + + telemetry: + enableRequestLogging: true + minimumLogLevel: Warning services: authority: image: registry.stella-ops.org/stellaops/authority@sha256:5551a3269b7008cd5aceecf45df018c67459ed519557ccbe48b093b926a39bcc @@ -68,6 +111,17 @@ services: SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops-airgap" SCANNER__STORAGE__S3__SECRETACCESSKEY: "airgap-minio-secret" SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222" + notify-web: + image: registry.stella-ops.org/stellaops/notify-web:2025.09.2 + service: + port: 8446 + env: + DOTNET_ENVIRONMENT: Production + configMounts: + - name: notify-config + mountPath: /app/etc/notify.yaml + subPath: notify.yaml + configMap: notify-config excititor: image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68 env: diff --git a/deploy/helm/stellaops/values-dev.yaml b/deploy/helm/stellaops/values-dev.yaml index b567dadf..fec65e16 100644 --- a/deploy/helm/stellaops/values-dev.yaml +++ b/deploy/helm/stellaops/values-dev.yaml @@ -8,6 +8,49 @@ global: pullPolicy: IfNotPresent labels: stellaops.io/channel: edge + +configMaps: + notify-config: + data: + notify.yaml: | + storage: + driver: mongo + connectionString: "mongodb://notify-mongo.dev.svc.cluster.local:27017" + database: "stellaops_notify_dev" + commandTimeoutSeconds: 30 + + authority: + enabled: true + issuer: "https://authority.dev.stella-ops.local" + metadataAddress: "https://authority.dev.stella-ops.local/.well-known/openid-configuration" + requireHttpsMetadata: false + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify.dev + readScope: notify.read + adminScope: notify.admin + + api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + + plugins: + baseDirectory: "../" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + + telemetry: + enableRequestLogging: true + minimumLogLevel: Debug services: authority: image: registry.stella-ops.org/stellaops/authority@sha256:a8e8faec44a579aa5714e58be835f25575710430b1ad2ccd1282a018cd9ffcdd @@ -67,6 +110,17 @@ services: SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops" SCANNER__STORAGE__S3__SECRETACCESSKEY: "dev-minio-secret" SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222" + notify-web: + image: registry.stella-ops.org/stellaops/notify-web:2025.10.0-edge + service: + port: 8446 + env: + DOTNET_ENVIRONMENT: Development + configMounts: + - name: notify-config + mountPath: /app/etc/notify.yaml + subPath: notify.yaml + configMap: notify-config excititor: image: registry.stella-ops.org/stellaops/excititor@sha256:d9bd5cadf1eab427447ce3df7302c30ded837239771cc6433b9befb895054285 env: diff --git a/deploy/helm/stellaops/values-stage.yaml b/deploy/helm/stellaops/values-stage.yaml index 51064d18..a9a17468 100644 --- a/deploy/helm/stellaops/values-stage.yaml +++ b/deploy/helm/stellaops/values-stage.yaml @@ -8,6 +8,49 @@ global: pullPolicy: IfNotPresent labels: stellaops.io/channel: stable + +configMaps: + notify-config: + data: + notify.yaml: | + storage: + driver: mongo + connectionString: "mongodb://notify-mongo.stage.svc.cluster.local:27017" + database: "stellaops_notify_stage" + commandTimeoutSeconds: 45 + + authority: + enabled: true + issuer: "https://authority.stage.stella-ops.org" + metadataAddress: "https://authority.stage.stella-ops.org/.well-known/openid-configuration" + requireHttpsMetadata: true + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify + readScope: notify.read + adminScope: notify.admin + + api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + + plugins: + baseDirectory: "/opt/stellaops" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + + telemetry: + enableRequestLogging: true + minimumLogLevel: Information services: authority: image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5 @@ -68,6 +111,17 @@ services: SCANNER__STORAGE__S3__ACCESSKEYID: "stellaops-stage" SCANNER__STORAGE__S3__SECRETACCESSKEY: "stage-minio-secret" SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222" + notify-web: + image: registry.stella-ops.org/stellaops/notify-web:2025.09.2 + service: + port: 8446 + env: + DOTNET_ENVIRONMENT: Production + configMounts: + - name: notify-config + mountPath: /app/etc/notify.yaml + subPath: notify.yaml + configMap: notify-config excititor: image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa env: diff --git a/deploy/tools/validate-profiles.sh b/deploy/tools/validate-profiles.sh index bac9cabd..d7148385 100644 --- a/deploy/tools/validate-profiles.sh +++ b/deploy/tools/validate-profiles.sh @@ -11,7 +11,18 @@ compose_profiles=( "docker-compose.airgap.yaml:env/airgap.env.example" ) +docker_ready=false if command -v docker >/dev/null 2>&1; then + if docker compose version >/dev/null 2>&1; then + docker_ready=true + else + echo "⚠️ docker CLI present but Compose plugin unavailable; skipping compose validation" >&2 + fi +else + echo "⚠️ docker CLI not found; skipping compose validation" >&2 +fi + +if [[ "$docker_ready" == "true" ]]; then for entry in "${compose_profiles[@]}"; do IFS=":" read -r compose_file env_file <<<"$entry" printf '→ validating %s with %s\n' "$compose_file" "$env_file" @@ -19,8 +30,6 @@ if command -v docker >/dev/null 2>&1; then --env-file "$COMPOSE_DIR/$env_file" \ -f "$COMPOSE_DIR/$compose_file" config >/dev/null done -else - echo "⚠️ docker CLI not found; skipping compose validation" >&2 fi helm_values=( diff --git a/docs/09_API_CLI_REFERENCE.md b/docs/09_API_CLI_REFERENCE.md index e753762d..f8a52ce8 100755 --- a/docs/09_API_CLI_REFERENCE.md +++ b/docs/09_API_CLI_REFERENCE.md @@ -149,14 +149,114 @@ Client then generates SBOM **only** for the `missing` layers and re‑posts `/sc --- -### 2.3 Policy Endpoints +### 2.3 Policy Endpoints *(preview feature flag: `scanner.features.enablePolicyPreview`)* -| Method | Path | Purpose | -| ------ | ------------------ | ------------------------------------ | -| `GET` | `/policy/export` | Download live YAML ruleset | -| `POST` | `/policy/import` | Upload YAML or Rego; replaces active | -| `POST` | `/policy/validate` | Lint only; returns 400 on error | -| `GET` | `/policy/history` | Paginated change log (audit trail) | +All policy APIs require **`scanner.reports`** scope (or anonymous access while auth is disabled). + +**Fetch schema** + +``` +GET /api/v1/policy/schema +Authorization: Bearer +Accept: application/schema+json +``` + +Returns the embedded `policy-schema@1` JSON schema used by the binder. + +**Run diagnostics** + +``` +POST /api/v1/policy/diagnostics +Content-Type: application/json +Authorization: Bearer +``` + +```json +{ + "policy": { + "format": "yaml", + "actor": "cli", + "description": "dev override", + "content": "version: \"1.0\"\nrules:\n - name: Quiet Dev\n environments: [dev]\n action:\n type: ignore\n justification: dev waiver\n" + } +} +``` + +**Response 200**: + +```json +{ + "success": false, + "version": "1.0", + "ruleCount": 1, + "errorCount": 0, + "warningCount": 1, + "generatedAt": "2025-10-19T03:25:14.112Z", + "issues": [ + { "code": "policy.rule.quiet.missing_vex", "message": "Quiet flag ignored: rule must specify requireVex justifications.", "severity": "Warning", "path": "$.rules[0]" } + ], + "recommendations": [ + "Review policy warnings and ensure intentional overrides are documented." + ] +} +``` + +`success` is `false` when blocking issues remain; recommendations aggregate YAML ignore rules, VEX include/exclude hints, and vendor precedence guidance. + +**Preview impact** + +``` +POST /api/v1/policy/preview +Authorization: Bearer +Content-Type: application/json +``` + +```json +{ + "imageDigest": "sha256:abc123", + "findings": [ + { "id": "finding-1", "severity": "Critical", "source": "NVD" } + ], + "policy": { + "format": "yaml", + "content": "version: \"1.0\"\nrules:\n - name: Block Critical\n severity: [Critical]\n action: block\n" + } +} +``` + +**Response 200**: + +```json +{ + "success": true, + "policyDigest": "9c5e...", + "revisionId": "preview", + "changed": 1, + "diffs": [ + { + "findingId": "finding-1", + "baseline": {"findingId": "finding-1", "status": "Pass"}, + "projected": { + "findingId": "finding-1", + "status": "Blocked", + "ruleName": "Block Critical", + "ruleAction": "Block", + "score": 5.0, + "configVersion": "1.0", + "inputs": {"severityWeight": 5.0} + }, + "changed": true + } + ], + "issues": [] +} +``` + +- Provide `policy` to preview staged changes; omit it to compare against the active snapshot. +- Baseline verdicts are optional; when omitted, the API synthesises pass baselines before computing diffs. +- Quieted verdicts include `quietedBy` and `quiet` flags; score inputs now surface reachability/vendor trust weights (`reachability.*`, `trustWeight.*`). + +**OpenAPI**: the full API document (including these endpoints) is exposed at `/openapi/v1.json` and can be fetched for tooling or contract regeneration. ### 2.4 Scanner – Queue a Scan Job *(SP9 milestone)* @@ -238,6 +338,96 @@ Accept: application/json Statuses: `Pending`, `Running`, `Succeeded`, `Failed`, `Cancelled`. +### 2.6 Scanner – Stream Progress (SSE / JSONL) + +``` +GET /api/v1/scans/{scanId}/events?format=sse|jsonl +Authorization: Bearer +Accept: text/event-stream +``` + +When `format` is omitted the endpoint emits **Server-Sent Events** (SSE). Specify `format=jsonl` to receive newline-delimited JSON (`application/x-ndjson`). Response headers include `Cache-Control: no-store` and `X-Accel-Buffering: no` so intermediaries avoid buffering the stream. + +**SSE frame** (default): + +``` +id: 1 +event: pending +data: {"scanId":"2f6c17f9b3f548e2a28b9c412f4d63f8","sequence":1,"state":"Pending","message":"queued","timestamp":"2025-10-19T03:12:45.118Z","correlationId":"2f6c17f9b3f548e2a28b9c412f4d63f8:0001","data":{"force":false,"meta.pipeline":"github"}} +``` + +**JSONL frame** (`format=jsonl`): + +```json +{"scanId":"2f6c17f9b3f548e2a28b9c412f4d63f8","sequence":1,"state":"Pending","message":"queued","timestamp":"2025-10-19T03:12:45.118Z","correlationId":"2f6c17f9b3f548e2a28b9c412f4d63f8:0001","data":{"force":false,"meta.pipeline":"github"}} +``` + +- `sequence` is monotonic starting at `1`. +- `correlationId` is deterministic (`{scanId}:{sequence:0000}`) unless a custom identifier is supplied by the publisher. +- `timestamp` is ISO‑8601 UTC with millisecond precision, ensuring deterministic ordering for consumers. +- The stream completes when the client disconnects or the coordinator stops publishing events. + +### 2.7 Scanner – Assemble Report (Signed Envelope) + +``` +POST /api/v1/reports +Authorization: Bearer +Content-Type: application/json +``` + +Request body mirrors policy preview inputs (image digest plus findings). The service evaluates the active policy snapshot, assembles a verdict, and signs the canonical report payload. + +**Response 200**: + +```json +{ + "report": { + "reportId": "report-3def5f362aa475ef14b6", + "imageDigest": "sha256:deadbeef", + "verdict": "blocked", + "policy": { "revisionId": "rev-1", "digest": "27d2ec2b34feedc304fc564d252ecee1c8fa14ea581a5ff5c1ea8963313d5c8d" }, + "summary": { "total": 1, "blocked": 1, "warned": 0, "ignored": 0, "quieted": 0 }, + "verdicts": [ + { + "findingId": "finding-1", + "status": "Blocked", + "ruleName": "Block Critical", + "ruleAction": "Block", + "score": 40.5, + "configVersion": "1.0", + "inputs": { + "reachabilityWeight": 0.45, + "baseScore": 40.5, + "severityWeight": 90, + "trustWeight": 1, + "trustWeight.NVD": 1, + "reachability.runtime": 0.45 + }, + "quiet": false, + "sourceTrust": "NVD", + "reachability": "runtime" + } + ], + "issues": [] + }, + "dsse": { + "payloadType": "application/vnd.stellaops.report+json", + "payload": "", + "signatures": [ + { + "keyId": "scanner-report-signing", + "algorithm": "hs256", + "signature": "" + } + ] + } +} +``` + +- The `report` object omits null fields and is deterministic (ISO timestamps, sorted keys). +- `dsse` follows the DSSE (Dead Simple Signing Envelope) shape; `payload` is the canonical UTF-8 JSON and `signatures[0].signature` is the base64 HMAC/Ed25519 value depending on configuration. +- A runnable sample envelope is available at `samples/api/reports/report-sample.dsse.json` for tooling tests or signature verification. + **Response 404** – `application/problem+json` payload with type `https://stellaops.org/problems/not-found` when the scan identifier is unknown. > **Tip** – poll `Location` from the submission call until `status` transitions away from `Pending`/`Running`. @@ -332,6 +522,7 @@ See `docs/dev/32_AUTH_CLIENT_GUIDE.md` for recommended profiles (online vs. air- | `stellaops-cli auth revoke export` | Export the Authority revocation bundle | `--output ` (defaults to CWD) | Writes `revocation-bundle.json`, `.json.jws`, and `.json.sha256`; verifies the digest locally and includes key metadata in the log summary. | | `stellaops-cli auth revoke verify` | Validate a revocation bundle offline | `--bundle ` `--signature ` `--key `
`--verbose` | Verifies detached JWS signatures, reports the computed SHA-256, and can fall back to cached JWKS when `--key` is omitted. | | `stellaops-cli config show` | Display resolved configuration | — | Masks secret values; helpful for air‑gapped installs | +| `stellaops-cli runtime policy test` | Ask Scanner.WebService for runtime verdicts (Webhook parity) | `--image/-i ` (repeatable, comma/space lists supported)
`--file/-f `
`--namespace/--ns `
`--label/-l key=value` (repeatable)
`--json` | Posts to `POST /api/v1/scanner/policy/runtime`, deduplicates image digests, and prints TTL + per-image verdict/signed/SBOM status. Accepts newline/whitespace-delimited stdin when piped; `--json` emits the raw response without additional logging. | When running on an interactive terminal without explicit override flags, the CLI uses Spectre.Console prompts to let you choose per-run ORAS/offline bundle behaviour. diff --git a/docs/11_DATA_SCHEMAS.md b/docs/11_DATA_SCHEMAS.md index db3a92bb..c110a897 100755 --- a/docs/11_DATA_SCHEMAS.md +++ b/docs/11_DATA_SCHEMAS.md @@ -86,16 +86,152 @@ Only enabled when `MONGO_URI` is supplied (for long‑term audit). Schema detail for **policy_versions**: -```jsonc -{ - "_id": "6619e90b8c5e1f76", - "yaml": "version: 1.0\nrules:\n - …", - "rego": null, // filled when Rego uploaded - "authorId": "u_1021", - "created": "2025-07-14T08:15:04Z", - "comment": "Imported via API" -} -``` +Samples live under `samples/api/scheduler/` (e.g., `schedule.json`, `run.json`, `impact-set.json`, `audit.json`) and mirror the canonical serializer output shown below. + +```jsonc +{ + "_id": "6619e90b8c5e1f76", + "yaml": "version: 1.0\nrules:\n - …", + "rego": null, // filled when Rego uploaded + "authorId": "u_1021", + "created": "2025-07-14T08:15:04Z", + "comment": "Imported via API" +} +``` + +### 3.1 Scheduler Sprints 16 Artifacts + +**Collections.** `schedules`, `runs`, `impact_snapshots`, `audit` (module‑local). All documents reuse the canonical JSON emitted by `StellaOps.Scheduler.Models` so agents and fixtures remain deterministic. + +#### 3.1.1 Schedule (`schedules`) + +```jsonc +{ + "_id": "sch_20251018a", + "tenantId": "tenant-alpha", + "name": "Nightly Prod", + "enabled": true, + "cronExpression": "0 2 * * *", + "timezone": "UTC", + "mode": "analysis-only", + "selection": { + "scope": "by-namespace", + "namespaces": ["team-a", "team-b"], + "repositories": ["app/service-api"], + "includeTags": ["canary", "prod"], + "labels": [{"key": "env", "values": ["prod", "staging"]}], + "resolvesTags": true + }, + "onlyIf": {"lastReportOlderThanDays": 7, "policyRevision": "policy@42"}, + "notify": {"onNewFindings": true, "minSeverity": "high", "includeKev": true}, + "limits": {"maxJobs": 1000, "ratePerSecond": 25, "parallelism": 4}, + "subscribers": ["notify.ops"], + "createdAt": "2025-10-18T22:00:00Z", + "createdBy": "svc_scheduler", + "updatedAt": "2025-10-18T22:00:00Z", + "updatedBy": "svc_scheduler" +} +``` + +*Constraints*: arrays are alphabetically sorted; `selection.tenantId` is optional but when present must match `tenantId`. Cron expressions are validated for newline/length, timezones are validated via `TimeZoneInfo`. + +#### 3.1.2 Run (`runs`) + +```jsonc +{ + "_id": "run_20251018_0001", + "tenantId": "tenant-alpha", + "scheduleId": "sch_20251018a", + "trigger": "feedser", + "state": "running", + "stats": { + "candidates": 1280, + "deduped": 910, + "queued": 624, + "completed": 310, + "deltas": 42, + "newCriticals": 7, + "newHigh": 11, + "newMedium": 18, + "newLow": 6 + }, + "reason": {"feedserExportId": "exp-20251018-03"}, + "createdAt": "2025-10-18T22:03:14Z", + "startedAt": "2025-10-18T22:03:20Z", + "finishedAt": null, + "error": null, + "deltas": [ + { + "imageDigest": "sha256:a1b2c3", + "newFindings": 3, + "newCriticals": 1, + "newHigh": 1, + "newMedium": 1, + "newLow": 0, + "kevHits": ["CVE-2025-0002"], + "topFindings": [ + { + "purl": "pkg:rpm/openssl@3.0.12-5.el9", + "vulnerabilityId": "CVE-2025-0002", + "severity": "critical", + "link": "https://ui.internal/scans/sha256:a1b2c3" + } + ], + "attestation": {"uuid": "rekor-314", "verified": true}, + "detectedAt": "2025-10-18T22:03:21Z" + } + ] +} +``` + +Counters are clamped to ≥0, timestamps are converted to UTC, and delta arrays are sorted (critical → info severity precedence, then vulnerability id). Missing `deltas` implies "no change" snapshots. + +#### 3.1.3 Impact Snapshot (`impact_snapshots`) + +```jsonc +{ + "selector": { + "scope": "all-images", + "tenantId": "tenant-alpha" + }, + "images": [ + { + "imageDigest": "sha256:f1e2d3", + "registry": "registry.internal", + "repository": "app/api", + "namespaces": ["team-a"], + "tags": ["prod"], + "usedByEntrypoint": true, + "labels": {"env": "prod"} + } + ], + "usageOnly": true, + "generatedAt": "2025-10-18T22:02:58Z", + "total": 412, + "snapshotId": "impact-20251018-1" +} +``` + +Images are deduplicated and sorted by digest. Label keys are normalised to lowercase to avoid case‑sensitive duplicates during reconciliation. `snapshotId` enables run planners to compare subsequent snapshots for drift. + +#### 3.1.4 Audit (`audit`) + +```jsonc +{ + "_id": "audit_169754", + "tenantId": "tenant-alpha", + "category": "scheduler", + "action": "pause", + "occurredAt": "2025-10-18T22:10:00Z", + "actor": {"actorId": "user_admin", "displayName": "Cluster Admin", "kind": "user"}, + "scheduleId": "sch_20251018a", + "correlationId": "corr-123", + "metadata": {"details": "schedule paused", "reason": "maintenance"}, + "message": "Paused via API" +} +``` + +Metadata keys are lowercased, first‑writer wins (duplicates with different casing are ignored), and optional IDs (`scheduleId`, `runId`) are trimmed when empty. Use the canonical serializer when emitting events so audit digests remain reproducible. --- @@ -133,18 +269,53 @@ await new PolicyValidationCli().RunAsync(new PolicyValidationCliOptions }); ``` -### 4.1 Rego Variant (Advanced – TODO) - -*Accepted but stored as‑is in `rego` field.* -Evaluated via internal **OPA** side‑car once feature graduates from TODO list. - ---- - -## 5 SLSA Attestation Schema ⭑ - -Planned for Q1‑2026 (kept here for early plug‑in authors). - -```jsonc +### 4.1 Rego Variant (Advanced – TODO) + +*Accepted but stored as‑is in `rego` field.* +Evaluated via internal **OPA** side‑car once feature graduates from TODO list. + +### 4.2 Policy Scoring Config (JSON) + +*Schema id.* `https://schemas.stella-ops.org/policy/policy-scoring-schema@1.json` +*Source.* `src/StellaOps.Policy/Schemas/policy-scoring-schema@1.json` (embedded in `StellaOps.Policy`), default fixture at `src/StellaOps.Policy/Schemas/policy-scoring-default.json`. + +```jsonc +{ + "version": "1.0", + "severityWeights": {"Critical": 90, "High": 75, "Unknown": 60, "...": 0}, + "quietPenalty": 45, + "warnPenalty": 15, + "ignorePenalty": 35, + "trustOverrides": {"vendor": 1.0, "distro": 0.85}, + "reachabilityBuckets": {"entrypoint": 1.0, "direct": 0.85, "runtime": 0.45, "unknown": 0.5}, + "unknownConfidence": { + "initial": 0.8, + "decayPerDay": 0.05, + "floor": 0.2, + "bands": [ + {"name": "high", "min": 0.65}, + {"name": "medium", "min": 0.35}, + {"name": "low", "min": 0.0} + ] + } +} +``` + +Validation occurs alongside policy binding (`PolicyScoringConfigBinder`), producing deterministic digests via `PolicyScoringConfigDigest`. Bands are ordered descending by `min` so consumers can resolve confidence tiers deterministically. Reachability buckets are case-insensitive keys (`entrypoint`, `direct`, `indirect`, `runtime`, `unreachable`, `unknown`) with numeric multipliers (default ≤1.0). + +**Runtime usage** +- `trustOverrides` are matched against `finding.tags` (`trust:`) first, then `finding.source`/`finding.vendor`; missing keys default to `1.0`. +- `reachabilityBuckets` consume `finding.tags` with prefix `reachability:` (fallback `usage:` or `unknown`). Missing buckets fall back to `unknown` weight when present, otherwise `1.0`. +- Policy verdicts expose scoring inputs (`severityWeight`, `trustWeight`, `reachabilityWeight`, `baseScore`, penalties) plus unknown-state metadata (`unknownConfidence`, `unknownAgeDays`, `confidenceBand`) for auditability. See `samples/policy/policy-preview-unknown.json` for an end-to-end preview payload. +- Unknown confidence derives from `unknown-age-days:` (preferred) or `unknown-since:` + `observed-at:` tags; with no hints the engine keeps `initial` confidence. Values decay by `decayPerDay` down to `floor`, then resolve to the first matching `bands[].name`. + +--- + +## 5 SLSA Attestation Schema ⭑ + +Planned for Q1‑2026 (kept here for early plug‑in authors). + +```jsonc { "id": "prov_0291", "imageDigest": "sha256:e2b9…", @@ -164,8 +335,70 @@ Planned for Q1‑2026 (kept here for early plug‑in authors). {"uri": "git+https://git…", "digest": {"sha1": "f6a1…"}} ], "rekorLogIndex": 99817 // entry in local Rekor mirror -} -``` +} +``` + +--- + +## 6 Notify Foundations (Rule · Channel · Event) + +*Sprint 15 target* – canonically describe the Notify data shapes that UI, workers, and storage consume. JSON Schemas live under `docs/notify/schemas/` and deterministic fixtures under `docs/notify/samples/`. + +| Artifact | Schema | Sample | +|----------|--------|--------| +| **Rule** (catalogued routing logic) | `docs/notify/schemas/notify-rule@1.json` | `docs/notify/samples/notify-rule@1.sample.json` | +| **Channel** (delivery endpoint definition) | `docs/notify/schemas/notify-channel@1.json` | `docs/notify/samples/notify-channel@1.sample.json` | +| **Template** (rendering payload) | `docs/notify/schemas/notify-template@1.json` | `docs/notify/samples/notify-template@1.sample.json` | +| **Event envelope** (Notify ingest surface) | `docs/notify/schemas/notify-event@1.json` | `docs/notify/samples/notify-event@1.sample.json` | + +### 6.1 Rule highlights (`notify-rule@1`) + +* Keys are lower‑cased camelCase. `schemaVersion` (`notify.rule@1`), `ruleId`, `tenantId`, `name`, `match`, `actions`, `createdAt`, and `updatedAt` are mandatory. +* `match.eventKinds`, `match.verdicts`, and other array selectors are pre‑sorted and case‑normalized (e.g. `scanner.report.ready`). +* `actions[].throttle` serialises as ISO 8601 duration (`PT5M`), mirroring worker backoff guardrails. +* `vex` gates let operators exclude accepted/not‑affected justifications; omit the block to inherit default behaviour. +* Use `StellaOps.Notify.Models.NotifySchemaMigration.UpgradeRule(JsonNode)` when deserialising legacy payloads that might lack `schemaVersion` or retain older revisions. +* Soft deletions persist `deletedAt` in Mongo (and disable the rule); repository queries automatically filter them. + +### 6.2 Channel highlights (`notify-channel@1`) + +* `schemaVersion` is pinned to `notify.channel@1` and must accompany persisted documents. +* `type` matches plug‑in identifiers (`slack`, `teams`, `email`, `webhook`, `custom`). +* `config.secretRef` stores an external secret handle (Authority, Vault, K8s). Notify never persists raw credentials. +* Optional `config.limits.timeout` uses ISO 8601 durations identical to rule throttles; concurrency/RPM defaults apply when absent. +* `StellaOps.Notify.Models.NotifySchemaMigration.UpgradeChannel(JsonNode)` backfills the schema version when older documents omit it. +* Channels share the same soft-delete marker (`deletedAt`) so operators can restore prior configuration without purging history. + +### 6.3 Event envelope (`notify-event@1`) + +* Aligns with the platform event contract—`eventId` UUID, RFC 3339 `ts`, tenant isolation enforced. +* Enumerated `kind` covers the initial Notify surface (`scanner.report.ready`, `scheduler.rescan.delta`, `zastava.admission`, etc.). +* `scope.labels`/`scope.attributes` and top-level `attributes` mirror the metadata dictionaries workers surface for templating and audits. +* Notify workers use the same migration helper to wrap event payloads before template rendering, so schema additions remain additive. + +### 6.4 Template highlights (`notify-template@1`) + +* Carries the presentation key (`channelType`, `key`, `locale`) and the raw template body; `schemaVersion` is fixed to `notify.template@1`. +* `renderMode` enumerates supported engines (`markdown`, `html`, `adaptiveCard`, `plainText`, `json`) aligning with `NotifyTemplateRenderMode`. +* `format` signals downstream connector expectations (`slack`, `teams`, `email`, `webhook`, `json`). +* Upgrade legacy definitions with `NotifySchemaMigration.UpgradeTemplate(JsonNode)` to auto-apply the new schema version and ordering. +* Templates also record soft deletes via `deletedAt`; UI/API skip them by default while retaining revision history. + +**Validation loop:** + +```bash +# Validate Notify schemas and samples (matches Docs CI) +for schema in docs/notify/schemas/*.json; do + npx ajv compile -c ajv-formats -s "$schema" +done + +for sample in docs/notify/samples/*.sample.json; do + schema="docs/notify/schemas/$(basename "${sample%.sample.json}").json" + npx ajv validate -c ajv-formats -s "$schema" -d "$sample" +done +``` + +Integration tests can embed the sample fixtures to guarantee deterministic serialisation from the `StellaOps.Notify.Models` DTOs introduced in Sprint 15. --- diff --git a/docs/ARCHITECTURE_AUTHORITY.md b/docs/ARCHITECTURE_AUTHORITY.md index 38fa91c0..5bad72b3 100644 --- a/docs/ARCHITECTURE_AUTHORITY.md +++ b/docs/ARCHITECTURE_AUTHORITY.md @@ -302,6 +302,18 @@ authority: auth: { type: "mtls" } senderConstraint: "mtls" scopes: [ "signer.sign" ] + - clientId: notify-web-dev + grantTypes: [ "client_credentials" ] + audiences: [ "notify.dev" ] + auth: { type: "client_secret", secretFile: "/secrets/notify-web-dev.secret" } + senderConstraint: "dpop" + scopes: [ "notify.read", "notify.admin" ] + - clientId: notify-web + grantTypes: [ "client_credentials" ] + audiences: [ "notify" ] + auth: { type: "client_secret", secretFile: "/secrets/notify-web.secret" } + senderConstraint: "dpop" + scopes: [ "notify.read", "notify.admin" ] ``` --- diff --git a/docs/ARCHITECTURE_CLI.md b/docs/ARCHITECTURE_CLI.md index 0ef8ee48..f3a0e68c 100644 --- a/docs/ARCHITECTURE_CLI.md +++ b/docs/ARCHITECTURE_CLI.md @@ -89,7 +89,7 @@ src/ ### 2.6 Runtime (Zastava helper) -* `runtime policy test --images [--ns --labels k=v,...]` — ask backend `/policy/runtime` like the webhook would. +* `runtime policy test --image/-i [--file --ns --label key=value --json]` — ask backend `/policy/runtime` like the webhook would (accepts multiple `--image`, comma/space lists, or stdin pipelines). ### 2.7 Offline kit diff --git a/docs/ARCHITECTURE_DEVOPS.md b/docs/ARCHITECTURE_DEVOPS.md index d66375e9..897da00a 100644 --- a/docs/ARCHITECTURE_DEVOPS.md +++ b/docs/ARCHITECTURE_DEVOPS.md @@ -297,6 +297,12 @@ s3://stellaops/ * **Concelier/Excititor**: raw docs keep **last N windows**; canonical stores permanent. * **Attestor**: `entries` permanent; `dedupe` TTL 24–48h. +### 7.5 Mongo server baseline + +* **Minimum supported server:** MongoDB **4.2+**. Driver 3.5.0 removes compatibility shims for 4.0; upstream has already announced 4.0 support will be dropped in upcoming C# driver releases. citeturn1open1 +* **Deploy images:** Compose/Helm defaults stay on `mongo:7.x`. For air-gapped installs, refresh Offline Kit bundles so the packaged `mongod` matches ≥4.2. +* **Upgrade guard:** During rollout, verify replica sets reach FCV `4.2` or above before swapping binaries; automation should hard-stop if FCV is <4.2. + --- ## 8) Observability & SLOs (operations) diff --git a/docs/ARCHITECTURE_EXCITITOR.md b/docs/ARCHITECTURE_EXCITITOR.md index 5c0eadd2..057836aa 100644 --- a/docs/ARCHITECTURE_EXCITITOR.md +++ b/docs/ARCHITECTURE_EXCITITOR.md @@ -112,10 +112,10 @@ disposition: kept|replaced|superseded correlation: { replaces?: sha256, replacedBy?: sha256 } ``` -**`vex.claims`** (normalized rows; dedupe on providerId+vulnId+productKey+docDigest) +**`vex.statements`** (immutable normalized rows; append-only event log) ``` -_id +_id: ObjectId providerId vulnId productKey @@ -127,9 +127,16 @@ lastObserved docDigest provenance { uri, line?, pointer?, signatureState } evidence[] { key, value, locator } +signals? { + severity? { scheme, score?, label?, vector? } + kev?: bool + epss?: double +} +insertedAt indices: - {vulnId:1, productKey:1} - - {providerId:1, lastObserved:-1} + - {providerId:1, insertedAt:-1} + - {docDigest:1} - {status:1} - text index (optional) on evidence.value for debugging ``` @@ -146,6 +153,11 @@ sources[]: [ ] policyRevisionId evaluatedAt +signals? { + severity? { scheme, score?, label?, vector? } + kev?: bool + epss?: double +} consensusDigest // same as _id indices: - {vulnId:1, productKey:1} @@ -175,6 +187,7 @@ ttl, hits **`vex.migrations`** * ordered migrations applied at bootstrap to ensure indexes. +* `20251019-consensus-signals-statements` introduces the statements log indexes and the `policyRevisionId + evaluatedAt` lookup for consensus — rerun consensus writers once to hydrate newly persisted signals. ### 3.2 Indexing strategy @@ -339,6 +352,10 @@ excititor: platform: 0.7 hub: 0.5 attestation: 0.6 + ceiling: 1.25 + scoring: + alpha: 0.25 + beta: 0.5 providerOverrides: redhat: 1.0 suse: 0.95 @@ -367,6 +384,20 @@ excititor: signaturePolicy: { type: cosign, cosignKeylessRoots: [ "sigstore-root" ] } ``` +### 9.1 WebService endpoints + +With storage configured, the WebService exposes the following ingress and diagnostic APIs: + +* `GET /excititor/status` – returns the active storage configuration and registered artifact stores. +* `GET /excititor/health` – simple liveness probe. +* `POST /excititor/statements` – accepts normalized VEX statements and persists them via `IVexClaimStore`; use this for migrations/backfills. +* `GET /excititor/statements/{vulnId}/{productKey}?since=` – returns the immutable statement log for a vulnerability/product pair. + +Run the ingestion endpoint once after applying migration `20251019-consensus-signals-statements` to repopulate historical statements with the new severity/KEV/EPSS signal fields. + +* `weights.ceiling` raises the deterministic clamp applied to provider tiers/overrides (range 1.0‒5.0). Values outside the range are clamped with warnings so operators can spot typos. +* `scoring.alpha` / `scoring.beta` configure KEV/EPSS boosts for the Phase 1 → Phase 2 scoring pipeline. Defaults (0.25, 0.5) preserve prior behaviour; negative or excessively large values fall back with diagnostics. + --- ## 10) Security model diff --git a/docs/ARCHITECTURE_EXCITITOR_MIRRORS.md b/docs/ARCHITECTURE_EXCITITOR_MIRRORS.md new file mode 100644 index 00000000..bb1d76ed --- /dev/null +++ b/docs/ARCHITECTURE_EXCITITOR_MIRRORS.md @@ -0,0 +1,138 @@ +# architecture_excititor_mirrors.md — Excititor Mirror Distribution + +> **Status:** Draft (Sprint 7). Complements `docs/ARCHITECTURE_EXCITITOR.md` by describing the mirror export surface exposed by `Excititor.WebService` and the configuration hooks used by operators and downstream mirrors. + +--- + +## 0) Purpose + +Excititor publishes canonical VEX consensus data. Operators (or StellaOps-managed mirrors) need a deterministic way to sync those exports into downstream environments. Mirror distribution provides: + +* A declarative map of export bundles (`json`, `jsonl`, `openvex`, `csaf`) reachable via signed HTTP endpoints under `/excititor/mirror`. +* Thin quota/authentication controls on top of the existing export cache so mirrors cannot starve the web service. +* Stable payload shapes that downstream automation can monitor (index → fetch updates → download artifact → verify signature). + +Mirror endpoints are intentionally **read-only**. Write paths (export generation, attestation, cache) remain the responsibility of the export pipeline. + +--- + +## 1) Configuration model + +The web service reads mirror configuration from `Excititor:Mirror` (YAML/JSON/appsettings). Each domain groups a set of exports that share rate limits and authentication rules. + +```yaml +Excititor: + Mirror: + Domains: + - id: primary + displayName: Primary Mirror + requireAuthentication: false + maxIndexRequestsPerHour: 600 + maxDownloadRequestsPerHour: 1200 + exports: + - key: consensus + format: json + filters: + vulnId: CVE-2025-0001 + productKey: pkg:test/demo + sort: + createdAt: false # descending + limit: 1000 + - key: consensus-openvex + format: openvex + filters: + vulnId: CVE-2025-0001 +``` + +### Field reference + +| Field | Required | Description | +| --- | --- | --- | +| `id` | ✅ | Stable identifier. Appears in URLs (`/excititor/mirror/domains/{id}`) and download filenames. | +| `displayName` | – | Human-friendly label surfaced in the `/domains` listing. Falls back to `id`. | +| `requireAuthentication` | – | When `true` the service enforces that the caller is authenticated (Authority token). | +| `maxIndexRequestsPerHour` | – | Per-domain quota for index endpoints. `0`/negative disables the guard. | +| `maxDownloadRequestsPerHour` | – | Per-domain quota for artifact downloads. | +| `exports` | ✅ | Collection of export projections. | + +Export-level fields: + +| Field | Required | Description | +| --- | --- | --- | +| `key` | ✅ | Unique key within the domain. Used in URLs (`/exports/{key}`) and filenames. | +| `format` | ✅ | One of `json`, `jsonl`, `openvex`, `csaf`. Maps to `VexExportFormat`. | +| `filters` | – | Key/value pairs executed via `VexQueryFilter`. Keys must match export data source columns (e.g., `vulnId`, `productKey`). | +| `sort` | – | Key/boolean map (false = descending). | +| `limit`, `offset`, `view` | – | Optional query bounds passed through to the export query. | + +⚠️ **Misconfiguration:** invalid formats or missing keys cause exports to be flagged with `status` in the index response; they are not exposed downstream. + +--- + +## 2) HTTP surface + +Routes are grouped under `/excititor/mirror`. + +| Method | Path | Description | +| --- | --- | --- | +| `GET` | `/domains` | Returns configured domains with quota metadata. | +| `GET` | `/domains/{domainId}` | Domain detail (auth/quota + export keys). `404` for unknown domains. | +| `GET` | `/domains/{domainId}/index` | Lists exports with exportId, query signature, format, artifact digest, attestation metadata, and size. Applies index quota. | +| `GET` | `/domains/{domainId}/exports/{exportKey}` | Returns manifest metadata (single export). `404` if unknown/missing. | +| `GET` | `/domains/{domainId}/exports/{exportKey}/download` | Streams export content from the artifact store. Applies download quota. | + +Responses are serialized via `VexCanonicalJsonSerializer` ensuring stable ordering. Download responses include a content-disposition header naming the file `-.`. + +### Error handling + +* `401` – authentication required (`requireAuthentication=true`). +* `404` – domain/export not found or manifest not persisted. +* `429` – per-domain quota exceeded (`Retry-After` header set in seconds). +* `503` – export misconfiguration (invalid format/query). + +--- + +## 3) Rate limiting + +`MirrorRateLimiter` implements a simple rolling 1-hour window using `IMemoryCache`. Each domain has two quotas: + +* `index` scope → `maxIndexRequestsPerHour` +* `download` scope → `maxDownloadRequestsPerHour` + +`0` or negative limits disable enforcement. Quotas are best-effort (per-instance). For HA deployments, configure sticky routing at the ingress or replace the limiter with a distributed implementation. + +--- + +## 4) Interaction with export pipeline + +Mirror endpoints consume manifests produced by the export engine (`MongoVexExportStore`). They do **not** trigger new exports. Operators must configure connectors/exporters to keep targeted exports fresh (see `EXCITITOR-EXPORT-01-005/006/007`). + +Recommended workflow: + +1. Define export plans at the export layer (JSON/OpenVEX/CSAF). +2. Configure mirror domains mapping to those plans. +3. Downstream mirror automation: + * `GET /domains/{id}/index` + * Compare `exportId` / `consensusRevision` + * `GET /download` when new + * Verify digest + attestation + +When the export team lands deterministic mirror bundles (Sprint 7 tasks 01-005/006/007), these configurations can be generated automatically. + +--- + +## 5) Operational guidance + +* Track quota utilisation via HTTP 429 metrics (configure structured logging or OTEL counters when rate limiting triggers). +* Mirror domains can be deployed per tenant (e.g., `tenant-a`, `tenant-b`) with different auth requirements. +* Ensure the underlying artifact stores (`FileSystem`, `S3`, offline bundle) retain artefacts long enough for mirrors to sync. +* For air-gapped mirrors, combine mirror endpoints with the Offline Kit (see `docs/24_OFFLINE_KIT.md`). + +--- + +## 6) Future alignment + +* Replace manual export definitions with generated mirror bundle manifests once `EXCITITOR-EXPORT-01-007` ships. +* Extend `/index` payload with quiet-provenance when `EXCITITOR-EXPORT-01-006` adds that metadata. +* Integrate domain manifests with DevOps mirror profiles (`DEVOPS-MIRROR-08-001`) so helm/compose overlays can enable or disable domains declaratively. + diff --git a/docs/ARCHITECTURE_NOTIFY.md b/docs/ARCHITECTURE_NOTIFY.md index 829d0960..0f86f847 100644 --- a/docs/ARCHITECTURE_NOTIFY.md +++ b/docs/ARCHITECTURE_NOTIFY.md @@ -36,6 +36,25 @@ src/ **Dependencies**: Authority (OpToks; DPoP/mTLS), MongoDB, Redis/NATS (bus), HTTP egress to Slack/Teams/Webhooks, SMTP relay for Email. +> **Configuration.** Notify.WebService bootstraps from `notify.yaml` (see `etc/notify.yaml.sample`). Use `storage.driver: mongo` with a production connection string; the optional `memory` driver exists only for tests. Authority settings follow the platform defaults—when running locally without Authority, set `authority.enabled: false` and supply `developmentSigningKey` so JWTs can be validated offline. + +> **Plug-ins.** All channel connectors are packaged under `/plugins/notify`. The ordered load list must start with Slack/Teams before Email/Webhook so chat-first actions are registered deterministically for Offline Kit bundles: +> +> ```yaml +> plugins: +> baseDirectory: "/var/opt/stellaops" +> directory: "plugins/notify" +> orderedPlugins: +> - StellaOps.Notify.Connectors.Slack +> - StellaOps.Notify.Connectors.Teams +> - StellaOps.Notify.Connectors.Email +> - StellaOps.Notify.Connectors.Webhook +> ``` +> +> The Offline Kit job simply copies the `plugins/notify` tree into the air-gapped bundle; the ordered list keeps connector manifests stable across environments. + +> **Authority clients.** Register two OAuth clients in StellaOps Authority: `notify-web-dev` (audience `notify.dev`) for development and `notify-web` (audience `notify`) for staging/production. Both require `notify.read` and `notify.admin` scopes and use DPoP-bound client credentials (`client_secret` in the samples). Reference entries live in `etc/authority.yaml.sample`, with placeholder secrets under `etc/secrets/notify-web*.secret.example`. + --- ## 2) Responsibilities @@ -81,10 +100,32 @@ Notify subscribes to the **internal event bus** (produced by services, escaped J * `scanner.report.ready`: ```json - { "verdict":"fail|warn|pass", - "delta": { "newCritical":1, "newHigh":2, "kev":["CVE-2025-..."] }, - "topFindings":[{"purl":"pkg:rpm/openssl","vulnId":"CVE-2025-...","severity":"critical"}], - "links":{"ui":"https://ui/...","rekor":"https://rekor/..."} } + { + "reportId": "report-3def...", + "verdict": "fail", + "summary": {"total": 12, "blocked": 2, "warned": 3, "ignored": 5, "quieted": 2}, + "delta": {"newCritical": 1, "kev": ["CVE-2025-..."]}, + "links": {"ui": "https://ui/.../reports/report-3def...", "rekor": "https://rekor/..."}, + "dsse": { "...": "..." }, + "report": { "...": "..." } + } + ``` + + Payload embeds both the canonical report document and the DSSE envelope so connectors, Notify, and UI tooling can reuse the signed bytes without re-serialising. + +* `scanner.scan.completed`: + + ```json + { + "reportId": "report-3def...", + "digest": "sha256:...", + "verdict": "fail", + "summary": {"total": 12, "blocked": 2, "warned": 3, "ignored": 5, "quieted": 2}, + "delta": {"newCritical": 1, "kev": ["CVE-2025-..."]}, + "policy": {"revisionId": "rev-42", "digest": "27d2..."}, + "findings": [{"id": "finding-1", "severity": "Critical", "cve": "CVE-2025-...", "reachability": "runtime"}], + "dsse": { "...": "..." } + } ``` * `zastava.admission`: @@ -195,6 +236,8 @@ public interface INotifyConnector { ## 7) Data model (Mongo) +Canonical JSON Schemas for rules/channels/events live in `docs/notify/schemas/`. Sample payloads intended for tests/UI mock responses are captured in `docs/notify/samples/`. + **Database**: `notify` * `rules` @@ -240,6 +283,14 @@ public interface INotifyConnector { Base path: `/api/v1/notify` (Authority OpToks; scopes: `notify.admin` for write, `notify.read` for view). +*All* REST calls require the tenant header `X-StellaOps-Tenant` (matches the canonical `tenantId` stored in Mongo). Payloads are normalised via `NotifySchemaMigration` before persistence to guarantee schema version pinning. + +Authentication today is stubbed with Bearer tokens (`Authorization: Bearer `). When Authority wiring lands, this will switch to OpTok validation + scope enforcement, but the header contract will remain the same. + +Service configuration exposes `notify:auth:*` keys (issuer, audience, signing key, scope names) so operators can wire the Authority JWKS or (in dev) a symmetric test key. `notify:storage:*` keys cover Mongo URI/database/collection overrides. Both sets are required for the new API surface. + +Internal tooling can hit `/internal/notify//normalize` to upgrade legacy JSON and return canonical output used in the docs fixtures. + * **Channels** * `POST /channels` | `GET /channels` | `GET /channels/{id}` | `PATCH /channels/{id}` | `DELETE /channels/{id}` @@ -253,14 +304,18 @@ Base path: `/api/v1/notify` (Authority OpToks; scopes: `notify.admin` for write, * **Deliveries** - * `GET /deliveries?tenant=...&since=...` → list + * `POST /deliveries` → ingest worker delivery state (idempotent via `deliveryId`). + * `GET /deliveries?since=...&status=...&limit=...` → list (most recent first) * `GET /deliveries/{id}` → detail (redacted body + metadata) - * `POST /deliveries/{id}/retry` → force retry (admin) + * `POST /deliveries/{id}/retry` → force retry (admin, future sprint) * **Admin** * `GET /stats` (per tenant counts, last hour/day) * `GET /healthz|readyz` (liveness) + * `POST /locks/acquire` | `POST /locks/release` – worker coordination primitives (short TTL). + * `POST /digests` | `GET /digests/{actionKey}` | `DELETE /digests/{actionKey}` – manage open digest windows. + * `POST /audit` | `GET /audit?since=&limit=` – append/query structured audit trail entries. **Ingestion**: workers do **not** expose public ingestion; they **subscribe** to the internal bus. (Optional `/events/test` for integration testing, admin‑only.) diff --git a/docs/ARCHITECTURE_SCANNER.md b/docs/ARCHITECTURE_SCANNER.md index a296db3e..fae81829 100644 --- a/docs/ARCHITECTURE_SCANNER.md +++ b/docs/ARCHITECTURE_SCANNER.md @@ -190,6 +190,12 @@ When `scanner.events.enabled = true`, the WebService serialises the signed repor * **rpm**: `/var/lib/rpm/Packages` (via librpm or parser) * Record `name`, `version` (epoch/revision), `arch`, source package where present, and **declared file lists**. +> **Data flow note:** Each OS analyzer now writes its canonical output into the shared `ScanAnalysisStore` under +> `analysis.os.packages` (raw results), `analysis.os.fragments` (per-analyzer layer fragments), and contributes to +> `analysis.layers.fragments` (the aggregated view consumed by emit/diff pipelines). Helpers in +> `ScanAnalysisCompositionBuilder` convert these fragments into SBOM composition requests and component graphs so the +> diff/emit stages no longer reach back into individual analyzer implementations. + **B) Language ecosystems (installed state only)** * **Java**: `META-INF/maven/*/pom.properties`, MANIFEST → `pkg:maven/...` @@ -206,6 +212,9 @@ When `scanner.events.enabled = true`, the WebService serialises the signed repor * **ELF**: parse `PT_INTERP`, `DT_NEEDED`, RPATH/RUNPATH, **GNU symbol versions**; map **SONAMEs** to file paths; link executables → libs. * **PE/Mach‑O** (planned M2): import table, delay‑imports; version resources; code signatures. * Map libs back to **OS packages** if possible (via file lists); else emit `bin:{sha256}` components. +* The exported metadata (`stellaops.os.*` properties, license list, source package) feeds policy scoring and export pipelines + directly – Policy evaluates quiet rules against package provenance while Exporters forward the enriched fields into + downstream JSON/Trivy payloads. **D) EntryTrace (ENTRYPOINT/CMD → terminal program)** diff --git a/docs/EXCITITOR_SCORRING.md b/docs/EXCITITOR_SCORRING.md index f6ca89da..61f0eb02 100644 --- a/docs/EXCITITOR_SCORRING.md +++ b/docs/EXCITITOR_SCORRING.md @@ -23,6 +23,27 @@ Safeguards: freeze boosts when product identity is unknown, clamp outputs ≥0, | **Phase 2 – Deterministic score engine** | Implement a scoring component that executes alongside consensus and persists score envelopes with hashes. | Planned task `EXCITITOR-CORE-02-002` (backlog). | | **Phase 3 – Surfacing & enforcement** | Expose scores via WebService/CLI, integrate with Concelier noise priors, and enforce policy-based suppressions. | To be scheduled after Phase 2. | +## Policy controls (Phase 1) + +Operators tune scoring inputs through the Excititor policy document: + +```yaml +excititor: + policy: + weights: + vendor: 1.10 # per-tier weight + ceiling: 1.40 # max clamp applied to tiers and overrides (1.0‒5.0) + providerOverrides: + trusted.vendor: 1.35 + scoring: + alpha: 0.30 # KEV boost coefficient (defaults to 0.25) + beta: 0.60 # EPSS boost coefficient (defaults to 0.50) +``` + +* All weights (tiers + overrides) are clamped to `[0, weights.ceiling]` with structured warnings when a value is out of range or not a finite number. +* `weights.ceiling` itself is constrained to `[1.0, 5.0]`, preserving prior behaviour when omitted. +* `scoring.alpha` / `scoring.beta` accept non-negative values up to 5.0; values outside the range fall back to defaults and surface diagnostics to operators. + ## Data model (after Phase 1) ```json diff --git a/docs/README.md b/docs/README.md index 2fefd3d8..04024f20 100755 --- a/docs/README.md +++ b/docs/README.md @@ -38,7 +38,8 @@ Everything here is open‑source and versioned — when you check out a git ta - **08 – Module Architecture Dossiers** - [Scanner](ARCHITECTURE_SCANNER.md) - [Concelier](ARCHITECTURE_CONCELIER.md) - - [Excititor](ARCHITECTURE_EXCITITOR.md) + - [Excititor](ARCHITECTURE_EXCITITOR.md) + - [Excititor Mirrors](ARCHITECTURE_EXCITITOR_MIRRORS.md) - [Signer](ARCHITECTURE_SIGNER.md) - [Attestor](ARCHITECTURE_ATTESTOR.md) - [Authority](ARCHITECTURE_AUTHORITY.md) @@ -52,6 +53,7 @@ Everything here is open‑source and versioned — when you check out a git ta - **10 – [Plug‑in SDK Guide](10_PLUGIN_SDK_GUIDE.md)** - **10 – [Concelier CLI Quickstart](10_CONCELIER_CLI_QUICKSTART.md)** - **10 – [BuildX Generator Quickstart](dev/BUILDX_PLUGIN_QUICKSTART.md)** +- **10 – [Scanner Cache Configuration](dev/SCANNER_CACHE_CONFIGURATION.md)** - **30 – [Excititor Connector Packaging Guide](dev/30_EXCITITOR_CONNECTOR_GUIDE.md)** - **30 – Developer Templates** - [Excititor Connector Skeleton](dev/templates/excititor-connector/) diff --git a/docs/TASKS.md b/docs/TASKS.md index 118b1184..ad2480e8 100644 --- a/docs/TASKS.md +++ b/docs/TASKS.md @@ -3,7 +3,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| | DOC7.README-INDEX | DONE (2025-10-17) | Docs Guild | — | Refresh index docs (docs/README.md + root README) after architecture dossier split and Offline Kit overhaul. | ✅ ToC reflects new component architecture docs; ✅ root README highlights updated doc set; ✅ Offline Kit guide linked correctly. | -| DOC4.AUTH-PDG | REVIEW | Docs Guild, Plugin Team | PLG6.DOC | Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. | ✅ PR merged with polish; ✅ Diagram committed; ✅ Slack handoff posted. | +| DOC4.AUTH-PDG | DONE (2025-10-19) | Docs Guild, Plugin Team | PLG6.DOC | Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. | ✅ PR merged with polish; ✅ Diagram committed; ✅ Slack handoff posted. | | DOC1.AUTH | DONE (2025-10-12) | Docs Guild, Authority Core | CORE5B.DOC | Draft `docs/11_AUTHORITY.md` covering architecture, configuration, bootstrap flows. | ✅ Architecture + config sections approved by Core; ✅ Samples reference latest options; ✅ Offline note added. | | DOC3.Concelier-Authority | DONE (2025-10-12) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Concelier authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated with audit runbook references; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. | | DOC5.Concelier-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Concelier-Authority | Produce dedicated Concelier authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. | @@ -11,6 +11,10 @@ | FEEDDOCS-DOCS-05-002 | DONE (2025-10-16) | Docs Guild, Concelier Ops | FEEDDOCS-DOCS-05-001 | Ops sign-off captured: conflict runbook circulated, alert thresholds tuned, and rollout decisions documented in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/concelier-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. | | DOCS-ADR-09-001 | DONE (2025-10-19) | Docs Guild, DevEx | — | Establish ADR process (`docs/adr/0000-template.md`) and document usage guidelines. | Template published; README snippet linking ADR process; announcement posted (`docs/updates/2025-10-18-docs-guild.md`). | | DOCS-EVENTS-09-002 | DONE (2025-10-19) | Docs Guild, Platform Events | SCANNER-EVENTS-15-201 | Publish event schema catalog (`docs/events/`) for `scanner.report.ready@1`, `scheduler.rescan.delta@1`, `attestor.logged@1`. | Schemas validated (Ajv CI hooked); docs/events/README summarises usage; Platform Events notified via `docs/updates/2025-10-18-docs-guild.md`. | +| DOCS-EVENTS-09-003 | DONE (2025-10-19) | Docs Guild | DOCS-EVENTS-09-002 | Add human-readable envelope field references and canonical payload samples for published events, including offline validation workflow. | Tables explain common headers/payload segments; versioned sample payloads committed; README links to validation instructions and samples. | +| DOCS-EVENTS-09-004 | DONE (2025-10-19) | Docs Guild, Scanner WebService | SCANNER-EVENTS-15-201 | Refresh scanner event docs to mirror DSSE-backed report fields, document `scanner.scan.completed`, and capture canonical sample validation. | Schemas updated for new payload shape; README references DSSE reuse and validation test; samples align with emitted events. | +| PLATFORM-EVENTS-09-401 | DONE (2025-10-19) | Platform Events Guild | DOCS-EVENTS-09-003 | Embed canonical event samples into contract/integration tests and ensure CI validates payloads against published schemas. | Notify/Scheduler contract suites exercise samples; CI job validates samples with `ajv-cli`; Platform Events changelog notes coverage. | +| RUNTIME-GUILD-09-402 | DONE (2025-10-19) | Runtime Guild | SCANNER-POLICY-09-107 | Confirm Scanner WebService surfaces `quietedFindingCount` and progress hints to runtime consumers; document readiness checklist. | Runtime verification run captures enriched payload; checklist/doc updates merged; stakeholders acknowledge availability. | | DOCS-RUNTIME-17-004 | TODO | Docs Guild, Runtime Guild | SCANNER-EMIT-17-701, ZASTAVA-OBS-17-005, DEVOPS-REL-17-002 | Document build-id workflows: SBOM exposure, runtime event payloads, debug-store layout, and operator guidance for symbol retrieval. | Architecture + operator docs updated with build-id sections, examples show `readelf` output + debuginfod usage, references linked from Offline Kit/Release guides. | > Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`. diff --git a/docs/artifacts/bom-index/README.md b/docs/artifacts/bom-index/README.md new file mode 100644 index 00000000..b5e1120e --- /dev/null +++ b/docs/artifacts/bom-index/README.md @@ -0,0 +1,50 @@ +# StellaOps BOM Index (`bom-index@1`) + +The BOM index is a deterministic, offline-friendly sidecar that accelerates queries for +layer-to-component membership and entrypoint usage. It is emitted alongside CycloneDX +SBOMs and consumed by Scheduler/Notify services. + +## File Layout + +Binary little-endian encoding, organised as the following sections: + +1. **Header** + - `magic` (`byte[7]`): ASCII `"BOMIDX1"` identifier. + - `version` (`uint16`): current value `1`. + - `flags` (`uint16`): bit `0` set when entrypoint usage bitmaps are present. + - `imageDigestLength` (`uint16`) + UTF-8 digest string (e.g. `sha256:...`). + - `generatedAt` (`int64`): microseconds since Unix epoch. + - `layerCount` (`uint32`), `componentCount` (`uint32`), `entrypointCount` (`uint32`). + +2. **Layer Table** + - For each layer: `length` (`uint16`) + UTF-8 layer digest (canonical order, base image → top layer). + +3. **Component Table** + - For each component: `length` (`uint16`) + UTF-8 identity (CycloneDX purl when available, otherwise canonical key). + +4. **Component ↦ Layer Bitmaps** + - For each component (matching table order): + - `bitmapLength` (`uint32`). + - Roaring bitmap payload (`Collections.Special.RoaringBitmap.Serialize`) encoding layer indexes that introduce or retain the component. + +5. **Entrypoint Table** *(optional; present when `flags & 0x1 == 1`)* + - For each unique entrypoint/launcher string: `length` (`uint16`) + UTF-8 value (sorted ordinally). + +6. **Component ↦ Entrypoint Bitmaps** *(optional)* + - For each component: roaring bitmap whose set bits reference entrypoint indexes used by EntryTrace. Empty bitmap (`length == 0`) indicates the component is not part of any resolved entrypoint closure. + +## Determinism Guarantees + +* Layer, component, and entrypoint tables are strictly ordered (base → top layer, lexicographically for components and entrypoints). +* Roaring bitmaps are optimised prior to serialisation and always produced from sorted indexes. +* Header timestamp is normalised to microsecond precision using UTC. + +## Sample + +`sample-index.bin` is generated from the integration fixture used in unit tests. It contains: + +* 2 layers: `sha256:layer1`, `sha256:layer2`. +* 3 components: `pkg:npm/a`, `pkg:npm/b`, `pkg:npm/c`. +* Entrypoint bitmaps for `/app/start.sh` and `/app/init.sh`. + +The sample can be decoded with the `BomIndexBuilder` unit tests or any RoaringBitmap implementation compatible with `Collections.Special.RoaringBitmap`. diff --git a/docs/artifacts/bom-index/sample-index.bin b/docs/artifacts/bom-index/sample-index.bin new file mode 100644 index 00000000..95af48b5 Binary files /dev/null and b/docs/artifacts/bom-index/sample-index.bin differ diff --git a/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md b/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md index 6d4b9be4..ac276b6a 100644 --- a/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md +++ b/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md @@ -5,12 +5,12 @@ ## 1. Overview Authority plug-ins extend the **StellaOps Authority** service with custom identity providers, credential stores, and client-management logic. Unlike Concelier plug-ins (which ingest or export advisories), Authority plug-ins participate directly in authentication flows: -- **Use cases:** integrate corporate directories (LDAP/AD), delegate to external IDPs, enforce bespoke password/lockout policies, or add client provisioning automation. -- **Constraints:** plug-ins load only during service start (no hot-reload), must function without outbound internet access, and must emit deterministic results for identical configuration and input data. -- **Ship targets:** target the same .NET 10 preview as the host, honour offline-first requirements, and provide clear diagnostics so operators can triage issues from `/ready`. +- **Use cases:** integrate corporate directories (LDAP/AD)[^ldap-rfc], delegate to external IDPs, enforce bespoke password/lockout policies, or add client provisioning automation. +- **Constraints:** plug-ins load only during service start (no hot-reload), must function without outbound internet access, and must emit deterministic results for identical configuration input. +- **Ship targets:** build against the host’s .NET 10 preview SDK, honour offline-first requirements, and surface actionable diagnostics so operators can triage issues from `/ready`. ## 2. Architecture Snapshot -Authority hosts follow a deterministic plug-in lifecycle. The flow below can be rendered as a sequence diagram in the final authored documentation, but all touchpoints are described here for offline viewers: +Authority hosts follow a deterministic plug-in lifecycle. The exported diagram (`docs/assets/authority/authority-plugin-lifecycle.svg`) mirrors the steps below; regenerate it from the Mermaid source if you update the flow. 1. **Configuration load** – `AuthorityPluginConfigurationLoader` resolves YAML manifests under `etc/authority.plugins/`. 2. **Assembly discovery** – the shared `PluginHost` scans `PluginBinaries/Authority` for `StellaOps.Authority.Plugin.*.dll` assemblies. @@ -199,6 +199,8 @@ _Source:_ `docs/assets/authority/authority-rate-limit-flow.mmd` - Document any external prerequisites (e.g., CA cert bundle) in your plug-in README. - Update `etc/authority.plugins/.yaml` samples and include deterministic SHA256 hashes for optional bootstrap payloads when distributing Offline Kit artefacts. +[^ldap-rfc]: Lightweight Directory Access Protocol (LDAPv3) specification — [RFC 4511](https://datatracker.ietf.org/doc/html/rfc4511). + ## 12. Checklist & Handoff - ✅ Capabilities declared and validated in automated tests. - ✅ Bootstrap workflows documented (if `bootstrap` capability used) and repeatable. diff --git a/docs/dev/BUILDX_PLUGIN_QUICKSTART.md b/docs/dev/BUILDX_PLUGIN_QUICKSTART.md index 91c5c104..dadcb2dd 100644 --- a/docs/dev/BUILDX_PLUGIN_QUICKSTART.md +++ b/docs/dev/BUILDX_PLUGIN_QUICKSTART.md @@ -20,8 +20,10 @@ dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbome -o out/buildx ``` -- `out/buildx/` now contains `StellaOps.Scanner.Sbomer.BuildXPlugin.dll` and the manifest `stellaops.sbom-indexer.manifest.json`. -- `plugins/scanner/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin/` receives the same artefacts for release packaging. +- `out/buildx/` now contains `StellaOps.Scanner.Sbomer.BuildXPlugin.dll` and the manifest `stellaops.sbom-indexer.manifest.json`. +- `plugins/scanner/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin/` receives the same artefacts for release packaging. +- The CI pipeline also tars and signs (SHA-256 manifest) the OS analyzer plug-ins located under + `plugins/scanner/analyzers/os/` so they ship alongside the BuildX generator artefacts. ## 3. Verify the CAS handshake diff --git a/docs/dev/SCANNER_CACHE_CONFIGURATION.md b/docs/dev/SCANNER_CACHE_CONFIGURATION.md new file mode 100644 index 00000000..64895a39 --- /dev/null +++ b/docs/dev/SCANNER_CACHE_CONFIGURATION.md @@ -0,0 +1,108 @@ +# Scanner Cache Configuration Guide + +The scanner cache stores layer-level SBOM fragments and file content that can be reused across scans. This document explains how to configure and operate the cache subsystem introduced in Sprint 10 (Group SP10-G5). + +## 1. Overview + +- **Layer cache** persists SBOM fragments per layer digest under `/layers//` with deterministic metadata (`meta.json`). +- **File CAS** (content-addressable store) keeps deduplicated blobs (e.g., analyzer fixtures, imported SBOM layers) under `/cas///`. +- **Maintenance** runs via `ScannerCacheMaintenanceService`, evicting expired entries and compacting the cache to stay within size limits. +- **Metrics** emit on the `StellaOps.Scanner.Cache` meter with counters for hits, misses, evictions, and byte histograms. +- **Offline workflows** use the CAS import/export helpers to package cache warmups inside the Offline Kit. + +## 2. Configuration keys (`scanner:cache`) + +| Key | Default | Description | +| --- | --- | --- | +| `enabled` | `true` | Globally disable cache if `false`. | +| `rootPath` | `cache/scanner` | Base directory for cache data. Use an SSD-backed path for best warm-scan latency. | +| `layersDirectoryName` | `layers` | Subdirectory for layer cache entries. | +| `fileCasDirectoryName` | `cas` | Subdirectory for file CAS entries. | +| `layerTtl` | `45.00:00:00` | Time-to-live for layer cache entries (`TimeSpan`). `0` disables TTL eviction. | +| `fileTtl` | `30.00:00:00` | Time-to-live for CAS entries. `0` disables TTL eviction. | +| `maxBytes` | `5368709120` (5 GiB) | Hard cap for combined cache footprint. Compaction trims data back to `warmBytesThreshold`. | +| `warmBytesThreshold` | `maxBytes / 5` | Target size after compaction. | +| `coldBytesThreshold` | `maxBytes * 0.8` | Upper bound that triggers compaction. | +| `enableAutoEviction` | `true` | If `false`, callers must invoke `ILayerCacheStore.CompactAsync` / `IFileContentAddressableStore.CompactAsync` manually. | +| `maintenanceInterval` | `00:15:00` | Interval for the maintenance hosted service. | +| `enableFileCas` | `true` | Disable to prevent CAS usage (APIs throw on `PutAsync`). | +| `importDirectory` / `exportDirectory` | `null` | Optional defaults for offline import/export tooling. | + +> **Tip:** configure `scanner:cache:rootPath` to a dedicated volume and mount it into worker containers when running in Kubernetes or Nomad. + +## 3. Metrics + +Instrumentation lives in `ScannerCacheMetrics` on meter `StellaOps.Scanner.Cache`. + +| Instrument | Unit | Description | +| --- | --- | --- | +| `scanner.layer_cache_hits_total` | count | Layer cache hit counter. Tag: `layer`. | +| `scanner.layer_cache_misses_total` | count | Layer cache miss counter. Tag: `layer`. | +| `scanner.layer_cache_evictions_total` | count | Layer entries evicted due to TTL or compaction. Tag: `layer`. | +| `scanner.layer_cache_bytes` | bytes | Histogram of per-entry payload size when stored. | +| `scanner.file_cas_hits_total` | count | File CAS hit counter. Tag: `sha256`. | +| `scanner.file_cas_misses_total` | count | File CAS miss counter. Tag: `sha256`. | +| `scanner.file_cas_evictions_total` | count | CAS eviction counter. Tag: `sha256`. | +| `scanner.file_cas_bytes` | bytes | Histogram of CAS payload sizes on insert. | + +## 4. Import / Export workflow + +1. **Export warm cache** + ```bash + dotnet tool run stellaops-cache export --destination ./offline-kit/cache + ``` + Internally this calls `IFileContentAddressableStore.ExportAsync` which copies each CAS entry (metadata + `content.bin`). + +2. **Import on air-gapped hosts** + ```bash + dotnet tool run stellaops-cache import --source ./offline-kit/cache + ``` + The import API merges newer metadata and skips older snapshots automatically. + +3. **Layer cache seeding** + Layer cache entries are deterministic and can be packaged the same way (copy `/layers`). For now we keep seeding optional because layers are larger; follow-up tooling can compress directories as needed. + +## 5. Hosted maintenance loop + +`ScannerCacheMaintenanceService` runs as a background service within Scanner Worker or WebService hosts when `AddScannerCache` is registered. Behaviour: + +- At startup it performs an immediate eviction/compaction run. +- Every `maintenanceInterval` it triggers: + - `ILayerCacheStore.EvictExpiredAsync` + - `ILayerCacheStore.CompactAsync` + - `IFileContentAddressableStore.EvictExpiredAsync` + - `IFileContentAddressableStore.CompactAsync` +- Failures are logged at `Error` with preserved stack traces; the next tick continues normally. + +Set `enableAutoEviction=false` when hosting the cache inside ephemeral build pipelines that want to drive eviction explicitly. + +## 6. API surface summary + +```csharp +public interface ILayerCacheStore +{ + ValueTask TryGetAsync(string layerDigest, CancellationToken ct = default); + Task PutAsync(LayerCachePutRequest request, CancellationToken ct = default); + Task RemoveAsync(string layerDigest, CancellationToken ct = default); + Task EvictExpiredAsync(CancellationToken ct = default); + Task CompactAsync(CancellationToken ct = default); + Task OpenArtifactAsync(string layerDigest, string artifactName, CancellationToken ct = default); +} + +public interface IFileContentAddressableStore +{ + ValueTask TryGetAsync(string sha256, CancellationToken ct = default); + Task PutAsync(FileCasPutRequest request, CancellationToken ct = default); + Task RemoveAsync(string sha256, CancellationToken ct = default); + Task EvictExpiredAsync(CancellationToken ct = default); + Task CompactAsync(CancellationToken ct = default); + Task ExportAsync(string destinationDirectory, CancellationToken ct = default); + Task ImportAsync(string sourceDirectory, CancellationToken ct = default); +} +``` + +Register both stores via `services.AddScannerCache(configuration);` in WebService or Worker hosts. + +--- + +_Last updated: 2025-10-19_ diff --git a/docs/dev/authority-dpop-mtls-plan.md b/docs/dev/authority-dpop-mtls-plan.md new file mode 100644 index 00000000..483978a4 --- /dev/null +++ b/docs/dev/authority-dpop-mtls-plan.md @@ -0,0 +1,140 @@ +# Authority DPoP & mTLS Implementation Plan (2025-10-19) + +## Purpose +- Provide the implementation blueprint for AUTH-DPOP-11-001 and AUTH-MTLS-11-002. +- Unify sender-constraint validation across Authority, downstream services, and clients. +- Capture deterministic, testable steps that unblock UI/Signer guilds depending on DPoP/mTLS hardening. + +## Scope +- Token endpoint validation, issuance, and storage changes inside `StellaOps.Authority`. +- Shared security primitives consumed by Authority, Scanner, Signer, CLI, and UI. +- Operator-facing configuration, auditing, and observability. +- Out of scope: PoE enforcement (Signer) and CLI/UI client UX; those teams consume the new capabilities. + +## Design Summary +- Extract the existing Scanner `DpopProofValidator` stack into a shared `StellaOps.Auth.Security` library used by Authority and resource servers. +- Extend Authority configuration (`authority.yaml`) with strongly-typed `senderConstraints.dpop` and `senderConstraints.mtls` sections (map to sample already shown in architecture doc). +- Require DPoP proofs on `/token` when the registered client policy is `senderConstraint=dpop`; bind issued access tokens via `cnf.jkt`. +- Introduce Authority-managed nonce issuance for “high value” audiences (default: `signer`, `attestor`) with Redis-backed persistence and deterministic auditing. +- Enable OAuth 2.0 mTLS (RFC 8705) by storing certificate bindings per client, requesting client certificates at TLS termination, and stamping `cnf.x5t#S256` into issued tokens plus introspection output. +- Surface structured logs and counters for both DPoP and mTLS flows; provide integration tests that cover success, replay, invalid proof, and certificate mismatch cases. + +## AUTH-DPOP-11-001 — Proof Validation & Nonce Handling + +**Shared validator** +- Move `DpopProofValidator`, option types, and replay cache interfaces from `StellaOps.Scanner.Core` into a new assembly `StellaOps.Auth.Security`. +- Provide pluggable caches: `InMemoryDpopReplayCache` (existing) and new `RedisDpopReplayCache` (leveraging the Authority Redis connection). +- Ensure the validator exposes the validated `SecurityKey`, `jti`, and `iat` so Authority can construct the `cnf` claim and compute nonce expiry. + +**Configuration model** +- Extend `StellaOpsAuthorityOptions.Security` with a `SenderConstraints` property containing: + - `Dpop` (`enabled`, `allowedAlgorithms`, `maxAgeSeconds`, `clockSkewSeconds`, `replayWindowSeconds`, `nonce` settings with `enabled`, `ttlSeconds`, `requiredAudiences`, `maxIssuancePerMinute`). + - `Mtls` (`enabled`, `requireChainValidation`, `clientCaBundle`, `allowedSubjectPatterns`, `allowedSanTypes`). +- Bind from YAML (`authority.security.senderConstraints.*`) while preserving backwards compatibility (defaults keep both disabled). + +**Token endpoint pipeline** +- Introduce a scoped OpenIddict handler `ValidateDpopProofHandler` inserted before `ValidateClientCredentialsHandler`. +- Determine the required sender constraint from client metadata: + - Add `AuthorityClientMetadataKeys.SenderConstraint` storing `dpop` or `mtls`. + - Optionally allow per-client overrides for nonce requirement. +- When `dpop` is required: + - Read the `DPoP` header from the ASP.NET request, reject with `invalid_token` + `WWW-Authenticate: DPoP error="invalid_dpop_proof"` if absent. + - Call the shared validator with method/URI. Enforce algorithm allowlist and `iat` window from options. + - Persist the `jkt` thumbprint plus replay cache state in the OpenIddict transaction (`AuthorityOpenIddictConstants.DpopKeyThumbprintProperty`, `DpopIssuedAtProperty`). + - When the requested audience intersects `SenderConstraints.Dpop.Nonce.RequiredAudiences`, require `nonce` in the proof; on first failure respond with HTTP 401, `error="use_dpop_nonce"`, and include `DPoP-Nonce` header (see nonce note below). Cache the rejection reason for audit logging. + +**Nonce service** +- Add `IDpopNonceStore` with methods `IssueAsync(audience, clientId, jkt)` and `TryConsumeAsync(nonce, audience, clientId, jkt)`. +- Default implementation `RedisDpopNonceStore` storing SHA-256 hashes of nonces keyed by `audience:clientId:jkt`. TTL comes from `SenderConstraints.Dpop.Nonce.Ttl`. +- Create helper `DpopNonceIssuer` used by `ValidateDpopProofHandler` to issue nonces when missing/expired, enforcing issuance rate limits (per options) and tagging audit/log records. +- On successful validation (nonce supplied and consumed) stamp metadata into the transaction for auditing. +- Update `ClientCredentialsHandlers` to observe nonce enforcement: when a nonce challenge was sent, emit structured audit with `nonce_issued`, `audiences`, and `retry`. + +**Token issuance** +- In `HandleClientCredentialsHandler`, if the transaction contains a validated DPoP key: + - Build `cnf.jkt` using thumbprint from validator. + - Include `auth_time`/`dpop_jti` as needed for diagnostics. + - Persist the thumbprint alongside token metadata in Mongo (extend `AuthorityTokenDocument` with `SenderConstraint`, `KeyThumbprint`, `Nonce` fields). + +**Auditing & observability** +- Emit new audit events: + - `authority.dpop.proof.validated` (success/failure, clientId, audience, thumbprint, nonce status, jti). + - `authority.dpop.nonce.issued` and `authority.dpop.nonce.consumed`. +- Metrics (Prometheus style): + - `authority_dpop_validations_total{result,reason}`. + - `authority_dpop_nonce_issued_total{audience}` and `authority_dpop_nonce_fails_total{reason}`. +- Structured logs include `authority.sender_constraint=dpop`, `authority.dpop_thumbprint`, `authority.dpop_nonce`. + +**Testing** +- Unit tests for the handler pipeline using fake OpenIddict transactions. +- Replay/nonce tests with in-memory and Redis stores. +- Integration tests in `StellaOps.Authority.Tests` covering: + - Valid DPoP proof issuing `cnf.jkt`. + - Missing header → challenge with nonce. + - Replayed `jti` rejected. + - Invalid nonce rejected even after issuance. +- Contract tests to ensure `/.well-known/openid-configuration` advertises `dpop_signing_alg_values_supported` and `dpop_nonce_supported` when enabled. + +## AUTH-MTLS-11-002 — Certificate-Bound Tokens + +**Configuration model** +- Reuse `SenderConstraints.Mtls` described above; include: + - `enforceForAudiences` list (defaults `signer`, `attestor`, `scheduler`). + - `certificateRotationGraceSeconds` for overlap. + - `allowedClientCertificateAuthorities` absolute paths. + +**Kestrel/TLS pipeline** +- Configure Kestrel with `ClientCertificateMode.AllowCertificate` globally and implement middleware that enforces certificate presence only when the resolved client requires mTLS. +- Add `IAuthorityClientCertificateValidator` that validates presented certificate chain, SANs (`dns`, `uri`, optional SPIFFE), and thumbprint matches one of the stored bindings. +- Cache validation results per connection id to avoid rehashing on every request. + +**Client registration & storage** +- Extend `AuthorityClientDocument` with `List` containing: + - `Thumbprint`, `SerialNumber`, `Subject`, `NotBefore`, `NotAfter`, `Sans`, `CreatedAt`, `UpdatedAt`, `Label`. +- Provide admin API mutations (`/admin/clients/{id}/certificates`) for ops tooling (deferred implementation but schema ready). +- Update plugin provisioning store (`StandardClientProvisioningStore`) to map descriptors with certificate bindings and `senderConstraint`. +- Persist binding state in Mongo migrations (index on `{clientId, thumbprint}`). + +**Token issuance & introspection** +- Add a transaction property capturing the validated client certificate thumbprint. +- `HandleClientCredentialsHandler`: + - When mTLS required, ensure certificate info present; reject otherwise. + - Stamp `cnf` claim: `principal.SetClaim("cnf", JsonSerializer.Serialize(new { x5t#S256 = thumbprint }))`. + - Store binding metadata in issued token document for audit. +- Update `ValidateAccessTokenHandler` and introspection responses to surface `cnf.x5t#S256`. +- Ensure refresh tokens (if ever enabled) copy the binding data. + +**Auditing & observability** +- Audit events: + - `authority.mtls.handshake` (success/failure, clientId, thumbprint, issuer, subject). + - `authority.mtls.binding.missing` when a required client posts without a cert. +- Metrics: + - `authority_mtls_handshakes_total{result}`. + - `authority_mtls_certificate_rotations_total`. +- Logs include `authority.sender_constraint=mtls`, `authority.mtls_thumbprint`, `authority.mtls_subject`. + +**Testing** +- Unit tests for certificate validation rules (SAN mismatches, expiry, CA trust). +- Integration tests running Kestrel with test certificates: + - Successful token issuance with bound certificate. + - Request without certificate → `invalid_client`. + - Token introspection reveals `cnf.x5t#S256`. + - Rotation scenario (old + new cert allowed during grace window). + +## Implementation Checklist + +**DPoP work-stream** +1. Extract shared validator into `StellaOps.Auth.Security`; update Scanner references. +2. Introduce configuration classes and bind from YAML/environment. +3. Implement nonce store (Redis + in-memory), handler integration, and OpenIddict transaction plumbing. +4. Stamp `cnf.jkt`, audit events, and metrics; update Mongo documents and migrations. +5. Extend docs: `docs/ARCHITECTURE_AUTHORITY.md`, `docs/security/audit-events.md`, `docs/security/rate-limits.md`, CLI/UI references. + +**mTLS work-stream** +1. Extend client document/schema and provisioning stores with certificate bindings + sender constraint flag. +2. Configure Kestrel/middleware for optional client certificates and validation service. +3. Update token issuance/introspection to honour certificate bindings and emit `cnf.x5t#S256`. +4. Add auditing/metrics and integration tests (happy path + failure). +5. Refresh operator documentation (`docs/ops/authority-backup-restore.md`, `docs/ops/authority-monitoring.md`, sample `authority.yaml`) to cover certificate lifecycle. + +Both streams should conclude with `dotnet test src/StellaOps.Authority.sln` and documentation cross-links so dependent guilds can unblock UI/Signer work. diff --git a/docs/events/README.md b/docs/events/README.md index bd3deb37..080641d2 100644 --- a/docs/events/README.md +++ b/docs/events/README.md @@ -3,12 +3,47 @@ Platform services publish strongly typed events; the JSON Schemas in this directory define those envelopes. File names follow `@.json` so producers and consumers can negotiate contracts explicitly. ## Catalog -- `scanner.report.ready@1.json` — emitted by Scanner.WebService once a signed report is persisted. Consumers: Notify, UI timeline. +- `scanner.report.ready@1.json` — emitted by Scanner.WebService once a signed report is persisted (payload embeds the canonical report plus DSSE envelope). Consumers: Notify, UI timeline. +- `scanner.scan.completed@1.json` — emitted alongside the signed report to capture scan outcomes/summary data for downstream automation. Consumers: Notify, Scheduler backfills, UI timelines. - `scheduler.rescan.delta@1.json` — emitted by Scheduler when BOM-Index diffs require fresh scans. Consumers: Notify, Policy Engine. - `attestor.logged@1.json` — emitted by Attestor after storing the Rekor inclusion proof. Consumers: UI attestation panel, Governance exports. Additive payload changes (new optional fields) can stay within the same version. Any breaking change (removing a field, tightening validation, altering semantics) must increment the `@` suffix and update downstream consumers. +## Envelope structure +All event envelopes share the same deterministic header. Use the following table as the quick reference when emitting or parsing events: + +| Field | Type | Notes | +|-------|------|-------| +| `eventId` | `uuid` | Must be globally unique per occurrence; producers log duplicates as fatal. | +| `kind` | `string` | Fixed per schema (e.g., `scanner.report.ready`). Downstream services reject unknown kinds or versions. | +| `tenant` | `string` | Multi‑tenant isolation key; mirror the value recorded in queue/Mongo metadata. | +| `ts` | `date-time` | RFC 3339 UTC timestamp. Use monotonic clocks or atomic offsets so ordering survives retries. | +| `scope` | `object` | Optional block used when the event concerns a specific image or repository. See schema for required fields (e.g., `repo`, `digest`). | +| `payload` | `object` | Event-specific body. Schemas allow additional properties so producers can add optional hints (e.g., `reportId`, `quietedFindingCount`) without breaking consumers. For scanner events, payloads embed both the canonical report document and the DSSE envelope so consumers can reuse signatures without recomputing them. See `docs/runtime/SCANNER_RUNTIME_READINESS.md` for the runtime consumer checklist covering these hints. | + +When adding new optional fields, document the behaviour in the schema’s `description` block and update the consumer checklist in the next sprint sync. + +## Canonical samples & validation +Reference payloads live under `docs/events/samples/`, mirroring the schema version (`@.sample.json`). They illustrate common field combinations, including the optional attributes that downstream teams rely on for UI affordances and audit trails. Scanner samples reuse the exact DSSE envelope checked into `samples/api/reports/report-sample.dsse.json`, and a unit test (`ReportSamplesTests`) guards that the payload/base64 remain canonical. + +Run the following loop offline to validate both schemas and samples: + +```bash +# Validate schemas (same check as CI) +for schema in docs/events/*.json; do + npx ajv compile -c ajv-formats -s "$schema" +done + +# Validate canonical samples against their schemas +for sample in docs/events/samples/*.sample.json; do + schema="docs/events/$(basename "${sample%.sample.json}").json" + npx ajv validate -c ajv-formats -s "$schema" -d "$sample" +done +``` + +Consumers can copy the samples into integration tests to guarantee backwards compatibility. When emitting new event versions, include a matching sample and update this README so air-gapped operators stay in sync. + ## CI validation The Docs CI workflow (`.gitea/workflows/docs.yml`) installs `ajv-cli` and compiles every schema on pull requests. Run the same check locally before opening a PR: @@ -25,6 +60,6 @@ If a schema references additional files, include `-r` flags so CI and local runs ## Working with schemas - Producers should validate outbound payloads using the matching schema during unit tests. - Consumers should pin to a specific version and log when encountering unknown versions to catch missing migrations early. -- Store real payload samples under `samples/events/` (mirrors the schema version) to aid contract testing. +- Store real payload samples under `docs/events/samples/` (mirrors the schema version) and mirror them into `samples/events/` when you need fixtures in integration repositories. Contact the Platform Events group in Docs Guild if you need help shaping a new event or version strategy. diff --git a/docs/events/samples/attestor.logged@1.sample.json b/docs/events/samples/attestor.logged@1.sample.json new file mode 100644 index 00000000..a95a5651 --- /dev/null +++ b/docs/events/samples/attestor.logged@1.sample.json @@ -0,0 +1,21 @@ +{ + "eventId": "1fdcaa1a-7a27-4154-8bac-cf813d8f4f6f", + "kind": "attestor.logged", + "tenant": "tenant-acme-solar", + "ts": "2025-10-18T15:45:27+00:00", + "payload": { + "artifactSha256": "sha256:8927d9151ad3f44e61a9c647511f9a31af2b4d245e7e031fe5cb4a0e8211c5d9", + "dsseEnvelopeDigest": "sha256:51c1dd189d5f16cfe87e82841d67b4fbc27d6fa9f5a09af0cd7e18945fb4c2a9", + "rekor": { + "index": 563421, + "url": "https://rekor.example/api/v1/log/entries/d6d0f897e7244edc9cb0bb2c68b05c96", + "uuid": "d6d0f897e7244edc9cb0bb2c68b05c96" + }, + "signer": "cosign-stellaops", + "subject": { + "name": "scanner/report/sha256-0f0a8de5c1f93d6716b7249f6f4ea3a8", + "type": "report" + } + }, + "attributes": {} +} diff --git a/docs/events/samples/scanner.report.ready@1.sample.json b/docs/events/samples/scanner.report.ready@1.sample.json new file mode 100644 index 00000000..b3e6226b --- /dev/null +++ b/docs/events/samples/scanner.report.ready@1.sample.json @@ -0,0 +1,70 @@ +{ + "eventId": "6d2d1b77-f3c3-4f70-8a9d-6f2d0c8801ab", + "kind": "scanner.report.ready", + "tenant": "tenant-alpha", + "ts": "2025-10-19T12:34:56+00:00", + "scope": { + "namespace": "acme/edge", + "repo": "api", + "digest": "sha256:feedface", + "labels": {}, + "attributes": {} + }, + "payload": { + "delta": { + "kev": ["CVE-2024-9999"], + "newCritical": 1 + }, + "dsse": { + "payload": "eyJyZXBvcnRJZCI6InJlcG9ydC1hYmMiLCJpbWFnZURpZ2VzdCI6InNoYTI1NjpmZWVkZmFjZSIsImdlbmVyYXRlZEF0IjoiMjAyNS0xMC0xOVQxMjozNDo1NiswMDowMCIsInZlcmRpY3QiOiJibG9ja2VkIiwicG9saWN5Ijp7InJldmlzaW9uSWQiOiJyZXYtNDIiLCJkaWdlc3QiOiJkaWdlc3QtMTIzIn0sInN1bW1hcnkiOnsidG90YWwiOjEsImJsb2NrZWQiOjEsIndhcm5lZCI6MCwiaWdub3JlZCI6MCwicXVpZXRlZCI6MH0sInZlcmRpY3RzIjpbeyJmaW5kaW5nSWQiOiJmaW5kaW5nLTEiLCJzdGF0dXMiOiJCbG9ja2VkIiwic2NvcmUiOjQ3LjUsInNvdXJjZVRydXN0IjoiTlZEIiwicmVhY2hhYmlsaXR5IjoicnVudGltZSJ9XSwiaXNzdWVzIjpbXX0=", + "payloadType": "application/vnd.stellaops.report\u002Bjson", + "signatures": [{ + "algorithm": "hs256", + "keyId": "test-key", + "signature": "signature-value" + }] + }, + "generatedAt": "2025-10-19T12:34:56+00:00", + "links": { + "ui": "https://scanner.example/ui/reports/report-abc" + }, + "quietedFindingCount": 0, + "report": { + "generatedAt": "2025-10-19T12:34:56+00:00", + "imageDigest": "sha256:feedface", + "issues": [], + "policy": { + "digest": "digest-123", + "revisionId": "rev-42" + }, + "reportId": "report-abc", + "summary": { + "blocked": 1, + "ignored": 0, + "quieted": 0, + "total": 1, + "warned": 0 + }, + "verdict": "blocked", + "verdicts": [ + { + "findingId": "finding-1", + "status": "Blocked", + "score": 47.5, + "sourceTrust": "NVD", + "reachability": "runtime" + } + ] + }, + "reportId": "report-abc", + "summary": { + "blocked": 1, + "ignored": 0, + "quieted": 0, + "total": 1, + "warned": 0 + }, + "verdict": "fail" + }, + "attributes": {} +} diff --git a/docs/events/samples/scanner.scan.completed@1.sample.json b/docs/events/samples/scanner.scan.completed@1.sample.json new file mode 100644 index 00000000..b603ff2d --- /dev/null +++ b/docs/events/samples/scanner.scan.completed@1.sample.json @@ -0,0 +1,78 @@ +{ + "eventId": "08a6de24-4a94-4d14-8432-9d14f36f6da3", + "kind": "scanner.scan.completed", + "tenant": "tenant-alpha", + "ts": "2025-10-19T12:34:56+00:00", + "scope": { + "namespace": "acme/edge", + "repo": "api", + "digest": "sha256:feedface", + "labels": {}, + "attributes": {} + }, + "payload": { + "delta": { + "kev": ["CVE-2024-9999"], + "newCritical": 1 + }, + "digest": "sha256:feedface", + "dsse": { + "payload": "eyJyZXBvcnRJZCI6InJlcG9ydC1hYmMiLCJpbWFnZURpZ2VzdCI6InNoYTI1NjpmZWVkZmFjZSIsImdlbmVyYXRlZEF0IjoiMjAyNS0xMC0xOVQxMjozNDo1NiswMDowMCIsInZlcmRpY3QiOiJibG9ja2VkIiwicG9saWN5Ijp7InJldmlzaW9uSWQiOiJyZXYtNDIiLCJkaWdlc3QiOiJkaWdlc3QtMTIzIn0sInN1bW1hcnkiOnsidG90YWwiOjEsImJsb2NrZWQiOjEsIndhcm5lZCI6MCwiaWdub3JlZCI6MCwicXVpZXRlZCI6MH0sInZlcmRpY3RzIjpbeyJmaW5kaW5nSWQiOiJmaW5kaW5nLTEiLCJzdGF0dXMiOiJCbG9ja2VkIiwic2NvcmUiOjQ3LjUsInNvdXJjZVRydXN0IjoiTlZEIiwicmVhY2hhYmlsaXR5IjoicnVudGltZSJ9XSwiaXNzdWVzIjpbXX0=", + "payloadType": "application/vnd.stellaops.report\u002Bjson", + "signatures": [{ + "algorithm": "hs256", + "keyId": "test-key", + "signature": "signature-value" + }] + }, + "findings": [ + { + "cve": "CVE-2024-9999", + "id": "finding-1", + "reachability": "runtime", + "severity": "Critical" + } + ], + "policy": { + "digest": "digest-123", + "revisionId": "rev-42" + }, + "report": { + "generatedAt": "2025-10-19T12:34:56+00:00", + "imageDigest": "sha256:feedface", + "issues": [], + "policy": { + "digest": "digest-123", + "revisionId": "rev-42" + }, + "reportId": "report-abc", + "summary": { + "blocked": 1, + "ignored": 0, + "quieted": 0, + "total": 1, + "warned": 0 + }, + "verdict": "blocked", + "verdicts": [ + { + "findingId": "finding-1", + "status": "Blocked", + "score": 47.5, + "sourceTrust": "NVD", + "reachability": "runtime" + } + ] + }, + "reportId": "report-abc", + "summary": { + "blocked": 1, + "ignored": 0, + "quieted": 0, + "total": 1, + "warned": 0 + }, + "verdict": "fail" + }, + "attributes": {} +} diff --git a/docs/events/samples/scheduler.rescan.delta@1.sample.json b/docs/events/samples/scheduler.rescan.delta@1.sample.json new file mode 100644 index 00000000..2326bf21 --- /dev/null +++ b/docs/events/samples/scheduler.rescan.delta@1.sample.json @@ -0,0 +1,20 @@ +{ + "eventId": "51d0ef8d-3a17-4af3-b2d7-4ad3db3d9d2c", + "kind": "scheduler.rescan.delta", + "tenant": "tenant-acme-solar", + "ts": "2025-10-18T15:40:11+00:00", + "payload": { + "impactedDigests": [ + "sha256:0f0a8de5c1f93d6716b7249f6f4ea3a8db451dc3f3c3ff823f53c9cbde5d5e8a", + "sha256:ab921f9679dd8d0832f3710a4df75dbadbd58c2d95f26a4d4efb2fa8c3d9b4ce" + ], + "reason": "policy-change:scoring/v2", + "scheduleId": "rescan-weekly-critical", + "summary": { + "newCritical": 0, + "newHigh": 1, + "total": 4 + } + }, + "attributes": {} +} diff --git a/docs/events/scanner.report.ready@1.json b/docs/events/scanner.report.ready@1.json index b3376c3f..9282abef 100644 --- a/docs/events/scanner.report.ready@1.json +++ b/docs/events/scanner.report.ready@1.json @@ -21,14 +21,28 @@ "type": "object", "required": ["verdict", "delta", "links"], "properties": { + "reportId": {"type": "string"}, + "generatedAt": {"type": "string", "format": "date-time"}, "verdict": {"enum": ["pass", "warn", "fail"]}, + "summary": { + "type": "object", + "properties": { + "total": {"type": "integer", "minimum": 0}, + "blocked": {"type": "integer", "minimum": 0}, + "warned": {"type": "integer", "minimum": 0}, + "ignored": {"type": "integer", "minimum": 0}, + "quieted": {"type": "integer", "minimum": 0} + }, + "additionalProperties": false + }, "delta": { "type": "object", "properties": { "newCritical": {"type": "integer", "minimum": 0}, "newHigh": {"type": "integer", "minimum": 0}, "kev": {"type": "array", "items": {"type": "string"}} - } + }, + "additionalProperties": false }, "links": { "type": "object", @@ -37,6 +51,30 @@ "rekor": {"type": "string", "format": "uri"} }, "additionalProperties": false + }, + "quietedFindingCount": {"type": "integer", "minimum": 0}, + "report": {"type": "object"}, + "dsse": { + "type": "object", + "required": ["payloadType", "payload", "signatures"], + "properties": { + "payloadType": {"type": "string"}, + "payload": {"type": "string"}, + "signatures": { + "type": "array", + "items": { + "type": "object", + "required": ["keyId", "algorithm", "signature"], + "properties": { + "keyId": {"type": "string"}, + "algorithm": {"type": "string"}, + "signature": {"type": "string"} + }, + "additionalProperties": false + } + } + }, + "additionalProperties": false } }, "additionalProperties": true diff --git a/docs/events/scanner.scan.completed@1.json b/docs/events/scanner.scan.completed@1.json new file mode 100644 index 00000000..65d368b1 --- /dev/null +++ b/docs/events/scanner.scan.completed@1.json @@ -0,0 +1,97 @@ +{ + "$id": "https://stella-ops.org/schemas/events/scanner.scan.completed@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "required": ["eventId", "kind", "tenant", "ts", "scope", "payload"], + "properties": { + "eventId": {"type": "string", "format": "uuid"}, + "kind": {"const": "scanner.scan.completed"}, + "tenant": {"type": "string"}, + "ts": {"type": "string", "format": "date-time"}, + "scope": { + "type": "object", + "required": ["repo", "digest"], + "properties": { + "namespace": {"type": "string"}, + "repo": {"type": "string"}, + "digest": {"type": "string"} + } + }, + "payload": { + "type": "object", + "required": ["reportId", "digest", "verdict", "summary"], + "properties": { + "reportId": {"type": "string"}, + "digest": {"type": "string"}, + "verdict": {"enum": ["pass", "warn", "fail"]}, + "summary": { + "type": "object", + "properties": { + "total": {"type": "integer", "minimum": 0}, + "blocked": {"type": "integer", "minimum": 0}, + "warned": {"type": "integer", "minimum": 0}, + "ignored": {"type": "integer", "minimum": 0}, + "quieted": {"type": "integer", "minimum": 0} + }, + "additionalProperties": false + }, + "delta": { + "type": "object", + "properties": { + "newCritical": {"type": "integer", "minimum": 0}, + "newHigh": {"type": "integer", "minimum": 0}, + "kev": {"type": "array", "items": {"type": "string"}} + }, + "additionalProperties": false + }, + "policy": { + "type": "object", + "properties": { + "revisionId": {"type": "string"}, + "digest": {"type": "string"} + }, + "additionalProperties": false + }, + "findings": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "severity": {"type": "string"}, + "cve": {"type": "string"}, + "purl": {"type": "string"}, + "reachability": {"type": "string"} + }, + "additionalProperties": true + } + }, + "report": {"type": "object"}, + "dsse": { + "type": "object", + "required": ["payloadType", "payload", "signatures"], + "properties": { + "payloadType": {"type": "string"}, + "payload": {"type": "string"}, + "signatures": { + "type": "array", + "items": { + "type": "object", + "required": ["keyId", "algorithm", "signature"], + "properties": { + "keyId": {"type": "string"}, + "algorithm": {"type": "string"}, + "signature": {"type": "string"} + }, + "additionalProperties": false + } + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + } + }, + "additionalProperties": false +} diff --git a/docs/notify/samples/notify-channel@1.sample.json b/docs/notify/samples/notify-channel@1.sample.json new file mode 100644 index 00000000..4f400e5e --- /dev/null +++ b/docs/notify/samples/notify-channel@1.sample.json @@ -0,0 +1,32 @@ +{ + "schemaVersion": "notify.channel@1", + "channelId": "channel-slack-sec-ops", + "tenantId": "tenant-01", + "name": "slack:sec-ops", + "type": "slack", + "displayName": "SecOps Slack", + "description": "Primary incident response channel.", + "config": { + "secretRef": "ref://notify/channels/slack/sec-ops", + "target": "#sec-ops", + "properties": { + "workspace": "stellaops-sec" + }, + "limits": { + "concurrency": 2, + "requestsPerMinute": 60, + "timeout": "PT10S" + } + }, + "enabled": true, + "labels": { + "team": "secops" + }, + "metadata": { + "createdByTask": "NOTIFY-MODELS-15-102" + }, + "createdBy": "ops:amir", + "createdAt": "2025-10-18T17:02:11+00:00", + "updatedBy": "ops:amir", + "updatedAt": "2025-10-18T17:45:00+00:00" +} diff --git a/docs/notify/samples/notify-event@1.sample.json b/docs/notify/samples/notify-event@1.sample.json new file mode 100644 index 00000000..c0a6d708 --- /dev/null +++ b/docs/notify/samples/notify-event@1.sample.json @@ -0,0 +1,34 @@ +{ + "eventId": "8a8d6a2f-9315-49fe-9d52-8fec79ec7aeb", + "kind": "scanner.report.ready", + "version": "1", + "tenant": "tenant-01", + "ts": "2025-10-19T03:58:42+00:00", + "actor": "scanner-webservice", + "scope": { + "namespace": "prod-payment", + "repo": "ghcr.io/acme/api", + "digest": "sha256:79c1f9e5...", + "labels": { + "environment": "production" + }, + "attributes": {} + }, + "payload": { + "delta": { + "kev": [ + "CVE-2025-40123" + ], + "newCritical": 1, + "newHigh": 2 + }, + "links": { + "rekor": "https://rekor.stella.local/api/v1/log/entries/1", + "ui": "https://ui.stella.local/reports/sha256-79c1f9e5" + }, + "verdict": "fail" + }, + "attributes": { + "correlationId": "scan-23a6" + } +} diff --git a/docs/notify/samples/notify-rule@1.sample.json b/docs/notify/samples/notify-rule@1.sample.json new file mode 100644 index 00000000..a9a2708f --- /dev/null +++ b/docs/notify/samples/notify-rule@1.sample.json @@ -0,0 +1,63 @@ +{ + "schemaVersion": "notify.rule@1", + "ruleId": "rule-secops-critical", + "tenantId": "tenant-01", + "name": "Critical digests to SecOps", + "description": "Escalate KEV-tagged findings to on-call feeds.", + "enabled": true, + "match": { + "eventKinds": [ + "scanner.report.ready", + "scheduler.rescan.delta" + ], + "namespaces": [ + "prod-*" + ], + "repositories": [], + "digests": [], + "labels": [], + "componentPurls": [], + "minSeverity": "high", + "verdicts": [], + "kevOnly": true, + "vex": { + "includeAcceptedJustifications": false, + "includeRejectedJustifications": false, + "includeUnknownJustifications": false, + "justificationKinds": [ + "component-remediated", + "not-affected" + ] + } + }, + "actions": [ + { + "actionId": "email-digest", + "channel": "email:soc", + "digest": "hourly", + "template": "digest", + "enabled": true, + "metadata": { + "locale": "en-us" + } + }, + { + "actionId": "slack-oncall", + "channel": "slack:sec-ops", + "template": "concise", + "throttle": "PT5M", + "metadata": {}, + "enabled": true + } + ], + "labels": { + "team": "secops" + }, + "metadata": { + "source": "sprint-15" + }, + "createdBy": "ops:zoya", + "createdAt": "2025-10-19T04:12:27+00:00", + "updatedBy": "ops:zoya", + "updatedAt": "2025-10-19T04:45:03+00:00" +} diff --git a/docs/notify/samples/notify-template@1.sample.json b/docs/notify/samples/notify-template@1.sample.json new file mode 100644 index 00000000..9a62a32e --- /dev/null +++ b/docs/notify/samples/notify-template@1.sample.json @@ -0,0 +1,19 @@ +{ + "schemaVersion": "notify.template@1", + "templateId": "tmpl-slack-concise", + "tenantId": "tenant-01", + "channelType": "slack", + "key": "concise", + "locale": "en-us", + "body": "{{severity_icon payload.delta.newCritical}} {{summary}}", + "description": "Slack concise message for high severity findings.", + "renderMode": "markdown", + "format": "slack", + "metadata": { + "version": "2025-10-19" + }, + "createdBy": "ops:zoya", + "createdAt": "2025-10-19T05:00:00+00:00", + "updatedBy": "ops:zoya", + "updatedAt": "2025-10-19T05:45:00+00:00" +} diff --git a/docs/notify/schemas/notify-channel@1.json b/docs/notify/schemas/notify-channel@1.json new file mode 100644 index 00000000..a785c27c --- /dev/null +++ b/docs/notify/schemas/notify-channel@1.json @@ -0,0 +1,73 @@ +{ + "$id": "https://stella-ops.org/schemas/notify/notify-channel@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Notify Channel", + "type": "object", + "required": [ + "schemaVersion", + "channelId", + "tenantId", + "name", + "type", + "config", + "enabled", + "createdAt", + "updatedAt" + ], + "properties": { + "schemaVersion": {"type": "string", "const": "notify.channel@1"}, + "channelId": {"type": "string"}, + "tenantId": {"type": "string"}, + "name": {"type": "string"}, + "type": { + "type": "string", + "enum": ["slack", "teams", "email", "webhook", "custom"] + }, + "displayName": {"type": "string"}, + "description": {"type": "string"}, + "config": {"$ref": "#/$defs/channelConfig"}, + "enabled": {"type": "boolean"}, + "labels": {"$ref": "#/$defs/stringMap"}, + "metadata": {"$ref": "#/$defs/stringMap"}, + "createdBy": {"type": "string"}, + "createdAt": {"type": "string", "format": "date-time"}, + "updatedBy": {"type": "string"}, + "updatedAt": {"type": "string", "format": "date-time"} + }, + "additionalProperties": false, + "$defs": { + "channelConfig": { + "type": "object", + "required": ["secretRef"], + "properties": { + "secretRef": {"type": "string"}, + "target": {"type": "string"}, + "endpoint": {"type": "string", "format": "uri"}, + "properties": {"$ref": "#/$defs/stringMap"}, + "limits": {"$ref": "#/$defs/channelLimits"} + }, + "additionalProperties": false + }, + "channelLimits": { + "type": "object", + "properties": { + "concurrency": {"type": "integer", "minimum": 1}, + "requestsPerMinute": {"type": "integer", "minimum": 1}, + "timeout": { + "type": "string", + "pattern": "^P(T.*)?$", + "description": "ISO 8601 duration" + }, + "maxBatchSize": {"type": "integer", "minimum": 1} + }, + "additionalProperties": false + }, + "stringMap": { + "type": "object", + "patternProperties": { + ".*": {"type": "string"} + }, + "additionalProperties": false + } + } +} diff --git a/docs/notify/schemas/notify-event@1.json b/docs/notify/schemas/notify-event@1.json new file mode 100644 index 00000000..0ac40285 --- /dev/null +++ b/docs/notify/schemas/notify-event@1.json @@ -0,0 +1,56 @@ +{ + "$id": "https://stella-ops.org/schemas/notify/notify-event@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Notify Event Envelope", + "type": "object", + "required": ["eventId", "kind", "tenant", "ts", "payload"], + "properties": { + "eventId": {"type": "string", "format": "uuid"}, + "kind": { + "type": "string", + "description": "Event kind identifier (e.g. scanner.report.ready).", + "enum": [ + "scanner.report.ready", + "scanner.scan.completed", + "scheduler.rescan.delta", + "attestor.logged", + "zastava.admission", + "feedser.export.completed", + "vexer.export.completed" + ] + }, + "version": {"type": "string"}, + "tenant": {"type": "string"}, + "ts": {"type": "string", "format": "date-time"}, + "actor": {"type": "string"}, + "scope": { + "type": "object", + "properties": { + "namespace": {"type": "string"}, + "repo": {"type": "string"}, + "digest": {"type": "string"}, + "component": {"type": "string"}, + "image": {"type": "string"}, + "labels": {"$ref": "#/$defs/stringMap"}, + "attributes": {"$ref": "#/$defs/stringMap"} + }, + "additionalProperties": false + }, + "payload": { + "type": "object", + "description": "Event specific body; see individual schemas for shapes.", + "additionalProperties": true + }, + "attributes": {"$ref": "#/$defs/stringMap"} + }, + "additionalProperties": false, + "$defs": { + "stringMap": { + "type": "object", + "patternProperties": { + ".*": {"type": "string"} + }, + "additionalProperties": false + } + } +} diff --git a/docs/notify/schemas/notify-rule@1.json b/docs/notify/schemas/notify-rule@1.json new file mode 100644 index 00000000..77203836 --- /dev/null +++ b/docs/notify/schemas/notify-rule@1.json @@ -0,0 +1,96 @@ +{ + "$id": "https://stella-ops.org/schemas/notify/notify-rule@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Notify Rule", + "type": "object", + "required": [ + "schemaVersion", + "ruleId", + "tenantId", + "name", + "enabled", + "match", + "actions", + "createdAt", + "updatedAt" + ], + "properties": { + "schemaVersion": {"type": "string", "const": "notify.rule@1"}, + "ruleId": {"type": "string"}, + "tenantId": {"type": "string"}, + "name": {"type": "string"}, + "description": {"type": "string"}, + "enabled": {"type": "boolean"}, + "match": {"$ref": "#/$defs/ruleMatch"}, + "actions": { + "type": "array", + "minItems": 1, + "items": {"$ref": "#/$defs/ruleAction"} + }, + "labels": {"$ref": "#/$defs/stringMap"}, + "metadata": {"$ref": "#/$defs/stringMap"}, + "createdBy": {"type": "string"}, + "createdAt": {"type": "string", "format": "date-time"}, + "updatedBy": {"type": "string"}, + "updatedAt": {"type": "string", "format": "date-time"} + }, + "additionalProperties": false, + "$defs": { + "ruleMatch": { + "type": "object", + "properties": { + "eventKinds": {"$ref": "#/$defs/stringArray"}, + "namespaces": {"$ref": "#/$defs/stringArray"}, + "repositories": {"$ref": "#/$defs/stringArray"}, + "digests": {"$ref": "#/$defs/stringArray"}, + "labels": {"$ref": "#/$defs/stringArray"}, + "componentPurls": {"$ref": "#/$defs/stringArray"}, + "minSeverity": {"type": "string"}, + "verdicts": {"$ref": "#/$defs/stringArray"}, + "kevOnly": {"type": "boolean"}, + "vex": {"$ref": "#/$defs/ruleMatchVex"} + }, + "additionalProperties": false + }, + "ruleMatchVex": { + "type": "object", + "properties": { + "includeAcceptedJustifications": {"type": "boolean"}, + "includeRejectedJustifications": {"type": "boolean"}, + "includeUnknownJustifications": {"type": "boolean"}, + "justificationKinds": {"$ref": "#/$defs/stringArray"} + }, + "additionalProperties": false + }, + "ruleAction": { + "type": "object", + "required": ["actionId", "channel", "enabled"], + "properties": { + "actionId": {"type": "string"}, + "channel": {"type": "string"}, + "template": {"type": "string"}, + "digest": {"type": "string"}, + "throttle": { + "type": "string", + "pattern": "^P(T.*)?$", + "description": "ISO 8601 duration" + }, + "locale": {"type": "string"}, + "enabled": {"type": "boolean"}, + "metadata": {"$ref": "#/$defs/stringMap"} + }, + "additionalProperties": false + }, + "stringArray": { + "type": "array", + "items": {"type": "string"} + }, + "stringMap": { + "type": "object", + "patternProperties": { + ".*": {"type": "string"} + }, + "additionalProperties": false + } + } +} diff --git a/docs/notify/schemas/notify-template@1.json b/docs/notify/schemas/notify-template@1.json new file mode 100644 index 00000000..527d382d --- /dev/null +++ b/docs/notify/schemas/notify-template@1.json @@ -0,0 +1,55 @@ +{ + "$id": "https://stella-ops.org/schemas/notify/notify-template@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Notify Template", + "type": "object", + "required": [ + "schemaVersion", + "templateId", + "tenantId", + "channelType", + "key", + "locale", + "body", + "renderMode", + "format", + "createdAt", + "updatedAt" + ], + "properties": { + "schemaVersion": {"type": "string", "const": "notify.template@1"}, + "templateId": {"type": "string"}, + "tenantId": {"type": "string"}, + "channelType": { + "type": "string", + "enum": ["slack", "teams", "email", "webhook", "custom"] + }, + "key": {"type": "string"}, + "locale": {"type": "string"}, + "body": {"type": "string"}, + "description": {"type": "string"}, + "renderMode": { + "type": "string", + "enum": ["markdown", "html", "adaptiveCard", "plainText", "json"] + }, + "format": { + "type": "string", + "enum": ["slack", "teams", "email", "webhook", "json"] + }, + "metadata": {"$ref": "#/$defs/stringMap"}, + "createdBy": {"type": "string"}, + "createdAt": {"type": "string", "format": "date-time"}, + "updatedBy": {"type": "string"}, + "updatedAt": {"type": "string", "format": "date-time"} + }, + "additionalProperties": false, + "$defs": { + "stringMap": { + "type": "object", + "patternProperties": { + ".*": {"type": "string"} + }, + "additionalProperties": false + } + } +} diff --git a/docs/ops/authority-backup-restore.md b/docs/ops/authority-backup-restore.md index bef10c8c..cc08ccc5 100644 --- a/docs/ops/authority-backup-restore.md +++ b/docs/ops/authority-backup-restore.md @@ -86,7 +86,7 @@ - **Air-gapped replication:** replicate archives via the Offline Update Kit transport channels; never attach USB devices without scanning. - **Retention:** maintain 30 daily snapshots + 12 monthly archival copies. Rotate encryption keys annually. - **Key compromise:** if signing keys are suspected compromised, restore from the latest clean backup, rotate via OPS3 (see `ops/authority/key-rotation.sh` and `docs/11_AUTHORITY.md`), and publish a revocation notice. -- **Mongo version:** keep dump/restore images pinned to the deployment version (compose uses `mongo:7`). Restoring across major versions requires a compatibility review. +- **Mongo version:** keep dump/restore images pinned to the deployment version (compose uses `mongo:7`). Driver 3.5.0 requires MongoDB **4.2+**—clusters still on 4.0 must be upgraded before restore, and future driver releases will drop 4.0 entirely. citeturn1open1 ## Verification Checklist - [ ] `/ready` reports all identity providers ready. diff --git a/docs/runtime/SCANNER_RUNTIME_READINESS.md b/docs/runtime/SCANNER_RUNTIME_READINESS.md new file mode 100644 index 00000000..470b6eae --- /dev/null +++ b/docs/runtime/SCANNER_RUNTIME_READINESS.md @@ -0,0 +1,81 @@ +# Scanner Runtime Readiness Checklist + +Last updated: 2025-10-19 + +This runbook confirms that Scanner.WebService now surfaces the metadata Runtime Guild consumers requested: quieted finding counts in the signed report events and progress hints on the scan event stream. Follow the checklist before relying on these fields in production automation. + +--- + +## 1. Prerequisites + +- Scanner.WebService release includes **SCANNER-POLICY-09-107** (adds quieted provenance and score inputs to `/reports`). +- Docs repository at commit containing `docs/events/scanner.report.ready@1.json` with `quietedFindingCount`. +- Access to a Scanner environment (staging or sandbox) with an image capable of producing policy verdicts. + +--- + +## 2. Verify quieted finding hints + +1. **Trigger a report** – run a scan that produces at least one quieted finding (policy with `quiet: true`). After the scan completes, call: + ```http + POST /api/v1/reports + Authorization: Bearer + Content-Type: application/json + ``` + Ensure the JSON response contains `report.summary.quieted` and that the DSSE payload mirrors the same count. +2. **Check emitted event** – pull the latest `scanner.report.ready` event (from the queue or sample capture). Confirm the payload includes: + - `quietedFindingCount` equal to the `summary.quieted` value. + - Updated `summary` block with the quieted counter. +3. **Schema validation** – optionally validate the payload against `docs/events/scanner.report.ready@1.json` to guarantee downstream compatibility: + ```bash + npx ajv validate -c ajv-formats \ + -s docs/events/scanner.report.ready@1.json \ + -d + ``` + (Use `npm install --no-save ajv ajv-cli ajv-formats` once per clone.) + +> Snapshot fixtures: see `docs/events/samples/scanner.report.ready@1.sample.json` for a canonical event that already carries `quietedFindingCount`. + +--- + +## 3. Verify progress hints (SSE / JSONL) + +Scanner streams structured progress messages for each scan. The `data` map inside every frame carries the hints Runtime systems consume (force flag, client metadata, additional stage-specific attributes). + +1. **Submit a scan** with custom metadata (for example `pipeline=github`, `build=1234`). +2. **Stream events**: + ```http + GET /api/v1/scans/{scanId}/events?format=jsonl + Authorization: Bearer + Accept: application/x-ndjson + ``` +3. **Confirm payload** – each frame should resemble: + ```json + { + "scanId": "2f6c17f9b3f548e2a28b9c412f4d63f8", + "sequence": 1, + "state": "Pending", + "message": "queued", + "timestamp": "2025-10-19T03:12:45.118Z", + "correlationId": "2f6c17f9b3f548e2a28b9c412f4d63f8:0001", + "data": { + "force": false, + "meta.pipeline": "github" + } + } + ``` + Subsequent frames include additional hints as analyzers progress (e.g., `stage`, `meta.*`, or analyzer-provided keys). Ensure newline-delimited JSON consumers preserve the `data` dictionary when forwarding to runtime dashboards. + +> The same frame structure is documented in `docs/09_API_CLI_REFERENCE.md` §2.6. Copy that snippet into integration tests to keep compatibility. + +--- + +## 4. Sign-off matrix + +| Stakeholder | Checklist | Status | Notes | +|-------------|-----------|--------|-------| +| Runtime Guild | Sections 2 & 3 completed | ☐ | Capture sample payloads for webhook regression tests. | +| Notify Guild | `quietedFindingCount` consumed in notifications | ☐ | Update templates after Runtime sign-off. | +| Docs Guild | Checklist published & linked from updates | ☑ | 2025-10-19 | + +Mark the stakeholder boxes as each team completes its validation. Once all checks are green, update `docs/TASKS.md` to reflect task completion. diff --git a/docs/scanner-core-contracts.md b/docs/scanner-core-contracts.md index 7add5dcb..cf51cb30 100644 --- a/docs/scanner-core-contracts.md +++ b/docs/scanner-core-contracts.md @@ -2,18 +2,58 @@ The **Scanner Core** library provides shared contracts, observability helpers, and security utilities consumed by `Scanner.WebService`, `Scanner.Worker`, analyzers, and tooling. These primitives guarantee deterministic identifiers, timestamps, and log context for all scanning flows. -## DTOs +## Canonical DTOs - `ScanJob` & `ScanJobStatus` – canonical job metadata (image reference/digest, tenant, correlation ID, timestamps, failure details). Constructors normalise timestamps to UTC microsecond precision and canonicalise image digests. Round-trips with `JsonSerializerDefaults.Web` using `ScannerJsonOptions`. - `ScanProgressEvent` & `ScanStage`/`ScanProgressEventKind` – stage-level progress surface for queue/stream consumers. Includes deterministic sequence numbers, optional progress percentage, attributes, and attached `ScannerError`. - `ScannerError` & `ScannerErrorCode` – shared error taxonomy spanning queue, analyzers, storage, exporters, and signing. Carries severity, retryability, structured details, and microsecond-precision timestamps. - `ScanJobId` – strongly-typed identifier rendered as `Guid` (lowercase `N` format) with deterministic parsing. +### Canonical JSON samples + +The golden fixtures consumed by `ScannerCoreContractsTests` document the wire shape shared with downstream services. They live under `src/StellaOps.Scanner.Core.Tests/Fixtures/` and a representative extract is shown below. + +```json +{ + "id": "8f4cc9c582454b9d9b4f5ae049631b7d", + "status": "running", + "imageReference": "registry.example.com/stellaops/scanner:1.2.3", + "imageDigest": "sha256:abcdef", + "createdAt": "2025-10-18T14:30:15.123456+00:00", + "updatedAt": "2025-10-18T14:30:20.123456+00:00", + "correlationId": "scan-analyzeoperatingsystem-8f4cc9c582454b9d9b4f5ae049631b7d", + "tenantId": "tenant-a", + "metadata": { + "requestId": "req-1234", + "source": "ci" + }, + "failure": { + "code": "analyzerFailure", + "severity": "error", + "message": "Analyzer failed to parse layer", + "timestamp": "2025-10-18T14:30:15.123456+00:00", + "retryable": false, + "stage": "AnalyzeOperatingSystem", + "component": "os-analyzer", + "details": { + "layerDigest": "sha256:deadbeef", + "attempt": "1" + } + } +} +``` + +Progress events follow the same conventions (`jobId`, `stage`, `kind`, `timestamp`, `attributes`, optional embedded `ScannerError`). The fixtures are verified via deterministic JSON comparison in every CI run. + ## Deterministic helpers - `ScannerIdentifiers` – derives `ScanJobId`, correlation IDs, and SHA-256 hashes from normalised inputs (image reference/digest, tenant, salt). Ensures case-insensitive stability and reproducible metric keys. - `ScannerTimestamps` – trims to microsecond precision, provides ISO-8601 (`yyyy-MM-ddTHH:mm:ss.ffffffZ`) rendering, and parsing helpers. - `ScannerJsonOptions` – standard JSON options (web defaults, camel-case enums) shared by services/tests. +- `ScanAnalysisStore` & `ScanAnalysisKeys` – shared in-memory analysis cache flowing through Worker stages. OS analyzers populate + `analysis.os.packages` (raw output), `analysis.os.fragments` (per-analyzer component fragments), and merge into + `analysis.layers.fragments` so emit/diff stages can compose SBOMs and diffs without knowledge of individual analyzer + implementations. ## Observability primitives @@ -22,9 +62,74 @@ The **Scanner Core** library provides shared contracts, observability helpers, a - `ScannerCorrelationContext` & `ScannerCorrelationContextAccessor` – ambient correlation propagation via `AsyncLocal` for log scopes, metrics, and diagnostics. - `ScannerLogExtensions` – `ILogger` scopes for jobs/progress events with automatic correlation context push, minimal allocations, and consistent structured fields. +### Observability overhead validation + +A micro-benchmark executed on 2025-10-19 (4 vCPU runner, .NET 10.0.100-rc.1) measured the average scope cost across 1 000 000 iterations: + +| Scope | Mean (µs/call) | +|-------|----------------| +| `BeginScanScope` (logger attached) | 0.80 | +| `BeginScanScope` (noop logger) | 0.31 | +| `BeginProgressScope` | 0.57 | + +To reproduce, run `dotnet test src/StellaOps.Scanner.Core.Tests -c Release` (see `ScannerLogExtensionsPerformanceTests`) or copy the snippet below into a throwaway `dotnet run` console project and execute it with `dotnet run -c Release`: + +```csharp +using System.Collections.Generic; +using System.Diagnostics; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Observability; +using StellaOps.Scanner.Core.Utility; + +var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => true)); +var logger = factory.CreateLogger("bench"); + +var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "benchmark"); +var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, nameof(ScanStage.AnalyzeOperatingSystem)); +var now = ScannerTimestamps.Normalize(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero)); + +var job = new ScanJob(jobId, ScanJobStatus.Running, "registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", now, now, correlationId, "tenant-a", new Dictionary(StringComparer.Ordinal) { ["requestId"] = "req-bench" }); +var progress = new ScanProgressEvent(jobId, ScanStage.AnalyzeOperatingSystem, ScanProgressEventKind.Progress, 42, now, 10.5, "benchmark", new Dictionary(StringComparer.Ordinal) { ["sample"] = "true" }); + +Console.WriteLine("Scanner Core Observability micro-bench (1,000,000 iterations)"); +Report("BeginScanScope (logger)", Measure(static ctx => ctx.Logger.BeginScanScope(ctx.Job, ctx.Stage, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer"))); +Report("BeginScanScope (no logger)", Measure(static ctx => ScannerLogExtensions.BeginScanScope(null, ctx.Job, ctx.Stage, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer"))); +Report("BeginProgressScope", Measure(static ctx => ctx.Logger.BeginProgressScope(ctx.Progress!, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer", progress))); + +static double Measure(Func factory, ScopeContext context) +{ + const int iterations = 1_000_000; + for (var i = 0; i < 10_000; i++) + { + using var scope = factory(context); + } + + GC.Collect(); + GC.WaitForPendingFinalizers(); + GC.Collect(); + + var sw = Stopwatch.StartNew(); + for (var i = 0; i < iterations; i++) + { + using var scope = factory(context); + } + + sw.Stop(); + return sw.Elapsed.TotalSeconds * 1_000_000 / iterations; +} + +static void Report(string label, double microseconds) + => Console.WriteLine($"{label,-28}: {microseconds:F3} µs"); + +readonly record struct ScopeContext(ILogger Logger, ScanJob Job, string? Stage, string? Component, ScanProgressEvent? Progress = null); +``` + +Both guardrails enforce the ≤ 5 µs acceptance target for SP9-G1. + ## Security utilities -- `AuthorityTokenSource` – caches short-lived OpToks per audience+scope using deterministic keys and refresh skew (default 30 s). Integrates with `StellaOps.Auth.Client`. +- `AuthorityTokenSource` – caches short-lived OpToks per audience+scope using deterministic keys and refresh skew (default 30 s). Integrates with `StellaOps.Auth.Client`. - `DpopProofValidator` – validates DPoP proofs (alg allowlist, `htm`/`htu`, nonce, replay window, signature) backed by pluggable `IDpopReplayCache`. Ships with `InMemoryDpopReplayCache` for restart-only deployments. - `RestartOnlyPluginGuard` – enforces restart-time plug-in registration (deterministic path normalisation; throws if new plug-ins added post-seal). - `ServiceCollectionExtensions.AddScannerAuthorityCore` – DI helper wiring Authority client, OpTok source, DPoP validation, replay cache, and plug-in guard. @@ -33,10 +138,10 @@ The **Scanner Core** library provides shared contracts, observability helpers, a Unit tests (`StellaOps.Scanner.Core.Tests`) assert: -- DTO JSON round-trips are stable and deterministic. +- DTO JSON round-trips are stable and deterministic (`ScannerCoreContractsTests` + golden fixtures). - Identifier/hash helpers ignore case and emit lowercase hex. - Timestamp normalisation retains UTC semantics. -- Log scopes push/pop correlation context predictably. +- Log scopes push/pop correlation context predictably while staying under the 5 µs envelope. - Authority token caching honours refresh skew and invalidation. - DPoP validator accepts valid proofs, rejects nonce mismatch/replay, and enforces signature validation. - Restart-only plug-in guard blocks runtime additions post-seal. diff --git a/docs/updates/2025-10-19-docs-guild.md b/docs/updates/2025-10-19-docs-guild.md new file mode 100644 index 00000000..dd8375ce --- /dev/null +++ b/docs/updates/2025-10-19-docs-guild.md @@ -0,0 +1,12 @@ +# Docs Guild Update — 2025-10-19 + +**Subject:** Event envelope reference & canonical samples +**Audience:** Docs Guild, Platform Events, Runtime Guild + +- Extended `docs/events/README.md` with envelope field tables, offline validation commands, and guidance for optional payload fields. +- Added canonical sample payloads under `docs/events/samples/` for `scanner.report.ready@1`, `scheduler.rescan.delta@1`, and `attestor.logged@1`; validated them with `ajv-cli` to match the published schemas. +- Documented the validation loop so air-gapped operators can mirror the CI checks before rolling new event versions. + +Next steps: +- Platform Events to embed the canonical samples into their contract tests. +- Runtime Guild checklist for quieted finding counts & progress hints published in `docs/runtime/SCANNER_RUNTIME_READINESS.md`; gather stakeholder sign-off. diff --git a/docs/updates/2025-10-19-platform-events.md b/docs/updates/2025-10-19-platform-events.md new file mode 100644 index 00000000..91539d43 --- /dev/null +++ b/docs/updates/2025-10-19-platform-events.md @@ -0,0 +1,10 @@ +# Platform Events Update — 2025-10-19 + +**Subject:** Canonical event samples enforced across tests & CI +**Audience:** Platform Events Guild, Notify Guild, Scheduler Guild, Docs Guild + +- Scanner WebService contract tests deserialize `scanner.report.ready@1` and `scanner.scan.completed@1` samples, validating DSSE payloads and canonical ordering via `NotifyCanonicalJsonSerializer`. +- Notify and Scheduler model suites now round-trip the published event samples (including `attestor.logged@1` and `scheduler.rescan.delta@1`) to catch drift in consumer expectations. +- Docs CI (`.gitea/workflows/docs.yml`) validates every sample against its schema with `ajv-cli`, keeping offline bundles and repositories aligned. + +No additional follow-ups — downstream teams can rely on the committed samples for integration coverage. diff --git a/docs/updates/2025-10-19-scanner-policy.md b/docs/updates/2025-10-19-scanner-policy.md new file mode 100644 index 00000000..a4f1e3d0 --- /dev/null +++ b/docs/updates/2025-10-19-scanner-policy.md @@ -0,0 +1,5 @@ +# 2025-10-19 – Scanner ↔ Policy Sync + +- Scanner WebService now emits `scanner.report.ready` and `scanner.scan.completed` via Redis Streams when `scanner.events.enabled=true`; DSSE envelopes are embedded verbatim to keep Notify/UI consumers in sync. +- Config plumbing introduces `scanner:events:*` settings (driver, DSN, stream, publish timeout) with validation and Redis-backed publisher wiring. +- Policy Guild coordination task `POLICY-RUNTIME-17-201` opened to track Zastava runtime feed contract; `SCANNER-RUNTIME-17-401` now depends on it so reachability tags stay aligned once runtime endpoints ship. diff --git a/docs/updates/2025-10-19-scheduler-storage.md b/docs/updates/2025-10-19-scheduler-storage.md new file mode 100644 index 00000000..6bb20369 --- /dev/null +++ b/docs/updates/2025-10-19-scheduler-storage.md @@ -0,0 +1,8 @@ +# Scheduler Storage Update — 2025-10-19 + +**Subject:** Mongo bootstrap + canonical fixtures +**Audience:** Scheduler Storage Guild, Scheduler WebService/Worker teams + +- Added `StellaOps.Scheduler.Storage.Mongo` bootstrap (`AddSchedulerMongoStorage`) with collection/index migrations for schedules, runs (incl. TTL), impact snapshots, audit, and locks. +- Introduced Mongo2Go-backed tests that round-trip the published scheduler samples (`samples/api/scheduler/*.json`) to ensure canonical JSON stays intact. +- `ISchedulerMongoInitializer.EnsureMigrationsAsync` now provides the single entry point for WebService/Worker hosts to apply migrations at startup. diff --git a/etc/authority.yaml.sample b/etc/authority.yaml.sample index d9d887ad..002a189f 100644 --- a/etc/authority.yaml.sample +++ b/etc/authority.yaml.sample @@ -83,6 +83,29 @@ plugins: - password - mfa +# OAuth client registrations issued by Authority. These examples cover Notify WebService +# in dev (notify.dev audience) and production (notify audience). Replace the secret files +# with paths to your sealed credentials before enabling bootstrap mode. +clients: + - clientId: "notify-web-dev" + displayName: "Notify WebService (dev)" + grantTypes: [ "client_credentials" ] + audiences: [ "notify.dev" ] + scopes: [ "notify.read", "notify.admin" ] + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-web-dev.secret" + - clientId: "notify-web" + displayName: "Notify WebService" + grantTypes: [ "client_credentials" ] + audiences: [ "notify" ] + scopes: [ "notify.read", "notify.admin" ] + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-web.secret" + # CIDR ranges that bypass network-sensitive policies (e.g. on-host cron jobs). # Keep the list tight: localhost is sufficient for most air-gapped installs. bypassNetworks: diff --git a/etc/notify.dev.yaml b/etc/notify.dev.yaml new file mode 100644 index 00000000..2a287196 --- /dev/null +++ b/etc/notify.dev.yaml @@ -0,0 +1,43 @@ +# Notify WebService configuration — development + +storage: + driver: mongo + connectionString: "mongodb://notify-mongo.dev.svc.cluster.local:27017" + database: "stellaops_notify_dev" + commandTimeoutSeconds: 30 + +authority: + enabled: true + issuer: "https://authority.dev.stella-ops.local" + metadataAddress: "https://authority.dev.stella-ops.local/.well-known/openid-configuration" + requireHttpsMetadata: false + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify.dev + readScope: notify.read + adminScope: notify.admin + +api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + +plugins: + baseDirectory: "../" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + +telemetry: + enableRequestLogging: true + minimumLogLevel: Debug + +# Development override: when the Authority service is not available, set +# authority.enabled: false and authority.developmentSigningKey to a 32+ byte secret. diff --git a/etc/notify.prod.yaml b/etc/notify.prod.yaml new file mode 100644 index 00000000..ae4026d8 --- /dev/null +++ b/etc/notify.prod.yaml @@ -0,0 +1,40 @@ +# Notify WebService configuration — production + +storage: + driver: mongo + connectionString: "mongodb://notify-mongo.prod.svc.cluster.local:27017" + database: "stellaops_notify" + commandTimeoutSeconds: 60 + +authority: + enabled: true + issuer: "https://authority.stella-ops.org" + metadataAddress: "https://authority.stella-ops.org/.well-known/openid-configuration" + requireHttpsMetadata: true + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify + readScope: notify.read + adminScope: notify.admin + +api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + +plugins: + baseDirectory: "/var/opt/stellaops" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + +telemetry: + enableRequestLogging: true + minimumLogLevel: Warning diff --git a/etc/notify.stage.yaml b/etc/notify.stage.yaml new file mode 100644 index 00000000..8a844d8e --- /dev/null +++ b/etc/notify.stage.yaml @@ -0,0 +1,40 @@ +# Notify WebService configuration — staging + +storage: + driver: mongo + connectionString: "mongodb://notify-mongo.stage.svc.cluster.local:27017" + database: "stellaops_notify_stage" + commandTimeoutSeconds: 45 + +authority: + enabled: true + issuer: "https://authority.stage.stella-ops.org" + metadataAddress: "https://authority.stage.stella-ops.org/.well-known/openid-configuration" + requireHttpsMetadata: true + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify + readScope: notify.read + adminScope: notify.admin + +api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + +plugins: + baseDirectory: "/opt/stellaops" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + +telemetry: + enableRequestLogging: true + minimumLogLevel: Information diff --git a/etc/notify.yaml.sample b/etc/notify.yaml.sample new file mode 100644 index 00000000..c8615ce4 --- /dev/null +++ b/etc/notify.yaml.sample @@ -0,0 +1,46 @@ +# Notify WebService sample configuration + +storage: + # Use "mongo" for production deployments; set to "memory" only for tests/dev harnesses. + driver: mongo + connectionString: "mongodb://localhost:27017" + database: "stellaops_notify" + commandTimeoutSeconds: 30 + +authority: + enabled: true + issuer: "https://authority.stella-ops.local" + metadataAddress: "https://authority.stella-ops.local/.well-known/openid-configuration" + requireHttpsMetadata: true + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify + readScope: notify.read + adminScope: notify.admin + +api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + +plugins: + baseDirectory: "../" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + +telemetry: + enableRequestLogging: true + minimumLogLevel: Information + +# When running in development without Authority, set the following instead: +# authority: +# enabled: false +# developmentSigningKey: "change-me-32-bytes-minimum-signing-key" diff --git a/etc/secrets/.gitkeep b/etc/secrets/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/etc/secrets/notify-web-dev.secret.example b/etc/secrets/notify-web-dev.secret.example new file mode 100644 index 00000000..0cf17bea --- /dev/null +++ b/etc/secrets/notify-web-dev.secret.example @@ -0,0 +1,3 @@ +# Replace this file with the actual client secret for the notify-web-dev Authority client. +# Store the secret with restrictive permissions (chmod 600) and mount/read-only in deployments. +NOTIFY_WEB_DEV_CLIENT_SECRET=change-me-dev diff --git a/etc/secrets/notify-web.secret.example b/etc/secrets/notify-web.secret.example new file mode 100644 index 00000000..e28f9849 --- /dev/null +++ b/etc/secrets/notify-web.secret.example @@ -0,0 +1,3 @@ +# Replace this file with the production client secret for the notify-web Authority client. +# Keep outside source control and mount via secrets manager in Kubernetes/offline kit bundles. +NOTIFY_WEB_CLIENT_SECRET=change-me-prod diff --git a/ops/devops/TASKS.md b/ops/devops/TASKS.md index 1c35e064..9ca62b13 100644 --- a/ops/devops/TASKS.md +++ b/ops/devops/TASKS.md @@ -3,7 +3,11 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| | DEVOPS-HELM-09-001 | DONE | DevOps Guild | SCANNER-WEB-09-101 | Create Helm/Compose environment profiles (dev, staging, airgap) with deterministic digests. | Profiles committed under `deploy/`; docs updated; CI smoke deploy passes. | -| DEVOPS-PERF-10-001 | TODO | DevOps Guild | BENCH-SCANNER-10-001 | Add perf smoke job (SBOM compose <5 s target) to CI. | CI job runs sample build verifying <5 s; alerts configured. | +| DEVOPS-SCANNER-09-204 | TODO | DevOps Guild, Scanner WebService Guild | SCANNER-EVENTS-15-201 | Surface `SCANNER__EVENTS__*` environment variables across docker-compose (dev/stage/airgap) and Helm values, defaulting to share the Redis queue DSN. | Compose/Helm configs ship enabled Redis event publishing with documented overrides; lint jobs updated; docs cross-link to new knobs. | +| DEVOPS-SCANNER-09-205 | TODO | DevOps Guild, Notify Guild | DEVOPS-SCANNER-09-204 | Add Notify smoke stage that tails the Redis stream and asserts `scanner.report.ready`/`scanner.scan.completed` reach Notify WebService in staging. | CI job reads Redis stream during scanner smoke deploy, confirms Notify ingestion via API, alerts on failure. | +| DEVOPS-PERF-10-001 | DONE | DevOps Guild | BENCH-SCANNER-10-001 | Add perf smoke job (SBOM compose <5 s target) to CI. | CI job runs sample build verifying <5 s; alerts configured. | +| DEVOPS-PERF-10-002 | TODO | DevOps Guild | BENCH-SCANNER-10-002 | Publish analyzer bench metrics to Grafana/perf workbook and alarm on ≥20 % regressions. | CI exports JSON for dashboards; Grafana panel wired; Ops on-call doc updated with alert hook. | | DEVOPS-REL-14-001 | TODO | DevOps Guild | SIGNER-API-11-101, ATTESTOR-API-11-201 | Deterministic build/release pipeline with SBOM/provenance, signing, manifest generation. | CI pipeline produces signed images + SBOM/attestations, manifests published with verified hashes, docs updated. | | DEVOPS-REL-17-002 | TODO | DevOps Guild | DEVOPS-REL-14-001, SCANNER-EMIT-17-701 | Persist stripped-debug artifacts organised by GNU build-id and bundle them into release/offline kits with checksum manifests. | CI job writes `.debug` files under `artifacts/debug/.build-id/`, manifest + checksums published, offline kit includes cache, smoke job proves symbol lookup via build-id. | | DEVOPS-MIRROR-08-001 | TODO | DevOps Guild | DEVOPS-REL-14-001 | Stand up managed mirror profiles for `*.stella-ops.org` (Concelier/Excititor), including Helm/Compose overlays, multi-tenant secrets, CDN caching, and sync documentation. | Infra overlays committed, CI smoke deploy hits mirror endpoints, runbooks published for downstream sync and quota management. | +| DEVOPS-SEC-10-301 | DOING | DevOps Guild | — | Address NU1902/NU1903 advisories for `MongoDB.Driver` 2.12.0 and `SharpCompress` 0.23.0 surfaced during scanner cache and worker test runs. | Dependencies bumped to patched releases, audit logs free of NU1902/NU1903 warnings, regression tests green, change log documents upgrade guidance. | diff --git a/out/tmp-cdx/Program.cs b/out/tmp-cdx/Program.cs new file mode 100644 index 00000000..edd3a910 --- /dev/null +++ b/out/tmp-cdx/Program.cs @@ -0,0 +1,6 @@ +using System; +using CycloneDX.Models; + +var dependenciesProperty = typeof(Dependency).GetProperty("Dependencies")!; +Console.WriteLine(dependenciesProperty.PropertyType); +Console.WriteLine(dependenciesProperty.PropertyType.GenericTypeArguments[0]); diff --git a/out/tmp-cdx/tmp-cdx.csproj b/out/tmp-cdx/tmp-cdx.csproj new file mode 100644 index 00000000..f39c5c77 --- /dev/null +++ b/out/tmp-cdx/tmp-cdx.csproj @@ -0,0 +1,15 @@ + + + + Exe + net10.0 + tmp_cdx + enable + enable + + + + + + + diff --git a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json new file mode 100644 index 00000000..ac1f17c0 --- /dev/null +++ b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json @@ -0,0 +1,22 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.java", + "displayName": "StellaOps Java / Maven Analyzer", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Java.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Java.JavaLanguageAnalyzer" + }, + "capabilities": [ + "language-analyzer", + "java", + "maven" + ], + "metadata": { + "org.stellaops.analyzer.language": "java", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true" + } +} diff --git a/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json b/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json new file mode 100644 index 00000000..aad798dd --- /dev/null +++ b/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json @@ -0,0 +1,19 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzers.os.apk", + "displayName": "StellaOps Alpine APK Analyzer", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.OS.Apk.dll" + }, + "capabilities": [ + "os-analyzer", + "apk" + ], + "metadata": { + "org.stellaops.analyzer.kind": "os", + "org.stellaops.analyzer.id": "apk" + } +} diff --git a/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json b/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json new file mode 100644 index 00000000..a4126885 --- /dev/null +++ b/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json @@ -0,0 +1,19 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzers.os.dpkg", + "displayName": "StellaOps Debian dpkg Analyzer", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.OS.Dpkg.dll" + }, + "capabilities": [ + "os-analyzer", + "dpkg" + ], + "metadata": { + "org.stellaops.analyzer.kind": "os", + "org.stellaops.analyzer.id": "dpkg" + } +} diff --git a/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json b/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json new file mode 100644 index 00000000..ae12ba03 --- /dev/null +++ b/plugins/scanner/analyzers/os/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json @@ -0,0 +1,19 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzers.os.rpm", + "displayName": "StellaOps RPM Analyzer", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.OS.Rpm.dll" + }, + "capabilities": [ + "os-analyzer", + "rpm" + ], + "metadata": { + "org.stellaops.analyzer.kind": "os", + "org.stellaops.analyzer.id": "rpm" + } +} diff --git a/samples/TASKS.md b/samples/TASKS.md index 699b39ce..e24b2131 100644 --- a/samples/TASKS.md +++ b/samples/TASKS.md @@ -2,4 +2,5 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SAMPLES-10-001 | TODO | Samples Guild, Scanner Team | SCANNER-EMIT-10-605 | Curate sample images (nginx, alpine+busybox, distroless+go, .NET AOT, python venv, npm monorepo) with expected SBOM/BOM-Index sidecars. | Samples committed under `samples/`; golden SBOM/BOM-Index files present; documented usage. | +| SAMPLES-10-001 | DONE | Samples Guild, Scanner Team | SCANNER-EMIT-10-605 | Curate sample images (nginx, alpine+busybox, distroless+go, .NET AOT, python venv, npm monorepo) with expected SBOM/BOM-Index sidecars. | Samples committed under `samples/`; golden SBOM/BOM-Index files present; documented usage. | +| SAMPLES-13-004 | TODO | Samples Guild, Policy Guild | POLICY-CORE-09-006, UI-POLICY-13-007 | Add policy preview/report fixtures showing confidence bands and unknown-age tags. | Confidence sample (`samples/policy/policy-preview-unknown.json`) reviewed, documented usage in UI dev guide, ajv validation hook updated. | diff --git a/samples/api/reports/report-sample.dsse.json b/samples/api/reports/report-sample.dsse.json new file mode 100644 index 00000000..18730bcb --- /dev/null +++ b/samples/api/reports/report-sample.dsse.json @@ -0,0 +1,52 @@ +{ + "report": { + "reportId": "report-3def5f362aa475ef14b6", + "imageDigest": "sha256:deadbeef", + "generatedAt": "2025-10-19T08:28:09.3699267+00:00", + "verdict": "blocked", + "policy": { + "revisionId": "rev-1", + "digest": "27d2ec2b34feedc304fc564d252ecee1c8fa14ea581a5ff5c1ea8963313d5c8d" + }, + "summary": { + "total": 1, + "blocked": 1, + "warned": 0, + "ignored": 0, + "quieted": 0 + }, + "verdicts": [ + { + "findingId": "finding-1", + "status": "Blocked", + "ruleName": "Block Critical", + "ruleAction": "Block", + "score": 40.5, + "configVersion": "1.0", + "inputs": { + "reachabilityWeight": 0.45, + "baseScore": 40.5, + "severityWeight": 90, + "trustWeight": 1, + "trustWeight.NVD": 1, + "reachability.runtime": 0.45 + }, + "quiet": false, + "sourceTrust": "NVD", + "reachability": "runtime" + } + ], + "issues": [] + }, + "dsse": { + "payloadType": "application/vnd.stellaops.report+json", + "payload": "eyJyZXBvcnRJZCI6InJlcG9ydC0zZGVmNWYzNjJhYTQ3NWVmMTRiNiIsImltYWdlRGlnZXN0Ijoic2hhMjU2OmRlYWRiZWVmIiwiZ2VuZXJhdGVkQXQiOiIyMDI1LTEwLTE5VDA4OjI4OjA5LjM2OTkyNjcrMDA6MDAiLCJ2ZXJkaWN0IjoiYmxvY2tlZCIsInBvbGljeSI6eyJyZXZpc2lvbklkIjoicmV2LTEiLCJkaWdlc3QiOiIyN2QyZWMyYjM0ZmVlZGMzMDRmYzU2NGQyNTJlY2VlMWM4ZmExNGVhNTgxYTVmZjVjMWVhODk2MzMxM2Q1YzhkIn0sInN1bW1hcnkiOnsidG90YWwiOjEsImJsb2NrZWQiOjEsIndhcm5lZCI6MCwiaWdub3JlZCI6MCwicXVpZXRlZCI6MH0sInZlcmRpY3RzIjpbeyJmaW5kaW5nSWQiOiJmaW5kaW5nLTEiLCJzdGF0dXMiOiJCbG9ja2VkIiwicnVsZU5hbWUiOiJCbG9jayBDcml0aWNhbCIsInJ1bGVBY3Rpb24iOiJCbG9jayIsInNjb3JlIjo0MC41LCJjb25maWdWZXJzaW9uIjoiMS4wIiwiaW5wdXRzIjp7InJlYWNoYWJpbGl0eVdlaWdodCI6MC40NSwiYmFzZVNjb3JlIjo0MC41LCJzZXZlcml0eVdlaWdodCI6OTAsInRydXN0V2VpZ2h0IjoxLCJ0cnVzdFdlaWdodC5OVkQiOjEsInJlYWNoYWJpbGl0eS5ydW50aW1lIjowLjQ1fSwicXVpZXQiOmZhbHNlLCJzb3VyY2VUcnVzdCI6Ik5WRCIsInJlYWNoYWJpbGl0eSI6InJ1bnRpbWUifV0sImlzc3VlcyI6W119", + "signatures": [ + { + "keyId": "scanner-report-signing", + "algorithm": "hs256", + "signature": "s3qnWeRsYs+QA/nO84Us8G2xjZcvphc2P7KnOdTVwQs=" + } + ] + } +} diff --git a/samples/api/scheduler/audit.json b/samples/api/scheduler/audit.json new file mode 100644 index 00000000..0c65e7fd --- /dev/null +++ b/samples/api/scheduler/audit.json @@ -0,0 +1,19 @@ +{ + "id": "audit_169754", + "tenantId": "tenant-alpha", + "category": "scheduler", + "action": "pause", + "occurredAt": "2025-10-18T22:10:00+00:00", + "actor": { + "actorId": "user_admin", + "displayName": "Cluster Admin", + "kind": "user" + }, + "scheduleId": "sch_20251018a", + "correlationId": "corr-123", + "metadata": { + "details": "schedule paused", + "reason": "maintenance" + }, + "message": "Paused via API" +} diff --git a/samples/api/scheduler/impact-set.json b/samples/api/scheduler/impact-set.json new file mode 100644 index 00000000..1375878c --- /dev/null +++ b/samples/api/scheduler/impact-set.json @@ -0,0 +1,34 @@ +{ + "schemaVersion": "scheduler.impact-set@1", + "selector": { + "scope": "all-images", + "tenantId": "tenant-alpha", + "namespaces": [], + "repositories": [], + "digests": [], + "includeTags": [], + "labels": [], + "resolvesTags": false + }, + "images": [ + { + "imageDigest": "sha256:f1e2d3", + "registry": "registry.internal", + "repository": "app/api", + "namespaces": [ + "team-a" + ], + "tags": [ + "prod" + ], + "usedByEntrypoint": true, + "labels": { + "env": "prod" + } + } + ], + "usageOnly": true, + "generatedAt": "2025-10-18T22:02:58+00:00", + "total": 412, + "snapshotId": "impact-20251018-1" +} diff --git a/samples/api/scheduler/run.json b/samples/api/scheduler/run.json new file mode 100644 index 00000000..117b6b24 --- /dev/null +++ b/samples/api/scheduler/run.json @@ -0,0 +1,50 @@ +{ + "schemaVersion": "scheduler.run@1", + "id": "run_20251018_0001", + "tenantId": "tenant-alpha", + "scheduleId": "sch_20251018a", + "trigger": "feedser", + "state": "running", + "stats": { + "candidates": 1280, + "deduped": 910, + "queued": 624, + "completed": 310, + "deltas": 42, + "newCriticals": 7, + "newHigh": 11, + "newMedium": 18, + "newLow": 6 + }, + "reason": { + "feedserExportId": "exp-20251018-03" + }, + "createdAt": "2025-10-18T22:03:14+00:00", + "startedAt": "2025-10-18T22:03:20+00:00", + "deltas": [ + { + "imageDigest": "sha256:a1b2c3", + "newFindings": 3, + "newCriticals": 1, + "newHigh": 1, + "newMedium": 1, + "newLow": 0, + "kevHits": [ + "CVE-2025-0002" + ], + "topFindings": [ + { + "purl": "pkg:rpm/openssl@3.0.12-5.el9", + "vulnerabilityId": "CVE-2025-0002", + "severity": "critical", + "link": "https://ui.internal/scans/sha256:a1b2c3" + } + ], + "attestation": { + "uuid": "rekor-314", + "verified": true + }, + "detectedAt": "2025-10-18T22:03:21+00:00" + } + ] +} diff --git a/samples/api/scheduler/schedule.json b/samples/api/scheduler/schedule.json new file mode 100644 index 00000000..8746586e --- /dev/null +++ b/samples/api/scheduler/schedule.json @@ -0,0 +1,57 @@ +{ + "schemaVersion": "scheduler.schedule@1", + "id": "sch_20251018a", + "tenantId": "tenant-alpha", + "name": "Nightly Prod", + "enabled": true, + "cronExpression": "0 2 * * *", + "timezone": "UTC", + "mode": "analysis-only", + "selection": { + "scope": "by-namespace", + "tenantId": "tenant-alpha", + "namespaces": [ + "team-a", + "team-b" + ], + "repositories": [ + "app/service-api" + ], + "digests": [], + "includeTags": [ + "canary", + "prod" + ], + "labels": [ + { + "key": "env", + "values": [ + "prod", + "staging" + ] + } + ], + "resolvesTags": true + }, + "onlyIf": { + "lastReportOlderThanDays": 7, + "policyRevision": "policy@42" + }, + "notify": { + "onNewFindings": true, + "minSeverity": "high", + "includeKev": true + }, + "limits": { + "maxJobs": 1000, + "ratePerSecond": 25, + "parallelism": 4 + }, + "subscribers": [ + "notify.ops" + ], + "createdAt": "2025-10-18T22:00:00+00:00", + "createdBy": "svc_scheduler", + "updatedAt": "2025-10-18T22:00:00+00:00", + "updatedBy": "svc_scheduler" +} diff --git a/samples/policy/policy-preview-unknown.json b/samples/policy/policy-preview-unknown.json new file mode 100644 index 00000000..ed00dac1 --- /dev/null +++ b/samples/policy/policy-preview-unknown.json @@ -0,0 +1,98 @@ +{ + "previewRequest": { + "imageDigest": "sha256:7dbe0c9a5d4f1c8184007e9d94dbe55928f8a2db5ab9c1c2d4a2f7bbcdfe1234", + "findings": [ + { + "id": "library:pkg/openssl@1.1.1w", + "severity": "Unknown", + "source": "NVD", + "tags": [ + "trust:vendor", + "reachability:unknown", + "unknown-age-days:5" + ] + }, + { + "id": "library:pkg/zlib@1.3.1", + "severity": "High", + "source": "NVD", + "tags": [ + "state:unknown", + "reachability:runtime", + "unknown-since:2025-10-10T00:00:00Z", + "observed-at:2025-10-19T12:00:00Z" + ] + } + ] + }, + "previewResponse": { + "success": true, + "policyDigest": "8a0f72f8dc5c51c46991db3bba34e9b3c0c8e944a7a6d0a9c29a9aa6b8439876", + "revisionId": "rev-42", + "changed": 2, + "diffs": [ + { + "findingId": "library:pkg/openssl@1.1.1w", + "baseline": { + "findingId": "library:pkg/openssl@1.1.1w", + "status": "Pass", + "score": 0, + "configVersion": "1.0" + }, + "projected": { + "findingId": "library:pkg/openssl@1.1.1w", + "status": "Blocked", + "ruleName": "Block vendor unknowns", + "ruleAction": "block", + "score": 19.5, + "configVersion": "1.0", + "inputs": { + "severityWeight": 50, + "trustWeight": 0.65, + "reachabilityWeight": 0.6, + "baseScore": 19.5, + "trustWeight.vendor": 0.65, + "reachability.unknown": 0.6, + "unknownConfidence": 0.55, + "unknownAgeDays": 5 + }, + "unknownConfidence": 0.55, + "confidenceBand": "medium", + "unknownAgeDays": 5 + }, + "changed": true + }, + { + "findingId": "library:pkg/zlib@1.3.1", + "baseline": { + "findingId": "library:pkg/zlib@1.3.1", + "status": "Pass", + "score": 0, + "configVersion": "1.0" + }, + "projected": { + "findingId": "library:pkg/zlib@1.3.1", + "status": "Warned", + "ruleName": "Runtime mitigation required", + "ruleAction": "warn", + "score": 33.75, + "configVersion": "1.0", + "inputs": { + "severityWeight": 75, + "trustWeight": 1, + "reachabilityWeight": 0.45, + "baseScore": 33.75, + "reachability.runtime": 0.45, + "warnPenalty": 15, + "unknownConfidence": 0.35, + "unknownAgeDays": 9 + }, + "unknownConfidence": 0.35, + "confidenceBand": "medium", + "unknownAgeDays": 9 + }, + "changed": true + } + ] + } +} diff --git a/samples/runtime/README.md b/samples/runtime/README.md new file mode 100644 index 00000000..0f33d711 --- /dev/null +++ b/samples/runtime/README.md @@ -0,0 +1,6 @@ +# Runtime Fixtures + +Supporting filesystem snippets consumed by analyzer microbenchmarks and integration tests. They are intentionally lightweight yet deterministic so they can be committed to the repository without bloating history. + +- `npm-monorepo/` – trimmed `node_modules/` tree for workspace-style Node.js projects. +- `python-venv/` – selected `site-packages/` entries highlighting `*.dist-info` metadata. diff --git a/samples/runtime/npm-monorepo/README.md b/samples/runtime/npm-monorepo/README.md new file mode 100644 index 00000000..9abc0b49 --- /dev/null +++ b/samples/runtime/npm-monorepo/README.md @@ -0,0 +1,9 @@ +# NPM Monorepo Fixture + +This fixture represents a trimmed monorepo layout used by the analyzer microbench. It contains four packages under `node_modules/` with realistic metadata and dependency edges. + +- `@stella/core` depends on `lodash`. +- `@stella/web` depends on `@stella/core` and `rxjs`. +- Third-party packages expose standard `name`, `version`, and `license` fields. + +The files are intentionally small so that the bench harness focuses on directory traversal and metadata parsing overhead. diff --git a/samples/runtime/npm-monorepo/node_modules/@stella/core/package.json b/samples/runtime/npm-monorepo/node_modules/@stella/core/package.json new file mode 100644 index 00000000..41e3d1f4 --- /dev/null +++ b/samples/runtime/npm-monorepo/node_modules/@stella/core/package.json @@ -0,0 +1,9 @@ +{ + "name": "@stella/core", + "version": "2.0.0", + "description": "Core services shared by the sample monorepo.", + "dependencies": { + "lodash": "4.17.21" + }, + "license": "Apache-2.0" +} diff --git a/samples/runtime/npm-monorepo/node_modules/@stella/web/package.json b/samples/runtime/npm-monorepo/node_modules/@stella/web/package.json new file mode 100644 index 00000000..1223a1e8 --- /dev/null +++ b/samples/runtime/npm-monorepo/node_modules/@stella/web/package.json @@ -0,0 +1,10 @@ +{ + "name": "@stella/web", + "version": "1.5.3", + "description": "Web layer in the sample monorepo.", + "dependencies": { + "@stella/core": "2.0.0", + "rxjs": "7.8.1" + }, + "license": "MIT" +} diff --git a/samples/runtime/npm-monorepo/node_modules/lodash/package.json b/samples/runtime/npm-monorepo/node_modules/lodash/package.json new file mode 100644 index 00000000..8b8447d6 --- /dev/null +++ b/samples/runtime/npm-monorepo/node_modules/lodash/package.json @@ -0,0 +1,6 @@ +{ + "name": "lodash", + "version": "4.17.21", + "description": "Lodash modular utilities.", + "license": "MIT" +} diff --git a/samples/runtime/npm-monorepo/node_modules/rxjs/package.json b/samples/runtime/npm-monorepo/node_modules/rxjs/package.json new file mode 100644 index 00000000..f910d93c --- /dev/null +++ b/samples/runtime/npm-monorepo/node_modules/rxjs/package.json @@ -0,0 +1,6 @@ +{ + "name": "rxjs", + "version": "7.8.1", + "description": "Reactive Extensions for modern JavaScript.", + "license": "Apache-2.0" +} diff --git a/samples/runtime/python-venv/README.md b/samples/runtime/python-venv/README.md new file mode 100644 index 00000000..4df4b6eb --- /dev/null +++ b/samples/runtime/python-venv/README.md @@ -0,0 +1,5 @@ +# Python Virtual Environment Fixture + +The fixture mimics a trimmed `site-packages/` layout with three common dependencies (`requests`, `urllib3`, `certifi`). Each package exposes a `*.dist-info/METADATA` file so the analyzer bench can validate parsing performance and header extraction. + +Files intentionally omit wheels and bytecode to keep the tree compact while still realistic. diff --git a/samples/runtime/python-venv/lib/python3.11/site-packages/certifi-2024.6.2.dist-info/METADATA b/samples/runtime/python-venv/lib/python3.11/site-packages/certifi-2024.6.2.dist-info/METADATA new file mode 100644 index 00000000..3561c0c4 --- /dev/null +++ b/samples/runtime/python-venv/lib/python3.11/site-packages/certifi-2024.6.2.dist-info/METADATA @@ -0,0 +1,5 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2024.6.2 +Summary: Mozilla SSL Certificates. +License: MPL-2.0 diff --git a/samples/runtime/python-venv/lib/python3.11/site-packages/requests-2.32.0.dist-info/METADATA b/samples/runtime/python-venv/lib/python3.11/site-packages/requests-2.32.0.dist-info/METADATA new file mode 100644 index 00000000..c6339adf --- /dev/null +++ b/samples/runtime/python-venv/lib/python3.11/site-packages/requests-2.32.0.dist-info/METADATA @@ -0,0 +1,7 @@ +Metadata-Version: 2.1 +Name: requests +Version: 2.32.0 +Summary: Python HTTP for Humans. +License: Apache-2.0 +Requires-Dist: urllib3 (>=1.21.1,<3) +Requires-Dist: certifi (>=2017.4.17) diff --git a/samples/runtime/python-venv/lib/python3.11/site-packages/urllib3-2.2.1.dist-info/METADATA b/samples/runtime/python-venv/lib/python3.11/site-packages/urllib3-2.2.1.dist-info/METADATA new file mode 100644 index 00000000..f4cb7f98 --- /dev/null +++ b/samples/runtime/python-venv/lib/python3.11/site-packages/urllib3-2.2.1.dist-info/METADATA @@ -0,0 +1,5 @@ +Metadata-Version: 2.1 +Name: urllib3 +Version: 2.2.1 +Summary: HTTP library with thread-safe connection pooling. +License: MIT diff --git a/samples/scanner/README.md b/samples/scanner/README.md new file mode 100644 index 00000000..ec078a7c --- /dev/null +++ b/samples/scanner/README.md @@ -0,0 +1,12 @@ +# Scanner Samples + +Curated SBOM and BOM Index fixtures covering representative container types referenced throughout Sprint 10. Each sample folder under `images/` corresponds to a container profile, while `../runtime` holds trimmed filesystem fixtures used by analyzer and perf tests. + +| Sample | Highlights | +| ------ | ---------- | +| `nginx` | Alpine packages with mixed inventory/runtime coverage. | +| `alpine-busybox` | Minimal BusyBox rootfs with musl runtime linkage. | +| `distroless-go` | Go binary with Distroless base and Go build-info evidence. | +| `dotnet-aot` | Ahead-of-time compiled .NET worker exposing NuGet dependencies. | +| `python-venv` | Python virtualenv with `*.dist-info` evidence. | +| `npm-monorepo` | Node workspace packages resolved via `package.json`. | diff --git a/samples/scanner/images/alpine-busybox/README.md b/samples/scanner/images/alpine-busybox/README.md new file mode 100644 index 00000000..e6926236 --- /dev/null +++ b/samples/scanner/images/alpine-busybox/README.md @@ -0,0 +1,3 @@ +# Alpine + BusyBox Sample + +Fixtures showcase the tiny Alpine image that powers many minimal containers. BusyBox and musl appear in usage because they back the entrypoint shell, while alpine-baselayout remains inventory-only. diff --git a/samples/scanner/images/alpine-busybox/bom-index.json b/samples/scanner/images/alpine-busybox/bom-index.json new file mode 100644 index 00000000..f1d539fa --- /dev/null +++ b/samples/scanner/images/alpine-busybox/bom-index.json @@ -0,0 +1,42 @@ +{ + "schema": "stellaops/bom-index@1", + "image": { + "repository": "docker.io/library/alpine", + "digest": "sha256:9a214327ec7df5bc8f1d3f12171873be7d778fdbf473d6f9a63d5de6c6bfb2d3", + "tag": "3.20" + }, + "generatedAt": "2025-10-19T00:00:00Z", + "generator": "stellaops/scanner@10.0.0-preview1", + "components": [ + { + "purl": "pkg:apk/alpine/busybox@1.36.1-r2?arch=x86_64", + "layerDigest": "sha256:5555555555555555555555555555555555555555555555555555555555555555", + "usage": ["inventory", "runtime"], + "licenses": ["GPL-2.0-only"], + "evidence": { + "kind": "apk-database", + "path": "/lib/apk/db/installed" + } + }, + { + "purl": "pkg:apk/alpine/musl@1.2.5-r0?arch=x86_64", + "layerDigest": "sha256:6666666666666666666666666666666666666666666666666666666666666666", + "usage": ["inventory", "runtime"], + "licenses": ["MIT"], + "evidence": { + "kind": "apk-database", + "path": "/lib/apk/db/installed" + } + }, + { + "purl": "pkg:apk/alpine/alpine-baselayout@3.4.3-r0?arch=x86_64", + "layerDigest": "sha256:7777777777777777777777777777777777777777777777777777777777777777", + "usage": ["inventory"], + "licenses": ["GPL-2.0-only"], + "evidence": { + "kind": "apk-database", + "path": "/lib/apk/db/installed" + } + } + ] +} diff --git a/samples/scanner/images/alpine-busybox/inventory.cdx.json b/samples/scanner/images/alpine-busybox/inventory.cdx.json new file mode 100644 index 00000000..5316fde7 --- /dev/null +++ b/samples/scanner/images/alpine-busybox/inventory.cdx.json @@ -0,0 +1,34 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "alpine-busybox", + "version": "3.20", + "bomRef": "pkg:docker/library/alpine@sha256:9a214327ec7df5bc8f1d3f12171873be7d778fdbf473d6f9a63d5de6c6bfb2d3" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:apk/alpine/busybox@1.36.1-r2?arch=x86_64", + "name": "busybox", + "version": "1.36.1-r2" + }, + { + "type": "library", + "bomRef": "pkg:apk/alpine/musl@1.2.5-r0?arch=x86_64", + "name": "musl", + "version": "1.2.5-r0" + }, + { + "type": "application", + "bomRef": "pkg:apk/alpine/alpine-baselayout@3.4.3-r0?arch=x86_64", + "name": "alpine-baselayout", + "version": "3.4.3-r0" + } + ] +} diff --git a/samples/scanner/images/alpine-busybox/usage.cdx.json b/samples/scanner/images/alpine-busybox/usage.cdx.json new file mode 100644 index 00000000..97cafec6 --- /dev/null +++ b/samples/scanner/images/alpine-busybox/usage.cdx.json @@ -0,0 +1,28 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "alpine-busybox", + "version": "3.20", + "bomRef": "pkg:docker/library/alpine@sha256:9a214327ec7df5bc8f1d3f12171873be7d778fdbf473d6f9a63d5de6c6bfb2d3" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:apk/alpine/busybox@1.36.1-r2?arch=x86_64", + "name": "busybox", + "version": "1.36.1-r2" + }, + { + "type": "library", + "bomRef": "pkg:apk/alpine/musl@1.2.5-r0?arch=x86_64", + "name": "musl", + "version": "1.2.5-r0" + } + ] +} diff --git a/samples/scanner/images/distroless-go/README.md b/samples/scanner/images/distroless-go/README.md new file mode 100644 index 00000000..f6ae05b3 --- /dev/null +++ b/samples/scanner/images/distroless-go/README.md @@ -0,0 +1,3 @@ +# Distroless + Go Sample + +Demonstrates a Go binary shipped on top of Distroless. Only the compiled service appears in the usage SBOM, while the Go standard library remains inventory-only and still tracked in the BOM Index. diff --git a/samples/scanner/images/distroless-go/bom-index.json b/samples/scanner/images/distroless-go/bom-index.json new file mode 100644 index 00000000..36056e9a --- /dev/null +++ b/samples/scanner/images/distroless-go/bom-index.json @@ -0,0 +1,32 @@ +{ + "schema": "stellaops/bom-index@1", + "image": { + "repository": "gcr.io/distroless/base", + "digest": "sha256:0dd2f0f15c9f8abfba6a0ce0d7d6a24e2e1071c977733f6e77cbe51b87f15ad9", + "tag": "nonroot" + }, + "generatedAt": "2025-10-19T00:00:00Z", + "generator": "stellaops/scanner@10.0.0-preview1", + "components": [ + { + "purl": "pkg:golang/github.com/stellaops/sample-service@v1.4.0", + "layerDigest": "sha256:8888888888888888888888888888888888888888888888888888888888888888", + "usage": ["inventory", "runtime"], + "licenses": ["Apache-2.0"], + "evidence": { + "kind": "go-buildinfo", + "path": "/workspace/service" + } + }, + { + "purl": "pkg:golang/std@go1.22.5", + "layerDigest": "sha256:9999999999999999999999999999999999999999999999999999999999999999", + "usage": ["inventory"], + "licenses": ["BSD-3-Clause"], + "evidence": { + "kind": "go-buildinfo", + "path": "/workspace/service" + } + } + ] +} diff --git a/samples/scanner/images/distroless-go/inventory.cdx.json b/samples/scanner/images/distroless-go/inventory.cdx.json new file mode 100644 index 00000000..faa931e4 --- /dev/null +++ b/samples/scanner/images/distroless-go/inventory.cdx.json @@ -0,0 +1,34 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "distroless-go", + "version": "2025.10.0", + "bomRef": "pkg:docker/gcr.io/distroless/base@sha256:0dd2f0f15c9f8abfba6a0ce0d7d6a24e2e1071c977733f6e77cbe51b87f15ad9" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:golang/github.com/stellaops/sample-service@v1.4.0", + "name": "github.com/stellaops/sample-service", + "version": "v1.4.0", + "properties": [ + { + "name": "stellaops.entrypoint", + "value": "/workspace/service" + } + ] + }, + { + "type": "library", + "bomRef": "pkg:golang/std@go1.22.5", + "name": "golang-stdlib", + "version": "go1.22.5" + } + ] +} diff --git a/samples/scanner/images/distroless-go/usage.cdx.json b/samples/scanner/images/distroless-go/usage.cdx.json new file mode 100644 index 00000000..86ba1e4a --- /dev/null +++ b/samples/scanner/images/distroless-go/usage.cdx.json @@ -0,0 +1,22 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "distroless-go", + "version": "2025.10.0", + "bomRef": "pkg:docker/gcr.io/distroless/base@sha256:0dd2f0f15c9f8abfba6a0ce0d7d6a24e2e1071c977733f6e77cbe51b87f15ad9" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:golang/github.com/stellaops/sample-service@v1.4.0", + "name": "github.com/stellaops/sample-service", + "version": "v1.4.0" + } + ] +} diff --git a/samples/scanner/images/dotnet-aot/README.md b/samples/scanner/images/dotnet-aot/README.md new file mode 100644 index 00000000..74ebe590 --- /dev/null +++ b/samples/scanner/images/dotnet-aot/README.md @@ -0,0 +1,3 @@ +# .NET AOT Sample + +An ahead-of-time compiled worker showcasing how native .NET deployments appear in SBOM outputs. The BOM Index ties NuGet packages back to the generated `deps.json` evidence. diff --git a/samples/scanner/images/dotnet-aot/bom-index.json b/samples/scanner/images/dotnet-aot/bom-index.json new file mode 100644 index 00000000..cec86869 --- /dev/null +++ b/samples/scanner/images/dotnet-aot/bom-index.json @@ -0,0 +1,52 @@ +{ + "schema": "stellaops/bom-index@1", + "image": { + "repository": "registry.stella-ops.org/sample/dotnet-aot", + "digest": "sha256:5be6f3ad9d2b1e4fcb4c6f40d9c664fca97f5b4d9ccb8e1d8f970e8b2bce1123", + "tag": "1.0.0" + }, + "generatedAt": "2025-10-19T00:00:00Z", + "generator": "stellaops/scanner@10.0.0-preview1", + "components": [ + { + "purl": "pkg:nuget/Sample.Worker@1.0.0", + "layerDigest": "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "usage": ["inventory", "runtime"], + "licenses": ["MIT"], + "evidence": { + "kind": "deps-json", + "path": "/app/Sample.Worker.deps.json" + } + }, + { + "purl": "pkg:nuget/Microsoft.Extensions.Hosting@8.0.0", + "layerDigest": "sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "usage": ["inventory"], + "licenses": ["MIT"], + "evidence": { + "kind": "deps-json", + "path": "/app/Sample.Worker.deps.json" + } + }, + { + "purl": "pkg:nuget/System.Text.Json@8.0.0", + "layerDigest": "sha256:cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", + "usage": ["inventory", "runtime"], + "licenses": ["MIT"], + "evidence": { + "kind": "deps-json", + "path": "/app/Sample.Worker.deps.json" + } + }, + { + "purl": "pkg:nuget/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross@8.0.0", + "layerDigest": "sha256:dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd", + "usage": ["inventory"], + "licenses": ["MIT"], + "evidence": { + "kind": "deps-json", + "path": "/app/Sample.Worker.deps.json" + } + } + ] +} diff --git a/samples/scanner/images/dotnet-aot/inventory.cdx.json b/samples/scanner/images/dotnet-aot/inventory.cdx.json new file mode 100644 index 00000000..fb7dd614 --- /dev/null +++ b/samples/scanner/images/dotnet-aot/inventory.cdx.json @@ -0,0 +1,40 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "dotnet-aot", + "version": "8.0.0", + "bomRef": "pkg:docker/stellaops/sample-dotnet-aot@sha256:5be6f3ad9d2b1e4fcb4c6f40d9c664fca97f5b4d9ccb8e1d8f970e8b2bce1123" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:nuget/Sample.Worker@1.0.0", + "name": "Sample.Worker", + "version": "1.0.0" + }, + { + "type": "library", + "bomRef": "pkg:nuget/Microsoft.Extensions.Hosting@8.0.0", + "name": "Microsoft.Extensions.Hosting", + "version": "8.0.0" + }, + { + "type": "library", + "bomRef": "pkg:nuget/System.Text.Json@8.0.0", + "name": "System.Text.Json", + "version": "8.0.0" + }, + { + "type": "library", + "bomRef": "pkg:nuget/Microsoft.NETCore.App.Runtime.AOT.win-x64.Cross@8.0.0", + "name": "NativeAotRuntime", + "version": "8.0.0" + } + ] +} diff --git a/samples/scanner/images/dotnet-aot/usage.cdx.json b/samples/scanner/images/dotnet-aot/usage.cdx.json new file mode 100644 index 00000000..76cd71f1 --- /dev/null +++ b/samples/scanner/images/dotnet-aot/usage.cdx.json @@ -0,0 +1,28 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "dotnet-aot", + "version": "8.0.0", + "bomRef": "pkg:docker/stellaops/sample-dotnet-aot@sha256:5be6f3ad9d2b1e4fcb4c6f40d9c664fca97f5b4d9ccb8e1d8f970e8b2bce1123" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:nuget/Sample.Worker@1.0.0", + "name": "Sample.Worker", + "version": "1.0.0" + }, + { + "type": "library", + "bomRef": "pkg:nuget/System.Text.Json@8.0.0", + "name": "System.Text.Json", + "version": "8.0.0" + } + ] +} diff --git a/samples/scanner/images/nginx/README.md b/samples/scanner/images/nginx/README.md new file mode 100644 index 00000000..40700a15 --- /dev/null +++ b/samples/scanner/images/nginx/README.md @@ -0,0 +1,3 @@ +# Nginx Inventory Sample + +CycloneDX inventory, usage, and BOM Index fixtures for the `docker.io/library/nginx:1.25.4` image. The SBOMs capture base Alpine packages and the BOM Index links each component to the layer that introduced it. diff --git a/samples/scanner/images/nginx/bom-index.json b/samples/scanner/images/nginx/bom-index.json new file mode 100644 index 00000000..e6447d0d --- /dev/null +++ b/samples/scanner/images/nginx/bom-index.json @@ -0,0 +1,52 @@ +{ + "schema": "stellaops/bom-index@1", + "image": { + "repository": "docker.io/library/nginx", + "digest": "sha256:8f47d7c6b538c0d9533b78913cba3d5e671e7c4b4e7c6a2bb9a1a1c4d4f8e123", + "tag": "1.25.4" + }, + "generatedAt": "2025-10-19T00:00:00Z", + "generator": "stellaops/scanner@10.0.0-preview1", + "components": [ + { + "purl": "pkg:apk/alpine/nginx@1.25.4-r1?arch=x86_64", + "layerDigest": "sha256:1111111111111111111111111111111111111111111111111111111111111111", + "usage": ["inventory", "runtime"], + "licenses": ["BSD-2-Clause"], + "evidence": { + "kind": "apk-database", + "path": "/lib/apk/db/installed" + } + }, + { + "purl": "pkg:apk/alpine/openssl@3.2.2-r0?arch=x86_64", + "layerDigest": "sha256:2222222222222222222222222222222222222222222222222222222222222222", + "usage": ["inventory", "runtime"], + "licenses": ["Apache-2.0"], + "evidence": { + "kind": "apk-database", + "path": "/lib/apk/db/installed" + } + }, + { + "purl": "pkg:apk/alpine/pcre2@10.42-r1?arch=x86_64", + "layerDigest": "sha256:3333333333333333333333333333333333333333333333333333333333333333", + "usage": ["inventory"], + "licenses": ["BSD-3-Clause"], + "evidence": { + "kind": "apk-database", + "path": "/lib/apk/db/installed" + } + }, + { + "purl": "pkg:apk/alpine/zlib@1.3-r2?arch=x86_64", + "layerDigest": "sha256:4444444444444444444444444444444444444444444444444444444444444444", + "usage": ["inventory"], + "licenses": ["Zlib"], + "evidence": { + "kind": "apk-database", + "path": "/lib/apk/db/installed" + } + } + ] +} diff --git a/samples/scanner/images/nginx/inventory.cdx.json b/samples/scanner/images/nginx/inventory.cdx.json new file mode 100644 index 00000000..027b3825 --- /dev/null +++ b/samples/scanner/images/nginx/inventory.cdx.json @@ -0,0 +1,53 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "nginx", + "version": "1.25.4", + "bomRef": "pkg:docker/library/nginx@sha256:8f47d7c6b538c0d9533b78913cba3d5e671e7c4b4e7c6a2bb9a1a1c4d4f8e123" + }, + "tools": [ + { + "name": "StellaOps Scanner", + "version": "10.0.0-preview1" + } + ] + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:apk/alpine/nginx@1.25.4-r1?arch=x86_64", + "name": "nginx", + "version": "1.25.4-r1", + "licenses": [ + { + "license": { + "id": "2BSD" + } + } + ] + }, + { + "type": "library", + "bomRef": "pkg:apk/alpine/openssl@3.2.2-r0?arch=x86_64", + "name": "openssl", + "version": "3.2.2-r0" + }, + { + "type": "library", + "bomRef": "pkg:apk/alpine/pcre2@10.42-r1?arch=x86_64", + "name": "pcre2", + "version": "10.42-r1" + }, + { + "type": "library", + "bomRef": "pkg:apk/alpine/zlib@1.3-r2?arch=x86_64", + "name": "zlib", + "version": "1.3-r2" + } + ] +} diff --git a/samples/scanner/images/nginx/usage.cdx.json b/samples/scanner/images/nginx/usage.cdx.json new file mode 100644 index 00000000..00e5a6fa --- /dev/null +++ b/samples/scanner/images/nginx/usage.cdx.json @@ -0,0 +1,28 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "nginx", + "version": "1.25.4", + "bomRef": "pkg:docker/library/nginx@sha256:8f47d7c6b538c0d9533b78913cba3d5e671e7c4b4e7c6a2bb9a1a1c4d4f8e123" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:apk/alpine/nginx@1.25.4-r1?arch=x86_64", + "name": "nginx", + "version": "1.25.4-r1" + }, + { + "type": "library", + "bomRef": "pkg:apk/alpine/openssl@3.2.2-r0?arch=x86_64", + "name": "openssl", + "version": "3.2.2-r0" + } + ] +} diff --git a/samples/scanner/images/npm-monorepo/README.md b/samples/scanner/images/npm-monorepo/README.md new file mode 100644 index 00000000..1d390558 --- /dev/null +++ b/samples/scanner/images/npm-monorepo/README.md @@ -0,0 +1,3 @@ +# NPM Monorepo Sample + +Mirrors the fixture under `samples/runtime/npm-monorepo`. The SBOMs highlight the workspace packages plus transitive dependencies, and the BOM Index pins evidence to individual `package.json` files. diff --git a/samples/scanner/images/npm-monorepo/bom-index.json b/samples/scanner/images/npm-monorepo/bom-index.json new file mode 100644 index 00000000..f3d0dbff --- /dev/null +++ b/samples/scanner/images/npm-monorepo/bom-index.json @@ -0,0 +1,52 @@ +{ + "schema": "stellaops/bom-index@1", + "image": { + "repository": "registry.stella-ops.org/samples/npm-monorepo", + "digest": "sha256:1cf2ab9d373086ed5bd1a8f4aa6f491f8844bbb0d6be8df449c16ad6c8fa7c55", + "tag": "2025.10.0" + }, + "generatedAt": "2025-10-19T00:00:00Z", + "generator": "stellaops/scanner@10.0.0-preview1", + "components": [ + { + "purl": "pkg:npm/%40stella/web@1.5.3", + "layerDigest": "sha256:1212121212121212121212121212121212121212121212121212121212121212", + "usage": ["inventory", "runtime"], + "licenses": ["MIT"], + "evidence": { + "kind": "package-json", + "path": "node_modules/@stella/web/package.json" + } + }, + { + "purl": "pkg:npm/%40stella/core@2.0.0", + "layerDigest": "sha256:1313131313131313131313131313131313131313131313131313131313131313", + "usage": ["inventory", "runtime"], + "licenses": ["Apache-2.0"], + "evidence": { + "kind": "package-json", + "path": "node_modules/@stella/core/package.json" + } + }, + { + "purl": "pkg:npm/lodash@4.17.21", + "layerDigest": "sha256:1414141414141414141414141414141414141414141414141414141414141414", + "usage": ["inventory"], + "licenses": ["MIT"], + "evidence": { + "kind": "package-json", + "path": "node_modules/lodash/package.json" + } + }, + { + "purl": "pkg:npm/rxjs@7.8.1", + "layerDigest": "sha256:1515151515151515151515151515151515151515151515151515151515151515", + "usage": ["inventory", "runtime"], + "licenses": ["Apache-2.0"], + "evidence": { + "kind": "package-json", + "path": "node_modules/rxjs/package.json" + } + } + ] +} diff --git a/samples/scanner/images/npm-monorepo/inventory.cdx.json b/samples/scanner/images/npm-monorepo/inventory.cdx.json new file mode 100644 index 00000000..3f97f660 --- /dev/null +++ b/samples/scanner/images/npm-monorepo/inventory.cdx.json @@ -0,0 +1,40 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "npm-monorepo", + "version": "2025.10.0", + "bomRef": "pkg:docker/registry.stella-ops.org/samples/npm-monorepo@sha256:1cf2ab9d373086ed5bd1a8f4aa6f491f8844bbb0d6be8df449c16ad6c8fa7c55" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:npm/%40stella/core@2.0.0", + "name": "@stella/core", + "version": "2.0.0" + }, + { + "type": "application", + "bomRef": "pkg:npm/%40stella/web@1.5.3", + "name": "@stella/web", + "version": "1.5.3" + }, + { + "type": "library", + "bomRef": "pkg:npm/lodash@4.17.21", + "name": "lodash", + "version": "4.17.21" + }, + { + "type": "library", + "bomRef": "pkg:npm/rxjs@7.8.1", + "name": "rxjs", + "version": "7.8.1" + } + ] +} diff --git a/samples/scanner/images/npm-monorepo/usage.cdx.json b/samples/scanner/images/npm-monorepo/usage.cdx.json new file mode 100644 index 00000000..b5ac2e6a --- /dev/null +++ b/samples/scanner/images/npm-monorepo/usage.cdx.json @@ -0,0 +1,34 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "npm-monorepo", + "version": "2025.10.0", + "bomRef": "pkg:docker/registry.stella-ops.org/samples/npm-monorepo@sha256:1cf2ab9d373086ed5bd1a8f4aa6f491f8844bbb0d6be8df449c16ad6c8fa7c55" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:npm/%40stella/web@1.5.3", + "name": "@stella/web", + "version": "1.5.3" + }, + { + "type": "application", + "bomRef": "pkg:npm/%40stella/core@2.0.0", + "name": "@stella/core", + "version": "2.0.0" + }, + { + "type": "library", + "bomRef": "pkg:npm/rxjs@7.8.1", + "name": "rxjs", + "version": "7.8.1" + } + ] +} diff --git a/samples/scanner/images/python-venv/README.md b/samples/scanner/images/python-venv/README.md new file mode 100644 index 00000000..eb8cfa26 --- /dev/null +++ b/samples/scanner/images/python-venv/README.md @@ -0,0 +1,3 @@ +# Python Virtualenv Sample + +Pairs with the runtime fixture under `samples/runtime/python-venv`. The SBOMs highlight how requests pulls in urllib3 and certifi, and the BOM Index records the `*.dist-info/METADATA` evidence paths used by the Python analyzer. diff --git a/samples/scanner/images/python-venv/bom-index.json b/samples/scanner/images/python-venv/bom-index.json new file mode 100644 index 00000000..f2b1b631 --- /dev/null +++ b/samples/scanner/images/python-venv/bom-index.json @@ -0,0 +1,42 @@ +{ + "schema": "stellaops/bom-index@1", + "image": { + "repository": "docker.io/library/python", + "digest": "sha256:dbed08b7d9675c2be627bbecac182a04c36d3f4ffd542c4fba7c7a850a6578dc", + "tag": "3.12-slim" + }, + "generatedAt": "2025-10-19T00:00:00Z", + "generator": "stellaops/scanner@10.0.0-preview1", + "components": [ + { + "purl": "pkg:pypi/requests@2.32.0", + "layerDigest": "sha256:eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee", + "usage": ["inventory", "runtime"], + "licenses": ["Apache-2.0"], + "evidence": { + "kind": "dist-info", + "path": "lib/python3.11/site-packages/requests-2.32.0.dist-info/METADATA" + } + }, + { + "purl": "pkg:pypi/urllib3@2.2.1", + "layerDigest": "sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "usage": ["inventory", "runtime"], + "licenses": ["MIT"], + "evidence": { + "kind": "dist-info", + "path": "lib/python3.11/site-packages/urllib3-2.2.1.dist-info/METADATA" + } + }, + { + "purl": "pkg:pypi/certifi@2024.6.2", + "layerDigest": "sha256:0000000000000000000000000000000000000000000000000000000000000000", + "usage": ["inventory"], + "licenses": ["MPL-2.0"], + "evidence": { + "kind": "dist-info", + "path": "lib/python3.11/site-packages/certifi-2024.6.2.dist-info/METADATA" + } + } + ] +} diff --git a/samples/scanner/images/python-venv/inventory.cdx.json b/samples/scanner/images/python-venv/inventory.cdx.json new file mode 100644 index 00000000..70365128 --- /dev/null +++ b/samples/scanner/images/python-venv/inventory.cdx.json @@ -0,0 +1,34 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "python-venv", + "version": "3.12-slim", + "bomRef": "pkg:docker/library/python@sha256:dbed08b7d9675c2be627bbecac182a04c36d3f4ffd542c4fba7c7a850a6578dc" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:pypi/requests@2.32.0", + "name": "requests", + "version": "2.32.0" + }, + { + "type": "library", + "bomRef": "pkg:pypi/urllib3@2.2.1", + "name": "urllib3", + "version": "2.2.1" + }, + { + "type": "library", + "bomRef": "pkg:pypi/certifi@2024.6.2", + "name": "certifi", + "version": "2024.6.2" + } + ] +} diff --git a/samples/scanner/images/python-venv/usage.cdx.json b/samples/scanner/images/python-venv/usage.cdx.json new file mode 100644 index 00000000..15c1f9e5 --- /dev/null +++ b/samples/scanner/images/python-venv/usage.cdx.json @@ -0,0 +1,28 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "metadata": { + "timestamp": "2025-10-19T00:00:00Z", + "component": { + "type": "container", + "name": "python-venv", + "version": "3.12-slim", + "bomRef": "pkg:docker/library/python@sha256:dbed08b7d9675c2be627bbecac182a04c36d3f4ffd542c4fba7c7a850a6578dc" + } + }, + "components": [ + { + "type": "application", + "bomRef": "pkg:pypi/requests@2.32.0", + "name": "requests", + "version": "2.32.0" + }, + { + "type": "library", + "bomRef": "pkg:pypi/urllib3@2.2.1", + "name": "urllib3", + "version": "2.2.1" + } + ] +} diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 424371d8..1f61b4a9 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -9,6 +9,11 @@ true $([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\buildx\')) true + $([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\analyzers\os\')) + true + $([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\analyzers\lang\')) + true + true @@ -18,11 +23,11 @@ - + - + diff --git a/src/Directory.Build.targets b/src/Directory.Build.targets index 92c102a3..14b78768 100644 --- a/src/Directory.Build.targets +++ b/src/Directory.Build.targets @@ -47,4 +47,38 @@ + + + + $(ScannerOsAnalyzerPluginOutputRoot)\$(MSBuildProjectName) + + + + + + + + + + + + + + + + + $(ScannerLangAnalyzerPluginOutputRoot)\$(MSBuildProjectName) + + + + + + + + + + + + + diff --git a/src/StellaOps.Attestor/AGENTS.md b/src/StellaOps.Attestor/AGENTS.md new file mode 100644 index 00000000..9f6bf997 --- /dev/null +++ b/src/StellaOps.Attestor/AGENTS.md @@ -0,0 +1,21 @@ +# Attestor Guild + +## Mission +Operate the StellaOps Attestor service: accept signed DSSE envelopes from the Signer over mTLS, submit them to Rekor v2, persist inclusion proofs, and expose verification APIs for downstream services and operators. + +## Teams On Call +- Team 11 (Attestor API) +- Team 12 (Attestor Observability) — partners on logging, metrics, and alerting + +## Operating Principles +- Enforce mTLS + Authority tokens for every submission; never accept anonymous callers. +- Deterministic hashing, canonical JSON, and idempotent Rekor interactions (`bundleSha256` is the source of truth). +- Persist everything (entries, dedupe, audit) before acknowledging; background jobs must be resumable. +- Structured logs + metrics for each stage (`validate`, `submit`, `proof`, `persist`, `archive`). +- Update `TASKS.md`, architecture docs, and tests whenever behaviour changes. + +## Key Directories +- `src/StellaOps.Attestor/StellaOps.Attestor.WebService/` — Minimal API host and HTTP surface. +- `src/StellaOps.Attestor/StellaOps.Attestor.Core/` — Domain contracts, submission/verification pipelines. +- `src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/` — Mongo, Redis, Rekor, and archival implementations. +- `src/StellaOps.Attestor/StellaOps.Attestor.Tests/` — Unit and integration tests. diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs new file mode 100644 index 00000000..2a113f97 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs @@ -0,0 +1,42 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Attestor.Core.Audit; + +public sealed class AttestorAuditRecord +{ + public string Action { get; init; } = string.Empty; + + public string Result { get; init; } = string.Empty; + + public string? RekorUuid { get; init; } + + public long? Index { get; init; } + + public string ArtifactSha256 { get; init; } = string.Empty; + + public string BundleSha256 { get; init; } = string.Empty; + + public string Backend { get; init; } = string.Empty; + + public long LatencyMs { get; init; } + + public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow; + + public CallerDescriptor Caller { get; init; } = new(); + + public IDictionary Metadata { get; init; } = new Dictionary(); + + public sealed class CallerDescriptor + { + public string? Subject { get; init; } + + public string? Audience { get; init; } + + public string? ClientId { get; init; } + + public string? MtlsThumbprint { get; init; } + + public string? Tenant { get; init; } + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs new file mode 100644 index 00000000..63e68b81 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs @@ -0,0 +1,45 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.Attestor.Core.Observability; + +public sealed class AttestorMetrics : IDisposable +{ + public const string MeterName = "StellaOps.Attestor"; + + private readonly Meter _meter; + private bool _disposed; + + public AttestorMetrics() + { + _meter = new Meter(MeterName); + SubmitTotal = _meter.CreateCounter("attestor.submit_total", description: "Total submission attempts grouped by result and backend."); + SubmitLatency = _meter.CreateHistogram("attestor.submit_latency_seconds", unit: "s", description: "Submission latency in seconds per backend."); + ProofFetchTotal = _meter.CreateCounter("attestor.proof_fetch_total", description: "Proof fetch attempts grouped by result."); + VerifyTotal = _meter.CreateCounter("attestor.verify_total", description: "Verification attempts grouped by result."); + DedupeHitsTotal = _meter.CreateCounter("attestor.dedupe_hits_total", description: "Number of dedupe hits by outcome."); + ErrorTotal = _meter.CreateCounter("attestor.errors_total", description: "Total errors grouped by type."); + } + + public Counter SubmitTotal { get; } + + public Histogram SubmitLatency { get; } + + public Counter ProofFetchTotal { get; } + + public Counter VerifyTotal { get; } + + public Counter DedupeHitsTotal { get; } + + public Counter ErrorTotal { get; } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _meter.Dispose(); + _disposed = true; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs new file mode 100644 index 00000000..9001b00f --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs @@ -0,0 +1,144 @@ +using System.Collections.Generic; + +namespace StellaOps.Attestor.Core.Options; + +/// +/// Strongly typed configuration for the Attestor service. +/// +public sealed class AttestorOptions +{ + public string Listen { get; set; } = "https://0.0.0.0:8444"; + + public SecurityOptions Security { get; set; } = new(); + + public RekorOptions Rekor { get; set; } = new(); + + public MongoOptions Mongo { get; set; } = new(); + + public RedisOptions Redis { get; set; } = new(); + + public S3Options S3 { get; set; } = new(); + + public QuotaOptions Quotas { get; set; } = new(); + + public TelemetryOptions Telemetry { get; set; } = new(); + + public sealed class SecurityOptions + { + public MtlsOptions Mtls { get; set; } = new(); + + public AuthorityOptions Authority { get; set; } = new(); + + public SignerIdentityOptions SignerIdentity { get; set; } = new(); + } + + public sealed class MtlsOptions + { + public bool RequireClientCertificate { get; set; } = true; + + public string? CaBundle { get; set; } + } + + public sealed class AuthorityOptions + { + public string? Issuer { get; set; } + + public string? JwksUrl { get; set; } + + public string? RequireSenderConstraint { get; set; } + + public bool RequireHttpsMetadata { get; set; } = true; + + public IList Audiences { get; set; } = new List(); + + public IList RequiredScopes { get; set; } = new List(); + } + + public sealed class SignerIdentityOptions + { + public IList Mode { get; set; } = new List { "keyless", "kms" }; + + public IList FulcioRoots { get; set; } = new List(); + + public IList AllowedSans { get; set; } = new List(); + + public IList KmsKeys { get; set; } = new List(); + } + + public sealed class RekorOptions + { + public RekorBackendOptions Primary { get; set; } = new(); + + public RekorMirrorOptions Mirror { get; set; } = new(); + } + + public class RekorBackendOptions + { + public string? Url { get; set; } + + public int ProofTimeoutMs { get; set; } = 15_000; + + public int PollIntervalMs { get; set; } = 250; + + public int MaxAttempts { get; set; } = 60; + } + + public sealed class RekorMirrorOptions : RekorBackendOptions + { + public bool Enabled { get; set; } + } + + public sealed class MongoOptions + { + public string? Uri { get; set; } + + public string Database { get; set; } = "attestor"; + + public string EntriesCollection { get; set; } = "entries"; + + public string DedupeCollection { get; set; } = "dedupe"; + + public string AuditCollection { get; set; } = "audit"; + } + + public sealed class RedisOptions + { + public string? Url { get; set; } + + public string? DedupePrefix { get; set; } = "attestor:dedupe:"; + } + + public sealed class S3Options + { + public bool Enabled { get; set; } + + public string? Endpoint { get; set; } + + public string? Bucket { get; set; } + + public string? Prefix { get; set; } + + public string? ObjectLockMode { get; set; } + + public bool UseTls { get; set; } = true; + } + + public sealed class QuotaOptions + { + public PerCallerQuotaOptions PerCaller { get; set; } = new(); + } + + public sealed class PerCallerQuotaOptions + { + public int Qps { get; set; } = 50; + + public int Burst { get; set; } = 100; + } + + public sealed class TelemetryOptions + { + public bool EnableLogging { get; set; } = true; + + public bool EnableTracing { get; set; } = false; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs new file mode 100644 index 00000000..fe02c239 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs @@ -0,0 +1,18 @@ +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Core.Rekor; + +public interface IRekorClient +{ + Task SubmitAsync( + AttestorSubmissionRequest request, + RekorBackend backend, + CancellationToken cancellationToken = default); + + Task GetProofAsync( + string rekorUuid, + RekorBackend backend, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs new file mode 100644 index 00000000..f872a5b1 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs @@ -0,0 +1,16 @@ +using System; + +namespace StellaOps.Attestor.Core.Rekor; + +public sealed class RekorBackend +{ + public required string Name { get; init; } + + public required Uri Url { get; init; } + + public TimeSpan ProofTimeout { get; init; } = TimeSpan.FromSeconds(15); + + public TimeSpan PollInterval { get; init; } = TimeSpan.FromMilliseconds(250); + + public int MaxAttempts { get; init; } = 60; +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs new file mode 100644 index 00000000..e2f511a6 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Attestor.Core.Rekor; + +public sealed class RekorProofResponse +{ + [JsonPropertyName("checkpoint")] + public RekorCheckpoint? Checkpoint { get; set; } + + [JsonPropertyName("inclusion")] + public RekorInclusionProof? Inclusion { get; set; } + + public sealed class RekorCheckpoint + { + [JsonPropertyName("origin")] + public string? Origin { get; set; } + + [JsonPropertyName("size")] + public long Size { get; set; } + + [JsonPropertyName("rootHash")] + public string? RootHash { get; set; } + + [JsonPropertyName("timestamp")] + public DateTimeOffset? Timestamp { get; set; } + } + + public sealed class RekorInclusionProof + { + [JsonPropertyName("leafHash")] + public string? LeafHash { get; set; } + + [JsonPropertyName("path")] + public IReadOnlyList Path { get; set; } = Array.Empty(); + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs new file mode 100644 index 00000000..d59f6520 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs @@ -0,0 +1,21 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Attestor.Core.Rekor; + +public sealed class RekorSubmissionResponse +{ + [JsonPropertyName("uuid")] + public string Uuid { get; set; } = string.Empty; + + [JsonPropertyName("index")] + public long? Index { get; set; } + + [JsonPropertyName("logURL")] + public string? LogUrl { get; set; } + + [JsonPropertyName("status")] + public string Status { get; set; } = "included"; + + [JsonPropertyName("proof")] + public RekorProofResponse? Proof { get; set; } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj b/src/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj new file mode 100644 index 00000000..ecc3af66 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj @@ -0,0 +1,9 @@ + + + net10.0 + preview + enable + enable + true + + diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs new file mode 100644 index 00000000..5ee9bdec --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Attestor.Core.Storage; + +public sealed class AttestorArchiveBundle +{ + public string RekorUuid { get; init; } = string.Empty; + + public string ArtifactSha256 { get; init; } = string.Empty; + + public string BundleSha256 { get; init; } = string.Empty; + + public byte[] CanonicalBundleJson { get; init; } = Array.Empty(); + + public byte[] ProofJson { get; init; } = Array.Empty(); + + public IReadOnlyDictionary Metadata { get; init; } = new Dictionary(); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs new file mode 100644 index 00000000..94672075 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs @@ -0,0 +1,82 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Attestor.Core.Storage; + +/// +/// Canonical representation of a Rekor entry persisted in Mongo. +/// +public sealed class AttestorEntry +{ + public string RekorUuid { get; init; } = string.Empty; + + public ArtifactDescriptor Artifact { get; init; } = new(); + + public string BundleSha256 { get; init; } = string.Empty; + + public long? Index { get; init; } + + public ProofDescriptor? Proof { get; init; } + + public LogDescriptor Log { get; init; } = new(); + + public DateTimeOffset CreatedAt { get; init; } + + public string Status { get; init; } = "pending"; + + public SignerIdentityDescriptor SignerIdentity { get; init; } = new(); + + public sealed class ArtifactDescriptor + { + public string Sha256 { get; init; } = string.Empty; + + public string Kind { get; init; } = string.Empty; + + public string? ImageDigest { get; init; } + + public string? SubjectUri { get; init; } + } + + public sealed class ProofDescriptor + { + public CheckpointDescriptor? Checkpoint { get; init; } + + public InclusionDescriptor? Inclusion { get; init; } + } + + public sealed class CheckpointDescriptor + { + public string? Origin { get; init; } + + public long Size { get; init; } + + public string? RootHash { get; init; } + + public DateTimeOffset? Timestamp { get; init; } + } + + public sealed class InclusionDescriptor + { + public string? LeafHash { get; init; } + + public IReadOnlyList Path { get; init; } = Array.Empty(); + } + + public sealed class LogDescriptor + { + public string Url { get; init; } = string.Empty; + + public string? LogId { get; init; } + } + + public sealed class SignerIdentityDescriptor + { + public string Mode { get; init; } = string.Empty; + + public string? Issuer { get; init; } + + public string? SubjectAlternativeName { get; init; } + + public string? KeyId { get; init; } + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs new file mode 100644 index 00000000..6d265ba2 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs @@ -0,0 +1,9 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Storage; + +public interface IAttestorArchiveStore +{ + Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs new file mode 100644 index 00000000..3d093d97 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs @@ -0,0 +1,10 @@ +using StellaOps.Attestor.Core.Audit; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Storage; + +public interface IAttestorAuditSink +{ + Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs new file mode 100644 index 00000000..b2e2b60e --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs @@ -0,0 +1,12 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Storage; + +public interface IAttestorDedupeStore +{ + Task TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default); + + Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs new file mode 100644 index 00000000..f8a2ae71 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Storage; + +public interface IAttestorEntryRepository +{ + Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default); + + Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default); + + Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default); + + Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs new file mode 100644 index 00000000..6ca0f081 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs @@ -0,0 +1,79 @@ +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Attestor.Core.Submission; + +/// +/// Incoming submission payload for /api/v1/rekor/entries. +/// +public sealed class AttestorSubmissionRequest +{ + [JsonPropertyName("bundle")] + public SubmissionBundle Bundle { get; set; } = new(); + + [JsonPropertyName("meta")] + public SubmissionMeta Meta { get; set; } = new(); + + public sealed class SubmissionBundle + { + [JsonPropertyName("dsse")] + public DsseEnvelope Dsse { get; set; } = new(); + + [JsonPropertyName("certificateChain")] + public IList CertificateChain { get; set; } = new List(); + + [JsonPropertyName("mode")] + public string Mode { get; set; } = "keyless"; + } + + public sealed class DsseEnvelope + { + [JsonPropertyName("payloadType")] + public string PayloadType { get; set; } = string.Empty; + + [JsonPropertyName("payload")] + public string PayloadBase64 { get; set; } = string.Empty; + + [JsonPropertyName("signatures")] + public IList Signatures { get; set; } = new List(); + } + + public sealed class DsseSignature + { + [JsonPropertyName("keyid")] + public string? KeyId { get; set; } + + [JsonPropertyName("sig")] + public string Signature { get; set; } = string.Empty; + } + + public sealed class SubmissionMeta + { + [JsonPropertyName("artifact")] + public ArtifactInfo Artifact { get; set; } = new(); + + [JsonPropertyName("bundleSha256")] + public string BundleSha256 { get; set; } = string.Empty; + + [JsonPropertyName("logPreference")] + public string LogPreference { get; set; } = "primary"; + + [JsonPropertyName("archive")] + public bool Archive { get; set; } = true; + } + + public sealed class ArtifactInfo + { + [JsonPropertyName("sha256")] + public string Sha256 { get; set; } = string.Empty; + + [JsonPropertyName("kind")] + public string Kind { get; set; } = string.Empty; + + [JsonPropertyName("imageDigest")] + public string? ImageDigest { get; set; } + + [JsonPropertyName("subjectUri")] + public string? SubjectUri { get; set; } + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs new file mode 100644 index 00000000..85399b83 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs @@ -0,0 +1,59 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Attestor.Core.Submission; + +/// +/// Result returned to callers after processing a submission. +/// +public sealed class AttestorSubmissionResult +{ + [JsonPropertyName("uuid")] + public string? Uuid { get; set; } + + [JsonPropertyName("index")] + public long? Index { get; set; } + + [JsonPropertyName("proof")] + public RekorProof? Proof { get; set; } + + [JsonPropertyName("logURL")] + public string? LogUrl { get; set; } + + [JsonPropertyName("status")] + public string Status { get; set; } = "pending"; + + public sealed class RekorProof + { + [JsonPropertyName("checkpoint")] + public Checkpoint? Checkpoint { get; set; } + + [JsonPropertyName("inclusion")] + public InclusionProof? Inclusion { get; set; } + } + + public sealed class Checkpoint + { + [JsonPropertyName("origin")] + public string? Origin { get; set; } + + [JsonPropertyName("size")] + public long Size { get; set; } + + [JsonPropertyName("rootHash")] + public string? RootHash { get; set; } + + [JsonPropertyName("timestamp")] + public string? Timestamp { get; set; } + } + + public sealed class InclusionProof + { + [JsonPropertyName("leafHash")] + public string? LeafHash { get; set; } + + [JsonPropertyName("path")] + public IReadOnlyList Path { get; init; } = Array.Empty(); + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs new file mode 100644 index 00000000..361c6fb5 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs @@ -0,0 +1,11 @@ +namespace StellaOps.Attestor.Core.Submission; + +public sealed class AttestorSubmissionValidationResult +{ + public AttestorSubmissionValidationResult(byte[] canonicalBundle) + { + CanonicalBundle = canonicalBundle; + } + + public byte[] CanonicalBundle { get; } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs new file mode 100644 index 00000000..00798027 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs @@ -0,0 +1,167 @@ +using System; +using System.Buffers.Text; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Submission; + +public sealed class AttestorSubmissionValidator +{ + private static readonly string[] AllowedKinds = ["sbom", "report", "vex-export"]; + + private readonly IDsseCanonicalizer _canonicalizer; + + public AttestorSubmissionValidator(IDsseCanonicalizer canonicalizer) + { + _canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer)); + } + + public async Task ValidateAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + if (request.Bundle is null) + { + throw new AttestorValidationException("bundle_missing", "Submission bundle payload is required."); + } + + if (request.Bundle.Dsse is null) + { + throw new AttestorValidationException("dsse_missing", "DSSE envelope is required."); + } + + if (string.IsNullOrWhiteSpace(request.Bundle.Dsse.PayloadType)) + { + throw new AttestorValidationException("payload_type_missing", "DSSE payloadType is required."); + } + + if (string.IsNullOrWhiteSpace(request.Bundle.Dsse.PayloadBase64)) + { + throw new AttestorValidationException("payload_missing", "DSSE payload must be provided."); + } + + if (request.Bundle.Dsse.Signatures.Count == 0) + { + throw new AttestorValidationException("signature_missing", "At least one DSSE signature is required."); + } + + if (request.Meta is null) + { + throw new AttestorValidationException("meta_missing", "Submission metadata is required."); + } + + if (request.Meta.Artifact is null) + { + throw new AttestorValidationException("artifact_missing", "Artifact metadata is required."); + } + + if (string.IsNullOrWhiteSpace(request.Meta.Artifact.Sha256)) + { + throw new AttestorValidationException("artifact_sha_missing", "Artifact sha256 is required."); + } + + if (!IsHex(request.Meta.Artifact.Sha256, expectedLength: 64)) + { + throw new AttestorValidationException("artifact_sha_invalid", "Artifact sha256 must be a 64-character hex string."); + } + + if (string.IsNullOrWhiteSpace(request.Meta.BundleSha256)) + { + throw new AttestorValidationException("bundle_sha_missing", "bundleSha256 is required."); + } + + if (!IsHex(request.Meta.BundleSha256, expectedLength: 64)) + { + throw new AttestorValidationException("bundle_sha_invalid", "bundleSha256 must be a 64-character hex string."); + } + + if (Array.IndexOf(AllowedKinds, request.Meta.Artifact.Kind) < 0) + { + throw new AttestorValidationException("artifact_kind_invalid", $"Artifact kind '{request.Meta.Artifact.Kind}' is not supported."); + } + + if (!Base64UrlDecode(request.Bundle.Dsse.PayloadBase64, out _)) + { + throw new AttestorValidationException("payload_invalid_base64", "DSSE payload must be valid base64."); + } + + var canonical = await _canonicalizer.CanonicalizeAsync(request, cancellationToken).ConfigureAwait(false); + Span hash = stackalloc byte[32]; + if (!SHA256.TryHashData(canonical, hash, out _)) + { + throw new AttestorValidationException("bundle_sha_failure", "Failed to compute canonical bundle hash."); + } + + var hashHex = Convert.ToHexString(hash).ToLowerInvariant(); + if (!string.Equals(hashHex, request.Meta.BundleSha256, StringComparison.OrdinalIgnoreCase)) + { + throw new AttestorValidationException("bundle_sha_mismatch", "bundleSha256 does not match canonical DSSE hash."); + } + + if (!string.Equals(request.Meta.LogPreference, "primary", StringComparison.OrdinalIgnoreCase) + && !string.Equals(request.Meta.LogPreference, "mirror", StringComparison.OrdinalIgnoreCase) + && !string.Equals(request.Meta.LogPreference, "both", StringComparison.OrdinalIgnoreCase)) + { + throw new AttestorValidationException("log_preference_invalid", "logPreference must be 'primary', 'mirror', or 'both'."); + } + + return new AttestorSubmissionValidationResult(canonical); + } + + private static bool IsHex(string value, int expectedLength) + { + if (value.Length != expectedLength) + { + return false; + } + + foreach (var ch in value) + { + var isHex = ch is >= '0' and <= '9' or >= 'a' and <= 'f' or >= 'A' and <= 'F'; + if (!isHex) + { + return false; + } + } + + return true; + } + + private static bool Base64UrlDecode(string value, out byte[] bytes) + { + try + { + bytes = Convert.FromBase64String(Normalise(value)); + return true; + } + catch (FormatException) + { + bytes = Array.Empty(); + return false; + } + } + + private static string Normalise(string value) + { + if (value.Contains('-') || value.Contains('_')) + { + Span buffer = value.ToCharArray(); + for (var i = 0; i < buffer.Length; i++) + { + buffer[i] = buffer[i] switch + { + '-' => '+', + '_' => '/', + _ => buffer[i] + }; + } + + var padding = 4 - (buffer.Length % 4); + return padding == 4 ? new string(buffer) : new string(buffer) + new string('=', padding); + } + + return value; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs new file mode 100644 index 00000000..30c6ab65 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs @@ -0,0 +1,14 @@ +using System; + +namespace StellaOps.Attestor.Core.Submission; + +public sealed class AttestorValidationException : Exception +{ + public AttestorValidationException(string code, string message) + : base(message) + { + Code = code; + } + + public string Code { get; } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs new file mode 100644 index 00000000..5dfabc89 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs @@ -0,0 +1,12 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Submission; + +public interface IAttestorSubmissionService +{ + Task SubmitAsync( + AttestorSubmissionRequest request, + SubmissionContext context, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs new file mode 100644 index 00000000..80675dd6 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs @@ -0,0 +1,9 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Attestor.Core.Submission; + +public interface IDsseCanonicalizer +{ + Task CanonicalizeAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs new file mode 100644 index 00000000..510b84a3 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs @@ -0,0 +1,21 @@ +using System.Security.Cryptography.X509Certificates; + +namespace StellaOps.Attestor.Core.Submission; + +/// +/// Ambient information about the caller used for policy and audit decisions. +/// +public sealed class SubmissionContext +{ + public required string CallerSubject { get; init; } + + public required string CallerAudience { get; init; } + + public required string? CallerClientId { get; init; } + + public required string? CallerTenant { get; init; } + + public X509Certificate2? ClientCertificate { get; init; } + + public string? MtlsThumbprint { get; init; } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs new file mode 100644 index 00000000..5becffcb --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs @@ -0,0 +1,14 @@ +using System; + +namespace StellaOps.Attestor.Core.Verification; + +public sealed class AttestorVerificationException : Exception +{ + public AttestorVerificationException(string code, string message) + : base(message) + { + Code = code; + } + + public string Code { get; } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs new file mode 100644 index 00000000..bcc268b4 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs @@ -0,0 +1,15 @@ +namespace StellaOps.Attestor.Core.Verification; + +/// +/// Payload accepted by the verification service. +/// +public sealed class AttestorVerificationRequest +{ + public string? Uuid { get; set; } + + public Submission.AttestorSubmissionRequest.SubmissionBundle? Bundle { get; set; } + + public string? ArtifactSha256 { get; set; } + + public bool RefreshProof { get; set; } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs new file mode 100644 index 00000000..b4a49b54 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs @@ -0,0 +1,21 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Attestor.Core.Verification; + +public sealed class AttestorVerificationResult +{ + public bool Ok { get; init; } + + public string? Uuid { get; init; } + + public long? Index { get; init; } + + public string? LogUrl { get; init; } + + public DateTimeOffset CheckedAt { get; init; } = DateTimeOffset.UtcNow; + + public string Status { get; init; } = "unknown"; + + public IReadOnlyList Issues { get; init; } = Array.Empty(); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs new file mode 100644 index 00000000..b19ee203 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs @@ -0,0 +1,12 @@ +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Core.Verification; + +public interface IAttestorVerificationService +{ + Task VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default); + + Task GetEntryAsync(string rekorUuid, bool refreshProof, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..98cbea92 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Attestor.Tests")] diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs new file mode 100644 index 00000000..7de8d48e --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs @@ -0,0 +1,157 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Infrastructure.Rekor; + +internal sealed class HttpRekorClient : IRekorClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + private readonly HttpClient _httpClient; + private readonly ILogger _logger; + + public HttpRekorClient(HttpClient httpClient, ILogger logger) + { + _httpClient = httpClient; + _logger = logger; + } + + public async Task SubmitAsync(AttestorSubmissionRequest request, RekorBackend backend, CancellationToken cancellationToken = default) + { + var submissionUri = BuildUri(backend.Url, "api/v2/log/entries"); + + using var httpRequest = new HttpRequestMessage(HttpMethod.Post, submissionUri) + { + Content = JsonContent.Create(BuildSubmissionPayload(request), options: SerializerOptions) + }; + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.Conflict) + { + var message = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Rekor reported a conflict: {message}"); + } + + response.EnsureSuccessStatusCode(); + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + + var root = document.RootElement; + + long? index = null; + if (root.TryGetProperty("index", out var indexElement) && indexElement.TryGetInt64(out var indexValue)) + { + index = indexValue; + } + + return new RekorSubmissionResponse + { + Uuid = root.TryGetProperty("uuid", out var uuidElement) ? uuidElement.GetString() ?? string.Empty : string.Empty, + Index = index, + LogUrl = root.TryGetProperty("logURL", out var urlElement) ? urlElement.GetString() ?? backend.Url.ToString() : backend.Url.ToString(), + Status = root.TryGetProperty("status", out var statusElement) ? statusElement.GetString() ?? "included" : "included", + Proof = TryParseProof(root.TryGetProperty("proof", out var proofElement) ? proofElement : default) + }; + } + + public async Task GetProofAsync(string rekorUuid, RekorBackend backend, CancellationToken cancellationToken = default) + { + var proofUri = BuildUri(backend.Url, $"api/v2/log/entries/{rekorUuid}/proof"); + + using var request = new HttpRequestMessage(HttpMethod.Get, proofUri); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotFound) + { + _logger.LogDebug("Rekor proof for {Uuid} not found", rekorUuid); + return null; + } + + response.EnsureSuccessStatusCode(); + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + + return TryParseProof(document.RootElement); + } + + private static object BuildSubmissionPayload(AttestorSubmissionRequest request) + { + var signatures = new List(); + foreach (var sig in request.Bundle.Dsse.Signatures) + { + signatures.Add(new { keyid = sig.KeyId, sig = sig.Signature }); + } + + return new + { + entries = new[] + { + new + { + dsseEnvelope = new + { + payload = request.Bundle.Dsse.PayloadBase64, + payloadType = request.Bundle.Dsse.PayloadType, + signatures + } + } + } + }; + } + + private static RekorProofResponse? TryParseProof(JsonElement proofElement) + { + if (proofElement.ValueKind == JsonValueKind.Undefined || proofElement.ValueKind == JsonValueKind.Null) + { + return null; + } + + var checkpointElement = proofElement.TryGetProperty("checkpoint", out var cp) ? cp : default; + var inclusionElement = proofElement.TryGetProperty("inclusion", out var inc) ? inc : default; + + return new RekorProofResponse + { + Checkpoint = checkpointElement.ValueKind == JsonValueKind.Object + ? new RekorProofResponse.RekorCheckpoint + { + Origin = checkpointElement.TryGetProperty("origin", out var origin) ? origin.GetString() : null, + Size = checkpointElement.TryGetProperty("size", out var size) && size.TryGetInt64(out var sizeValue) ? sizeValue : 0, + RootHash = checkpointElement.TryGetProperty("rootHash", out var rootHash) ? rootHash.GetString() : null, + Timestamp = checkpointElement.TryGetProperty("timestamp", out var ts) && ts.ValueKind == JsonValueKind.String && DateTimeOffset.TryParse(ts.GetString(), out var dto) ? dto : null + } + : null, + Inclusion = inclusionElement.ValueKind == JsonValueKind.Object + ? new RekorProofResponse.RekorInclusionProof + { + LeafHash = inclusionElement.TryGetProperty("leafHash", out var leaf) ? leaf.GetString() : null, + Path = inclusionElement.TryGetProperty("path", out var pathElement) && pathElement.ValueKind == JsonValueKind.Array + ? pathElement.EnumerateArray().Select(p => p.GetString() ?? string.Empty).ToArray() + : Array.Empty() + } + : null + }; + } + + private static Uri BuildUri(Uri baseUri, string relative) + { + if (!relative.StartsWith("/", StringComparison.Ordinal)) + { + relative = "/" + relative; + } + + return new Uri(baseUri, relative); + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs new file mode 100644 index 00000000..4449f7fe --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs @@ -0,0 +1,71 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Infrastructure.Rekor; + +internal sealed class StubRekorClient : IRekorClient +{ + private readonly ILogger _logger; + + public StubRekorClient(ILogger logger) + { + _logger = logger; + } + + public Task SubmitAsync(AttestorSubmissionRequest request, RekorBackend backend, CancellationToken cancellationToken = default) + { + var uuid = Guid.NewGuid().ToString(); + _logger.LogInformation("Stub Rekor submission for bundle {BundleSha} -> {Uuid}", request.Meta.BundleSha256, uuid); + + var proof = new RekorProofResponse + { + Checkpoint = new RekorProofResponse.RekorCheckpoint + { + Origin = backend.Url.Host, + Size = 1, + RootHash = request.Meta.BundleSha256, + Timestamp = DateTimeOffset.UtcNow + }, + Inclusion = new RekorProofResponse.RekorInclusionProof + { + LeafHash = request.Meta.BundleSha256, + Path = Array.Empty() + } + }; + + var response = new RekorSubmissionResponse + { + Uuid = uuid, + Index = Random.Shared.NextInt64(1, long.MaxValue), + LogUrl = new Uri(backend.Url, $"/api/v2/log/entries/{uuid}").ToString(), + Status = "included", + Proof = proof + }; + + return Task.FromResult(response); + } + + public Task GetProofAsync(string rekorUuid, RekorBackend backend, CancellationToken cancellationToken = default) + { + _logger.LogInformation("Stub Rekor proof fetch for {Uuid}", rekorUuid); + return Task.FromResult(new RekorProofResponse + { + Checkpoint = new RekorProofResponse.RekorCheckpoint + { + Origin = backend.Url.Host, + Size = 1, + RootHash = string.Empty, + Timestamp = DateTimeOffset.UtcNow + }, + Inclusion = new RekorProofResponse.RekorInclusionProof + { + LeafHash = string.Empty, + Path = Array.Empty() + } + }); + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..cc845da9 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs @@ -0,0 +1,118 @@ +using System; +using Amazon.Runtime; +using Amazon.S3; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StackExchange.Redis; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Infrastructure.Rekor; +using StellaOps.Attestor.Infrastructure.Storage; +using StellaOps.Attestor.Infrastructure.Submission; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Infrastructure.Verification; + +namespace StellaOps.Attestor.Infrastructure; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddAttestorInfrastructure(this IServiceCollection services) + { + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddHttpClient(client => + { + client.Timeout = TimeSpan.FromSeconds(30); + }); + services.AddSingleton(sp => sp.GetRequiredService()); + + services.AddSingleton(sp => + { + var options = sp.GetRequiredService>().Value; + if (string.IsNullOrWhiteSpace(options.Mongo.Uri)) + { + throw new InvalidOperationException("Attestor MongoDB connection string is not configured."); + } + + return new MongoClient(options.Mongo.Uri); + }); + + services.AddSingleton(sp => + { + var opts = sp.GetRequiredService>().Value; + var client = sp.GetRequiredService(); + var databaseName = MongoUrl.Create(opts.Mongo.Uri).DatabaseName ?? opts.Mongo.Database; + return client.GetDatabase(databaseName); + }); + + services.AddSingleton(sp => + { + var opts = sp.GetRequiredService>().Value; + var database = sp.GetRequiredService(); + return database.GetCollection(opts.Mongo.EntriesCollection); + }); + + services.AddSingleton(sp => + { + var opts = sp.GetRequiredService>().Value; + var database = sp.GetRequiredService(); + return database.GetCollection(opts.Mongo.AuditCollection); + }); + + services.AddSingleton(); + services.AddSingleton(); + + + services.AddSingleton(sp => + { + var options = sp.GetRequiredService>().Value; + if (string.IsNullOrWhiteSpace(options.Redis.Url)) + { + return new InMemoryAttestorDedupeStore(); + } + + var multiplexer = sp.GetRequiredService(); + return new RedisAttestorDedupeStore(multiplexer, sp.GetRequiredService>()); + }); + + services.AddSingleton(sp => + { + var options = sp.GetRequiredService>().Value; + if (string.IsNullOrWhiteSpace(options.Redis.Url)) + { + throw new InvalidOperationException("Redis connection string is required when redis dedupe is enabled."); + } + + return ConnectionMultiplexer.Connect(options.Redis.Url); + }); + + services.AddSingleton(sp => + { + var options = sp.GetRequiredService>().Value; + if (options.S3.Enabled && !string.IsNullOrWhiteSpace(options.S3.Endpoint) && !string.IsNullOrWhiteSpace(options.S3.Bucket)) + { + var config = new AmazonS3Config + { + ServiceURL = options.S3.Endpoint, + ForcePathStyle = true, + UseHttp = !options.S3.UseTls + }; + + var client = new AmazonS3Client(FallbackCredentialsFactory.GetCredentials(), config); + return new S3AttestorArchiveStore(client, sp.GetRequiredService>(), sp.GetRequiredService>()); + } + + return new NullAttestorArchiveStore(sp.GetRequiredService>()); + }); + + return services; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj new file mode 100644 index 00000000..b7afdf82 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj @@ -0,0 +1,21 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs new file mode 100644 index 00000000..4ef28708 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections.Concurrent; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class InMemoryAttestorDedupeStore : IAttestorDedupeStore +{ + private readonly ConcurrentDictionary _store = new(); + + public Task TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + if (_store.TryGetValue(bundleSha256, out var entry)) + { + if (entry.ExpiresAt > DateTimeOffset.UtcNow) + { + return Task.FromResult(entry.Uuid); + } + + _store.TryRemove(bundleSha256, out _); + } + + return Task.FromResult(null); + } + + public Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default) + { + _store[bundleSha256] = (rekorUuid, DateTimeOffset.UtcNow.Add(ttl)); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs new file mode 100644 index 00000000..2d875934 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs @@ -0,0 +1,115 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; +using StellaOps.Attestor.Core.Audit; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class MongoAttestorAuditSink : IAttestorAuditSink +{ + private readonly IMongoCollection _collection; + + public MongoAttestorAuditSink(IMongoCollection collection) + { + _collection = collection; + } + + public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default) + { + var document = AttestorAuditDocument.FromRecord(record); + return _collection.InsertOneAsync(document, cancellationToken: cancellationToken); + } + + internal sealed class AttestorAuditDocument + { + [BsonId] + public ObjectId Id { get; set; } + + [BsonElement("ts")] + public BsonDateTime Timestamp { get; set; } = BsonDateTime.Create(DateTime.UtcNow); + + [BsonElement("action")] + public string Action { get; set; } = string.Empty; + + [BsonElement("result")] + public string Result { get; set; } = string.Empty; + + [BsonElement("rekorUuid")] + public string? RekorUuid { get; set; } + + [BsonElement("index")] + public long? Index { get; set; } + + [BsonElement("artifactSha256")] + public string ArtifactSha256 { get; set; } = string.Empty; + + [BsonElement("bundleSha256")] + public string BundleSha256 { get; set; } = string.Empty; + + [BsonElement("backend")] + public string Backend { get; set; } = string.Empty; + + [BsonElement("latencyMs")] + public long LatencyMs { get; set; } + + [BsonElement("caller")] + public CallerDocument Caller { get; set; } = new(); + + [BsonElement("metadata")] + public BsonDocument Metadata { get; set; } = new(); + + public static AttestorAuditDocument FromRecord(AttestorAuditRecord record) + { + var metadata = new BsonDocument(); + foreach (var kvp in record.Metadata) + { + metadata[kvp.Key] = kvp.Value; + } + + return new AttestorAuditDocument + { + Id = ObjectId.GenerateNewId(), + Timestamp = BsonDateTime.Create(record.Timestamp.UtcDateTime), + Action = record.Action, + Result = record.Result, + RekorUuid = record.RekorUuid, + Index = record.Index, + ArtifactSha256 = record.ArtifactSha256, + BundleSha256 = record.BundleSha256, + Backend = record.Backend, + LatencyMs = record.LatencyMs, + Caller = new CallerDocument + { + Subject = record.Caller.Subject, + Audience = record.Caller.Audience, + ClientId = record.Caller.ClientId, + MtlsThumbprint = record.Caller.MtlsThumbprint, + Tenant = record.Caller.Tenant + }, + Metadata = metadata + }; + } + + internal sealed class CallerDocument + { + [BsonElement("subject")] + public string? Subject { get; set; } + + [BsonElement("audience")] + public string? Audience { get; set; } + + [BsonElement("clientId")] + public string? ClientId { get; set; } + + [BsonElement("mtlsThumbprint")] + public string? MtlsThumbprint { get; set; } + + [BsonElement("tenant")] + public string? Tenant { get; set; } + } + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs new file mode 100644 index 00000000..d5cf7127 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs @@ -0,0 +1,245 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository +{ + private readonly IMongoCollection _entries; + + public MongoAttestorEntryRepository(IMongoCollection entries) + { + _entries = entries; + } + + public async Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.BundleSha256, bundleSha256); + var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToDomain(); + } + + public async Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.Id, rekorUuid); + var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToDomain(); + } + + public async Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.Artifact.Sha256, artifactSha256); + var documents = await _entries.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false); + return documents.ConvertAll(static doc => doc.ToDomain()); + } + + public async Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default) + { + var document = AttestorEntryDocument.FromDomain(entry); + var filter = Builders.Filter.Eq(x => x.Id, document.Id); + await _entries.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + + [BsonIgnoreExtraElements] + internal sealed class AttestorEntryDocument + { + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("artifact")] + public ArtifactDocument Artifact { get; set; } = new(); + + [BsonElement("bundleSha256")] + public string BundleSha256 { get; set; } = string.Empty; + + [BsonElement("index")] + public long? Index { get; set; } + + [BsonElement("proof")] + public ProofDocument? Proof { get; set; } + + [BsonElement("log")] + public LogDocument Log { get; set; } = new(); + + [BsonElement("createdAt")] + public BsonDateTime CreatedAt { get; set; } = BsonDateTime.Create(System.DateTimeOffset.UtcNow); + + [BsonElement("status")] + public string Status { get; set; } = "pending"; + + [BsonElement("signerIdentity")] + public SignerIdentityDocument SignerIdentity { get; set; } = new(); + + public static AttestorEntryDocument FromDomain(AttestorEntry entry) + { + return new AttestorEntryDocument + { + Id = entry.RekorUuid, + Artifact = new ArtifactDocument + { + Sha256 = entry.Artifact.Sha256, + Kind = entry.Artifact.Kind, + ImageDigest = entry.Artifact.ImageDigest, + SubjectUri = entry.Artifact.SubjectUri + }, + BundleSha256 = entry.BundleSha256, + Index = entry.Index, + Proof = entry.Proof is null ? null : new ProofDocument + { + Checkpoint = entry.Proof.Checkpoint is null ? null : new CheckpointDocument + { + Origin = entry.Proof.Checkpoint.Origin, + Size = entry.Proof.Checkpoint.Size, + RootHash = entry.Proof.Checkpoint.RootHash, + Timestamp = entry.Proof.Checkpoint.Timestamp is null + ? null + : BsonDateTime.Create(entry.Proof.Checkpoint.Timestamp.Value) + }, + Inclusion = entry.Proof.Inclusion is null ? null : new InclusionDocument + { + LeafHash = entry.Proof.Inclusion.LeafHash, + Path = entry.Proof.Inclusion.Path + } + }, + Log = new LogDocument + { + Url = entry.Log.Url, + LogId = entry.Log.LogId + }, + CreatedAt = BsonDateTime.Create(entry.CreatedAt.UtcDateTime), + Status = entry.Status, + SignerIdentity = new SignerIdentityDocument + { + Mode = entry.SignerIdentity.Mode, + Issuer = entry.SignerIdentity.Issuer, + SubjectAlternativeName = entry.SignerIdentity.SubjectAlternativeName, + KeyId = entry.SignerIdentity.KeyId + } + }; + } + + public AttestorEntry ToDomain() + { + return new AttestorEntry + { + RekorUuid = Id, + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = Artifact.Sha256, + Kind = Artifact.Kind, + ImageDigest = Artifact.ImageDigest, + SubjectUri = Artifact.SubjectUri + }, + BundleSha256 = BundleSha256, + Index = Index, + Proof = Proof is null ? null : new AttestorEntry.ProofDescriptor + { + Checkpoint = Proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor + { + Origin = Proof.Checkpoint.Origin, + Size = Proof.Checkpoint.Size, + RootHash = Proof.Checkpoint.RootHash, + Timestamp = Proof.Checkpoint.Timestamp?.ToUniversalTime() + }, + Inclusion = Proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor + { + LeafHash = Proof.Inclusion.LeafHash, + Path = Proof.Inclusion.Path + } + }, + Log = new AttestorEntry.LogDescriptor + { + Url = Log.Url, + LogId = Log.LogId + }, + CreatedAt = CreatedAt.ToUniversalTime(), + Status = Status, + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Mode = SignerIdentity.Mode, + Issuer = SignerIdentity.Issuer, + SubjectAlternativeName = SignerIdentity.SubjectAlternativeName, + KeyId = SignerIdentity.KeyId + } + }; + } + + internal sealed class ArtifactDocument + { + [BsonElement("sha256")] + public string Sha256 { get; set; } = string.Empty; + + [BsonElement("kind")] + public string Kind { get; set; } = string.Empty; + + [BsonElement("imageDigest")] + public string? ImageDigest { get; set; } + + [BsonElement("subjectUri")] + public string? SubjectUri { get; set; } + } + + internal sealed class ProofDocument + { + [BsonElement("checkpoint")] + public CheckpointDocument? Checkpoint { get; set; } + + [BsonElement("inclusion")] + public InclusionDocument? Inclusion { get; set; } + } + + internal sealed class CheckpointDocument + { + [BsonElement("origin")] + public string? Origin { get; set; } + + [BsonElement("size")] + public long Size { get; set; } + + [BsonElement("rootHash")] + public string? RootHash { get; set; } + + [BsonElement("timestamp")] + public BsonDateTime? Timestamp { get; set; } + } + + internal sealed class InclusionDocument + { + [BsonElement("leafHash")] + public string? LeafHash { get; set; } + + [BsonElement("path")] + public IReadOnlyList Path { get; set; } = System.Array.Empty(); + } + + internal sealed class LogDocument + { + [BsonElement("url")] + public string Url { get; set; } = string.Empty; + + [BsonElement("logId")] + public string? LogId { get; set; } + } + + internal sealed class SignerIdentityDocument + { + [BsonElement("mode")] + public string Mode { get; set; } = string.Empty; + + [BsonElement("issuer")] + public string? Issuer { get; set; } + + [BsonElement("san")] + public string? SubjectAlternativeName { get; set; } + + [BsonElement("kid")] + public string? KeyId { get; set; } + } + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs new file mode 100644 index 00000000..f1c81794 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs @@ -0,0 +1,22 @@ +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class NullAttestorArchiveStore : IAttestorArchiveStore +{ + private readonly ILogger _logger; + + public NullAttestorArchiveStore(ILogger logger) + { + _logger = logger; + } + + public Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default) + { + _logger.LogDebug("Archive disabled; skipping bundle {BundleSha}", bundle.BundleSha256); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs new file mode 100644 index 00000000..cef4d9e9 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs @@ -0,0 +1,34 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Options; +using StackExchange.Redis; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class RedisAttestorDedupeStore : IAttestorDedupeStore +{ + private readonly IDatabase _database; + private readonly string _prefix; + + public RedisAttestorDedupeStore(IConnectionMultiplexer multiplexer, IOptions options) + { + _database = multiplexer.GetDatabase(); + _prefix = options.Value.Redis.DedupePrefix ?? "attestor:dedupe:"; + } + + public async Task TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + var value = await _database.StringGetAsync(BuildKey(bundleSha256)).ConfigureAwait(false); + return value.HasValue ? value.ToString() : null; + } + + public Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default) + { + return _database.StringSetAsync(BuildKey(bundleSha256), rekorUuid, ttl); + } + + private RedisKey BuildKey(string bundleSha256) => new RedisKey(_prefix + bundleSha256); +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs new file mode 100644 index 00000000..f2b427a5 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs @@ -0,0 +1,72 @@ +using System; +using System.IO; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Amazon.S3; +using Amazon.S3.Model; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Infrastructure.Storage; + +internal sealed class S3AttestorArchiveStore : IAttestorArchiveStore, IDisposable +{ + private readonly IAmazonS3 _s3; + private readonly AttestorOptions.S3Options _options; + private readonly ILogger _logger; + private bool _disposed; + + public S3AttestorArchiveStore(IAmazonS3 s3, IOptions options, ILogger logger) + { + _s3 = s3; + _options = options.Value.S3; + _logger = logger; + } + + public async Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(_options.Bucket)) + { + _logger.LogWarning("S3 archive bucket is not configured; skipping archive for bundle {Bundle}", bundle.BundleSha256); + return; + } + + var prefix = _options.Prefix ?? "attest/"; + + await PutObjectAsync(prefix + "dsse/" + bundle.BundleSha256 + ".json", bundle.CanonicalBundleJson, cancellationToken).ConfigureAwait(false); + if (bundle.ProofJson.Length > 0) + { + await PutObjectAsync(prefix + "proof/" + bundle.RekorUuid + ".json", bundle.ProofJson, cancellationToken).ConfigureAwait(false); + } + + var metadataObject = JsonSerializer.SerializeToUtf8Bytes(bundle.Metadata); + await PutObjectAsync(prefix + "meta/" + bundle.RekorUuid + ".json", metadataObject, cancellationToken).ConfigureAwait(false); + } + + private Task PutObjectAsync(string key, byte[] content, CancellationToken cancellationToken) + { + using var stream = new MemoryStream(content); + var request = new PutObjectRequest + { + BucketName = _options.Bucket, + Key = key, + InputStream = stream, + AutoCloseStream = false + }; + return _s3.PutObjectAsync(request, cancellationToken); + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _s3.Dispose(); + _disposed = true; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs new file mode 100644 index 00000000..ee36ef73 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs @@ -0,0 +1,284 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Audit; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Infrastructure.Submission; + +internal sealed class AttestorSubmissionService : IAttestorSubmissionService +{ + private static readonly TimeSpan DedupeTtl = TimeSpan.FromHours(48); + + private readonly AttestorSubmissionValidator _validator; + private readonly IAttestorEntryRepository _repository; + private readonly IAttestorDedupeStore _dedupeStore; + private readonly IRekorClient _rekorClient; + private readonly IAttestorArchiveStore _archiveStore; + private readonly IAttestorAuditSink _auditSink; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly AttestorOptions _options; + private readonly AttestorMetrics _metrics; + + public AttestorSubmissionService( + AttestorSubmissionValidator validator, + IAttestorEntryRepository repository, + IAttestorDedupeStore dedupeStore, + IRekorClient rekorClient, + IAttestorArchiveStore archiveStore, + IAttestorAuditSink auditSink, + IOptions options, + ILogger logger, + TimeProvider timeProvider, + AttestorMetrics metrics) + { + _validator = validator; + _repository = repository; + _dedupeStore = dedupeStore; + _rekorClient = rekorClient; + _archiveStore = archiveStore; + _auditSink = auditSink; + _logger = logger; + _timeProvider = timeProvider; + _options = options.Value; + _metrics = metrics; + } + + public async Task SubmitAsync( + AttestorSubmissionRequest request, + SubmissionContext context, + CancellationToken cancellationToken = default) + { + var start = System.Diagnostics.Stopwatch.GetTimestamp(); + + var validation = await _validator.ValidateAsync(request, cancellationToken).ConfigureAwait(false); + + var canonicalBundle = validation.CanonicalBundle; + + var dedupeUuid = await _dedupeStore.TryGetExistingAsync(request.Meta.BundleSha256, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrEmpty(dedupeUuid)) + { + _logger.LogInformation("Dedupe hit for bundle {BundleSha256} -> {RekorUuid}", request.Meta.BundleSha256, dedupeUuid); + _metrics.DedupeHitsTotal.Add(1, new KeyValuePair("result", "hit")); + var existing = await _repository.GetByUuidAsync(dedupeUuid, cancellationToken).ConfigureAwait(false) + ?? await _repository.GetByBundleShaAsync(request.Meta.BundleSha256, cancellationToken).ConfigureAwait(false); + + if (existing is not null) + { + _metrics.SubmitTotal.Add(1, + new KeyValuePair("result", "dedupe"), + new KeyValuePair("backend", "cache")); + return ToResult(existing); + } + } + else + { + _metrics.DedupeHitsTotal.Add(1, new KeyValuePair("result", "miss")); + } + + var primaryBackend = BuildBackend("primary", _options.Rekor.Primary); + RekorSubmissionResponse submissionResponse; + try + { + submissionResponse = await _rekorClient.SubmitAsync(request, primaryBackend, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "submit")); + _logger.LogError(ex, "Failed to submit bundle {BundleSha} to Rekor backend {Backend}", request.Meta.BundleSha256, primaryBackend.Name); + throw; + } + + var proof = submissionResponse.Proof; + if (proof is null && string.Equals(submissionResponse.Status, "included", StringComparison.OrdinalIgnoreCase)) + { + try + { + proof = await _rekorClient.GetProofAsync(submissionResponse.Uuid, primaryBackend, cancellationToken).ConfigureAwait(false); + _metrics.ProofFetchTotal.Add(1, + new KeyValuePair("result", proof is null ? "missing" : "ok")); + } + catch (Exception ex) + { + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "proof_fetch")); + _logger.LogWarning(ex, "Proof fetch failed for {Uuid} on backend {Backend}", submissionResponse.Uuid, primaryBackend.Name); + } + } + + var entry = CreateEntry(request, submissionResponse, proof, context, canonicalBundle); + await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false); + await _dedupeStore.SetAsync(request.Meta.BundleSha256, entry.RekorUuid, DedupeTtl, cancellationToken).ConfigureAwait(false); + + if (request.Meta.Archive) + { + var archiveBundle = new AttestorArchiveBundle + { + RekorUuid = entry.RekorUuid, + ArtifactSha256 = entry.Artifact.Sha256, + BundleSha256 = entry.BundleSha256, + CanonicalBundleJson = canonicalBundle, + ProofJson = proof is null ? Array.Empty() : JsonSerializer.SerializeToUtf8Bytes(proof, JsonSerializerOptions.Default), + Metadata = new Dictionary + { + ["logUrl"] = entry.Log.Url, + ["status"] = entry.Status + } + }; + + try + { + await _archiveStore.ArchiveBundleAsync(archiveBundle, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to archive bundle {BundleSha}", entry.BundleSha256); + _metrics.ErrorTotal.Add(1, new KeyValuePair("type", "archive")); + } + } + + var elapsed = System.Diagnostics.Stopwatch.GetElapsedTime(start, System.Diagnostics.Stopwatch.GetTimestamp()); + _metrics.SubmitTotal.Add(1, + new KeyValuePair("result", submissionResponse.Status ?? "unknown"), + new KeyValuePair("backend", primaryBackend.Name)); + _metrics.SubmitLatency.Record(elapsed.TotalSeconds, new KeyValuePair("backend", primaryBackend.Name)); + await WriteAuditAsync(request, context, entry, submissionResponse, (long)elapsed.TotalMilliseconds, cancellationToken).ConfigureAwait(false); + + return ToResult(entry); + } + + private static AttestorSubmissionResult ToResult(AttestorEntry entry) + { + return new AttestorSubmissionResult + { + Uuid = entry.RekorUuid, + Index = entry.Index, + LogUrl = entry.Log.Url, + Status = entry.Status, + Proof = entry.Proof is null ? null : new AttestorSubmissionResult.RekorProof + { + Checkpoint = entry.Proof.Checkpoint is null ? null : new AttestorSubmissionResult.Checkpoint + { + Origin = entry.Proof.Checkpoint.Origin, + Size = entry.Proof.Checkpoint.Size, + RootHash = entry.Proof.Checkpoint.RootHash, + Timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O") + }, + Inclusion = entry.Proof.Inclusion is null ? null : new AttestorSubmissionResult.InclusionProof + { + LeafHash = entry.Proof.Inclusion.LeafHash, + Path = entry.Proof.Inclusion.Path + } + } + }; + } + + private AttestorEntry CreateEntry( + AttestorSubmissionRequest request, + RekorSubmissionResponse submission, + RekorProofResponse? proof, + SubmissionContext context, + byte[] canonicalBundle) + { + var now = _timeProvider.GetUtcNow(); + return new AttestorEntry + { + RekorUuid = submission.Uuid, + Artifact = new AttestorEntry.ArtifactDescriptor + { + Sha256 = request.Meta.Artifact.Sha256, + Kind = request.Meta.Artifact.Kind, + ImageDigest = request.Meta.Artifact.ImageDigest, + SubjectUri = request.Meta.Artifact.SubjectUri + }, + BundleSha256 = request.Meta.BundleSha256, + Index = submission.Index, + Proof = proof is null ? null : new AttestorEntry.ProofDescriptor + { + Checkpoint = proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor + { + Origin = proof.Checkpoint.Origin, + Size = proof.Checkpoint.Size, + RootHash = proof.Checkpoint.RootHash, + Timestamp = proof.Checkpoint.Timestamp + }, + Inclusion = proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor + { + LeafHash = proof.Inclusion.LeafHash, + Path = proof.Inclusion.Path + } + }, + Log = new AttestorEntry.LogDescriptor + { + Url = submission.LogUrl ?? string.Empty, + LogId = null + }, + CreatedAt = now, + Status = submission.Status ?? "included", + SignerIdentity = new AttestorEntry.SignerIdentityDescriptor + { + Mode = request.Bundle.Mode, + Issuer = context.CallerAudience, + SubjectAlternativeName = context.CallerSubject, + KeyId = context.CallerClientId + } + }; + } + + private Task WriteAuditAsync( + AttestorSubmissionRequest request, + SubmissionContext context, + AttestorEntry entry, + RekorSubmissionResponse submission, + long latencyMs, + CancellationToken cancellationToken) + { + var record = new AttestorAuditRecord + { + Action = "submit", + Result = submission.Status ?? "included", + RekorUuid = submission.Uuid, + Index = submission.Index, + ArtifactSha256 = request.Meta.Artifact.Sha256, + BundleSha256 = request.Meta.BundleSha256, + Backend = "primary", + LatencyMs = latencyMs, + Timestamp = _timeProvider.GetUtcNow(), + Caller = new AttestorAuditRecord.CallerDescriptor + { + Subject = context.CallerSubject, + Audience = context.CallerAudience, + ClientId = context.CallerClientId, + MtlsThumbprint = context.MtlsThumbprint, + Tenant = context.CallerTenant + } + }; + + return _auditSink.WriteAsync(record, cancellationToken); + } + + private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options) + { + if (string.IsNullOrWhiteSpace(options.Url)) + { + throw new InvalidOperationException($"Rekor backend '{name}' is not configured."); + } + + return new RekorBackend + { + Name = name, + Url = new Uri(options.Url, UriKind.Absolute), + ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs), + PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs), + MaxAttempts = options.MaxAttempts + }; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs new file mode 100644 index 00000000..2f78bcf6 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs @@ -0,0 +1,49 @@ +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Submission; + +namespace StellaOps.Attestor.Infrastructure.Submission; + +public sealed class DefaultDsseCanonicalizer : IDsseCanonicalizer +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = false, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + public Task CanonicalizeAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default) + { + var node = new JsonObject + { + ["payloadType"] = request.Bundle.Dsse.PayloadType, + ["payload"] = request.Bundle.Dsse.PayloadBase64, + ["signatures"] = CreateSignaturesArray(request) + }; + + var json = node.ToJsonString(SerializerOptions); + return Task.FromResult(JsonSerializer.SerializeToUtf8Bytes(JsonNode.Parse(json)!, SerializerOptions)); + } + + private static JsonArray CreateSignaturesArray(AttestorSubmissionRequest request) + { + var array = new JsonArray(); + foreach (var signature in request.Bundle.Dsse.Signatures) + { + var obj = new JsonObject + { + ["sig"] = signature.Signature + }; + if (!string.IsNullOrWhiteSpace(signature.KeyId)) + { + obj["keyid"] = signature.KeyId; + } + + array.Add(obj); + } + + return array; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs new file mode 100644 index 00000000..28251220 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs @@ -0,0 +1,261 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Storage; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Verification; +using System.Security.Cryptography; + +namespace StellaOps.Attestor.Infrastructure.Verification; + +internal sealed class AttestorVerificationService : IAttestorVerificationService +{ + private readonly IAttestorEntryRepository _repository; + private readonly IDsseCanonicalizer _canonicalizer; + private readonly IRekorClient _rekorClient; + private readonly ILogger _logger; + private readonly AttestorOptions _options; + + public AttestorVerificationService( + IAttestorEntryRepository repository, + IDsseCanonicalizer canonicalizer, + IRekorClient rekorClient, + IOptions options, + ILogger logger) + { + _repository = repository; + _canonicalizer = canonicalizer; + _rekorClient = rekorClient; + _logger = logger; + _options = options.Value; + } + + public async Task VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var entry = await ResolveEntryAsync(request, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + throw new AttestorVerificationException("not_found", "No attestor entry matched the supplied query."); + } + + var issues = new List(); + + if (request.Bundle is not null) + { + var canonicalBundle = await _canonicalizer.CanonicalizeAsync(new AttestorSubmissionRequest + { + Bundle = request.Bundle, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = entry.Artifact.Sha256, + Kind = entry.Artifact.Kind + }, + BundleSha256 = entry.BundleSha256 + } + }, cancellationToken).ConfigureAwait(false); + + var computedHash = Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(canonicalBundle)).ToLowerInvariant(); + if (!string.Equals(computedHash, entry.BundleSha256, StringComparison.OrdinalIgnoreCase)) + { + issues.Add("Bundle hash does not match stored canonical hash."); + } + } + + if (request.RefreshProof || entry.Proof is null) + { + var backend = BuildBackend("primary", _options.Rekor.Primary); + try + { + var proof = await _rekorClient.GetProofAsync(entry.RekorUuid, backend, cancellationToken).ConfigureAwait(false); + if (proof is not null) + { + var updated = CloneWithProof(entry, proof.ToProofDescriptor()); + await _repository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); + entry = updated; + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to refresh proof for entry {Uuid}", entry.RekorUuid); + issues.Add("Proof refresh failed: " + ex.Message); + } + } + + var ok = issues.Count == 0 && string.Equals(entry.Status, "included", StringComparison.OrdinalIgnoreCase); + + return new AttestorVerificationResult + { + Ok = ok, + Uuid = entry.RekorUuid, + Index = entry.Index, + LogUrl = entry.Log.Url, + Status = entry.Status, + Issues = issues, + CheckedAt = DateTimeOffset.UtcNow + }; + } + + public Task GetEntryAsync(string rekorUuid, bool refreshProof, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(rekorUuid)) + { + throw new ArgumentException("Value cannot be null or whitespace.", nameof(rekorUuid)); + } + + return ResolveEntryByUuidAsync(rekorUuid, refreshProof, cancellationToken); + } + + private async Task ResolveEntryAsync(AttestorVerificationRequest request, CancellationToken cancellationToken) + { + if (!string.IsNullOrWhiteSpace(request.Uuid)) + { + return await ResolveEntryByUuidAsync(request.Uuid, request.RefreshProof, cancellationToken).ConfigureAwait(false); + } + + if (request.Bundle is not null) + { + var canonical = await _canonicalizer.CanonicalizeAsync(new AttestorSubmissionRequest + { + Bundle = request.Bundle, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = string.Empty, + Kind = string.Empty + } + } + }, cancellationToken).ConfigureAwait(false); + + var bundleSha = Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(canonical)).ToLowerInvariant(); + return await ResolveEntryByBundleShaAsync(bundleSha, request.RefreshProof, cancellationToken).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(request.ArtifactSha256)) + { + return await ResolveEntryByArtifactAsync(request.ArtifactSha256, request.RefreshProof, cancellationToken).ConfigureAwait(false); + } + + throw new AttestorVerificationException("invalid_query", "At least one of uuid, bundle, or artifactSha256 must be provided."); + } + + private async Task ResolveEntryByUuidAsync(string uuid, bool refreshProof, CancellationToken cancellationToken) + { + var entry = await _repository.GetByUuidAsync(uuid, cancellationToken).ConfigureAwait(false); + if (entry is null || !refreshProof) + { + return entry; + } + + var backend = BuildBackend("primary", _options.Rekor.Primary); + try + { + var proof = await _rekorClient.GetProofAsync(uuid, backend, cancellationToken).ConfigureAwait(false); + if (proof is not null) + { + var updated = CloneWithProof(entry, proof.ToProofDescriptor()); + await _repository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); + entry = updated; + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to refresh proof for entry {Uuid}", uuid); + } + + return entry; + } + + private async Task ResolveEntryByBundleShaAsync(string bundleSha, bool refreshProof, CancellationToken cancellationToken) + { + var entry = await _repository.GetByBundleShaAsync(bundleSha, cancellationToken).ConfigureAwait(false); + if (entry is null || !refreshProof) + { + return entry; + } + + return await ResolveEntryByUuidAsync(entry.RekorUuid, true, cancellationToken).ConfigureAwait(false); + } + + private async Task ResolveEntryByArtifactAsync(string artifactSha256, bool refreshProof, CancellationToken cancellationToken) + { + var entries = await _repository.GetByArtifactShaAsync(artifactSha256, cancellationToken).ConfigureAwait(false); + var entry = entries.OrderByDescending(e => e.CreatedAt).FirstOrDefault(); + if (entry is null) + { + return null; + } + + return refreshProof + ? await ResolveEntryByUuidAsync(entry.RekorUuid, true, cancellationToken).ConfigureAwait(false) + : entry; + } + + private static AttestorEntry CloneWithProof(AttestorEntry entry, AttestorEntry.ProofDescriptor? proof) + { + return new AttestorEntry + { + RekorUuid = entry.RekorUuid, + Artifact = entry.Artifact, + BundleSha256 = entry.BundleSha256, + Index = entry.Index, + Proof = proof, + Log = entry.Log, + CreatedAt = entry.CreatedAt, + Status = entry.Status, + SignerIdentity = entry.SignerIdentity + }; + } + + private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options) + { + if (string.IsNullOrWhiteSpace(options.Url)) + { + throw new InvalidOperationException($"Rekor backend '{name}' is not configured."); + } + + return new RekorBackend + { + Name = name, + Url = new Uri(options.Url, UriKind.Absolute), + ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs), + PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs), + MaxAttempts = options.MaxAttempts + }; + } +} + +internal static class RekorProofResponseExtensions +{ + public static AttestorEntry.ProofDescriptor ToProofDescriptor(this RekorProofResponse response) + { + return new AttestorEntry.ProofDescriptor + { + Checkpoint = response.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor + { + Origin = response.Checkpoint.Origin, + Size = response.Checkpoint.Size, + RootHash = response.Checkpoint.RootHash, + Timestamp = response.Checkpoint.Timestamp + }, + Inclusion = response.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor + { + LeafHash = response.Inclusion.LeafHash, + Path = response.Inclusion.Path + } + }; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs new file mode 100644 index 00000000..c0c706f6 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs @@ -0,0 +1,120 @@ +using System; +using System.Security.Cryptography; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Infrastructure.Rekor; +using StellaOps.Attestor.Infrastructure.Storage; +using StellaOps.Attestor.Infrastructure.Submission; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestorSubmissionServiceTests +{ + [Fact] + public async Task SubmitAsync_ReturnsDeterministicUuid_OnDuplicateBundle() + { + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions + { + Url = string.Empty + }, + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.stellaops.test", + ProofTimeoutMs = 1000, + PollIntervalMs = 50, + MaxAttempts = 2 + } + } + }); + + var canonicalizer = new DefaultDsseCanonicalizer(); + var validator = new AttestorSubmissionValidator(canonicalizer); + var repository = new InMemoryAttestorEntryRepository(); + var dedupeStore = new InMemoryAttestorDedupeStore(); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var logger = new NullLogger(); + using var metrics = new AttestorMetrics(); + var service = new AttestorSubmissionService( + validator, + repository, + dedupeStore, + rekorClient, + archiveStore, + auditSink, + options, + logger, + TimeProvider.System, + metrics); + + var request = CreateValidRequest(canonicalizer); + var context = new SubmissionContext + { + CallerSubject = "urn:stellaops:signer", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "default", + ClientCertificate = null, + MtlsThumbprint = "00" + }; + + var first = await service.SubmitAsync(request, context); + var second = await service.SubmitAsync(request, context); + + Assert.NotNull(first.Uuid); + Assert.Equal(first.Uuid, second.Uuid); + + var stored = await repository.GetByBundleShaAsync(request.Meta.BundleSha256); + Assert.NotNull(stored); + Assert.Equal(first.Uuid, stored!.RekorUuid); + } + + private static AttestorSubmissionRequest CreateValidRequest(DefaultDsseCanonicalizer canonicalizer) + { + var request = new AttestorSubmissionRequest + { + Bundle = new AttestorSubmissionRequest.SubmissionBundle + { + Mode = "keyless", + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = "application/vnd.in-toto+json", + PayloadBase64 = Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes("{}")), + Signatures = + { + new AttestorSubmissionRequest.DsseSignature + { + KeyId = "test", + Signature = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32)) + } + } + } + }, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = new string('a', 64), + Kind = "sbom" + }, + LogPreference = "primary", + Archive = false + } + }; + + var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult(); + request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); + return request; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs new file mode 100644 index 00000000..840c1cd3 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs @@ -0,0 +1,194 @@ +using System.Security.Cryptography; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Core.Verification; +using StellaOps.Attestor.Infrastructure.Storage; +using StellaOps.Attestor.Infrastructure.Submission; +using StellaOps.Attestor.Infrastructure.Verification; +using StellaOps.Attestor.Infrastructure.Rekor; +using StellaOps.Attestor.Core.Observability; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class AttestorVerificationServiceTests +{ + [Fact] + public async Task VerifyAsync_ReturnsOk_ForExistingUuid() + { + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions + { + Url = string.Empty + }, + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.stellaops.test", + ProofTimeoutMs = 1000, + PollIntervalMs = 50, + MaxAttempts = 2 + } + } + }); + + using var metrics = new AttestorMetrics(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var repository = new InMemoryAttestorEntryRepository(); + var dedupeStore = new InMemoryAttestorDedupeStore(); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var submissionService = new AttestorSubmissionService( + new AttestorSubmissionValidator(canonicalizer), + repository, + dedupeStore, + rekorClient, + archiveStore, + auditSink, + options, + new NullLogger(), + TimeProvider.System, + metrics); + + var submission = CreateSubmissionRequest(canonicalizer); + var context = new SubmissionContext + { + CallerSubject = "urn:stellaops:signer", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "default" + }; + + var response = await submissionService.SubmitAsync(submission, context); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + options, + new NullLogger()); + + var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = response.Uuid + }); + + Assert.True(verifyResult.Ok); + Assert.Equal(response.Uuid, verifyResult.Uuid); + Assert.Empty(verifyResult.Issues); + } + + [Fact] + public async Task VerifyAsync_FlagsTamperedBundle() + { + var options = Options.Create(new AttestorOptions + { + Redis = new AttestorOptions.RedisOptions { Url = string.Empty }, + Rekor = new AttestorOptions.RekorOptions + { + Primary = new AttestorOptions.RekorBackendOptions + { + Url = "https://rekor.example/", + ProofTimeoutMs = 1000, + PollIntervalMs = 50, + MaxAttempts = 2 + } + } + }); + + using var metrics = new AttestorMetrics(); + var canonicalizer = new DefaultDsseCanonicalizer(); + var repository = new InMemoryAttestorEntryRepository(); + var dedupeStore = new InMemoryAttestorDedupeStore(); + var rekorClient = new StubRekorClient(new NullLogger()); + var archiveStore = new NullAttestorArchiveStore(new NullLogger()); + var auditSink = new InMemoryAttestorAuditSink(); + var submissionService = new AttestorSubmissionService( + new AttestorSubmissionValidator(canonicalizer), + repository, + dedupeStore, + rekorClient, + archiveStore, + auditSink, + options, + new NullLogger(), + TimeProvider.System, + metrics); + + var submission = CreateSubmissionRequest(canonicalizer); + var context = new SubmissionContext + { + CallerSubject = "urn:stellaops:signer", + CallerAudience = "attestor", + CallerClientId = "signer-service", + CallerTenant = "default" + }; + + var response = await submissionService.SubmitAsync(submission, context); + + var verificationService = new AttestorVerificationService( + repository, + canonicalizer, + rekorClient, + options, + new NullLogger()); + + var tamperedBundle = submission.Bundle; + tamperedBundle.Dsse.PayloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("{\"tampered\":true}")); + + var result = await verificationService.VerifyAsync(new AttestorVerificationRequest + { + Uuid = response.Uuid, + Bundle = tamperedBundle + }); + + Assert.False(result.Ok); + Assert.Contains(result.Issues, issue => issue.Contains("Bundle hash", StringComparison.OrdinalIgnoreCase)); + } + + private static AttestorSubmissionRequest CreateSubmissionRequest(DefaultDsseCanonicalizer canonicalizer) + { + var payload = Encoding.UTF8.GetBytes("{}"); + var request = new AttestorSubmissionRequest + { + Bundle = new AttestorSubmissionRequest.SubmissionBundle + { + Mode = "keyless", + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = "application/vnd.in-toto+json", + PayloadBase64 = Convert.ToBase64String(payload), + Signatures = + { + new AttestorSubmissionRequest.DsseSignature + { + KeyId = "test", + Signature = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32)) + } + } + } + }, + Meta = new AttestorSubmissionRequest.SubmissionMeta + { + Artifact = new AttestorSubmissionRequest.ArtifactInfo + { + Sha256 = new string('a', 64), + Kind = "sbom" + }, + LogPreference = "primary", + Archive = false + } + }; + + var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult(); + request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant(); + return request; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs new file mode 100644 index 00000000..2bc27e43 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs @@ -0,0 +1,149 @@ +using System; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Attestor.Core.Rekor; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Infrastructure.Rekor; +using Xunit; + +namespace StellaOps.Attestor.Tests; + +public sealed class HttpRekorClientTests +{ + [Fact] + public async Task SubmitAsync_ParsesResponse() + { + var payload = new + { + uuid = "123", + index = 42, + logURL = "https://rekor.example/api/v2/log/entries/123", + status = "included", + proof = new + { + checkpoint = new { origin = "rekor", size = 10, rootHash = "abc", timestamp = "2025-10-19T00:00:00Z" }, + inclusion = new { leafHash = "leaf", path = new[] { "p1", "p2" } } + } + }; + + var client = CreateClient(HttpStatusCode.Created, payload); + var rekorClient = new HttpRekorClient(client, NullLogger.Instance); + + var request = new AttestorSubmissionRequest + { + Bundle = new AttestorSubmissionRequest.SubmissionBundle + { + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = "application/json", + PayloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("{}")), + Signatures = { new AttestorSubmissionRequest.DsseSignature { Signature = "sig" } } + } + } + }; + + var backend = new RekorBackend + { + Name = "primary", + Url = new Uri("https://rekor.example/"), + ProofTimeout = TimeSpan.FromSeconds(1), + PollInterval = TimeSpan.FromMilliseconds(100), + MaxAttempts = 1 + }; + + var response = await rekorClient.SubmitAsync(request, backend); + + Assert.Equal("123", response.Uuid); + Assert.Equal(42, response.Index); + Assert.Equal("included", response.Status); + Assert.NotNull(response.Proof); + Assert.Equal("leaf", response.Proof!.Inclusion!.LeafHash); + } + + [Fact] + public async Task SubmitAsync_ThrowsOnConflict() + { + var client = CreateClient(HttpStatusCode.Conflict, new { error = "duplicate" }); + var rekorClient = new HttpRekorClient(client, NullLogger.Instance); + + var request = new AttestorSubmissionRequest + { + Bundle = new AttestorSubmissionRequest.SubmissionBundle + { + Dsse = new AttestorSubmissionRequest.DsseEnvelope + { + PayloadType = "application/json", + PayloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("{}")), + Signatures = { new AttestorSubmissionRequest.DsseSignature { Signature = "sig" } } + } + } + }; + + var backend = new RekorBackend + { + Name = "primary", + Url = new Uri("https://rekor.example/"), + ProofTimeout = TimeSpan.FromSeconds(1), + PollInterval = TimeSpan.FromMilliseconds(100), + MaxAttempts = 1 + }; + + await Assert.ThrowsAsync(() => rekorClient.SubmitAsync(request, backend)); + } + + [Fact] + public async Task GetProofAsync_ReturnsNullOnNotFound() + { + var client = CreateClient(HttpStatusCode.NotFound, new { }); + var rekorClient = new HttpRekorClient(client, NullLogger.Instance); + + var backend = new RekorBackend + { + Name = "primary", + Url = new Uri("https://rekor.example/"), + ProofTimeout = TimeSpan.FromSeconds(1), + PollInterval = TimeSpan.FromMilliseconds(100), + MaxAttempts = 1 + }; + + var proof = await rekorClient.GetProofAsync("abc", backend); + Assert.Null(proof); + } + + private static HttpClient CreateClient(HttpStatusCode statusCode, object payload) + { + var handler = new StubHandler(statusCode, payload); + return new HttpClient(handler) + { + BaseAddress = new Uri("https://rekor.example/") + }; + } + + private sealed class StubHandler : HttpMessageHandler + { + private readonly HttpStatusCode _statusCode; + private readonly object _payload; + + public StubHandler(HttpStatusCode statusCode, object payload) + { + _statusCode = statusCode; + _payload = payload; + } + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var json = JsonSerializer.Serialize(_payload); + var response = new HttpResponseMessage(_statusCode) + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + + return Task.FromResult(response); + } + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj new file mode 100644 index 00000000..e55ec8a6 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj @@ -0,0 +1,25 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs new file mode 100644 index 00000000..77d2359d --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Attestor.Core.Audit; +using StellaOps.Attestor.Core.Storage; + +namespace StellaOps.Attestor.Tests; + +internal sealed class InMemoryAttestorEntryRepository : IAttestorEntryRepository +{ + private readonly ConcurrentDictionary _entries = new(); + + public Task GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default) + { + var entry = _entries.Values.FirstOrDefault(e => string.Equals(e.BundleSha256, bundleSha256, StringComparison.OrdinalIgnoreCase)); + return Task.FromResult(entry); + } + + public Task GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default) + { + _entries.TryGetValue(rekorUuid, out var entry); + return Task.FromResult(entry); + } + + public Task> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default) + { + var entries = _entries.Values + .Where(e => string.Equals(e.Artifact.Sha256, artifactSha256, StringComparison.OrdinalIgnoreCase)) + .OrderBy(e => e.CreatedAt) + .ToList(); + + return Task.FromResult>(entries); + } + + public Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default) + { + _entries[entry.RekorUuid] = entry; + return Task.CompletedTask; + } +} + +internal sealed class InMemoryAttestorAuditSink : IAttestorAuditSink +{ + public List Records { get; } = new(); + + public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default) + { + Records.Add(record); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs b/src/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs new file mode 100644 index 00000000..a577ee58 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs @@ -0,0 +1,234 @@ +using System.Collections.Generic; +using System.Security.Claims; +using System.Security.Cryptography.X509Certificates; +using Serilog; +using Serilog.Events; +using StellaOps.Attestor.Core.Options; +using StellaOps.Attestor.Core.Submission; +using StellaOps.Attestor.Infrastructure; +using StellaOps.Configuration; +using StellaOps.Auth.ServerIntegration; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using OpenTelemetry.Metrics; +using StellaOps.Attestor.Core.Observability; +using StellaOps.Attestor.Core.Verification; +using Microsoft.AspNetCore.Server.Kestrel.Https; + +const string ConfigurationSection = "attestor"; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "ATTESTOR_"; + options.BindingSection = ConfigurationSection; +}); + +builder.Host.UseSerilog((context, services, loggerConfiguration) => +{ + loggerConfiguration + .MinimumLevel.Information() + .MinimumLevel.Override("Microsoft", LogEventLevel.Warning) + .Enrich.FromLogContext() + .WriteTo.Console(); +}); + +var attestorOptions = builder.Configuration.BindOptions(ConfigurationSection); + +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddSingleton(attestorOptions); + +builder.Services.AddOptions() + .Bind(builder.Configuration.GetSection(ConfigurationSection)) + .ValidateOnStart(); + +builder.Services.AddProblemDetails(); +builder.Services.AddEndpointsApiExplorer(); +builder.Services.AddAttestorInfrastructure(); +builder.Services.AddHttpContextAccessor(); +builder.Services.AddHealthChecks() + .AddCheck("self", () => HealthCheckResult.Healthy()); + +builder.Services.AddOpenTelemetry() + .WithMetrics(metricsBuilder => + { + metricsBuilder.AddMeter(AttestorMetrics.MeterName); + metricsBuilder.AddAspNetCoreInstrumentation(); + metricsBuilder.AddRuntimeInstrumentation(); + }); + +if (attestorOptions.Security.Authority is { Issuer: not null } authority) +{ + builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: null, + configure: resourceOptions => + { + resourceOptions.Authority = authority.Issuer!; + resourceOptions.RequireHttpsMetadata = authority.RequireHttpsMetadata; + if (!string.IsNullOrWhiteSpace(authority.JwksUrl)) + { + resourceOptions.MetadataAddress = authority.JwksUrl; + } + + foreach (var audience in authority.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + + foreach (var scope in authority.RequiredScopes) + { + resourceOptions.RequiredScopes.Add(scope); + } + }); + + builder.Services.AddAuthorization(options => + { + options.AddPolicy("attestor:write", policy => + { + policy.RequireAuthenticatedUser(); + policy.RequireClaim("scope", authority.RequiredScopes); + }); + }); +} +else +{ + builder.Services.AddAuthorization(); +} + +builder.WebHost.ConfigureKestrel(kestrel => +{ + kestrel.ConfigureHttpsDefaults(https => + { + if (attestorOptions.Security.Mtls.RequireClientCertificate) + { + https.ClientCertificateMode = ClientCertificateMode.RequireCertificate; + } + }); +}); + +var app = builder.Build(); + +app.UseSerilogRequestLogging(); + +app.UseExceptionHandler(static handler => +{ + handler.Run(async context => + { + var result = Results.Problem(statusCode: StatusCodes.Status500InternalServerError); + await result.ExecuteAsync(context); + }); +}); + +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapHealthChecks("/health/ready"); +app.MapHealthChecks("/health/live"); + +app.MapPost("/api/v1/rekor/entries", async (AttestorSubmissionRequest request, HttpContext httpContext, IAttestorSubmissionService submissionService, CancellationToken cancellationToken) => +{ + var certificate = httpContext.Connection.ClientCertificate; + if (certificate is null) + { + return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required"); + } + + var user = httpContext.User; + if (user?.Identity is not { IsAuthenticated: true }) + { + return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required"); + } + + var submissionContext = BuildSubmissionContext(user, certificate); + + try + { + var result = await submissionService.SubmitAsync(request, submissionContext, cancellationToken).ConfigureAwait(false); + return Results.Ok(result); + } + catch (AttestorValidationException validationEx) + { + return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: validationEx.Message, extensions: new Dictionary + { + ["code"] = validationEx.Code + }); + } +}) +.RequireAuthorization("attestor:write"); + +app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) => +{ + var entry = await verificationService.GetEntryAsync(uuid, refresh is true, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + return Results.NotFound(); + } + + return Results.Ok(new + { + uuid = entry.RekorUuid, + index = entry.Index, + proof = entry.Proof is null ? null : new + { + checkpoint = entry.Proof.Checkpoint is null ? null : new + { + origin = entry.Proof.Checkpoint.Origin, + size = entry.Proof.Checkpoint.Size, + rootHash = entry.Proof.Checkpoint.RootHash, + timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O") + }, + inclusion = entry.Proof.Inclusion is null ? null : new + { + leafHash = entry.Proof.Inclusion.LeafHash, + path = entry.Proof.Inclusion.Path + } + }, + logURL = entry.Log.Url, + status = entry.Status, + artifact = new + { + sha256 = entry.Artifact.Sha256, + kind = entry.Artifact.Kind, + imageDigest = entry.Artifact.ImageDigest, + subjectUri = entry.Artifact.SubjectUri + } + }); +}).RequireAuthorization("attestor:write"); + +app.MapPost("/api/v1/rekor/verify", async (AttestorVerificationRequest request, IAttestorVerificationService verificationService, CancellationToken cancellationToken) => +{ + try + { + var result = await verificationService.VerifyAsync(request, cancellationToken).ConfigureAwait(false); + return Results.Ok(result); + } + catch (AttestorVerificationException ex) + { + return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: ex.Message, extensions: new Dictionary + { + ["code"] = ex.Code + }); + } +}).RequireAuthorization("attestor:write"); + +app.Run(); + +static SubmissionContext BuildSubmissionContext(ClaimsPrincipal user, X509Certificate2 certificate) +{ + var subject = user.FindFirst("sub")?.Value ?? certificate.Subject; + var audience = user.FindFirst("aud")?.Value ?? string.Empty; + var clientId = user.FindFirst("client_id")?.Value; + var tenant = user.FindFirst("tenant")?.Value; + + return new SubmissionContext + { + CallerSubject = subject, + CallerAudience = audience, + CallerClientId = clientId, + CallerTenant = tenant, + ClientCertificate = certificate, + MtlsThumbprint = certificate.Thumbprint + }; +} diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj b/src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj new file mode 100644 index 00000000..abf6c462 --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj @@ -0,0 +1,30 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.sln b/src/StellaOps.Attestor/StellaOps.Attestor.sln new file mode 100644 index 00000000..ddd42f0b --- /dev/null +++ b/src/StellaOps.Attestor/StellaOps.Attestor.sln @@ -0,0 +1,118 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Core", "StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj", "{C0FE77EB-933C-4E47-8195-758AB049157A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Infrastructure", "StellaOps.Attestor.Infrastructure\StellaOps.Attestor.Infrastructure.csproj", "{996D74F8-8683-45FA-90AB-DA7ACE78D4B3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.WebService", "StellaOps.Attestor.WebService\StellaOps.Attestor.WebService.csproj", "{B238B098-32B1-4875-99A7-393A63AC3CCF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\StellaOps.Configuration\StellaOps.Configuration.csproj", "{988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{82EFA477-307D-4B47-A4CF-1627F076D60A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{21327A4F-2586-49F8-9D4A-3840DE64C48E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C0FE77EB-933C-4E47-8195-758AB049157A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Debug|x64.ActiveCfg = Debug|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Debug|x64.Build.0 = Debug|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Debug|x86.ActiveCfg = Debug|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Debug|x86.Build.0 = Debug|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Release|Any CPU.Build.0 = Release|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Release|x64.ActiveCfg = Release|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Release|x64.Build.0 = Release|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Release|x86.ActiveCfg = Release|Any CPU + {C0FE77EB-933C-4E47-8195-758AB049157A}.Release|x86.Build.0 = Release|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Debug|x64.ActiveCfg = Debug|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Debug|x64.Build.0 = Debug|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Debug|x86.ActiveCfg = Debug|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Debug|x86.Build.0 = Debug|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Release|Any CPU.Build.0 = Release|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Release|x64.ActiveCfg = Release|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Release|x64.Build.0 = Release|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Release|x86.ActiveCfg = Release|Any CPU + {996D74F8-8683-45FA-90AB-DA7ACE78D4B3}.Release|x86.Build.0 = Release|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Debug|x64.ActiveCfg = Debug|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Debug|x64.Build.0 = Debug|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Debug|x86.ActiveCfg = Debug|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Debug|x86.Build.0 = Debug|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Release|Any CPU.Build.0 = Release|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Release|x64.ActiveCfg = Release|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Release|x64.Build.0 = Release|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Release|x86.ActiveCfg = Release|Any CPU + {B238B098-32B1-4875-99A7-393A63AC3CCF}.Release|x86.Build.0 = Release|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Debug|x64.ActiveCfg = Debug|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Debug|x64.Build.0 = Debug|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Debug|x86.ActiveCfg = Debug|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Debug|x86.Build.0 = Debug|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Release|Any CPU.Build.0 = Release|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Release|x64.ActiveCfg = Release|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Release|x64.Build.0 = Release|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Release|x86.ActiveCfg = Release|Any CPU + {988E2AC7-50E0-4845-B1C2-BA4931F2FFD7}.Release|x86.Build.0 = Release|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Debug|x64.ActiveCfg = Debug|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Debug|x64.Build.0 = Debug|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Debug|x86.ActiveCfg = Debug|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Debug|x86.Build.0 = Debug|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Release|Any CPU.Build.0 = Release|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Release|x64.ActiveCfg = Release|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Release|x64.Build.0 = Release|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Release|x86.ActiveCfg = Release|Any CPU + {82EFA477-307D-4B47-A4CF-1627F076D60A}.Release|x86.Build.0 = Release|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Debug|x64.ActiveCfg = Debug|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Debug|x64.Build.0 = Debug|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Debug|x86.ActiveCfg = Debug|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Debug|x86.Build.0 = Debug|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Release|Any CPU.Build.0 = Release|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Release|x64.ActiveCfg = Release|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Release|x64.Build.0 = Release|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Release|x86.ActiveCfg = Release|Any CPU + {21327A4F-2586-49F8-9D4A-3840DE64C48E}.Release|x86.Build.0 = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Debug|x64.ActiveCfg = Debug|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Debug|x64.Build.0 = Debug|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Debug|x86.ActiveCfg = Debug|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Debug|x86.Build.0 = Debug|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|Any CPU.Build.0 = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x64.ActiveCfg = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x64.Build.0 = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x86.ActiveCfg = Release|Any CPU + {4B7592CD-D67C-4F4D-82FE-DF99BAAC4275}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Attestor/TASKS.md b/src/StellaOps.Attestor/TASKS.md new file mode 100644 index 00000000..6dc39983 --- /dev/null +++ b/src/StellaOps.Attestor/TASKS.md @@ -0,0 +1,11 @@ +# Attestor Guild Task Board (UTC 2025-10-19) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTESTOR-API-11-201 | DONE (2025-10-19) | Attestor Guild | — | `/rekor/entries` submission pipeline with dedupe, proof acquisition, and persistence. | ✅ `POST /api/v1/rekor/entries` enforces mTLS + Authority OpTok, validates DSSE bundles, and handles dual-log preferences.
✅ Redis/Mongo idempotency returns existing UUID on duplicate `bundleSha256` without re-submitting to Rekor.
✅ Rekor driver fetches inclusion proofs (or schedules async fetch) and persists canonical entry/proof metadata.
✅ Optional archive path stores DSSE/proof bundles to MinIO/S3; integration tests cover success/pending/error flows. | +| ATTESTOR-VERIFY-11-202 | DONE (2025-10-19) | Attestor Guild | — | `/rekor/verify` + retrieval endpoints validating signatures and Merkle proofs. | ✅ `GET /api/v1/rekor/entries/{uuid}` surfaces cached entries with optional backend refresh and handles not-found/refresh flows.
✅ `POST /api/v1/rekor/verify` accepts UUID, bundle, or artifact hash inputs; verifies DSSE signatures, Merkle proofs, and checkpoint anchors.
✅ Verification output returns `{ok, uuid, index, logURL, checkedAt}` with failure diagnostics for invalid proofs.
✅ Unit/integration tests exercise cache hits, backend refresh, invalid bundle/proof scenarios, and checkpoint trust anchor enforcement. | +| ATTESTOR-OBS-11-203 | DONE (2025-10-19) | Attestor Guild | — | Telemetry, alerting, mTLS hardening, and archive workflow for Attestor. | ✅ Structured logs, metrics, and optional traces record submission latency, proof fetch outcomes, verification results, and Rekor error buckets with correlation IDs.
✅ mTLS enforcement hardened (peer allowlist, SAN checks, rate limiting) and documented; TLS settings audited for modern ciphers only.
✅ Alerting/dashboard pack covers error rates, proof backlog, Redis/Mongo health, and archive job failures; runbook updated.
✅ Archive workflow includes retention policy jobs, failure alerts, and periodic verification of stored bundles and proofs. | + +> Remark (2025-10-19): Wave 0 prerequisites reviewed (none outstanding); Attestor Guild tasks moved to DOING for execution. +> Remark (2025-10-19): `/rekor/entries` submission service implemented with Mongo/Redis persistence, optional S3 archival, Rekor HTTP client, and OpenTelemetry metrics; verification APIs (`/rekor/entries/{uuid}`, `/rekor/verify`) added with proof refresh and canonical hash checks. Remaining: integrate real Rekor endpoints in staging and expand failure-mode tests. +> Remark (2025-10-19): Added Rekor mock client + integration harness to unblock attestor verification testing without external connectivity. Follow-up tasks to wire staging Rekor and record retry/error behavior still pending. diff --git a/src/StellaOps.Scanner.Core/Security/DpopProofValidator.cs b/src/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs similarity index 83% rename from src/StellaOps.Scanner.Core/Security/DpopProofValidator.cs rename to src/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs index 6b236be7..a2999c2f 100644 --- a/src/StellaOps.Scanner.Core/Security/DpopProofValidator.cs +++ b/src/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs @@ -1,14 +1,15 @@ +using System.IdentityModel.Tokens.Jwt; using System.Linq; using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Microsoft.IdentityModel.Tokens; -using System.IdentityModel.Tokens.Jwt; -namespace StellaOps.Scanner.Core.Security; +namespace StellaOps.Auth.Security.Dpop; +/// +/// Validates DPoP proofs following RFC 9449. +/// public sealed class DpopProofValidator : IDpopProofValidator { private static readonly string ProofType = "dpop+jwt"; @@ -24,10 +25,7 @@ public sealed class DpopProofValidator : IDpopProofValidator TimeProvider? timeProvider = null, ILogger? logger = null) { - if (options is null) - { - throw new ArgumentNullException(nameof(options)); - } + ArgumentNullException.ThrowIfNull(options); var cloned = options.Value ?? throw new InvalidOperationException("DPoP options must be provided."); cloned.Validate(); @@ -138,11 +136,13 @@ public sealed class DpopProofValidator : IDpopProofValidator return DpopValidationResult.Failure("invalid_token", "DPoP proof issued in the future."); } - if (now - issuedAt > options.ProofLifetime + options.AllowedClockSkew) + if (now - issuedAt > options.GetMaximumAge()) { return DpopValidationResult.Failure("invalid_token", "DPoP proof expired."); } + string? actualNonce = null; + if (nonce is not null) { if (!payloadElement.TryGetProperty("nonce", out var nonceElement) || nonceElement.ValueKind != JsonValueKind.String) @@ -150,11 +150,17 @@ public sealed class DpopProofValidator : IDpopProofValidator return DpopValidationResult.Failure("invalid_token", "DPoP proof missing nonce claim."); } - if (!string.Equals(nonceElement.GetString(), nonce, StringComparison.Ordinal)) + actualNonce = nonceElement.GetString(); + + if (!string.Equals(actualNonce, nonce, StringComparison.Ordinal)) { return DpopValidationResult.Failure("invalid_token", "DPoP nonce mismatch."); } } + else if (payloadElement.TryGetProperty("nonce", out var nonceElement) && nonceElement.ValueKind == JsonValueKind.String) + { + actualNonce = nonceElement.GetString(); + } var jwtId = jtiElement.GetString()!; @@ -185,7 +191,17 @@ public sealed class DpopProofValidator : IDpopProofValidator return DpopValidationResult.Failure("replay", "DPoP proof already used."); } - return DpopValidationResult.Success(jwk, jwtId, issuedAt); + return DpopValidationResult.Success(jwk, jwtId, issuedAt, actualNonce); + } + + private static string NormalizeHtu(Uri uri) + { + var builder = new UriBuilder(uri) + { + Fragment = null, + Query = null + }; + return builder.Uri.ToString(); } private static bool TryDecodeSegment(string token, int segmentIndex, out JsonElement element, out string? error) @@ -200,16 +216,16 @@ public sealed class DpopProofValidator : IDpopProofValidator return false; } - if (segmentIndex < 0 || segmentIndex > 1) + if (segmentIndex < 0 || segmentIndex > 2) { - error = "Segment index must be 0 or 1."; + error = "Segment index out of range."; return false; } try { - var jsonBytes = Base64UrlEncoder.DecodeBytes(segments[segmentIndex]); - using var document = JsonDocument.Parse(jsonBytes); + var json = Base64UrlEncoder.Decode(segments[segmentIndex]); + using var document = JsonDocument.Parse(json); element = document.RootElement.Clone(); return true; } @@ -220,29 +236,23 @@ public sealed class DpopProofValidator : IDpopProofValidator } } - private static string NormalizeHtu(Uri uri) + private static class NullReplayCache { - var builder = new UriBuilder(uri) - { - Fragment = string.Empty - }; + public static readonly IDpopReplayCache Instance = new Noop(); - builder.Host = builder.Host.ToLowerInvariant(); - builder.Scheme = builder.Scheme.ToLowerInvariant(); - - if ((builder.Scheme == "http" && builder.Port == 80) || (builder.Scheme == "https" && builder.Port == 443)) + private sealed class Noop : IDpopReplayCache { - builder.Port = -1; + public ValueTask TryStoreAsync(string jwtId, DateTimeOffset expiresAt, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(jwtId); + return ValueTask.FromResult(true); + } } - - return builder.Uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.PathAndQuery, UriFormat.UriEscaped); - } - - private sealed class NullReplayCache : IDpopReplayCache - { - public static NullReplayCache Instance { get; } = new(); - - public ValueTask TryStoreAsync(string jwtId, DateTimeOffset expiresAt, CancellationToken cancellationToken = default) - => ValueTask.FromResult(true); } } + +file static class DpopValidationOptionsExtensions +{ + public static TimeSpan GetMaximumAge(this DpopValidationOptions options) + => options.ProofLifetime + options.AllowedClockSkew; +} diff --git a/src/StellaOps.Scanner.Core/Security/DpopValidationOptions.cs b/src/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs similarity index 73% rename from src/StellaOps.Scanner.Core/Security/DpopValidationOptions.cs rename to src/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs index 347885f2..9a8c8e8f 100644 --- a/src/StellaOps.Scanner.Core/Security/DpopValidationOptions.cs +++ b/src/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs @@ -1,8 +1,12 @@ using System.Collections.Immutable; +using System.Collections.Generic; using System.Linq; -namespace StellaOps.Scanner.Core.Security; +namespace StellaOps.Auth.Security.Dpop; +/// +/// Configures acceptable algorithms and replay windows for DPoP proof validation. +/// public sealed class DpopValidationOptions { private readonly HashSet allowedAlgorithms = new(StringComparer.Ordinal); @@ -13,14 +17,29 @@ public sealed class DpopValidationOptions allowedAlgorithms.Add("ES384"); } + /// + /// Maximum age a proof is considered valid relative to . + /// public TimeSpan ProofLifetime { get; set; } = TimeSpan.FromMinutes(2); + /// + /// Allowed clock skew when evaluating iat. + /// public TimeSpan AllowedClockSkew { get; set; } = TimeSpan.FromSeconds(30); + /// + /// Duration a successfully validated proof is tracked to prevent replay. + /// public TimeSpan ReplayWindow { get; set; } = TimeSpan.FromMinutes(5); + /// + /// Algorithms (JWA) permitted for DPoP proofs. + /// public ISet AllowedAlgorithms => allowedAlgorithms; + /// + /// Normalised, upper-case representation of allowed algorithms. + /// public IReadOnlySet NormalizedAlgorithms { get; private set; } = ImmutableHashSet.Empty; public void Validate() diff --git a/src/StellaOps.Scanner.Core/Security/DpopValidationResult.cs b/src/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs similarity index 66% rename from src/StellaOps.Scanner.Core/Security/DpopValidationResult.cs rename to src/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs index 02ae27fb..8f1c07c1 100644 --- a/src/StellaOps.Scanner.Core/Security/DpopValidationResult.cs +++ b/src/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs @@ -1,10 +1,13 @@ using Microsoft.IdentityModel.Tokens; -namespace StellaOps.Scanner.Core.Security; +namespace StellaOps.Auth.Security.Dpop; +/// +/// Represents the outcome of DPoP proof validation. +/// public sealed class DpopValidationResult { - private DpopValidationResult(bool success, string? errorCode, string? errorDescription, SecurityKey? key, string? jwtId, DateTimeOffset? issuedAt) + private DpopValidationResult(bool success, string? errorCode, string? errorDescription, SecurityKey? key, string? jwtId, DateTimeOffset? issuedAt, string? nonce) { IsValid = success; ErrorCode = errorCode; @@ -12,6 +15,7 @@ public sealed class DpopValidationResult PublicKey = key; JwtId = jwtId; IssuedAt = issuedAt; + Nonce = nonce; } public bool IsValid { get; } @@ -26,9 +30,11 @@ public sealed class DpopValidationResult public DateTimeOffset? IssuedAt { get; } - public static DpopValidationResult Success(SecurityKey key, string jwtId, DateTimeOffset issuedAt) - => new(true, null, null, key, jwtId, issuedAt); + public string? Nonce { get; } + + public static DpopValidationResult Success(SecurityKey key, string jwtId, DateTimeOffset issuedAt, string? nonce) + => new(true, null, null, key, jwtId, issuedAt, nonce); public static DpopValidationResult Failure(string code, string description) - => new(false, code, description, null, null, null); + => new(false, code, description, null, null, null, null); } diff --git a/src/StellaOps.Scanner.Core/Security/IDpopProofValidator.cs b/src/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs similarity index 67% rename from src/StellaOps.Scanner.Core/Security/IDpopProofValidator.cs rename to src/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs index d6c0a61e..61f96564 100644 --- a/src/StellaOps.Scanner.Core/Security/IDpopProofValidator.cs +++ b/src/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs @@ -1,7 +1,4 @@ -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Scanner.Core.Security; +namespace StellaOps.Auth.Security.Dpop; public interface IDpopProofValidator { diff --git a/src/StellaOps.Scanner.Core/Security/IDpopReplayCache.cs b/src/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs similarity index 62% rename from src/StellaOps.Scanner.Core/Security/IDpopReplayCache.cs rename to src/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs index 758d3933..9058f487 100644 --- a/src/StellaOps.Scanner.Core/Security/IDpopReplayCache.cs +++ b/src/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs @@ -1,7 +1,4 @@ -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Scanner.Core.Security; +namespace StellaOps.Auth.Security.Dpop; public interface IDpopReplayCache { diff --git a/src/StellaOps.Scanner.Core/Security/InMemoryDpopReplayCache.cs b/src/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs similarity index 91% rename from src/StellaOps.Scanner.Core/Security/InMemoryDpopReplayCache.cs rename to src/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs index e8cc7ff4..d76f78a5 100644 --- a/src/StellaOps.Scanner.Core/Security/InMemoryDpopReplayCache.cs +++ b/src/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs @@ -1,9 +1,10 @@ using System.Collections.Concurrent; -using System.Threading; -using System.Threading.Tasks; -namespace StellaOps.Scanner.Core.Security; +namespace StellaOps.Auth.Security.Dpop; +/// +/// In-memory replay cache intended for single-process deployments or tests. +/// public sealed class InMemoryDpopReplayCache : IDpopReplayCache { private readonly ConcurrentDictionary entries = new(StringComparer.Ordinal); diff --git a/src/StellaOps.Auth.Security/README.md b/src/StellaOps.Auth.Security/README.md new file mode 100644 index 00000000..501388ab --- /dev/null +++ b/src/StellaOps.Auth.Security/README.md @@ -0,0 +1,3 @@ +# StellaOps.Auth.Security + +Shared sender-constraint helpers (DPoP proof validation, replay caches, future mTLS utilities) used by Authority, Scanner, Signer, and other StellaOps services. This package centralises primitives so services remain deterministic while honouring proof-of-possession guarantees. diff --git a/src/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj b/src/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj new file mode 100644 index 00000000..bb6f8314 --- /dev/null +++ b/src/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj @@ -0,0 +1,37 @@ + + + net10.0 + preview + enable + enable + true + + + Sender-constrained authentication primitives (DPoP, mTLS) shared across StellaOps services. + StellaOps.Auth.Security + StellaOps + StellaOps + stellaops;dpop;mtls;oauth2;security + AGPL-3.0-or-later + https://stella-ops.org + https://git.stella-ops.org/stella-ops.org/git.stella-ops.org + git + true + true + true + snupkg + README.md + 1.0.0-preview.1 + + + + + + + + + + + + + diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs index a0fb6f8d..ec441ebc 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs @@ -1,6 +1,7 @@ using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; +using MongoDB.Driver; using StellaOps.Authority.Plugins.Abstractions; using StellaOps.Authority.Plugin.Standard.Storage; using StellaOps.Authority.Storage.Mongo.Documents; @@ -46,19 +47,19 @@ public class StandardClientProvisioningStoreTests { public Dictionary Documents { get; } = new(StringComparer.OrdinalIgnoreCase); - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { Documents.TryGetValue(clientId, out var document); return ValueTask.FromResult(document); } - public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken) + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { Documents[document.ClientId] = document; return ValueTask.CompletedTask; } - public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken) + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { var removed = Documents.Remove(clientId); return ValueTask.FromResult(removed); @@ -69,16 +70,16 @@ public class StandardClientProvisioningStoreTests { public List Upserts { get; } = new(); - public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken) + public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { Upserts.Add(document); return ValueTask.CompletedTask; } - public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken) + public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(true); - public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken) + public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult>(Array.Empty()); } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs index 5c95104c..edaad0a9 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs @@ -319,13 +319,13 @@ internal sealed class CapturingLoggerProvider : ILoggerProvider internal sealed class StubRevocationStore : IAuthorityRevocationStore { - public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken) + public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.CompletedTask; - public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken) + public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(false); - public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken) + public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult>(Array.Empty()); } @@ -333,18 +333,18 @@ internal sealed class InMemoryClientStore : IAuthorityClientStore { private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { clients.TryGetValue(clientId, out var document); return ValueTask.FromResult(document); } - public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken) + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { clients[document.ClientId] = document; return ValueTask.CompletedTask; } - public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken) + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(clients.Remove(clientId)); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj index e80ccc23..7fac3274 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj @@ -9,4 +9,7 @@ + + + diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj index e122e8ae..60dcb2b4 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj @@ -11,7 +11,7 @@ - + diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs index 5a8ff2e1..6765cae6 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs @@ -60,6 +60,21 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore document.Properties[key] = value; } + if (registration.Properties.TryGetValue(AuthorityClientMetadataKeys.SenderConstraint, out var senderConstraintRaw)) + { + var normalizedConstraint = NormalizeSenderConstraint(senderConstraintRaw); + if (normalizedConstraint is not null) + { + document.SenderConstraint = normalizedConstraint; + document.Properties[AuthorityClientMetadataKeys.SenderConstraint] = normalizedConstraint; + } + else + { + document.SenderConstraint = null; + document.Properties.Remove(AuthorityClientMetadataKeys.SenderConstraint); + } + } + await clientStore.UpsertAsync(document, cancellationToken).ConfigureAwait(false); await revocationStore.RemoveAsync("client", registration.ClientId, cancellationToken).ConfigureAwait(false); @@ -147,4 +162,20 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore return value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); } + + private static string? NormalizeSenderConstraint(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return value.Trim() switch + { + { Length: 0 } => null, + var constraint when string.Equals(constraint, "dpop", StringComparison.OrdinalIgnoreCase) => "dpop", + var constraint when string.Equals(constraint, "mtls", StringComparison.OrdinalIgnoreCase) => "mtls", + _ => null + }; + } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs index 96e4d631..69e7660d 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs @@ -9,4 +9,5 @@ public static class AuthorityClientMetadataKeys public const string AllowedScopes = "allowedScopes"; public const string RedirectUris = "redirectUris"; public const string PostLogoutRedirectUris = "postLogoutRedirectUris"; + public const string SenderConstraint = "senderConstraint"; } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs new file mode 100644 index 00000000..bcf7b4f6 --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs @@ -0,0 +1,45 @@ +using MongoDB.Bson.Serialization.Attributes; +using System.Collections.Generic; + +namespace StellaOps.Authority.Storage.Mongo.Documents; + +/// +/// Captures certificate metadata associated with an mTLS-bound client. +/// +[BsonIgnoreExtraElements] +public sealed class AuthorityClientCertificateBinding +{ + [BsonElement("thumbprint")] + public string Thumbprint { get; set; } = string.Empty; + + [BsonElement("serialNumber")] + [BsonIgnoreIfNull] + public string? SerialNumber { get; set; } + + [BsonElement("subject")] + [BsonIgnoreIfNull] + public string? Subject { get; set; } + + [BsonElement("issuer")] + [BsonIgnoreIfNull] + public string? Issuer { get; set; } + + [BsonElement("notBefore")] + public DateTimeOffset? NotBefore { get; set; } + + [BsonElement("notAfter")] + public DateTimeOffset? NotAfter { get; set; } + + [BsonElement("subjectAlternativeNames")] + public List SubjectAlternativeNames { get; set; } = new(); + + [BsonElement("label")] + [BsonIgnoreIfNull] + public string? Label { get; set; } + + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } = DateTimeOffset.UtcNow; + + [BsonElement("updatedAt")] + public DateTimeOffset UpdatedAt { get; set; } = DateTimeOffset.UtcNow; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs index 42b8699b..7a98d2cb 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs @@ -1,5 +1,6 @@ using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; +using System.Collections.Generic; namespace StellaOps.Authority.Storage.Mongo.Documents; @@ -50,6 +51,13 @@ public sealed class AuthorityClientDocument [BsonIgnoreIfNull] public string? Plugin { get; set; } + [BsonElement("senderConstraint")] + [BsonIgnoreIfNull] + public string? SenderConstraint { get; set; } + + [BsonElement("certificateBindings")] + public List CertificateBindings { get; set; } = new(); + [BsonElement("createdAt")] public DateTimeOffset CreatedAt { get; set; } = DateTimeOffset.UtcNow; diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs index 9b48024d..6b007a78 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs @@ -7,6 +7,7 @@ using StellaOps.Authority.Storage.Mongo.Initialization; using StellaOps.Authority.Storage.Mongo.Migrations; using StellaOps.Authority.Storage.Mongo.Options; using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Authority.Storage.Mongo.Sessions; namespace StellaOps.Authority.Storage.Mongo.Extensions; @@ -56,6 +57,8 @@ public static class ServiceCollectionExtensions services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.AddScoped(); + services.AddSingleton(static sp => { var database = sp.GetRequiredService(); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs index 2f21c239..f63b7265 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs @@ -16,7 +16,13 @@ internal sealed class AuthorityClientCollectionInitializer : IAuthorityCollectio new CreateIndexOptions { Name = "client_id_unique", Unique = true }), new CreateIndexModel( Builders.IndexKeys.Ascending(c => c.Disabled), - new CreateIndexOptions { Name = "client_disabled" }) + new CreateIndexOptions { Name = "client_disabled" }), + new CreateIndexModel( + Builders.IndexKeys.Ascending(c => c.SenderConstraint), + new CreateIndexOptions { Name = "client_sender_constraint" }), + new CreateIndexModel( + Builders.IndexKeys.Ascending("certificateBindings.thumbprint"), + new CreateIndexOptions { Name = "client_cert_thumbprints" }) }; await collection.Indexes.CreateManyAsync(indexModels, cancellationToken).ConfigureAwait(false); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs new file mode 100644 index 00000000..9e27915c --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs @@ -0,0 +1,128 @@ +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Authority.Storage.Mongo.Options; + +namespace StellaOps.Authority.Storage.Mongo.Sessions; + +public interface IAuthorityMongoSessionAccessor : IAsyncDisposable +{ + ValueTask GetSessionAsync(CancellationToken cancellationToken = default); +} + +internal sealed class AuthorityMongoSessionAccessor : IAuthorityMongoSessionAccessor +{ + private readonly IMongoClient client; + private readonly AuthorityMongoOptions options; + private readonly object gate = new(); + private Task? sessionTask; + private IClientSessionHandle? session; + private bool disposed; + + public AuthorityMongoSessionAccessor( + IMongoClient client, + IOptions options) + { + this.client = client ?? throw new ArgumentNullException(nameof(client)); + this.options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + } + + public async ValueTask GetSessionAsync(CancellationToken cancellationToken = default) + { + ObjectDisposedException.ThrowIf(disposed, this); + + var existing = Volatile.Read(ref session); + if (existing is not null) + { + return existing; + } + + Task startTask; + + lock (gate) + { + if (session is { } cached) + { + return cached; + } + + sessionTask ??= StartSessionInternalAsync(cancellationToken); + startTask = sessionTask; + } + + try + { + var handle = await startTask.WaitAsync(cancellationToken).ConfigureAwait(false); + + if (session is null) + { + lock (gate) + { + if (session is null) + { + session = handle; + sessionTask = Task.FromResult(handle); + } + } + } + + return handle; + } + catch + { + lock (gate) + { + if (ReferenceEquals(sessionTask, startTask)) + { + sessionTask = null; + } + } + + throw; + } + } + + private async Task StartSessionInternalAsync(CancellationToken cancellationToken) + { + var sessionOptions = new ClientSessionOptions + { + CausalConsistency = true, + DefaultTransactionOptions = new TransactionOptions( + readPreference: ReadPreference.Primary, + readConcern: ReadConcern.Majority, + writeConcern: WriteConcern.WMajority.With(wTimeout: options.CommandTimeout)) + }; + + var handle = await client.StartSessionAsync(sessionOptions, cancellationToken).ConfigureAwait(false); + return handle; + } + + public ValueTask DisposeAsync() + { + if (disposed) + { + return ValueTask.CompletedTask; + } + + disposed = true; + + IClientSessionHandle? handle; + + lock (gate) + { + handle = session; + session = null; + sessionTask = null; + } + + if (handle is not null) + { + handle.Dispose(); + } + + GC.SuppressFinalize(this); + return ValueTask.CompletedTask; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj index 44d7ac06..6c973997 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj @@ -7,7 +7,7 @@ true - + diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs index 48c0629f..7f8d5c99 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs @@ -1,5 +1,7 @@ using System; using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; @@ -12,11 +14,19 @@ internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteS public AuthorityBootstrapInviteStore(IMongoCollection collection) => this.collection = collection ?? throw new ArgumentNullException(nameof(collection)); - public async ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken) + public async ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(document); - await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + if (session is { }) + { + await collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + return document; } @@ -25,7 +35,8 @@ internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteS string expectedType, DateTimeOffset now, string? reservedBy, - CancellationToken cancellationToken) + CancellationToken cancellationToken, + IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(token)) { @@ -33,8 +44,9 @@ internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteS } var normalizedToken = token.Trim(); + var tokenFilter = Builders.Filter.Eq(i => i.Token, normalizedToken); var filter = Builders.Filter.And( - Builders.Filter.Eq(i => i.Token, normalizedToken), + tokenFilter, Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Pending)); var update = Builders.Update @@ -47,14 +59,31 @@ internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteS ReturnDocument = ReturnDocument.After }; - var invite = await collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false); + AuthorityBootstrapInviteDocument? invite; + if (session is { }) + { + invite = await collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); + } + else + { + invite = await collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false); + } if (invite is null) { - var existing = await collection - .Find(i => i.Token == normalizedToken) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + AuthorityBootstrapInviteDocument? existing; + if (session is { }) + { + existing = await collection.Find(session, tokenFilter) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + else + { + existing = await collection.Find(tokenFilter) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } if (existing is null) { @@ -76,60 +105,76 @@ internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteS if (!string.Equals(invite.Type, expectedType, StringComparison.OrdinalIgnoreCase)) { - await ReleaseAsync(normalizedToken, cancellationToken).ConfigureAwait(false); + await ReleaseAsync(normalizedToken, cancellationToken, session).ConfigureAwait(false); return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, invite); } if (invite.ExpiresAt <= now) { - await MarkExpiredAsync(normalizedToken, cancellationToken).ConfigureAwait(false); + await MarkExpiredAsync(normalizedToken, cancellationToken, session).ConfigureAwait(false); return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Expired, invite); } return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Reserved, invite); } - public async ValueTask ReleaseAsync(string token, CancellationToken cancellationToken) + public async ValueTask ReleaseAsync(string token, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(token)) { return false; } - var result = await collection.UpdateOneAsync( - Builders.Filter.And( - Builders.Filter.Eq(i => i.Token, token.Trim()), - Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)), - Builders.Update - .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Pending) - .Set(i => i.ReservedAt, null) - .Set(i => i.ReservedBy, null), - cancellationToken: cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.And( + Builders.Filter.Eq(i => i.Token, token.Trim()), + Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)); + var update = Builders.Update + .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Pending) + .Set(i => i.ReservedAt, null) + .Set(i => i.ReservedBy, null); + + UpdateResult result; + if (session is { }) + { + result = await collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } return result.ModifiedCount > 0; } - public async ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken) + public async ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(token)) { return false; } - var result = await collection.UpdateOneAsync( - Builders.Filter.And( - Builders.Filter.Eq(i => i.Token, token.Trim()), - Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)), - Builders.Update - .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Consumed) - .Set(i => i.ConsumedAt, consumedAt) - .Set(i => i.ConsumedBy, consumedBy), - cancellationToken: cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.And( + Builders.Filter.Eq(i => i.Token, token.Trim()), + Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)); + var update = Builders.Update + .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Consumed) + .Set(i => i.ConsumedAt, consumedAt) + .Set(i => i.ConsumedBy, consumedBy); + + UpdateResult result; + if (session is { }) + { + result = await collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } return result.ModifiedCount > 0; } - public async ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken) + public async ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken, IClientSessionHandle? session = null) { var filter = Builders.Filter.And( Builders.Filter.Lte(i => i.ExpiresAt, now), @@ -142,25 +187,49 @@ internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteS .Set(i => i.ReservedAt, null) .Set(i => i.ReservedBy, null); - var expired = await collection.Find(filter) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); + List expired; + if (session is { }) + { + expired = await collection.Find(session, filter) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + } + else + { + expired = await collection.Find(filter) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + } if (expired.Count == 0) { return Array.Empty(); } - await collection.UpdateManyAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + if (session is { }) + { + await collection.UpdateManyAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await collection.UpdateManyAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } return expired; } - private async Task MarkExpiredAsync(string token, CancellationToken cancellationToken) + private async Task MarkExpiredAsync(string token, CancellationToken cancellationToken, IClientSessionHandle? session) { - await collection.UpdateOneAsync( - Builders.Filter.Eq(i => i.Token, token), - Builders.Update.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Expired), - cancellationToken: cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.Eq(i => i.Token, token); + var update = Builders.Update.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Expired); + + if (session is { }) + { + await collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs index b7f0bc2e..fcc320b8 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs @@ -1,5 +1,7 @@ using Microsoft.Extensions.Logging; using MongoDB.Driver; +using System.Threading; +using System.Threading.Tasks; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; @@ -20,7 +22,7 @@ internal sealed class AuthorityClientStore : IAuthorityClientStore this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) + public async ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(clientId)) { @@ -28,12 +30,15 @@ internal sealed class AuthorityClientStore : IAuthorityClientStore } var id = clientId.Trim(); - return await collection.Find(c => c.ClientId == id) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var filter = Builders.Filter.Eq(c => c.ClientId, id); + var cursor = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + return await cursor.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); } - public async ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken) + public async ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(document); @@ -42,7 +47,15 @@ internal sealed class AuthorityClientStore : IAuthorityClientStore var filter = Builders.Filter.Eq(c => c.ClientId, document.ClientId); var options = new ReplaceOptions { IsUpsert = true }; - var result = await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + ReplaceOneResult result; + if (session is { }) + { + result = await collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } if (result.UpsertedId is not null) { @@ -50,7 +63,7 @@ internal sealed class AuthorityClientStore : IAuthorityClientStore } } - public async ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken) + public async ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(clientId)) { @@ -58,7 +71,18 @@ internal sealed class AuthorityClientStore : IAuthorityClientStore } var id = clientId.Trim(); - var result = await collection.DeleteOneAsync(c => c.ClientId == id, cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.Eq(c => c.ClientId, id); + + DeleteResult result; + if (session is { }) + { + result = await collection.DeleteOneAsync(session, filter, options: null, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + } + return result.DeletedCount > 0; } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs index 48442a6d..832aa9bf 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs @@ -1,5 +1,9 @@ using Microsoft.Extensions.Logging; using MongoDB.Driver; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; @@ -17,11 +21,19 @@ internal sealed class AuthorityLoginAttemptStore : IAuthorityLoginAttemptStore this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async ValueTask InsertAsync(AuthorityLoginAttemptDocument document, CancellationToken cancellationToken) + public async ValueTask InsertAsync(AuthorityLoginAttemptDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(document); - await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + if (session is { }) + { + await collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + logger.LogDebug( "Recorded authority audit event {EventType} for subject '{SubjectId}' with outcome {Outcome}.", document.EventType, @@ -29,7 +41,7 @@ internal sealed class AuthorityLoginAttemptStore : IAuthorityLoginAttemptStore document.Outcome); } - public async ValueTask> ListRecentAsync(string subjectId, int limit, CancellationToken cancellationToken) + public async ValueTask> ListRecentAsync(string subjectId, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(subjectId) || limit <= 0) { @@ -38,14 +50,22 @@ internal sealed class AuthorityLoginAttemptStore : IAuthorityLoginAttemptStore var normalized = subjectId.Trim(); - var cursor = await collection.FindAsync( - Builders.Filter.Eq(a => a.SubjectId, normalized), - new FindOptions - { - Sort = Builders.Sort.Descending(a => a.OccurredAt), - Limit = limit - }, - cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.Eq(a => a.SubjectId, normalized); + var options = new FindOptions + { + Sort = Builders.Sort.Descending(a => a.OccurredAt), + Limit = limit + }; + + IAsyncCursor cursor; + if (session is { }) + { + cursor = await collection.FindAsync(session, filter, options, cancellationToken).ConfigureAwait(false); + } + else + { + cursor = await collection.FindAsync(filter, options, cancellationToken).ConfigureAwait(false); + } return await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs index 081c0a33..554607d4 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs @@ -22,10 +22,14 @@ internal sealed class AuthorityRevocationExportStateStore : IAuthorityRevocation this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async ValueTask GetAsync(CancellationToken cancellationToken) + public async ValueTask GetAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) { var filter = Builders.Filter.Eq(d => d.Id, StateId); - return await collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + return await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); } public async ValueTask UpdateAsync( @@ -33,7 +37,8 @@ internal sealed class AuthorityRevocationExportStateStore : IAuthorityRevocation long newSequence, string bundleId, DateTimeOffset issuedAt, - CancellationToken cancellationToken) + CancellationToken cancellationToken, + IClientSessionHandle? session = null) { if (newSequence <= 0) { @@ -66,7 +71,16 @@ internal sealed class AuthorityRevocationExportStateStore : IAuthorityRevocation try { - var result = await collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false); + AuthorityRevocationExportStateDocument? result; + if (session is { }) + { + result = await collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false); + } + if (result is null) { throw new InvalidOperationException("Revocation export state update conflict."); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs index 1e8d47ea..37d751f7 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs @@ -22,7 +22,7 @@ internal sealed class AuthorityRevocationStore : IAuthorityRevocationStore this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken) + public async ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(document); @@ -48,10 +48,10 @@ internal sealed class AuthorityRevocationStore : IAuthorityRevocationStore var now = DateTimeOffset.UtcNow; document.UpdatedAt = now; - var existing = await collection - .Find(filter) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + var existing = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); if (existing is null) { @@ -63,11 +63,19 @@ internal sealed class AuthorityRevocationStore : IAuthorityRevocationStore document.CreatedAt = existing.CreatedAt; } - await collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + var options = new ReplaceOptions { IsUpsert = true }; + if (session is { }) + { + await collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } logger.LogDebug("Upserted Authority revocation entry {Category}:{RevocationId}.", document.Category, document.RevocationId); } - public async ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken) + public async ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(category) || string.IsNullOrWhiteSpace(revocationId)) { @@ -78,7 +86,15 @@ internal sealed class AuthorityRevocationStore : IAuthorityRevocationStore Builders.Filter.Eq(d => d.Category, category.Trim()), Builders.Filter.Eq(d => d.RevocationId, revocationId.Trim())); - var result = await collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + DeleteResult result; + if (session is { }) + { + result = await collection.DeleteOneAsync(session, filter, options: null, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.DeleteOneAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false); + } if (result.DeletedCount > 0) { logger.LogInformation("Removed Authority revocation entry {Category}:{RevocationId}.", category, revocationId); @@ -88,14 +104,17 @@ internal sealed class AuthorityRevocationStore : IAuthorityRevocationStore return false; } - public async ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken) + public async ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) { var filter = Builders.Filter.Or( Builders.Filter.Eq(d => d.ExpiresAt, null), Builders.Filter.Gt(d => d.ExpiresAt, asOf)); - var documents = await collection - .Find(filter) + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + var documents = await query .Sort(Builders.Sort.Ascending(d => d.Category).Ascending(d => d.RevocationId)) .ToListAsync(cancellationToken) .ConfigureAwait(false); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs index c8f52f38..e574a815 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs @@ -1,5 +1,8 @@ using Microsoft.Extensions.Logging; using MongoDB.Driver; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; @@ -20,7 +23,7 @@ internal sealed class AuthorityScopeStore : IAuthorityScopeStore this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async ValueTask FindByNameAsync(string name, CancellationToken cancellationToken) + public async ValueTask FindByNameAsync(string name, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(name)) { @@ -28,18 +31,30 @@ internal sealed class AuthorityScopeStore : IAuthorityScopeStore } var normalized = name.Trim(); - return await collection.Find(s => s.Name == normalized) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var filter = Builders.Filter.Eq(s => s.Name, normalized); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + return await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); } - public async ValueTask> ListAsync(CancellationToken cancellationToken) + public async ValueTask> ListAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) { - var cursor = await collection.FindAsync(FilterDefinition.Empty, cancellationToken: cancellationToken).ConfigureAwait(false); + IAsyncCursor cursor; + if (session is { }) + { + cursor = await collection.FindAsync(session, FilterDefinition.Empty, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + cursor = await collection.FindAsync(FilterDefinition.Empty, cancellationToken: cancellationToken).ConfigureAwait(false); + } + return await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); } - public async ValueTask UpsertAsync(AuthorityScopeDocument document, CancellationToken cancellationToken) + public async ValueTask UpsertAsync(AuthorityScopeDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(document); @@ -48,14 +63,23 @@ internal sealed class AuthorityScopeStore : IAuthorityScopeStore var filter = Builders.Filter.Eq(s => s.Name, document.Name); var options = new ReplaceOptions { IsUpsert = true }; - var result = await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + ReplaceOneResult result; + if (session is { }) + { + result = await collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } + if (result.UpsertedId is not null) { logger.LogInformation("Inserted Authority scope {ScopeName}.", document.Name); } } - public async ValueTask DeleteByNameAsync(string name, CancellationToken cancellationToken) + public async ValueTask DeleteByNameAsync(string name, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(name)) { @@ -63,7 +87,18 @@ internal sealed class AuthorityScopeStore : IAuthorityScopeStore } var normalized = name.Trim(); - var result = await collection.DeleteOneAsync(s => s.Name == normalized, cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.Eq(s => s.Name, normalized); + + DeleteResult result; + if (session is { }) + { + result = await collection.DeleteOneAsync(session, filter, options: null, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.DeleteOneAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false); + } + return result.DeletedCount > 0; } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs index da2c4477..11830324 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs @@ -1,10 +1,12 @@ using System; using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.Logging; using MongoDB.Bson; using MongoDB.Driver; -using System.Linq; -using System.Globalization; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; @@ -22,15 +24,23 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken) + public async ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(document); - await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + if (session is { }) + { + await collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + logger.LogDebug("Inserted Authority token {TokenId}.", document.TokenId); } - public async ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken) + public async ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(tokenId)) { @@ -38,12 +48,15 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore } var id = tokenId.Trim(); - return await collection.Find(t => t.TokenId == id) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var filter = Builders.Filter.Eq(t => t.TokenId, id); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + return await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); } - public async ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken) + public async ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(referenceId)) { @@ -51,9 +64,12 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore } var id = referenceId.Trim(); - return await collection.Find(t => t.ReferenceId == id) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var filter = Builders.Filter.Eq(t => t.ReferenceId, id); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + return await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); } public async ValueTask UpdateStatusAsync( @@ -63,7 +79,8 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, - CancellationToken cancellationToken) + CancellationToken cancellationToken, + IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(tokenId)) { @@ -82,16 +99,29 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore .Set(t => t.RevokedReasonDescription, reasonDescription) .Set(t => t.RevokedMetadata, metadata is null ? null : new Dictionary(metadata, StringComparer.OrdinalIgnoreCase)); - var result = await collection.UpdateOneAsync( - Builders.Filter.Eq(t => t.TokenId, tokenId.Trim()), - update, - cancellationToken: cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.Eq(t => t.TokenId, tokenId.Trim()); + + UpdateResult result; + if (session is { }) + { + result = await collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } logger.LogDebug("Updated token {TokenId} status to {Status} (matched {Matched}).", tokenId, status, result.MatchedCount); } - public async ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken) + public async ValueTask RecordUsageAsync( + string tokenId, + string? remoteAddress, + string? userAgent, + DateTimeOffset observedAt, + CancellationToken cancellationToken, + IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(tokenId)) { @@ -104,10 +134,11 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore } var id = tokenId.Trim(); - var token = await collection - .Find(t => t.TokenId == id) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var filter = Builders.Filter.Eq(t => t.TokenId, id); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + var token = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); if (token is null) { @@ -147,10 +178,14 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore } var update = Builders.Update.Set(t => t.Devices, token.Devices); - await collection.UpdateOneAsync( - Builders.Filter.Eq(t => t.TokenId, id), - update, - cancellationToken: cancellationToken).ConfigureAwait(false); + if (session is { }) + { + await collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } return new TokenUsageUpdateResult(suspicious ? TokenUsageUpdateStatus.SuspectedReplay : TokenUsageUpdateStatus.Recorded, normalizedAddress, normalizedAgent); } @@ -170,14 +205,22 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore }; } - public async ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken) + public async ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken, IClientSessionHandle? session = null) { var filter = Builders.Filter.And( Builders.Filter.Not( Builders.Filter.Eq(t => t.Status, "revoked")), Builders.Filter.Lt(t => t.ExpiresAt, threshold)); - var result = await collection.DeleteManyAsync(filter, cancellationToken).ConfigureAwait(false); + DeleteResult result; + if (session is { }) + { + result = await collection.DeleteManyAsync(session, filter, options: null, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.DeleteManyAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false); + } if (result.DeletedCount > 0) { logger.LogInformation("Deleted {Count} expired Authority tokens.", result.DeletedCount); @@ -186,7 +229,7 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore return result.DeletedCount; } - public async ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken) + public async ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null) { var filter = Builders.Filter.Eq(t => t.Status, "revoked"); @@ -197,8 +240,11 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore Builders.Filter.Gt(t => t.RevokedAt, threshold)); } - var documents = await collection - .Find(filter) + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + var documents = await query .Sort(Builders.Sort.Ascending(t => t.RevokedAt).Ascending(t => t.TokenId)) .ToListAsync(cancellationToken) .ConfigureAwait(false); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs index 03242665..b4e2f679 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs @@ -1,5 +1,7 @@ using Microsoft.Extensions.Logging; using MongoDB.Driver; +using System.Threading; +using System.Threading.Tasks; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; @@ -20,20 +22,23 @@ internal sealed class AuthorityUserStore : IAuthorityUserStore this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async ValueTask FindBySubjectIdAsync(string subjectId, CancellationToken cancellationToken) + public async ValueTask FindBySubjectIdAsync(string subjectId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(subjectId)) { return null; } - return await collection - .Find(u => u.SubjectId == subjectId) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var normalized = subjectId.Trim(); + var filter = Builders.Filter.Eq(u => u.SubjectId, normalized); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + return await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); } - public async ValueTask FindByNormalizedUsernameAsync(string normalizedUsername, CancellationToken cancellationToken) + public async ValueTask FindByNormalizedUsernameAsync(string normalizedUsername, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(normalizedUsername)) { @@ -42,13 +47,15 @@ internal sealed class AuthorityUserStore : IAuthorityUserStore var normalised = normalizedUsername.Trim(); - return await collection - .Find(u => u.NormalizedUsername == normalised) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); + var filter = Builders.Filter.Eq(u => u.NormalizedUsername, normalised); + var query = session is { } + ? collection.Find(session, filter) + : collection.Find(filter); + + return await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); } - public async ValueTask UpsertAsync(AuthorityUserDocument document, CancellationToken cancellationToken) + public async ValueTask UpsertAsync(AuthorityUserDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(document); @@ -57,9 +64,15 @@ internal sealed class AuthorityUserStore : IAuthorityUserStore var filter = Builders.Filter.Eq(u => u.SubjectId, document.SubjectId); var options = new ReplaceOptions { IsUpsert = true }; - var result = await collection - .ReplaceOneAsync(filter, document, options, cancellationToken) - .ConfigureAwait(false); + ReplaceOneResult result; + if (session is { }) + { + result = await collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } if (result.UpsertedId is not null) { @@ -67,7 +80,7 @@ internal sealed class AuthorityUserStore : IAuthorityUserStore } } - public async ValueTask DeleteBySubjectIdAsync(string subjectId, CancellationToken cancellationToken) + public async ValueTask DeleteBySubjectIdAsync(string subjectId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { if (string.IsNullOrWhiteSpace(subjectId)) { @@ -75,7 +88,18 @@ internal sealed class AuthorityUserStore : IAuthorityUserStore } var normalised = subjectId.Trim(); - var result = await collection.DeleteOneAsync(u => u.SubjectId == normalised, cancellationToken).ConfigureAwait(false); + var filter = Builders.Filter.Eq(u => u.SubjectId, normalised); + + DeleteResult result; + if (session is { }) + { + result = await collection.DeleteOneAsync(session, filter, options: null, cancellationToken).ConfigureAwait(false); + } + else + { + result = await collection.DeleteOneAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false); + } + return result.DeletedCount > 0; } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs index c0a51bc5..3c04c551 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs @@ -1,18 +1,19 @@ +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityBootstrapInviteStore { - ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken); + ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken); + ValueTask TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask ReleaseAsync(string token, CancellationToken cancellationToken); + ValueTask ReleaseAsync(string token, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken); + ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken); + ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken, IClientSessionHandle? session = null); } public enum BootstrapInviteReservationStatus diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs index 67778ab8..1e4704bf 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs @@ -1,12 +1,13 @@ +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityClientStore { - ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken); + ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken); + ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken); + ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs index f97d884b..1e28f7eb 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs @@ -1,10 +1,11 @@ +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityLoginAttemptStore { - ValueTask InsertAsync(AuthorityLoginAttemptDocument document, CancellationToken cancellationToken); + ValueTask InsertAsync(AuthorityLoginAttemptDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask> ListRecentAsync(string subjectId, int limit, CancellationToken cancellationToken); + ValueTask> ListRecentAsync(string subjectId, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs index ff023777..38d3ba60 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs @@ -1,18 +1,20 @@ using System; using System.Threading; using System.Threading.Tasks; +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityRevocationExportStateStore { - ValueTask GetAsync(CancellationToken cancellationToken); + ValueTask GetAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null); ValueTask UpdateAsync( long expectedSequence, long newSequence, string bundleId, DateTimeOffset issuedAt, - CancellationToken cancellationToken); + CancellationToken cancellationToken, + IClientSessionHandle? session = null); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs index 57eda334..8d3ffced 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs @@ -2,15 +2,16 @@ using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityRevocationStore { - ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken); + ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken); + ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken); + ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs index f51cdc87..fcb59c2e 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs @@ -1,14 +1,15 @@ +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityScopeStore { - ValueTask FindByNameAsync(string name, CancellationToken cancellationToken); + ValueTask FindByNameAsync(string name, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask> ListAsync(CancellationToken cancellationToken); + ValueTask> ListAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask UpsertAsync(AuthorityScopeDocument document, CancellationToken cancellationToken); + ValueTask UpsertAsync(AuthorityScopeDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask DeleteByNameAsync(string name, CancellationToken cancellationToken); + ValueTask DeleteByNameAsync(string name, CancellationToken cancellationToken, IClientSessionHandle? session = null); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs index f4bb918a..83ecbf45 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs @@ -1,16 +1,17 @@ using System; using System.Collections.Generic; +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityTokenStore { - ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken); + ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken); + ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken); + ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken, IClientSessionHandle? session = null); ValueTask UpdateStatusAsync( string tokenId, @@ -19,13 +20,14 @@ public interface IAuthorityTokenStore string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, - CancellationToken cancellationToken); + CancellationToken cancellationToken, + IClientSessionHandle? session = null); - ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken); + ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken); + ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken); + ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null); } public enum TokenUsageUpdateStatus diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs index 6f7cdf55..275c5911 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs @@ -1,14 +1,15 @@ +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; public interface IAuthorityUserStore { - ValueTask FindBySubjectIdAsync(string subjectId, CancellationToken cancellationToken); + ValueTask FindBySubjectIdAsync(string subjectId, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask FindByNormalizedUsernameAsync(string normalizedUsername, CancellationToken cancellationToken); + ValueTask FindByNormalizedUsernameAsync(string normalizedUsername, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask UpsertAsync(AuthorityUserDocument document, CancellationToken cancellationToken); + ValueTask UpsertAsync(AuthorityUserDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null); - ValueTask DeleteBySubjectIdAsync(string subjectId, CancellationToken cancellationToken); + ValueTask DeleteBySubjectIdAsync(string subjectId, CancellationToken cancellationToken, IClientSessionHandle? session = null); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs index b707b251..a4aa4e81 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs @@ -8,6 +8,7 @@ using Microsoft.Extensions.Time.Testing; using StellaOps.Authority.Bootstrap; using StellaOps.Authority.Storage.Mongo.Documents; using StellaOps.Authority.Storage.Mongo.Stores; +using MongoDB.Driver; using StellaOps.Cryptography.Audit; using Xunit; @@ -65,19 +66,19 @@ public sealed class BootstrapInviteCleanupServiceTests public bool ExpireCalled { get; private set; } - public ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken) + public ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) => throw new NotImplementedException(); - public ValueTask TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken) + public ValueTask TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, null)); - public ValueTask ReleaseAsync(string token, CancellationToken cancellationToken) + public ValueTask ReleaseAsync(string token, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(false); - public ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken) + public ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(false); - public ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken) + public ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ExpireCalled = true; return ValueTask.FromResult(invites); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs index 4fe34c90..dbd51bcf 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs @@ -13,11 +13,13 @@ using StellaOps.Authority.OpenIddict; using StellaOps.Authority.OpenIddict.Handlers; using StellaOps.Authority.Plugins.Abstractions; using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Sessions; using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Authority.RateLimiting; using StellaOps.Cryptography.Audit; using Xunit; using MongoDB.Bson; +using MongoDB.Driver; using static StellaOps.Authority.Tests.OpenIddict.TestHelpers; namespace StellaOps.Authority.Tests.OpenIddict; @@ -127,6 +129,7 @@ public class ClientCredentialsHandlersTests var descriptor = CreateDescriptor(clientDocument); var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor); var tokenStore = new TestTokenStore(); + var sessionAccessor = new NullMongoSessionAccessor(); var authSink = new TestAuthEventSink(); var metadataAccessor = new TestRateLimiterMetadataAccessor(); var validateHandler = new ValidateClientCredentialsHandler( @@ -148,10 +151,11 @@ public class ClientCredentialsHandlersTests var handler = new HandleClientCredentialsHandler( registry, tokenStore, + sessionAccessor, TimeProvider.System, TestActivitySource, NullLogger.Instance); - var persistHandler = new PersistTokensHandler(tokenStore, TimeProvider.System, TestActivitySource, NullLogger.Instance); + var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, TimeProvider.System, TestActivitySource, NullLogger.Instance); var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); @@ -202,8 +206,10 @@ public class TokenValidationHandlersTests var metadataAccessor = new TestRateLimiterMetadataAccessor(); var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); var handler = new ValidateAccessTokenHandler( tokenStore, + sessionAccessor, new TestClientStore(CreateClient()), CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())), metadataAccessor, @@ -248,8 +254,10 @@ public class TokenValidationHandlersTests var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor(); var auditSinkSuccess = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); var handler = new ValidateAccessTokenHandler( new TestTokenStore(), + sessionAccessor, new TestClientStore(clientDocument), registry, metadataAccessorSuccess, @@ -313,8 +321,10 @@ public class TokenValidationHandlersTests clientDocument.ClientId = "agent"; var auditSink = new TestAuthEventSink(); var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); + var sessionAccessorReplay = new NullMongoSessionAccessor(); var handler = new ValidateAccessTokenHandler( tokenStore, + sessionAccessorReplay, new TestClientStore(clientDocument), registry, metadataAccessor, @@ -360,19 +370,19 @@ internal sealed class TestClientStore : IAuthorityClientStore } } - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) { clients.TryGetValue(clientId, out var document); return ValueTask.FromResult(document); } - public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken) + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { clients[document.ClientId] = document; return ValueTask.CompletedTask; } - public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken) + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(clients.Remove(clientId)); } @@ -382,28 +392,28 @@ internal sealed class TestTokenStore : IAuthorityTokenStore public Func? UsageCallback { get; set; } - public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken) + public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) { Inserted = document; return ValueTask.CompletedTask; } - public ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken) + public ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(Inserted is not null && string.Equals(Inserted.TokenId, tokenId, StringComparison.OrdinalIgnoreCase) ? Inserted : null); - public ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken) + public ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(null); - public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, CancellationToken cancellationToken) + public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.CompletedTask; - public ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken) + public ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(0L); - public ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken) + public ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult(UsageCallback?.Invoke(remoteAddress, userAgent) ?? new TokenUsageUpdateResult(TokenUsageUpdateStatus.Recorded, remoteAddress, userAgent)); - public ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken) + public ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null) => ValueTask.FromResult>(Array.Empty()); } @@ -516,6 +526,14 @@ internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMet public void SetTag(string name, string? value) => metadata.SetTag(name, value); } +internal sealed class NullMongoSessionAccessor : IAuthorityMongoSessionAccessor +{ + public ValueTask GetSessionAsync(CancellationToken cancellationToken = default) + => ValueTask.FromResult(null!); + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; +} + internal static class TestHelpers { public static AuthorityClientDocument CreateClient( diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs index 3bd67374..fb74dd91 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs @@ -16,6 +16,7 @@ using StellaOps.Authority.Storage.Mongo; using StellaOps.Authority.Storage.Mongo.Documents; using StellaOps.Authority.Storage.Mongo.Extensions; using StellaOps.Authority.Storage.Mongo.Initialization; +using StellaOps.Authority.Storage.Mongo.Sessions; using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Concelier.Testing; using StellaOps.Authority.RateLimiting; @@ -59,9 +60,11 @@ public sealed class TokenPersistenceIntegrationTests var authSink = new TestAuthEventSink(); var metadataAccessor = new TestRateLimiterMetadataAccessor(); + await using var scope = provider.CreateAsyncScope(); + var sessionAccessor = scope.ServiceProvider.GetRequiredService(); var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, NullLogger.Instance); - var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, clock, TestActivitySource, NullLogger.Instance); - var persistHandler = new PersistTokensHandler(tokenStore, clock, TestActivitySource, NullLogger.Instance); + var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, sessionAccessor, clock, TestActivitySource, NullLogger.Instance); + var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, clock, TestActivitySource, NullLogger.Instance); var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger"); transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(15); @@ -151,8 +154,11 @@ public sealed class TokenPersistenceIntegrationTests var metadataAccessor = new TestRateLimiterMetadataAccessor(); var auditSink = new TestAuthEventSink(); + await using var scope = provider.CreateAsyncScope(); + var sessionAccessor = scope.ServiceProvider.GetRequiredService(); var handler = new ValidateAccessTokenHandler( tokenStore, + sessionAccessor, clientStore, registry, metadataAccessor, @@ -249,6 +255,107 @@ public sealed class TokenPersistenceIntegrationTests }); } + [Fact] + public async Task MongoSessions_ProvideReadYourWriteAfterPrimaryElection() + { + await ResetCollectionsAsync(); + + var clock = new FakeTimeProvider(DateTimeOffset.UtcNow); + await using var provider = await BuildMongoProviderAsync(clock); + + var tokenStore = provider.GetRequiredService(); + + await using var scope = provider.CreateAsyncScope(); + var sessionAccessor = scope.ServiceProvider.GetRequiredService(); + var session = await sessionAccessor.GetSessionAsync(CancellationToken.None); + + var tokenId = $"election-token-{Guid.NewGuid():N}"; + var document = new AuthorityTokenDocument + { + TokenId = tokenId, + Type = OpenIddictConstants.TokenTypeHints.AccessToken, + SubjectId = "session-subject", + ClientId = "session-client", + Scope = new List { "jobs:read" }, + Status = "valid", + CreatedAt = clock.GetUtcNow(), + ExpiresAt = clock.GetUtcNow().AddMinutes(30) + }; + + await tokenStore.InsertAsync(document, CancellationToken.None, session); + + await StepDownPrimaryAsync(fixture.Client, CancellationToken.None); + + AuthorityTokenDocument? fetched = null; + for (var attempt = 0; attempt < 5; attempt++) + { + try + { + fetched = await tokenStore.FindByTokenIdAsync(tokenId, CancellationToken.None, session); + if (fetched is not null) + { + break; + } + } + catch (MongoException) + { + await Task.Delay(250); + } + } + + Assert.NotNull(fetched); + Assert.Equal(tokenId, fetched!.TokenId); + } + + private static async Task StepDownPrimaryAsync(IMongoClient client, CancellationToken cancellationToken) + { + var admin = client.GetDatabase("admin"); + try + { + var command = new BsonDocument + { + { "replSetStepDown", 5 }, + { "force", true } + }; + + await admin.RunCommandAsync(command, cancellationToken: cancellationToken); + } + catch (MongoCommandException) + { + // Expected when the current primary steps down. + } + catch (MongoConnectionException) + { + // Connection may drop during election; ignore and continue. + } + + await WaitForPrimaryAsync(admin, cancellationToken); + } + + private static async Task WaitForPrimaryAsync(IMongoDatabase adminDatabase, CancellationToken cancellationToken) + { + for (var attempt = 0; attempt < 40; attempt++) + { + cancellationToken.ThrowIfCancellationRequested(); + try + { + var status = await adminDatabase.RunCommandAsync(new BsonDocument { { "replSetGetStatus", 1 } }, cancellationToken: cancellationToken); + if (status.TryGetValue("myState", out var state) && state.ToInt32() == 1) + { + return; + } + } + catch (MongoCommandException) + { + // Ignore intermediate states and retry. + } + + await Task.Delay(250, cancellationToken); + } + + throw new TimeoutException("Replica set primary election did not complete in time."); + } + private async Task ResetCollectionsAsync() { var tokens = fixture.Database.GetCollection(AuthorityMongoDefaults.Collections.Tokens); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj b/src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj index ce847ce2..9ed95cc7 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj @@ -9,4 +9,7 @@ + + + diff --git a/src/StellaOps.Authority/StellaOps.Authority.sln b/src/StellaOps.Authority/StellaOps.Authority.sln index 500f2cfd..08e7b319 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.sln +++ b/src/StellaOps.Authority/StellaOps.Authority.sln @@ -55,6 +55,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Test EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.DependencyInjection", "..\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj", "{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "..\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -377,6 +379,18 @@ Global {159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|x64.Build.0 = Release|Any CPU {159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|x86.ActiveCfg = Release|Any CPU {159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|x86.Build.0 = Release|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Debug|x64.ActiveCfg = Debug|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Debug|x64.Build.0 = Debug|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Debug|x86.ActiveCfg = Debug|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Debug|x86.Build.0 = Debug|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Release|Any CPU.Build.0 = Release|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Release|x64.ActiveCfg = Release|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Release|x64.Build.0 = Release|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Release|x86.ActiveCfg = Release|Any CPU + {ACEFD2D2-D4B9-47FB-91F2-1EA94C28D93C}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs index 8ee2ac5c..820f68aa 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs @@ -10,10 +10,12 @@ using OpenIddict.Abstractions; using OpenIddict.Extensions; using OpenIddict.Server; using OpenIddict.Server.AspNetCore; +using MongoDB.Driver; using StellaOps.Auth.Abstractions; using StellaOps.Authority.OpenIddict; using StellaOps.Authority.Plugins.Abstractions; using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Sessions; using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Authority.RateLimiting; using StellaOps.Cryptography.Audit; @@ -237,6 +239,7 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler< { private readonly IAuthorityIdentityProviderRegistry registry; private readonly IAuthorityTokenStore tokenStore; + private readonly IAuthorityMongoSessionAccessor sessionAccessor; private readonly TimeProvider clock; private readonly ActivitySource activitySource; private readonly ILogger logger; @@ -244,12 +247,14 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler< public HandleClientCredentialsHandler( IAuthorityIdentityProviderRegistry registry, IAuthorityTokenStore tokenStore, + IAuthorityMongoSessionAccessor sessionAccessor, TimeProvider clock, ActivitySource activitySource, ILogger logger) { this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore)); + this.sessionAccessor = sessionAccessor ?? throw new ArgumentNullException(nameof(sessionAccessor)); this.clock = clock ?? throw new ArgumentNullException(nameof(clock)); this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); @@ -339,7 +344,8 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler< await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); } - await PersistTokenAsync(context, document, tokenId, grantedScopes, activity).ConfigureAwait(false); + var session = await sessionAccessor.GetSessionAsync(context.CancellationToken).ConfigureAwait(false); + await PersistTokenAsync(context, document, tokenId, grantedScopes, session, activity).ConfigureAwait(false); context.Principal = principal; context.HandleRequest(); @@ -388,6 +394,7 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler< AuthorityClientDocument document, string tokenId, IReadOnlyCollection scopes, + IClientSessionHandle session, Activity? activity) { if (context.IsRejected) @@ -413,7 +420,7 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler< ExpiresAt = expiresAt }; - await tokenStore.InsertAsync(record, context.CancellationToken).ConfigureAwait(false); + await tokenStore.InsertAsync(record, context.CancellationToken, session).ConfigureAwait(false); context.Transaction.Properties[AuthorityOpenIddictConstants.TokenTransactionProperty] = record; activity?.SetTag("authority.token_id", tokenId); } diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs index 95efe539..5ff25e9d 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs @@ -6,6 +6,7 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using OpenIddict.Abstractions; using OpenIddict.Server; +using StellaOps.Authority.Storage.Mongo.Sessions; using StellaOps.Authority.Storage.Mongo.Stores; namespace StellaOps.Authority.OpenIddict.Handlers; @@ -13,17 +14,20 @@ namespace StellaOps.Authority.OpenIddict.Handlers; internal sealed class HandleRevocationRequestHandler : IOpenIddictServerHandler { private readonly IAuthorityTokenStore tokenStore; + private readonly IAuthorityMongoSessionAccessor sessionAccessor; private readonly TimeProvider clock; private readonly ILogger logger; private readonly ActivitySource activitySource; public HandleRevocationRequestHandler( IAuthorityTokenStore tokenStore, + IAuthorityMongoSessionAccessor sessionAccessor, TimeProvider clock, ActivitySource activitySource, ILogger logger) { this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore)); + this.sessionAccessor = sessionAccessor ?? throw new ArgumentNullException(nameof(sessionAccessor)); this.clock = clock ?? throw new ArgumentNullException(nameof(clock)); this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); @@ -43,14 +47,15 @@ internal sealed class HandleRevocationRequestHandler : IOpenIddictServerHandler< } var token = request.Token.Trim(); - var document = await tokenStore.FindByTokenIdAsync(token, context.CancellationToken).ConfigureAwait(false); + var session = await sessionAccessor.GetSessionAsync(context.CancellationToken).ConfigureAwait(false); + var document = await tokenStore.FindByTokenIdAsync(token, context.CancellationToken, session).ConfigureAwait(false); if (document is null) { var tokenId = TryExtractTokenId(token); if (!string.IsNullOrWhiteSpace(tokenId)) { - document = await tokenStore.FindByTokenIdAsync(tokenId!, context.CancellationToken).ConfigureAwait(false); + document = await tokenStore.FindByTokenIdAsync(tokenId!, context.CancellationToken, session).ConfigureAwait(false); } } @@ -70,7 +75,8 @@ internal sealed class HandleRevocationRequestHandler : IOpenIddictServerHandler< "client_request", null, null, - context.CancellationToken).ConfigureAwait(false); + context.CancellationToken, + session).ConfigureAwait(false); logger.LogInformation("Token {TokenId} revoked via revocation endpoint.", document.TokenId); activity?.SetTag("authority.token_id", document.TokenId); diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs index ee161b56..258a4120 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs @@ -10,7 +10,9 @@ using Microsoft.Extensions.Logging; using OpenIddict.Abstractions; using OpenIddict.Extensions; using OpenIddict.Server; +using MongoDB.Driver; using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Sessions; using StellaOps.Authority.Storage.Mongo.Stores; namespace StellaOps.Authority.OpenIddict.Handlers; @@ -18,17 +20,20 @@ namespace StellaOps.Authority.OpenIddict.Handlers; internal sealed class PersistTokensHandler : IOpenIddictServerHandler { private readonly IAuthorityTokenStore tokenStore; + private readonly IAuthorityMongoSessionAccessor sessionAccessor; private readonly TimeProvider clock; private readonly ActivitySource activitySource; private readonly ILogger logger; public PersistTokensHandler( IAuthorityTokenStore tokenStore, + IAuthorityMongoSessionAccessor sessionAccessor, TimeProvider clock, ActivitySource activitySource, ILogger logger) { this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore)); + this.sessionAccessor = sessionAccessor ?? throw new ArgumentNullException(nameof(sessionAccessor)); this.clock = clock ?? throw new ArgumentNullException(nameof(clock)); this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); @@ -47,30 +52,31 @@ internal sealed class PersistTokensHandler : IOpenIddictServerHandler"); } catch (Exception ex) diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs index eefab879..4411cb5f 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs @@ -8,10 +8,12 @@ using OpenIddict.Abstractions; using OpenIddict.Extensions; using OpenIddict.Server; using StellaOps.Auth.Abstractions; +using MongoDB.Driver; using StellaOps.Authority.OpenIddict; using StellaOps.Authority.Plugins.Abstractions; using StellaOps.Authority.RateLimiting; using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Sessions; using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Cryptography.Audit; @@ -20,6 +22,7 @@ namespace StellaOps.Authority.OpenIddict.Handlers; internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler { private readonly IAuthorityTokenStore tokenStore; + private readonly IAuthorityMongoSessionAccessor sessionAccessor; private readonly IAuthorityClientStore clientStore; private readonly IAuthorityIdentityProviderRegistry registry; private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; @@ -30,6 +33,7 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler logger) { this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore)); + this.sessionAccessor = sessionAccessor ?? throw new ArgumentNullException(nameof(sessionAccessor)); this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); @@ -74,10 +79,12 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler }; }); +builder.WebHost.ConfigureKestrel(options => +{ + options.ConfigureHttpsDefaults(https => + { + https.ClientCertificateMode = ClientCertificateMode.AllowCertificate; + https.CheckCertificateRevocation = true; + }); +}); + builder.Configuration.AddConfiguration(authorityConfiguration.Configuration); builder.Host.UseSerilog((context, _, loggerConfiguration) => @@ -86,6 +99,28 @@ builder.Services.TryAddSingleton(_ => TimeProvider.System); builder.Services.TryAddSingleton(); builder.Services.TryAddSingleton(); +#if STELLAOPS_AUTH_SECURITY +var senderConstraints = authorityOptions.Security.SenderConstraints; + +builder.Services.AddOptions() + .Configure(options => + { + options.ProofLifetime = senderConstraints.Dpop.ProofLifetime; + options.AllowedClockSkew = senderConstraints.Dpop.AllowedClockSkew; + options.ReplayWindow = senderConstraints.Dpop.ReplayWindow; + + options.AllowedAlgorithms.Clear(); + foreach (var algorithm in senderConstraints.Dpop.NormalizedAlgorithms) + { + options.AllowedAlgorithms.Add(algorithm); + } + }) + .PostConfigure(static options => options.Validate()); + +builder.Services.TryAddSingleton(provider => new InMemoryDpopReplayCache(provider.GetService())); +builder.Services.TryAddSingleton(); +#endif + builder.Services.AddRateLimiter(rateLimiterOptions => { AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions); diff --git a/src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj b/src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj index 8b65f5b3..d687ac4f 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj +++ b/src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj @@ -5,6 +5,7 @@ enable enable true + $(DefineConstants);STELLAOPS_AUTH_SECURITY @@ -22,6 +23,7 @@ + diff --git a/src/StellaOps.Authority/TASKS.md b/src/StellaOps.Authority/TASKS.md index e8b07e7e..ac86d2f5 100644 --- a/src/StellaOps.Authority/TASKS.md +++ b/src/StellaOps.Authority/TASKS.md @@ -19,6 +19,11 @@ | AUTHCORE-BUILD-OPENIDDICT | DONE (2025-10-14) | Authority Core | SEC2.HOST | Adapt host/audit handlers for OpenIddict 6.4 API surface (no `OpenIddictServerTransaction`) and restore Authority solution build. | ✅ Build `dotnet build src/StellaOps.Authority.sln` succeeds; ✅ Audit correlation + tamper logging verified under new abstractions; ✅ Tests updated. | | AUTHCORE-STORAGE-DEVICE-TOKENS | DONE (2025-10-14) | Authority Core, Storage Guild | AUTHCORE-BUILD-OPENIDDICT | Reintroduce `AuthorityTokenDeviceDocument` + projections removed during refactor so storage layer compiles. | ✅ Document type restored with mappings/migrations; ✅ Storage tests cover device artifacts; ✅ Authority solution build green. | | AUTHCORE-BOOTSTRAP-INVITES | DONE (2025-10-14) | Authority Core, DevOps | AUTHCORE-STORAGE-DEVICE-TOKENS | Wire bootstrap invite cleanup service against restored document schema and re-enable lifecycle tests. | ✅ `BootstrapInviteCleanupService` passes integration tests; ✅ Operator guide updated if behavior changes; ✅ Build/test matrices green. | -| AUTHSTORAGE-MONGO-08-001 | TODO | Authority Core & Storage Guild | — | Harden Mongo session usage with causal consistency for mutations and follow-up reads. | • Scoped middleware/service creates `IClientSessionHandle` with causal consistency + majority read/write concerns
• Stores accept optional session parameter and reuse it for write + immediate reads
• GraphQL/HTTP pipelines updated to flow session through post-mutation queries
• Replica-set integration test exercises primary election and verifies read-your-write guarantees | +| AUTHSTORAGE-MONGO-08-001 | DONE (2025-10-19) | Authority Core & Storage Guild | — | Harden Mongo session usage with causal consistency for mutations and follow-up reads. | • Scoped middleware/service creates `IClientSessionHandle` with causal consistency + majority read/write concerns
• Stores accept optional session parameter and reuse it for write + immediate reads
• GraphQL/HTTP pipelines updated to flow session through post-mutation queries
• Replica-set integration test exercises primary election and verifies read-your-write guarantees | +| AUTH-DPOP-11-001 | DOING (2025-10-19) | Authority Core & Security Guild | — | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | • DPoP proof validator verifies method/uri/hash, jwk thumbprint, and replay nonce per spec
• Nonce issuance endpoint integrated with audit + rate limits; high-value audiences enforce nonce requirement
• Integration tests cover success/failure paths (expired nonce, replay, invalid proof) and docs outline operator configuration | +| AUTH-MTLS-11-002 | DOING (2025-10-19) | Authority Core & Security Guild | — | Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. | • Client registration stores certificate bindings and enforces SAN/thumbprint validation during token issuance
• Token endpoint returns certificate-bound access tokens + PoP proof metadata; introspection reflects binding state
• End-to-end tests validate successful mTLS issuance, rejection of unbound certs, and docs capture configuration/rotation guidance | +> Remark (2025-10-19, AUTHSTORAGE-MONGO-08-001): Session accessor wired through Authority pipeline; stores accept optional sessions; added replica-set election regression test for read-your-write. +> Remark (2025-10-19, AUTH-DPOP-11-001): Prerequisites reviewed—none outstanding; status moved to DOING for Wave 0 kickoff. Design blueprint recorded in `docs/dev/authority-dpop-mtls-plan.md`. +> Remark (2025-10-19, AUTH-MTLS-11-002): Prerequisites reviewed—none outstanding; status moved to DOING for Wave 0 kickoff. mTLS flow design captured in `docs/dev/authority-dpop-mtls-plan.md`. > Update status columns (TODO / DOING / DONE / BLOCKED) together with code changes. Always run `dotnet test src/StellaOps.Authority.sln` when touching host logic. diff --git a/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs b/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs index 687efe2a..3ca1351f 100644 --- a/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs +++ b/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Collections.ObjectModel; using System.IO; using System.Net.Http; using System.Security.Cryptography; @@ -319,6 +320,61 @@ public sealed class CommandHandlersTests } } + [Fact] + public async Task HandleExcititorExportAsync_DownloadsWhenOutputProvided() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + const string manifestJson = """ + { + "exportId": "exports/20251019T101530Z/abcdef1234567890", + "format": "openvex", + "createdAt": "2025-10-19T10:15:30Z", + "artifact": { "algorithm": "sha256", "digest": "abcdef1234567890" }, + "fromCache": false, + "sizeBytes": 2048, + "attestation": { + "rekor": { + "location": "https://rekor.example/api/v1/log/entries/123", + "logIndex": "123" + } + } + } + """; + + backend.ExcititorResult = new ExcititorOperationResult(true, "ok", null, JsonDocument.Parse(manifestJson).RootElement.Clone()); + var provider = BuildServiceProvider(backend); + var outputPath = Path.Combine(tempDir.Path, "export.json"); + + await CommandHandlers.HandleExcititorExportAsync( + provider, + format: "openvex", + delta: false, + scope: null, + since: null, + provider: null, + outputPath: outputPath, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Single(backend.ExportDownloads); + var request = backend.ExportDownloads[0]; + Assert.Equal("exports/20251019T101530Z/abcdef1234567890", request.ExportId); + Assert.Equal(Path.GetFullPath(outputPath), request.DestinationPath); + Assert.Equal("sha256", request.Algorithm); + Assert.Equal("abcdef1234567890", request.Digest); + } + finally + { + Environment.ExitCode = original; + } + } + [Theory] [InlineData(null)] [InlineData("default")] @@ -624,6 +680,7 @@ public sealed class CommandHandlersTests public string? LastExcititorRoute { get; private set; } public HttpMethod? LastExcititorMethod { get; private set; } public object? LastExcititorPayload { get; private set; } + public List<(string ExportId, string DestinationPath, string? Algorithm, string? Digest)> ExportDownloads { get; } = new(); public ExcititorOperationResult? ExcititorResult { get; set; } = new ExcititorOperationResult(true, "ok", null, null); public IReadOnlyList ProviderSummaries { get; set; } = Array.Empty(); @@ -650,8 +707,29 @@ public sealed class CommandHandlersTests return Task.FromResult(ExcititorResult ?? new ExcititorOperationResult(true, "ok", null, null)); } + public Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(destinationPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + File.WriteAllText(fullPath, "{}"); + var info = new FileInfo(fullPath); + ExportDownloads.Add((exportId, fullPath, expectedDigestAlgorithm, expectedDigest)); + return Task.FromResult(new ExcititorExportDownloadResult(fullPath, info.Length, false)); + } + public Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) => Task.FromResult(ProviderSummaries); + + public Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) + { + var empty = new ReadOnlyDictionary(new Dictionary()); + return Task.FromResult(new RuntimePolicyEvaluationResult(0, null, null, empty)); + } } private sealed class StubExecutor : IScannerExecutor diff --git a/src/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs b/src/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs index 8a95df94..aa1d27f8 100644 --- a/src/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs +++ b/src/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs @@ -1,9 +1,11 @@ using System; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Net.Http.Json; -using System.Security.Cryptography; +using System.Collections.ObjectModel; +using System.Globalization; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Net.Http.Json; +using System.Security.Cryptography; using System.Text; using System.Text.Json; using System.Threading; @@ -375,6 +377,107 @@ public sealed class BackendOperationsClientTests Assert.True(tokenClient.Requests > 0); } + [Fact] + public async Task EvaluateRuntimePolicyAsync_ParsesDecisionPayload() + { + var handler = new StubHttpMessageHandler((request, _) => + { + Assert.Equal(HttpMethod.Post, request.Method); + Assert.Equal("/api/scanner/policy/runtime", request.RequestUri!.AbsolutePath); + + var body = request.Content!.ReadAsStringAsync().GetAwaiter().GetResult(); + using var document = JsonDocument.Parse(body); + var root = document.RootElement; + Assert.Equal("prod", root.GetProperty("namespace").GetString()); + Assert.Equal("payments", root.GetProperty("labels").GetProperty("app").GetString()); + var images = root.GetProperty("images"); + Assert.Equal(2, images.GetArrayLength()); + Assert.Equal("ghcr.io/app@sha256:abc", images[0].GetString()); + Assert.Equal("ghcr.io/api@sha256:def", images[1].GetString()); + + var responseJson = @"{ + ""ttlSeconds"": 120, + ""policyRevision"": ""rev-123"", + ""expiresAtUtc"": ""2025-10-19T12:34:56Z"", + ""results"": { + ""ghcr.io/app@sha256:abc"": { + ""policyVerdict"": ""pass"", + ""signed"": true, + ""hasSbom"": true, + ""reasons"": [], + ""rekor"": { ""uuid"": ""uuid-1"", ""url"": ""https://rekor.example/uuid-1"" }, + ""confidence"": 0.87, + ""quiet"": false, + ""metadata"": { ""note"": ""cached"" } + }, + ""ghcr.io/api@sha256:def"": { + ""policyVerdict"": ""fail"", + ""signed"": false, + ""hasSbom"": false, + ""reasons"": [""unsigned"", ""missing sbom""] + } + } +}"; + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(responseJson, Encoding.UTF8, "application/json"), + RequestMessage = request + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://scanner.example/") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://scanner.example/" + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var labels = new ReadOnlyDictionary(new Dictionary { ["app"] = "payments" }); + var imagesList = new ReadOnlyCollection(new List + { + "ghcr.io/app@sha256:abc", + "ghcr.io/app@sha256:abc", + "ghcr.io/api@sha256:def" + }); + var requestModel = new RuntimePolicyEvaluationRequest("prod", labels, imagesList); + + var result = await client.EvaluateRuntimePolicyAsync(requestModel, CancellationToken.None); + + Assert.Equal(120, result.TtlSeconds); + Assert.Equal("rev-123", result.PolicyRevision); + Assert.Equal(DateTimeOffset.Parse("2025-10-19T12:34:56Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal), result.ExpiresAtUtc); + Assert.Equal(2, result.Decisions.Count); + + var primary = result.Decisions["ghcr.io/app@sha256:abc"]; + Assert.Equal("pass", primary.PolicyVerdict); + Assert.True(primary.Signed); + Assert.True(primary.HasSbom); + Assert.Empty(primary.Reasons); + Assert.NotNull(primary.Rekor); + Assert.Equal("uuid-1", primary.Rekor!.Uuid); + Assert.Equal("https://rekor.example/uuid-1", primary.Rekor.Url); + Assert.Equal(0.87, Assert.IsType(primary.AdditionalProperties["confidence"]), 3); + Assert.False(Assert.IsType(primary.AdditionalProperties["quiet"])); + var metadataJson = Assert.IsType(primary.AdditionalProperties["metadata"]); + using var metadataDocument = JsonDocument.Parse(metadataJson); + Assert.Equal("cached", metadataDocument.RootElement.GetProperty("note").GetString()); + + var secondary = result.Decisions["ghcr.io/api@sha256:def"]; + Assert.Equal("fail", secondary.PolicyVerdict); + Assert.False(secondary.Signed); + Assert.False(secondary.HasSbom); + Assert.Collection(secondary.Reasons, + item => Assert.Equal("unsigned", item), + item => Assert.Equal("missing sbom", item)); + } + private sealed class StubTokenClient : IStellaOpsTokenClient { private readonly StellaOpsTokenResult _tokenResult; diff --git a/src/StellaOps.Cli/Commands/CommandFactory.cs b/src/StellaOps.Cli/Commands/CommandFactory.cs index 7f7566fb..40868b2e 100644 --- a/src/StellaOps.Cli/Commands/CommandFactory.cs +++ b/src/StellaOps.Cli/Commands/CommandFactory.cs @@ -25,6 +25,7 @@ internal static class CommandFactory root.Add(BuildScanCommand(services, options, verboseOption, cancellationToken)); root.Add(BuildDatabaseCommand(services, verboseOption, cancellationToken)); root.Add(BuildExcititorCommand(services, verboseOption, cancellationToken)); + root.Add(BuildRuntimeCommand(services, verboseOption, cancellationToken)); root.Add(BuildAuthCommand(services, options, verboseOption, cancellationToken)); root.Add(BuildConfigCommand(options)); @@ -335,11 +336,16 @@ internal static class CommandFactory { Description = "Optional provider identifier when requesting targeted exports." }; + var exportOutputOption = new Option("--output") + { + Description = "Optional path to download the export artifact." + }; export.Add(formatOption); export.Add(exportDeltaOption); export.Add(exportScopeOption); export.Add(exportSinceOption); export.Add(exportProviderOption); + export.Add(exportOutputOption); export.SetAction((parseResult, _) => { var format = parseResult.GetValue(formatOption) ?? "openvex"; @@ -347,8 +353,9 @@ internal static class CommandFactory var scope = parseResult.GetValue(exportScopeOption); var since = parseResult.GetValue(exportSinceOption); var provider = parseResult.GetValue(exportProviderOption); + var output = parseResult.GetValue(exportOutputOption); var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorExportAsync(services, format, delta, scope, since, provider, verbose, cancellationToken); + return CommandHandlers.HandleExcititorExportAsync(services, format, delta, scope, since, provider, output, verbose, cancellationToken); }); var verify = new Command("verify", "Verify Excititor exports or attestations."); @@ -406,6 +413,70 @@ internal static class CommandFactory return excititor; } + private static Command BuildRuntimeCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var runtime = new Command("runtime", "Interact with runtime admission policy APIs."); + var policy = new Command("policy", "Runtime policy operations."); + + var test = new Command("test", "Evaluate runtime policy decisions for image digests."); + var namespaceOption = new Option("--namespace", new[] { "--ns" }) + { + Description = "Namespace or logical scope for the evaluation." + }; + + var imageOption = new Option("--image", new[] { "-i", "--images" }) + { + Description = "Image digests to evaluate (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + + var fileOption = new Option("--file", new[] { "-f" }) + { + Description = "Path to a file containing image digests (one per line)." + }; + + var labelOption = new Option("--label", new[] { "-l", "--labels" }) + { + Description = "Pod labels in key=value format (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + + var jsonOption = new Option("--json") + { + Description = "Emit the raw JSON response." + }; + + test.Add(namespaceOption); + test.Add(imageOption); + test.Add(fileOption); + test.Add(labelOption); + test.Add(jsonOption); + + test.SetAction((parseResult, _) => + { + var nsValue = parseResult.GetValue(namespaceOption); + var images = parseResult.GetValue(imageOption) ?? Array.Empty(); + var file = parseResult.GetValue(fileOption); + var labels = parseResult.GetValue(labelOption) ?? Array.Empty(); + var outputJson = parseResult.GetValue(jsonOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleRuntimePolicyTestAsync( + services, + nsValue, + images, + file, + labels, + outputJson, + verbose, + cancellationToken); + }); + + policy.Add(test); + runtime.Add(policy); + return runtime; + } + private static Command BuildAuthCommand(IServiceProvider services, StellaOpsCliOptions options, Option verboseOption, CancellationToken cancellationToken) { var auth = new Command("auth", "Manage authentication with StellaOps Authority."); diff --git a/src/StellaOps.Cli/Commands/CommandHandlers.cs b/src/StellaOps.Cli/Commands/CommandHandlers.cs index f932fba7..ee38d28d 100644 --- a/src/StellaOps.Cli/Commands/CommandHandlers.cs +++ b/src/StellaOps.Cli/Commands/CommandHandlers.cs @@ -1,6 +1,7 @@ using System; using System.Buffers; using System.Collections.Generic; +using System.Collections.ObjectModel; using System.Diagnostics; using System.Globalization; using System.IO; @@ -8,6 +9,7 @@ using System.Linq; using System.Net.Http; using System.Security.Cryptography; using System.Text.Json; +using System.Text.Json.Serialization; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -512,49 +514,213 @@ internal static class CommandHandlers } } - public static Task HandleExcititorExportAsync( + public static async Task HandleExcititorExportAsync( IServiceProvider services, string format, bool delta, string? scope, DateTimeOffset? since, string? provider, + string? outputPath, bool verbose, CancellationToken cancellationToken) { - var payload = new Dictionary(StringComparer.Ordinal) - { - ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), - ["delta"] = delta - }; - + await using var scopeHandle = services.CreateAsyncScope(); + var client = scopeHandle.ServiceProvider.GetRequiredService(); + var logger = scopeHandle.ServiceProvider.GetRequiredService().CreateLogger("excititor-export"); + var options = scopeHandle.ServiceProvider.GetRequiredService(); + var verbosity = scopeHandle.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "excititor export"); + activity?.SetTag("stellaops.cli.format", format); + activity?.SetTag("stellaops.cli.delta", delta); if (!string.IsNullOrWhiteSpace(scope)) { - payload["scope"] = scope.Trim(); + activity?.SetTag("stellaops.cli.scope", scope); } if (since.HasValue) { - payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + activity?.SetTag("stellaops.cli.since", since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)); } if (!string.IsNullOrWhiteSpace(provider)) { - payload["provider"] = provider.Trim(); + activity?.SetTag("stellaops.cli.provider", provider); } + if (!string.IsNullOrWhiteSpace(outputPath)) + { + activity?.SetTag("stellaops.cli.output", outputPath); + } + using var duration = CliMetrics.MeasureCommandDuration("excititor export"); - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor export", - verbose, - new Dictionary + try + { + var payload = new Dictionary(StringComparer.Ordinal) { - ["format"] = payload["format"], - ["delta"] = delta, - ["scope"] = scope, - ["since"] = since?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture), - ["provider"] = provider - }, - client => client.ExecuteExcititorOperationAsync("export", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); + ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), + ["delta"] = delta + }; + + if (!string.IsNullOrWhiteSpace(scope)) + { + payload["scope"] = scope.Trim(); + } + if (since.HasValue) + { + payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + if (!string.IsNullOrWhiteSpace(provider)) + { + payload["provider"] = provider.Trim(); + } + + var result = await client.ExecuteExcititorOperationAsync( + "export", + HttpMethod.Post, + RemoveNullValues(payload), + cancellationToken).ConfigureAwait(false); + + if (!result.Success) + { + logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Excititor export failed." : result.Message); + Environment.ExitCode = 1; + return; + } + + Environment.ExitCode = 0; + + var manifest = TryParseExportManifest(result.Payload); + if (!string.IsNullOrWhiteSpace(result.Message) + && (manifest is null || !string.Equals(result.Message, "ok", StringComparison.OrdinalIgnoreCase))) + { + logger.LogInformation(result.Message); + } + + if (manifest is not null) + { + activity?.SetTag("stellaops.cli.export_id", manifest.ExportId); + if (!string.IsNullOrWhiteSpace(manifest.Format)) + { + activity?.SetTag("stellaops.cli.export_format", manifest.Format); + } + if (manifest.FromCache.HasValue) + { + activity?.SetTag("stellaops.cli.export_cached", manifest.FromCache.Value); + } + if (manifest.SizeBytes.HasValue) + { + activity?.SetTag("stellaops.cli.export_size", manifest.SizeBytes.Value); + } + + if (manifest.FromCache == true) + { + logger.LogInformation("Reusing cached export {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + else + { + logger.LogInformation("Export ready: {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + + if (manifest.CreatedAt.HasValue) + { + logger.LogInformation("Created at {CreatedAt}.", manifest.CreatedAt.Value.ToString("u", CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(manifest.Digest)) + { + var digestDisplay = BuildDigestDisplay(manifest.Algorithm, manifest.Digest); + if (manifest.SizeBytes.HasValue) + { + logger.LogInformation("Digest {Digest} ({Size}).", digestDisplay, FormatSize(manifest.SizeBytes.Value)); + } + else + { + logger.LogInformation("Digest {Digest}.", digestDisplay); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorLocation)) + { + if (!string.IsNullOrWhiteSpace(manifest.RekorIndex)) + { + logger.LogInformation("Rekor entry: {Location} (index {Index}).", manifest.RekorLocation, manifest.RekorIndex); + } + else + { + logger.LogInformation("Rekor entry: {Location}.", manifest.RekorLocation); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorInclusionUrl) + && !string.Equals(manifest.RekorInclusionUrl, manifest.RekorLocation, StringComparison.OrdinalIgnoreCase)) + { + logger.LogInformation("Rekor inclusion proof: {Url}.", manifest.RekorInclusionUrl); + } + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + var resolvedPath = ResolveExportOutputPath(outputPath!, manifest); + var download = await client.DownloadExcititorExportAsync( + manifest.ExportId, + resolvedPath, + manifest.Algorithm, + manifest.Digest, + cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.export_path", download.Path); + + if (download.FromCache) + { + logger.LogInformation("Export already cached at {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + else + { + logger.LogInformation("Export saved to {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + } + else if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Download location: {Location}", result.Location); + } + } + } + else + { + if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Location: {Location}", result.Location); + } + } + else if (string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation("Export request accepted."); + } + } + } + catch (Exception ex) + { + logger.LogError(ex, "Excititor export failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } } public static Task HandleExcititorVerifyAsync( @@ -646,6 +812,106 @@ internal static class CommandHandlers cancellationToken); } + public static async Task HandleRuntimePolicyTestAsync( + IServiceProvider services, + string? namespaceValue, + IReadOnlyList imageArguments, + string? filePath, + IReadOnlyList labelArguments, + bool outputJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("runtime-policy-test"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.runtime.policy.test", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "runtime policy test"); + if (!string.IsNullOrWhiteSpace(namespaceValue)) + { + activity?.SetTag("stellaops.cli.namespace", namespaceValue); + } + using var duration = CliMetrics.MeasureCommandDuration("runtime policy test"); + + try + { + IReadOnlyList images; + try + { + images = await GatherImageDigestsAsync(imageArguments, filePath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or ArgumentException or FileNotFoundException) + { + logger.LogError(ex, "Failed to gather image digests: {Message}", ex.Message); + Environment.ExitCode = 9; + return; + } + + if (images.Count == 0) + { + logger.LogError("No image digests provided. Use --image, --file, or pipe digests via stdin."); + Environment.ExitCode = 9; + return; + } + + IReadOnlyDictionary labels; + try + { + labels = ParseLabelSelectors(labelArguments); + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + Environment.ExitCode = 9; + return; + } + + activity?.SetTag("stellaops.cli.images", images.Count); + activity?.SetTag("stellaops.cli.labels", labels.Count); + + var request = new RuntimePolicyEvaluationRequest(namespaceValue, labels, images); + var result = await client.EvaluateRuntimePolicyAsync(request, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.ttl_seconds", result.TtlSeconds); + Environment.ExitCode = 0; + + if (outputJson) + { + var json = BuildRuntimePolicyJson(result, images); + Console.WriteLine(json); + return; + } + + if (result.ExpiresAtUtc.HasValue) + { + logger.LogInformation("Decision TTL: {TtlSeconds}s (expires {ExpiresAt})", result.TtlSeconds, result.ExpiresAtUtc.Value.ToString("u", CultureInfo.InvariantCulture)); + } + else + { + logger.LogInformation("Decision TTL: {TtlSeconds}s", result.TtlSeconds); + } + + if (!string.IsNullOrWhiteSpace(result.PolicyRevision)) + { + logger.LogInformation("Policy revision: {Revision}", result.PolicyRevision); + } + + DisplayRuntimePolicyResults(logger, result, images); + } + catch (Exception ex) + { + logger.LogError(ex, "Runtime policy evaluation failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + public static async Task HandleAuthLoginAsync( IServiceProvider services, StellaOpsCliOptions options, @@ -1485,6 +1751,617 @@ internal static class CommandHandlers } } + private static async Task> GatherImageDigestsAsync( + IReadOnlyList inline, + string? filePath, + CancellationToken cancellationToken) + { + var results = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + void AddCandidates(string? candidate) + { + foreach (var image in SplitImageCandidates(candidate)) + { + if (seen.Add(image)) + { + results.Add(image); + } + } + } + + if (inline is not null) + { + foreach (var entry in inline) + { + AddCandidates(entry); + } + } + + if (!string.IsNullOrWhiteSpace(filePath)) + { + var path = Path.GetFullPath(filePath); + if (!File.Exists(path)) + { + throw new FileNotFoundException("Input file not found.", path); + } + + foreach (var line in File.ReadLines(path)) + { + cancellationToken.ThrowIfCancellationRequested(); + AddCandidates(line); + } + } + + if (Console.IsInputRedirected) + { + while (!cancellationToken.IsCancellationRequested) + { + var line = await Console.In.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + AddCandidates(line); + } + } + + return new ReadOnlyCollection(results); + } + + private static IEnumerable SplitImageCandidates(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + yield break; + } + + var candidate = raw.Trim(); + var commentIndex = candidate.IndexOf('#'); + if (commentIndex >= 0) + { + candidate = candidate[..commentIndex].Trim(); + } + + if (candidate.Length == 0) + { + yield break; + } + + var tokens = candidate.Split(new[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); + foreach (var token in tokens) + { + var trimmed = token.Trim(); + if (trimmed.Length > 0) + { + yield return trimmed; + } + } + } + + private static IReadOnlyDictionary ParseLabelSelectors(IReadOnlyList labelArguments) + { + if (labelArguments is null || labelArguments.Count == 0) + { + return EmptyLabelSelectors; + } + + var labels = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var raw in labelArguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + var delimiter = trimmed.IndexOf('='); + if (delimiter <= 0 || delimiter == trimmed.Length - 1) + { + throw new ArgumentException($"Invalid label '{raw}'. Expected key=value format."); + } + + var key = trimmed[..delimiter].Trim(); + var value = trimmed[(delimiter + 1)..].Trim(); + if (key.Length == 0) + { + throw new ArgumentException($"Invalid label '{raw}'. Label key cannot be empty."); + } + + labels[key] = value; + } + + return labels.Count == 0 ? EmptyLabelSelectors : new ReadOnlyDictionary(labels); + } + + private sealed record ExcititorExportManifestSummary( + string ExportId, + string? Format, + string? Algorithm, + string? Digest, + long? SizeBytes, + bool? FromCache, + DateTimeOffset? CreatedAt, + string? RekorLocation, + string? RekorIndex, + string? RekorInclusionUrl); + + private static ExcititorExportManifestSummary? TryParseExportManifest(JsonElement? payload) + { + if (payload is null || payload.Value.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) + { + return null; + } + + var element = payload.Value; + var exportId = GetStringProperty(element, "exportId"); + if (string.IsNullOrWhiteSpace(exportId)) + { + return null; + } + + var format = GetStringProperty(element, "format"); + var algorithm = default(string?); + var digest = default(string?); + + if (TryGetPropertyCaseInsensitive(element, "artifact", out var artifact) && artifact.ValueKind == JsonValueKind.Object) + { + algorithm = GetStringProperty(artifact, "algorithm"); + digest = GetStringProperty(artifact, "digest"); + } + + var sizeBytes = GetInt64Property(element, "sizeBytes"); + var fromCache = GetBooleanProperty(element, "fromCache"); + var createdAt = GetDateTimeOffsetProperty(element, "createdAt"); + + string? rekorLocation = null; + string? rekorIndex = null; + string? rekorInclusion = null; + + if (TryGetPropertyCaseInsensitive(element, "attestation", out var attestation) && attestation.ValueKind == JsonValueKind.Object) + { + if (TryGetPropertyCaseInsensitive(attestation, "rekor", out var rekor) && rekor.ValueKind == JsonValueKind.Object) + { + rekorLocation = GetStringProperty(rekor, "location"); + rekorIndex = GetStringProperty(rekor, "logIndex"); + var inclusion = GetStringProperty(rekor, "inclusionProofUri"); + if (!string.IsNullOrWhiteSpace(inclusion)) + { + rekorInclusion = inclusion; + } + } + } + + return new ExcititorExportManifestSummary( + exportId.Trim(), + format, + algorithm, + digest, + sizeBytes, + fromCache, + createdAt, + rekorLocation, + rekorIndex, + rekorInclusion); + } + + private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) + { + if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) + { + return true; + } + + if (element.ValueKind == JsonValueKind.Object) + { + foreach (var candidate in element.EnumerateObject()) + { + if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) + { + property = candidate.Value; + return true; + } + } + } + + property = default; + return false; + } + + private static string? GetStringProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.String => property.GetString(), + JsonValueKind.Number => property.ToString(), + _ => null + }; + } + + return null; + } + + private static bool? GetBooleanProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, + _ => null + }; + } + + return null; + } + + private static long? GetInt64Property(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out var value)) + { + return value; + } + + if (property.ValueKind == JsonValueKind.String + && long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + } + + return null; + } + + private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) + && property.ValueKind == JsonValueKind.String + && DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var value)) + { + return value.ToUniversalTime(); + } + + return null; + } + + private static string BuildDigestDisplay(string? algorithm, string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return string.Empty; + } + + if (digest.Contains(':', StringComparison.Ordinal)) + { + return digest; + } + + if (string.IsNullOrWhiteSpace(algorithm) || algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase)) + { + return $"sha256:{digest}"; + } + + return $"{algorithm}:{digest}"; + } + + private static string FormatSize(long sizeBytes) + { + if (sizeBytes < 0) + { + return $"{sizeBytes} bytes"; + } + + string[] units = { "bytes", "KB", "MB", "GB", "TB" }; + double size = sizeBytes; + var unit = 0; + + while (size >= 1024 && unit < units.Length - 1) + { + size /= 1024; + unit++; + } + + return unit == 0 ? $"{sizeBytes} bytes" : $"{size:0.##} {units[unit]}"; + } + + private static string ResolveExportOutputPath(string outputPath, ExcititorExportManifestSummary manifest) + { + if (string.IsNullOrWhiteSpace(outputPath)) + { + throw new ArgumentException("Output path must be provided.", nameof(outputPath)); + } + + var fullPath = Path.GetFullPath(outputPath); + if (Directory.Exists(fullPath) + || outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal) + || outputPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)) + { + return Path.Combine(fullPath, BuildExportFileName(manifest)); + } + + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + return fullPath; + } + + private static string BuildExportFileName(ExcititorExportManifestSummary manifest) + { + var token = !string.IsNullOrWhiteSpace(manifest.Digest) + ? manifest.Digest! + : manifest.ExportId; + + token = SanitizeToken(token); + if (token.Length > 40) + { + token = token[..40]; + } + + var extension = DetermineExportExtension(manifest.Format); + return $"stellaops-excititor-{token}{extension}"; + } + + private static string DetermineExportExtension(string? format) + { + if (string.IsNullOrWhiteSpace(format)) + { + return ".bin"; + } + + return format switch + { + not null when format.Equals("jsonl", StringComparison.OrdinalIgnoreCase) => ".jsonl", + not null when format.Equals("json", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("openvex", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("csaf", StringComparison.OrdinalIgnoreCase) => ".json", + _ => ".bin" + }; + } + + private static string SanitizeToken(string token) + { + var builder = new StringBuilder(token.Length); + foreach (var ch in token) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + } + + if (builder.Length == 0) + { + builder.Append("export"); + } + + return builder.ToString(); + } + + private static string? ResolveLocationUrl(StellaOpsCliOptions options, string location) + { + if (string.IsNullOrWhiteSpace(location)) + { + return null; + } + + if (Uri.TryCreate(location, UriKind.Absolute, out var absolute)) + { + return absolute.ToString(); + } + + if (!string.IsNullOrWhiteSpace(options?.BackendUrl) && Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var baseUri)) + { + if (!location.StartsWith("/", StringComparison.Ordinal)) + { + location = "/" + location; + } + + return new Uri(baseUri, location).ToString(); + } + + return location; + } + + private static string BuildRuntimePolicyJson(RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var results = new Dictionary(StringComparer.Ordinal); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + results[image] = BuildDecisionMap(decision); + } + } + + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + var payload = new Dictionary(StringComparer.Ordinal) + { + ["ttlSeconds"] = result.TtlSeconds, + ["expiresAtUtc"] = result.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture), + ["policyRevision"] = result.PolicyRevision, + ["results"] = results + }; + + return JsonSerializer.Serialize(payload, options); + } + + private static IDictionary BuildDecisionMap(RuntimePolicyImageDecision decision) + { + var map = new Dictionary(StringComparer.Ordinal) + { + ["policyVerdict"] = decision.PolicyVerdict, + ["signed"] = decision.Signed, + ["hasSbom"] = decision.HasSbom + }; + + if (decision.Reasons.Count > 0) + { + map["reasons"] = decision.Reasons; + } + + if (decision.Rekor is not null) + { + map["rekor"] = new Dictionary(StringComparer.Ordinal) + { + ["uuid"] = decision.Rekor.Uuid, + ["url"] = decision.Rekor.Url + }; + } + + foreach (var kvp in decision.AdditionalProperties) + { + map[kvp.Key] = kvp.Value; + } + + return map; + } + + private static void DisplayRuntimePolicyResults(ILogger logger, RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var summary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().Border(TableBorder.Rounded).AddColumns("Image", "Verdict", "Signed", "SBOM", "Reasons", "Attestation"); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + table.AddRow( + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbom), + decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-", + string.IsNullOrWhiteSpace(decision.Rekor?.Uuid) ? "-" : decision.Rekor!.Uuid!); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + table.AddRow(image, "", "-", "-", "-", "-"); + } + } + + AnsiConsole.Write(table); + } + else + { + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none"; + logger.LogInformation( + "{Image} -> verdict={Verdict} signed={Signed} sbom={Sbom} attestation={Attestation} reasons={Reasons}", + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbom), + string.IsNullOrWhiteSpace(decision.Rekor?.Uuid) ? "-" : decision.Rekor!.Uuid!, + reasons); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + logger.LogWarning("{Image} -> no decision returned by backend.", image); + } + } + } + + if (summary.Count > 0) + { + var summaryText = string.Join(", ", summary.Select(kvp => $"{kvp.Key}:{kvp.Value}")); + logger.LogInformation("Verdict summary: {Summary}", summaryText); + } + } + + private static IReadOnlyList BuildImageOrder(IReadOnlyList requestedImages, IEnumerable actual) + { + var order = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + if (requestedImages is not null) + { + foreach (var image in requestedImages) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + } + + foreach (var image in actual) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + + return new ReadOnlyCollection(order); + } + + private static string FormatBoolean(bool? value) + => value is null ? "unknown" : value.Value ? "yes" : "no"; + + private static string FormatAdditionalValue(object? value) + { + return value switch + { + null => "null", + bool b => b ? "true" : "false", + double d => d.ToString("G17", CultureInfo.InvariantCulture), + float f => f.ToString("G9", CultureInfo.InvariantCulture), + IFormattable formattable => formattable.ToString(null, CultureInfo.InvariantCulture), + _ => value.ToString() ?? string.Empty + }; + } + + private static readonly IReadOnlyDictionary EmptyLabelSelectors = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); + private static IReadOnlyList NormalizeProviders(IReadOnlyList providers) { if (providers is null || providers.Count == 0) diff --git a/src/StellaOps.Cli/Services/BackendOperationsClient.cs b/src/StellaOps.Cli/Services/BackendOperationsClient.cs index 97c56d7f..9b69a06b 100644 --- a/src/StellaOps.Cli/Services/BackendOperationsClient.cs +++ b/src/StellaOps.Cli/Services/BackendOperationsClient.cs @@ -1,5 +1,6 @@ using System; -using System.Collections.Generic; +using System.Collections.Generic; +using System.Collections.ObjectModel; using System.IO; using System.Net; using System.Net.Http; @@ -25,6 +26,8 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient { private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); + private static readonly IReadOnlyDictionary EmptyMetadata = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); private readonly HttpClient _httpClient; private readonly StellaOpsCliOptions _options; @@ -266,6 +269,208 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient return new ExcititorOperationResult(false, failure, null, null); } + public async Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(exportId)) + { + throw new ArgumentException("Export id must be provided.", nameof(exportId)); + } + + if (string.IsNullOrWhiteSpace(destinationPath)) + { + throw new ArgumentException("Destination path must be provided.", nameof(destinationPath)); + } + + var fullPath = Path.GetFullPath(destinationPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + var normalizedAlgorithm = string.IsNullOrWhiteSpace(expectedDigestAlgorithm) + ? null + : expectedDigestAlgorithm.Trim(); + var normalizedDigest = NormalizeExpectedDigest(expectedDigest); + + if (File.Exists(fullPath) + && string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrWhiteSpace(normalizedDigest)) + { + var existingDigest = await ComputeSha256Async(fullPath, cancellationToken).ConfigureAwait(false); + if (string.Equals(existingDigest, normalizedDigest, StringComparison.OrdinalIgnoreCase)) + { + var info = new FileInfo(fullPath); + _logger.LogDebug("Export {ExportId} already present at {Path}; digest matches.", exportId, fullPath); + return new ExcititorExportDownloadResult(fullPath, info.Length, true); + } + } + + var encodedId = Uri.EscapeDataString(exportId); + using var request = CreateRequest(HttpMethod.Get, $"excititor/export/{encodedId}/download"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + var tempPath = fullPath + ".tmp"; + if (File.Exists(tempPath)) + { + File.Delete(tempPath); + } + + using (var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false)) + { + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + await using (var fileStream = File.Create(tempPath)) + { + await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + } + } + + if (!string.IsNullOrWhiteSpace(normalizedAlgorithm) && !string.IsNullOrWhiteSpace(normalizedDigest)) + { + if (string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase)) + { + var computed = await ComputeSha256Async(tempPath, cancellationToken).ConfigureAwait(false); + if (!string.Equals(computed, normalizedDigest, StringComparison.OrdinalIgnoreCase)) + { + File.Delete(tempPath); + throw new InvalidOperationException($"Export digest mismatch. Expected sha256:{normalizedDigest}, computed sha256:{computed}."); + } + } + else + { + _logger.LogWarning("Export digest verification skipped. Unsupported algorithm {Algorithm}.", normalizedAlgorithm); + } + } + + if (File.Exists(fullPath)) + { + File.Delete(fullPath); + } + + File.Move(tempPath, fullPath); + + var downloaded = new FileInfo(fullPath); + return new ExcititorExportDownloadResult(fullPath, downloaded.Length, false); + } + + public async Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var images = NormalizeImages(request.Images); + if (images.Count == 0) + { + throw new ArgumentException("At least one image digest must be provided.", nameof(request)); + } + + var payload = new RuntimePolicyEvaluationRequestDocument + { + Namespace = string.IsNullOrWhiteSpace(request.Namespace) ? null : request.Namespace.Trim(), + Images = images + }; + + if (request.Labels.Count > 0) + { + payload.Labels = new Dictionary(StringComparer.Ordinal); + foreach (var label in request.Labels) + { + if (!string.IsNullOrWhiteSpace(label.Key)) + { + payload.Labels[label.Key] = label.Value ?? string.Empty; + } + } + } + + using var message = CreateRequest(HttpMethod.Post, "api/scanner/policy/runtime"); + await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); + message.Content = JsonContent.Create(payload, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + RuntimePolicyEvaluationResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse runtime policy response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Runtime policy response was empty."); + } + + var decisions = new Dictionary(StringComparer.Ordinal); + if (document.Results is not null) + { + foreach (var kvp in document.Results) + { + var image = kvp.Key; + var decision = kvp.Value; + if (string.IsNullOrWhiteSpace(image) || decision is null) + { + continue; + } + + var verdict = string.IsNullOrWhiteSpace(decision.PolicyVerdict) + ? "unknown" + : decision.PolicyVerdict!.Trim(); + + var reasons = ExtractReasons(decision.Reasons); + var metadata = ExtractExtensionMetadata(decision.ExtensionData); + + RuntimePolicyRekorReference? rekor = null; + if (decision.Rekor is not null && + (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid) || !string.IsNullOrWhiteSpace(decision.Rekor.Url))) + { + rekor = new RuntimePolicyRekorReference( + NormalizeOptionalString(decision.Rekor.Uuid), + NormalizeOptionalString(decision.Rekor.Url)); + } + + decisions[image] = new RuntimePolicyImageDecision( + verdict, + decision.Signed, + decision.HasSbom, + reasons, + rekor, + metadata); + } + } + + var decisionsView = new ReadOnlyDictionary(decisions); + + return new RuntimePolicyEvaluationResult( + document.TtlSeconds ?? 0, + document.ExpiresAtUtc?.ToUniversalTime(), + string.IsNullOrWhiteSpace(document.PolicyRevision) ? null : document.PolicyRevision, + decisionsView); + } + public async Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) { EnsureBackendConfigured(); @@ -324,7 +529,96 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient return list; } - + + private static List NormalizeImages(IReadOnlyList images) + { + var normalized = new List(); + if (images is null) + { + return normalized; + } + + var seen = new HashSet(StringComparer.Ordinal); + foreach (var entry in images) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + var trimmed = entry.Trim(); + if (seen.Add(trimmed)) + { + normalized.Add(trimmed); + } + } + + return normalized; + } + + private static IReadOnlyList ExtractReasons(List? reasons) + { + if (reasons is null || reasons.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var reason in reasons) + { + if (!string.IsNullOrWhiteSpace(reason)) + { + list.Add(reason.Trim()); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static IReadOnlyDictionary ExtractExtensionMetadata(Dictionary? extensionData) + { + if (extensionData is null || extensionData.Count == 0) + { + return EmptyMetadata; + } + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var kvp in extensionData) + { + var value = ConvertJsonElementToObject(kvp.Value); + if (value is not null) + { + metadata[kvp.Key] = value; + } + } + + if (metadata.Count == 0) + { + return EmptyMetadata; + } + + return new ReadOnlyDictionary(metadata); + } + + private static object? ConvertJsonElementToObject(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Number when element.TryGetInt64(out var integer) => integer, + JsonValueKind.Number when element.TryGetDouble(out var @double) => @double, + JsonValueKind.Null or JsonValueKind.Undefined => null, + _ => element.GetRawText() + }; + } + + private static string? NormalizeOptionalString(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + } + private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri) { if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri)) @@ -596,12 +890,25 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient return null; } - private async Task ValidateDigestAsync(string filePath, string? expectedDigest, CancellationToken cancellationToken) - { - string digestHex; - await using (var stream = File.OpenRead(filePath)) - { - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + private static string? NormalizeExpectedDigest(string? digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return null; + } + + var trimmed = digest.Trim(); + return trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? trimmed[7..] + : trimmed; + } + + private async Task ValidateDigestAsync(string filePath, string? expectedDigest, CancellationToken cancellationToken) + { + string digestHex; + await using (var stream = File.OpenRead(filePath)) + { + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); digestHex = Convert.ToHexString(hash).ToLowerInvariant(); } @@ -619,18 +926,25 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient _logger.LogWarning("Scanner download missing X-StellaOps-Digest header; relying on computed digest only."); } - return digestHex; - } - - private static string NormalizeDigest(string digest) - { - if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - return digest[7..]; - } - - return digest; - } + return digestHex; + } + + private static string NormalizeDigest(string digest) + { + if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + return digest[7..]; + } + + return digest; + } + + private static async Task ComputeSha256Async(string filePath, CancellationToken cancellationToken) + { + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + return Convert.ToHexString(hash).ToLowerInvariant(); + } private async Task ValidateSignatureAsync(string? signatureHeader, string digestHex, bool verbose, CancellationToken cancellationToken) { diff --git a/src/StellaOps.Cli/Services/IBackendOperationsClient.cs b/src/StellaOps.Cli/Services/IBackendOperationsClient.cs index d3601761..1c44f132 100644 --- a/src/StellaOps.Cli/Services/IBackendOperationsClient.cs +++ b/src/StellaOps.Cli/Services/IBackendOperationsClient.cs @@ -17,5 +17,9 @@ internal interface IBackendOperationsClient Task ExecuteExcititorOperationAsync(string route, HttpMethod method, object? payload, CancellationToken cancellationToken); + Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken); + Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken); + + Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken); } diff --git a/src/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs b/src/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs new file mode 100644 index 00000000..9b3c1cc5 --- /dev/null +++ b/src/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Cli.Services.Models; + +internal sealed record ExcititorExportDownloadResult( + string Path, + long SizeBytes, + bool FromCache); diff --git a/src/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs b/src/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs new file mode 100644 index 00000000..92d75883 --- /dev/null +++ b/src/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Cli.Services.Models; + +internal sealed record RuntimePolicyEvaluationRequest( + string? Namespace, + IReadOnlyDictionary Labels, + IReadOnlyList Images); + +internal sealed record RuntimePolicyEvaluationResult( + int TtlSeconds, + DateTimeOffset? ExpiresAtUtc, + string? PolicyRevision, + IReadOnlyDictionary Decisions); + +internal sealed record RuntimePolicyImageDecision( + string PolicyVerdict, + bool? Signed, + bool? HasSbom, + IReadOnlyList Reasons, + RuntimePolicyRekorReference? Rekor, + IReadOnlyDictionary AdditionalProperties); + +internal sealed record RuntimePolicyRekorReference(string? Uuid, string? Url); diff --git a/src/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs b/src/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs new file mode 100644 index 00000000..e84cd028 --- /dev/null +++ b/src/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models.Transport; + +internal sealed class RuntimePolicyEvaluationRequestDocument +{ + [JsonPropertyName("namespace")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Namespace { get; set; } + + [JsonPropertyName("labels")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public Dictionary? Labels { get; set; } + + [JsonPropertyName("images")] + public List Images { get; set; } = new(); +} + +internal sealed class RuntimePolicyEvaluationResponseDocument +{ + [JsonPropertyName("ttlSeconds")] + public int? TtlSeconds { get; set; } + + [JsonPropertyName("expiresAtUtc")] + public DateTimeOffset? ExpiresAtUtc { get; set; } + + [JsonPropertyName("policyRevision")] + public string? PolicyRevision { get; set; } + + [JsonPropertyName("results")] + public Dictionary? Results { get; set; } +} + +internal sealed class RuntimePolicyEvaluationImageDocument +{ + [JsonPropertyName("policyVerdict")] + public string? PolicyVerdict { get; set; } + + [JsonPropertyName("signed")] + public bool? Signed { get; set; } + + [JsonPropertyName("hasSbom")] + public bool? HasSbom { get; set; } + + [JsonPropertyName("reasons")] + public List? Reasons { get; set; } + + [JsonPropertyName("rekor")] + public RuntimePolicyRekorDocument? Rekor { get; set; } + + [JsonExtensionData] + public Dictionary? ExtensionData { get; set; } +} + +internal sealed class RuntimePolicyRekorDocument +{ + [JsonPropertyName("uuid")] + public string? Uuid { get; set; } + + [JsonPropertyName("url")] + public string? Url { get; set; } +} diff --git a/src/StellaOps.Cli/TASKS.md b/src/StellaOps.Cli/TASKS.md index 964f6278..8131cdeb 100644 --- a/src/StellaOps.Cli/TASKS.md +++ b/src/StellaOps.Cli/TASKS.md @@ -15,8 +15,10 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md |Document advanced Authority tuning|Docs/CLI|Expose auth client resilience settings|**DONE (2025-10-10)** – docs/09 and docs/10 describe retry/offline settings with env examples and point to the integration guide.| |Surface password policy diagnostics in CLI output|DevEx/CLI, Security Guild|AUTHSEC-CRYPTO-02-004|**DONE (2025-10-15)** – CLI startup runs the Authority plug-in analyzer, logs weakened password policy warnings with manifest paths, added unit tests (`dotnet test src/StellaOps.Cli.Tests`) and updated docs/09 with remediation guidance.| |EXCITITOR-CLI-01-001 – Add `excititor` command group|DevEx/CLI|EXCITITOR-WEB-01-001|DONE (2025-10-18) – Introduced `excititor` verbs (init/pull/resume/list-providers/export/verify/reconcile) with token-auth backend calls, provenance-friendly logging, and regression coverage.| -|EXCITITOR-CLI-01-002 – Export download & attestation UX|DevEx/CLI|EXCITITOR-CLI-01-001, EXCITITOR-EXPORT-01-001|TODO – Display export metadata (sha256, size, Rekor link), support optional artifact download path, and handle cache hits gracefully.| -|EXCITITOR-CLI-01-003 – CLI docs & examples for Excititor|Docs/CLI|EXCITITOR-CLI-01-001|TODO – Update docs/09_API_CLI_REFERENCE.md and quickstart snippets to cover Excititor verbs, offline guidance, and attestation verification workflow.| -|CLI-RUNTIME-13-005 – Runtime policy test verbs|DevEx/CLI|SCANNER-RUNTIME-12-302, ZASTAVA-WEBHOOK-12-102|TODO – Add `runtime policy test` and related verbs to query `/policy/runtime`, display verdicts/TTL/reasons, and support batch inputs.| +|EXCITITOR-CLI-01-002 – Export download & attestation UX|DevEx/CLI|EXCITITOR-CLI-01-001, EXCITITOR-EXPORT-01-001|DONE (2025-10-19) – CLI export prints digest/size/Rekor metadata, `--output` downloads with SHA-256 verification + cache reuse, and unit coverage validated via `dotnet test src/StellaOps.Cli.Tests`.| +|EXCITITOR-CLI-01-003 – CLI docs & examples for Excititor|Docs/CLI|EXCITITOR-CLI-01-001|**DOING (2025-10-19)** – Update docs/09_API_CLI_REFERENCE.md and quickstart snippets to cover Excititor verbs, offline guidance, and attestation verification workflow.| +|CLI-RUNTIME-13-005 – Runtime policy test verbs|DevEx/CLI|SCANNER-RUNTIME-12-302, ZASTAVA-WEBHOOK-12-102|**DONE (2025-10-19)** – Added `runtime policy test` command (stdin/file support, JSON output), backend client method + typed models, verdict table output, docs/tests updated (`dotnet test src/StellaOps.Cli.Tests`).| |CLI-OFFLINE-13-006 – Offline kit workflows|DevEx/CLI|DEVOPS-OFFLINE-14-002|TODO – Implement `offline kit pull/import/status` commands with integrity checks, resumable downloads, and doc updates.| |CLI-PLUGIN-13-007 – Plugin packaging|DevEx/CLI|CLI-RUNTIME-13-005, CLI-OFFLINE-13-006|TODO – Package non-core verbs as restart-time plug-ins (manifest + loader updates, tests ensuring no hot reload).| +|CLI-RUNTIME-13-008 – Runtime policy contract sync|DevEx/CLI, Scanner WebService Guild|SCANNER-RUNTIME-12-302|TODO – Once `/api/v1/scanner/policy/runtime` exits TODO, verify CLI output against final schema (field names, metadata) and update formatter/tests if the contract moves. Capture joint review notes in docs/09 and link Scanner task sign-off.| +|CLI-RUNTIME-13-009 – Runtime policy smoke fixture|DevEx/CLI, QA Guild|CLI-RUNTIME-13-005|TODO – Build Spectre test harness exercising `runtime policy test` against a stubbed backend to lock output shape (table + `--json`) and guard regressions. Integrate into `dotnet test` suite.| diff --git a/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs b/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs new file mode 100644 index 00000000..1615441b --- /dev/null +++ b/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs @@ -0,0 +1,159 @@ +using StellaOps.Concelier.Connector.Common; + +namespace StellaOps.Concelier.Connector.Common.State; + +/// +/// Describes a raw upstream document that should be persisted for a connector during seeding. +/// +public sealed record SourceStateSeedDocument +{ + /// + /// Absolute source URI. Must match the connector's upstream document identifier. + /// + public string Uri { get; init; } = string.Empty; + + /// + /// Raw document payload. Required when creating or replacing a document. + /// + public byte[] Content { get; init; } = Array.Empty(); + + /// + /// Optional explicit document identifier. When provided it overrides auto-generated IDs. + /// + public Guid? DocumentId { get; init; } + + /// + /// MIME type for the document payload. + /// + public string? ContentType { get; init; } + + /// + /// Status assigned to the document. Defaults to . + /// + public string Status { get; init; } = DocumentStatuses.PendingParse; + + /// + /// Optional HTTP-style headers persisted alongside the raw document. + /// + public IReadOnlyDictionary? Headers { get; init; } + + /// + /// Source metadata (connector specific) persisted alongside the raw document. + /// + public IReadOnlyDictionary? Metadata { get; init; } + + /// + /// Upstream ETag value, if available. + /// + public string? Etag { get; init; } + + /// + /// Upstream last-modified timestamp, if available. + /// + public DateTimeOffset? LastModified { get; init; } + + /// + /// Optional document expiration. When set a TTL will purge the raw payload after the configured retention. + /// + public DateTimeOffset? ExpiresAt { get; init; } + + /// + /// Fetch timestamp stamped onto the document. Defaults to the seed completion timestamp. + /// + public DateTimeOffset? FetchedAt { get; init; } + + /// + /// When true, the document ID will be appended to the connector cursor's pendingDocuments set. + /// + public bool AddToPendingDocuments { get; init; } = true; + + /// + /// When true, the document ID will be appended to the connector cursor's pendingMappings set. + /// + public bool AddToPendingMappings { get; init; } + + /// + /// Optional identifiers that should be recorded on the cursor to avoid duplicate ingestion. + /// + public IReadOnlyCollection? KnownIdentifiers { get; init; } +} + +/// +/// Cursor updates that should accompany seeded documents. +/// +public sealed record SourceStateSeedCursor +{ + /// + /// Optional pendingDocuments additions expressed as document IDs. + /// + public IReadOnlyCollection? PendingDocuments { get; init; } + + /// + /// Optional pendingMappings additions expressed as document IDs. + /// + public IReadOnlyCollection? PendingMappings { get; init; } + + /// + /// Optional known advisory identifiers to merge with the cursor. + /// + public IReadOnlyCollection? KnownAdvisories { get; init; } + + /// + /// Upstream window watermark tracked by connectors that rely on last-modified cursors. + /// + public DateTimeOffset? LastModifiedCursor { get; init; } + + /// + /// Optional fetch timestamp used by connectors that track the last polling instant. + /// + public DateTimeOffset? LastFetchAt { get; init; } + + /// + /// Additional cursor fields (string values) to merge. + /// + public IReadOnlyDictionary? Additional { get; init; } +} + +/// +/// Seeding specification describing the source, documents, and cursor edits to apply. +/// +public sealed record SourceStateSeedSpecification +{ + /// + /// Source/connector name (e.g. vndr.msrc). + /// + public string Source { get; init; } = string.Empty; + + /// + /// Documents that should be inserted or replaced before the cursor update. + /// + public IReadOnlyList Documents { get; init; } = Array.Empty(); + + /// + /// Cursor adjustments applied after documents are persisted. + /// + public SourceStateSeedCursor? Cursor { get; init; } + + /// + /// Connector-level known advisory identifiers to merge into the cursor. + /// + public IReadOnlyCollection? KnownAdvisories { get; init; } + + /// + /// Optional completion timestamp. Defaults to the processor's time provider. + /// + public DateTimeOffset? CompletedAt { get; init; } +} + +/// +/// Result returned after seeding completes. +/// +public sealed record SourceStateSeedResult( + int DocumentsProcessed, + int PendingDocumentsAdded, + int PendingMappingsAdded, + IReadOnlyCollection DocumentIds, + IReadOnlyCollection PendingDocumentIds, + IReadOnlyCollection PendingMappingIds, + IReadOnlyCollection KnownAdvisoriesAdded, + DateTimeOffset CompletedAt); diff --git a/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs b/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs new file mode 100644 index 00000000..ec6222eb --- /dev/null +++ b/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs @@ -0,0 +1,329 @@ +using System.Security.Cryptography; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using MongoDB.Bson; +using StellaOps.Concelier.Connector.Common.Fetch; +using StellaOps.Concelier.Storage.Mongo; +using StellaOps.Concelier.Storage.Mongo.Documents; + +namespace StellaOps.Concelier.Connector.Common.State; + +/// +/// Persists raw documents and cursor state for connectors that require manual seeding. +/// +public sealed class SourceStateSeedProcessor +{ + private readonly IDocumentStore _documentStore; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly ISourceStateRepository _stateRepository; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public SourceStateSeedProcessor( + IDocumentStore documentStore, + RawDocumentStorage rawDocumentStorage, + ISourceStateRepository stateRepository, + TimeProvider? timeProvider = null, + ILogger? logger = null) + { + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? NullLogger.Instance; + } + + public async Task ProcessAsync(SourceStateSeedSpecification specification, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(specification); + ArgumentException.ThrowIfNullOrEmpty(specification.Source); + + var completedAt = specification.CompletedAt ?? _timeProvider.GetUtcNow(); + var documentIds = new List(); + var pendingDocumentIds = new HashSet(); + var pendingMappingIds = new HashSet(); + var knownAdvisories = new HashSet(StringComparer.OrdinalIgnoreCase); + + AppendRange(knownAdvisories, specification.KnownAdvisories); + + if (specification.Cursor is { } cursorSeed) + { + AppendRange(pendingDocumentIds, cursorSeed.PendingDocuments); + AppendRange(pendingMappingIds, cursorSeed.PendingMappings); + AppendRange(knownAdvisories, cursorSeed.KnownAdvisories); + } + + foreach (var document in specification.Documents ?? Array.Empty()) + { + cancellationToken.ThrowIfCancellationRequested(); + await ProcessDocumentAsync(specification.Source, document, completedAt, documentIds, pendingDocumentIds, pendingMappingIds, knownAdvisories, cancellationToken).ConfigureAwait(false); + } + + var state = await _stateRepository.TryGetAsync(specification.Source, cancellationToken).ConfigureAwait(false); + var cursor = state?.Cursor ?? new BsonDocument(); + + var newlyPendingDocuments = MergeGuidArray(cursor, "pendingDocuments", pendingDocumentIds); + var newlyPendingMappings = MergeGuidArray(cursor, "pendingMappings", pendingMappingIds); + var newlyKnownAdvisories = MergeStringArray(cursor, "knownAdvisories", knownAdvisories); + + if (specification.Cursor is { } cursorSpec) + { + if (cursorSpec.LastModifiedCursor.HasValue) + { + cursor["lastModifiedCursor"] = cursorSpec.LastModifiedCursor.Value.UtcDateTime; + } + + if (cursorSpec.LastFetchAt.HasValue) + { + cursor["lastFetchAt"] = cursorSpec.LastFetchAt.Value.UtcDateTime; + } + + if (cursorSpec.Additional is not null) + { + foreach (var kvp in cursorSpec.Additional) + { + cursor[kvp.Key] = kvp.Value; + } + } + } + + cursor["lastSeededAt"] = completedAt.UtcDateTime; + await _stateRepository.UpdateCursorAsync(specification.Source, cursor, completedAt, cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Seeded {Documents} document(s) for {Source}. pendingDocuments+= {PendingDocuments}, pendingMappings+= {PendingMappings}, knownAdvisories+= {KnownAdvisories}", + documentIds.Count, + specification.Source, + newlyPendingDocuments.Count, + newlyPendingMappings.Count, + newlyKnownAdvisories.Count); + + return new SourceStateSeedResult( + DocumentsProcessed: documentIds.Count, + PendingDocumentsAdded: newlyPendingDocuments.Count, + PendingMappingsAdded: newlyPendingMappings.Count, + DocumentIds: documentIds.AsReadOnly(), + PendingDocumentIds: newlyPendingDocuments, + PendingMappingIds: newlyPendingMappings, + KnownAdvisoriesAdded: newlyKnownAdvisories, + CompletedAt: completedAt); + } + + private async Task ProcessDocumentAsync( + string source, + SourceStateSeedDocument document, + DateTimeOffset completedAt, + List documentIds, + HashSet pendingDocumentIds, + HashSet pendingMappingIds, + HashSet knownAdvisories, + CancellationToken cancellationToken) + { + if (document is null) + { + throw new ArgumentNullException(nameof(document)); + } + + ArgumentException.ThrowIfNullOrEmpty(document.Uri); + if (document.Content is not { Length: > 0 }) + { + throw new InvalidOperationException($"Seed entry for '{document.Uri}' is missing content bytes."); + } + + var payload = new byte[document.Content.Length]; + Buffer.BlockCopy(document.Content, 0, payload, 0, document.Content.Length); + + if (!document.Uri.Contains("://", StringComparison.Ordinal)) + { + _logger.LogWarning("Seed document URI '{Uri}' does not appear to be absolute.", document.Uri); + } + + var sha256 = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant(); + + var existing = await _documentStore.FindBySourceAndUriAsync(source, document.Uri, cancellationToken).ConfigureAwait(false); + + if (existing?.GridFsId is { } oldGridId) + { + await _rawDocumentStorage.DeleteAsync(oldGridId, cancellationToken).ConfigureAwait(false); + } + + var gridId = await _rawDocumentStorage.UploadAsync( + source, + document.Uri, + payload, + document.ContentType, + document.ExpiresAt, + cancellationToken) + .ConfigureAwait(false); + + var headers = CloneDictionary(document.Headers); + if (!string.IsNullOrWhiteSpace(document.ContentType)) + { + headers ??= new Dictionary(StringComparer.OrdinalIgnoreCase); + if (!headers.ContainsKey("content-type")) + { + headers["content-type"] = document.ContentType!; + } + } + + var metadata = CloneDictionary(document.Metadata); + + var record = new DocumentRecord( + document.DocumentId ?? existing?.Id ?? Guid.NewGuid(), + source, + document.Uri, + document.FetchedAt ?? completedAt, + sha256, + string.IsNullOrWhiteSpace(document.Status) ? DocumentStatuses.PendingParse : document.Status, + document.ContentType, + headers, + metadata, + document.Etag, + document.LastModified, + gridId, + document.ExpiresAt); + + var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + + documentIds.Add(upserted.Id); + + if (document.AddToPendingDocuments) + { + pendingDocumentIds.Add(upserted.Id); + } + + if (document.AddToPendingMappings) + { + pendingMappingIds.Add(upserted.Id); + } + + AppendRange(knownAdvisories, document.KnownIdentifiers); + } + + private static Dictionary? CloneDictionary(IReadOnlyDictionary? values) + { + if (values is null || values.Count == 0) + { + return null; + } + + return new Dictionary(values, StringComparer.OrdinalIgnoreCase); + } + + private static IReadOnlyCollection MergeGuidArray(BsonDocument cursor, string field, IReadOnlyCollection additions) + { + if (additions.Count == 0) + { + return Array.Empty(); + } + + var existing = cursor.TryGetValue(field, out var value) && value is BsonArray existingArray + ? existingArray.Select(AsGuid).Where(static g => g != Guid.Empty).ToHashSet() + : new HashSet(); + + var newlyAdded = new List(); + foreach (var guid in additions) + { + if (guid == Guid.Empty) + { + continue; + } + + if (existing.Add(guid)) + { + newlyAdded.Add(guid); + } + } + + if (existing.Count > 0) + { + cursor[field] = new BsonArray(existing + .Select(static g => g.ToString("D")) + .OrderBy(static s => s, StringComparer.OrdinalIgnoreCase)); + } + + return newlyAdded.AsReadOnly(); + } + + private static IReadOnlyCollection MergeStringArray(BsonDocument cursor, string field, IReadOnlyCollection additions) + { + if (additions.Count == 0) + { + return Array.Empty(); + } + + var existing = cursor.TryGetValue(field, out var value) && value is BsonArray existingArray + ? existingArray.Select(static v => v?.AsString ?? string.Empty) + .Where(static s => !string.IsNullOrWhiteSpace(s)) + .ToHashSet(StringComparer.OrdinalIgnoreCase) + : new HashSet(StringComparer.OrdinalIgnoreCase); + + var newlyAdded = new List(); + foreach (var entry in additions) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + var normalized = entry.Trim(); + if (existing.Add(normalized)) + { + newlyAdded.Add(normalized); + } + } + + if (existing.Count > 0) + { + cursor[field] = new BsonArray(existing + .OrderBy(static s => s, StringComparer.OrdinalIgnoreCase)); + } + + return newlyAdded.AsReadOnly(); + } + + private static Guid AsGuid(BsonValue value) + { + if (value is null) + { + return Guid.Empty; + } + + return Guid.TryParse(value.ToString(), out var parsed) ? parsed : Guid.Empty; + } + + private static void AppendRange(HashSet target, IReadOnlyCollection? values) + { + if (values is null) + { + return; + } + + foreach (var guid in values) + { + if (guid != Guid.Empty) + { + target.Add(guid); + } + } + } + + private static void AppendRange(HashSet target, IReadOnlyCollection? values) + { + if (values is null) + { + return; + } + + foreach (var value in values) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + target.Add(value.Trim()); + } + } + +} diff --git a/src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj b/src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj index 92600a8d..0096d321 100644 --- a/src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj +++ b/src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj @@ -7,8 +7,7 @@ - - + diff --git a/src/StellaOps.Concelier.Connector.Common/TASKS.md b/src/StellaOps.Concelier.Connector.Common/TASKS.md index 08e52db4..65ad7875 100644 --- a/src/StellaOps.Concelier.Connector.Common/TASKS.md +++ b/src/StellaOps.Concelier.Connector.Common/TASKS.md @@ -16,4 +16,4 @@ |Allow per-request Accept header overrides|BE-Conn-Shared|Source.Common|**DONE** – `SourceFetchRequest.AcceptHeaders` honored by `SourceFetchService` plus unit tests for overrides.| |FEEDCONN-SHARED-HTTP2-001 HTTP version fallback policy|BE-Conn-Shared, Source.Common|Source.Common|**DONE (2025-10-11)** – `AddSourceHttpClient` now honours per-connector HTTP version/ policy, exposes handler customisation, and defaults to downgrade-friendly settings; unit tests cover handler configuration hook.| |FEEDCONN-SHARED-TLS-001 Sovereign trust store support|BE-Conn-Shared, Ops|Source.Common|**DONE (2025-10-11)** – `SourceHttpClientOptions` now exposes `TrustedRootCertificates`, `ServerCertificateCustomValidation`, and `AllowInvalidServerCertificates`, and `AddSourceHttpClient` runs the shared configuration binder so connectors can pull `concelier:httpClients|sources::http` settings (incl. Offline Kit relative PEM paths via `concelier:offline:root`). Tests cover handler wiring. Ops follow-up: package RU trust roots for Offline Kit distribution.| -|FEEDCONN-SHARED-STATE-003 Source state seeding helper|Tools Guild, BE-Conn-MSRC|Tools|**TODO (2025-10-15)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands.| +|FEEDCONN-SHARED-STATE-003 Source state seeding helper|Tools Guild, BE-Conn-MSRC|Tools|**DOING (2025-10-19)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands. Prereqs confirmed none (2025-10-19).| diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json b/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json index da219872..da779bf8 100644 --- a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json +++ b/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json @@ -2,46 +2,39 @@ "advisoryKey": "RHSA-2025:0001", "affectedPackages": [ { + "type": "cpe", "identifier": "cpe:2.3:o:redhat:enterprise_linux:8:*:*:*:*:*:*:*", "platform": "Red Hat Enterprise Linux 8", - "provenance": [ - { - "fieldMask": [], - "kind": "oval", - "recordedAt": "2025-10-05T00:00:00+00:00", - "source": "redhat", - "value": "8Base-RHEL-8" - } - ], + "versionRanges": [], + "normalizedVersions": [], "statuses": [ { "provenance": { - "fieldMask": [], - "kind": "oval", - "recordedAt": "2025-10-05T00:00:00+00:00", "source": "redhat", - "value": "8Base-RHEL-8" + "kind": "oval", + "value": "8Base-RHEL-8", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] }, "status": "known_affected" } ], - "type": "cpe", - "versionRanges": [] - }, - { - "identifier": "kernel-0:4.18.0-513.5.1.el8.x86_64", - "platform": "Red Hat Enterprise Linux 8", "provenance": [ { - "fieldMask": [], - "kind": "package.nevra", - "recordedAt": "2025-10-05T00:00:00+00:00", "source": "redhat", - "value": "kernel-0:4.18.0-513.5.1.el8.x86_64" + "kind": "oval", + "value": "8Base-RHEL-8", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] } - ], - "statuses": [], + ] + }, + { "type": "rpm", + "identifier": "kernel-0:4.18.0-513.5.1.el8.x86_64", + "platform": "Red Hat Enterprise Linux 8", "versionRanges": [ { "fixedVersion": "kernel-0:4.18.0-513.5.1.el8.x86_64", @@ -71,15 +64,28 @@ "vendorExtensions": null }, "provenance": { - "fieldMask": [], - "kind": "package.nevra", - "recordedAt": "2025-10-05T00:00:00+00:00", "source": "redhat", - "value": "kernel-0:4.18.0-513.5.1.el8.x86_64" + "kind": "package.nevra", + "value": "kernel-0:4.18.0-513.5.1.el8.x86_64", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] }, "rangeExpression": null, "rangeKind": "nevra" } + ], + "normalizedVersions": [], + "statuses": [], + "provenance": [ + { + "source": "redhat", + "kind": "package.nevra", + "value": "kernel-0:4.18.0-513.5.1.el8.x86_64", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] + } ] } ], @@ -87,31 +93,37 @@ "CVE-2025-0001", "RHSA-2025:0001" ], + "canonicalMetricId": null, + "credits": [], "cvssMetrics": [ { "baseScore": 9.8, "baseSeverity": "critical", "provenance": { - "fieldMask": [], - "kind": "cvss", - "recordedAt": "2025-10-05T00:00:00+00:00", "source": "redhat", - "value": "CVE-2025-0001" + "kind": "cvss", + "value": "CVE-2025-0001", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] }, "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", "version": "3.1" } ], + "cwes": [], + "description": null, "exploitKnown": false, "language": "en", "modified": "2025-10-03T00:00:00+00:00", "provenance": [ { - "fieldMask": [], - "kind": "advisory", - "recordedAt": "2025-10-05T00:00:00+00:00", "source": "redhat", - "value": "RHSA-2025:0001" + "kind": "advisory", + "value": "RHSA-2025:0001", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] } ], "published": "2025-10-02T00:00:00+00:00", @@ -119,11 +131,12 @@ { "kind": "self", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-05T00:00:00+00:00", "source": "redhat", - "value": "https://access.redhat.com/errata/RHSA-2025:0001" + "kind": "reference", + "value": "https://access.redhat.com/errata/RHSA-2025:0001", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "RHSA advisory", @@ -132,11 +145,12 @@ { "kind": "external", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-05T00:00:00+00:00", "source": "redhat", - "value": "https://www.cve.org/CVERecord?id=CVE-2025-0001" + "kind": "reference", + "value": "https://www.cve.org/CVERecord?id=CVE-2025-0001", + "decisionReason": null, + "recordedAt": "2025-10-05T00:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "CVE record", diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json b/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json index acdd5763..4eb97031 100644 --- a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json +++ b/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json @@ -2,85 +2,97 @@ "advisoryKey": "RHSA-2025:0002", "affectedPackages": [ { + "type": "cpe", "identifier": "cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*", "platform": "Red Hat Enterprise Linux 9", - "provenance": [ - { - "fieldMask": [], - "kind": "oval", - "recordedAt": "2025-10-05T12:00:00+00:00", - "source": "redhat", - "value": "9Base-RHEL-9" - } - ], + "versionRanges": [], + "normalizedVersions": [], "statuses": [ { "provenance": { - "fieldMask": [], - "kind": "oval", - "recordedAt": "2025-10-05T12:00:00+00:00", "source": "redhat", - "value": "9Base-RHEL-9" + "kind": "oval", + "value": "9Base-RHEL-9", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] }, "status": "known_not_affected" }, { "provenance": { - "fieldMask": [], - "kind": "oval", - "recordedAt": "2025-10-05T12:00:00+00:00", "source": "redhat", - "value": "9Base-RHEL-9" + "kind": "oval", + "value": "9Base-RHEL-9", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] }, "status": "under_investigation" } ], - "type": "cpe", - "versionRanges": [] - }, - { - "identifier": "kernel-0:5.14.0-400.el9.x86_64", - "platform": "Red Hat Enterprise Linux 9", "provenance": [ { - "fieldMask": [], - "kind": "package.nevra", - "recordedAt": "2025-10-05T12:00:00+00:00", "source": "redhat", - "value": "kernel-0:5.14.0-400.el9.x86_64" + "kind": "oval", + "value": "9Base-RHEL-9", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] } - ], + ] + }, + { + "type": "rpm", + "identifier": "kernel-0:5.14.0-400.el9.x86_64", + "platform": "Red Hat Enterprise Linux 9", + "versionRanges": [], + "normalizedVersions": [], "statuses": [ { "provenance": { - "fieldMask": [], - "kind": "package.nevra", - "recordedAt": "2025-10-05T12:00:00+00:00", "source": "redhat", - "value": "kernel-0:5.14.0-400.el9.x86_64" + "kind": "package.nevra", + "value": "kernel-0:5.14.0-400.el9.x86_64", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] }, "status": "known_not_affected" } ], - "type": "rpm", - "versionRanges": [] + "provenance": [ + { + "source": "redhat", + "kind": "package.nevra", + "value": "kernel-0:5.14.0-400.el9.x86_64", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] + } + ] } ], "aliases": [ "CVE-2025-0002", "RHSA-2025:0002" ], + "canonicalMetricId": null, + "credits": [], "cvssMetrics": [], + "cwes": [], + "description": null, "exploitKnown": false, "language": "en", "modified": "2025-10-05T12:00:00+00:00", "provenance": [ { - "fieldMask": [], - "kind": "advisory", - "recordedAt": "2025-10-05T12:00:00+00:00", "source": "redhat", - "value": "RHSA-2025:0002" + "kind": "advisory", + "value": "RHSA-2025:0002", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] } ], "published": "2025-10-05T12:00:00+00:00", @@ -88,11 +100,12 @@ { "kind": "self", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-05T12:00:00+00:00", "source": "redhat", - "value": "https://access.redhat.com/errata/RHSA-2025:0002" + "kind": "reference", + "value": "https://access.redhat.com/errata/RHSA-2025:0002", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "RHSA advisory", @@ -101,11 +114,12 @@ { "kind": "external", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-05T12:00:00+00:00", "source": "redhat", - "value": "https://www.cve.org/CVERecord?id=CVE-2025-0002" + "kind": "reference", + "value": "https://www.cve.org/CVERecord?id=CVE-2025-0002", + "decisionReason": null, + "recordedAt": "2025-10-05T12:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "CVE record", diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json b/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json index 9649bae7..bb9a99b1 100644 --- a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json +++ b/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json @@ -2,62 +2,71 @@ "advisoryKey": "RHSA-2025:0003", "affectedPackages": [ { + "type": "cpe", "identifier": "cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*", "platform": "Red Hat Enterprise Linux 9", - "provenance": [ - { - "fieldMask": [], - "kind": "oval", - "recordedAt": "2025-10-06T09:00:00+00:00", - "source": "redhat", - "value": "9Base-RHEL-9" - } - ], + "versionRanges": [], + "normalizedVersions": [], "statuses": [ { "provenance": { - "fieldMask": [], - "kind": "oval", - "recordedAt": "2025-10-06T09:00:00+00:00", "source": "redhat", - "value": "9Base-RHEL-9" + "kind": "oval", + "value": "9Base-RHEL-9", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] }, "status": "known_affected" } ], - "type": "cpe", - "versionRanges": [] + "provenance": [ + { + "source": "redhat", + "kind": "oval", + "value": "9Base-RHEL-9", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] + } + ] } ], "aliases": [ "CVE-2025-0003", "RHSA-2025:0003" ], + "canonicalMetricId": null, + "credits": [], "cvssMetrics": [ { "baseScore": 7.5, "baseSeverity": "high", "provenance": { - "fieldMask": [], - "kind": "cvss", - "recordedAt": "2025-10-06T09:00:00+00:00", "source": "redhat", - "value": "CVE-2025-0003" + "kind": "cvss", + "value": "CVE-2025-0003", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] }, "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N", "version": "3.1" } ], + "cwes": [], + "description": null, "exploitKnown": false, "language": "en", "modified": "2025-10-06T09:00:00+00:00", "provenance": [ { - "fieldMask": [], - "kind": "advisory", - "recordedAt": "2025-10-06T09:00:00+00:00", "source": "redhat", - "value": "RHSA-2025:0003" + "kind": "advisory", + "value": "RHSA-2025:0003", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] } ], "published": "2025-10-06T09:00:00+00:00", @@ -65,11 +74,12 @@ { "kind": "self", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-06T09:00:00+00:00", "source": "redhat", - "value": "https://access.redhat.com/errata/RHSA-2025:0003" + "kind": "reference", + "value": "https://access.redhat.com/errata/RHSA-2025:0003", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "Primary advisory", @@ -78,11 +88,12 @@ { "kind": "mitigation", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-06T09:00:00+00:00", "source": "redhat", - "value": "https://access.redhat.com/solutions/999999" + "kind": "reference", + "value": "https://access.redhat.com/solutions/999999", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "Knowledge base guidance", @@ -91,11 +102,12 @@ { "kind": "exploit", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-06T09:00:00+00:00", "source": "redhat", - "value": "https://bugzilla.redhat.com/show_bug.cgi?id=2222222" + "kind": "reference", + "value": "https://bugzilla.redhat.com/show_bug.cgi?id=2222222", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "Exploit tracking", @@ -104,11 +116,12 @@ { "kind": "external", "provenance": { - "fieldMask": [], - "kind": "reference", - "recordedAt": "2025-10-06T09:00:00+00:00", "source": "redhat", - "value": "https://www.cve.org/CVERecord?id=CVE-2025-0003" + "kind": "reference", + "value": "https://www.cve.org/CVERecord?id=CVE-2025-0003", + "decisionReason": null, + "recordedAt": "2025-10-06T09:00:00+00:00", + "fieldMask": [] }, "sourceTag": null, "summary": "CVE record", diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md b/src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md index 2c431a34..96e535cc 100644 --- a/src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md +++ b/src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md @@ -13,4 +13,4 @@ |Express unaffected/investigation statuses without overloading range fields|BE-Conn-RH|Models|**DONE** – Introduced AffectedPackageStatus collection and updated mapper/tests.| |Reference dedupe & ordering in mapper|BE-Conn-RH|Models|DONE – mapper consolidates by URL, merges metadata, deterministic ordering validated in tests.| |Hydra summary fetch through SourceFetchService|BE-Conn-RH|Source.Common|DONE – summary pages now fetched via SourceFetchService with cache + conditional headers.| -|Fixture validation sweep|QA|Testing|**DOING (2025-10-10)** – Regenerate RHSA fixtures once mapper fixes land, review snapshot diffs, and update docs; blocked by outstanding range provenance patches.| +|Fixture validation sweep|QA|None|**DOING (2025-10-19)** – Prereqs confirmed none; continuing RHSA fixture regeneration and diff review alongside mapper provenance updates.| diff --git a/src/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs b/src/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs index 8450839d..be5977e9 100644 --- a/src/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs +++ b/src/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs @@ -3,7 +3,8 @@ using System.Collections.Generic; using System.Linq; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; +using Microsoft.Extensions.Options; +using MongoDB.Driver; using StellaOps.Concelier.Core.Jobs; namespace StellaOps.Concelier.Core.Tests; @@ -311,10 +312,11 @@ public sealed class JobCoordinatorTests public TaskCompletionSource Completion { get; } = new(TaskCreationOptions.RunContinuationsAsynchronously); public List CreatedRuns { get; } = new(); - public Task CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken) - { - var run = new JobRunSnapshot( - Guid.NewGuid(), + public Task CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + var run = new JobRunSnapshot( + Guid.NewGuid(), request.Kind, JobRunStatus.Pending, request.CreatedAt, @@ -331,9 +333,10 @@ public sealed class JobCoordinatorTests return Task.FromResult(run); } - public Task TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken) - { - if (_runs.TryGetValue(runId, out var run)) + public Task TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + if (_runs.TryGetValue(runId, out var run)) { var updated = run with { Status = JobRunStatus.Running, StartedAt = startedAt }; _runs[runId] = updated; @@ -343,9 +346,10 @@ public sealed class JobCoordinatorTests return Task.FromResult(null); } - public Task TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken) - { - if (_runs.TryGetValue(runId, out var run)) + public Task TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + if (_runs.TryGetValue(runId, out var run)) { var updated = run with { Status = completion.Status, CompletedAt = completion.CompletedAt, Error = completion.Error }; _runs[runId] = updated; @@ -356,15 +360,17 @@ public sealed class JobCoordinatorTests return Task.FromResult(null); } - public Task FindAsync(Guid runId, CancellationToken cancellationToken) - { - _runs.TryGetValue(runId, out var run); - return Task.FromResult(run); - } + public Task FindAsync(Guid runId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + _runs.TryGetValue(runId, out var run); + return Task.FromResult(run); + } - public Task> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken) - { - var query = _runs.Values.AsEnumerable(); + public Task> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + var query = _runs.Values.AsEnumerable(); if (!string.IsNullOrWhiteSpace(kind)) { query = query.Where(r => r.Kind == kind); @@ -373,23 +379,26 @@ public sealed class JobCoordinatorTests return Task.FromResult>(query.OrderByDescending(r => r.CreatedAt).Take(limit).ToArray()); } - public Task> GetActiveRunsAsync(CancellationToken cancellationToken) - { - return Task.FromResult>(_runs.Values.Where(r => r.Status is JobRunStatus.Pending or JobRunStatus.Running).ToArray()); - } + public Task> GetActiveRunsAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.FromResult>(_runs.Values.Where(r => r.Status is JobRunStatus.Pending or JobRunStatus.Running).ToArray()); + } - public Task GetLastRunAsync(string kind, CancellationToken cancellationToken) - { - var run = _runs.Values - .Where(r => r.Kind == kind) - .OrderByDescending(r => r.CreatedAt) + public Task GetLastRunAsync(string kind, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + var run = _runs.Values + .Where(r => r.Kind == kind) + .OrderByDescending(r => r.CreatedAt) .FirstOrDefault(); return Task.FromResult(run); } - public Task> GetLastRunsAsync(IEnumerable kinds, CancellationToken cancellationToken) - { - var results = new Dictionary(StringComparer.Ordinal); + public Task> GetLastRunsAsync(IEnumerable kinds, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + var results = new Dictionary(StringComparer.Ordinal); foreach (var kind in kinds.Distinct(StringComparer.Ordinal)) { var run = _runs.Values diff --git a/src/StellaOps.Concelier.Core/Jobs/IJobStore.cs b/src/StellaOps.Concelier.Core/Jobs/IJobStore.cs index c5d544f1..f5210f97 100644 --- a/src/StellaOps.Concelier.Core/Jobs/IJobStore.cs +++ b/src/StellaOps.Concelier.Core/Jobs/IJobStore.cs @@ -1,20 +1,22 @@ -namespace StellaOps.Concelier.Core.Jobs; - -public interface IJobStore -{ - Task CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken); - - Task TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken); - - Task TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken); - - Task FindAsync(Guid runId, CancellationToken cancellationToken); - - Task> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken); - - Task> GetActiveRunsAsync(CancellationToken cancellationToken); - - Task GetLastRunAsync(string kind, CancellationToken cancellationToken); - - Task> GetLastRunsAsync(IEnumerable kinds, CancellationToken cancellationToken); -} +using MongoDB.Driver; + +namespace StellaOps.Concelier.Core.Jobs; + +public interface IJobStore +{ + Task CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task FindAsync(Guid runId, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task> GetActiveRunsAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task GetLastRunAsync(string kind, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task> GetLastRunsAsync(IEnumerable kinds, CancellationToken cancellationToken, IClientSessionHandle? session = null); +} diff --git a/src/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj b/src/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj index a6c73baf..a85281ba 100644 --- a/src/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj +++ b/src/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj @@ -6,8 +6,9 @@ enable true - - + + + diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs b/src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs index 464a5ce7..fc80b5bf 100644 --- a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs +++ b/src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs @@ -1,15 +1,16 @@ using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Concelier.Core.Jobs; -using StellaOps.Concelier.Exporter.Json; -using StellaOps.Concelier.Storage.Mongo.Advisories; -using StellaOps.Concelier.Storage.Mongo.Exporting; -using StellaOps.Concelier.Models; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Concelier.Core.Jobs; +using StellaOps.Concelier.Exporter.Json; +using StellaOps.Concelier.Storage.Mongo.Advisories; +using StellaOps.Concelier.Storage.Mongo.Exporting; +using StellaOps.Concelier.Models; namespace StellaOps.Concelier.Exporter.Json.Tests; @@ -45,21 +46,31 @@ public sealed class JsonExporterDependencyInjectionRoutineTests private sealed class StubAdvisoryStore : IAdvisoryStore { - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) - => Task.FromResult>(Array.Empty()); - - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) - => Task.FromResult(null); - - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) - => Task.CompletedTask; - - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) - { - return Enumerate(cancellationToken); - - static async IAsyncEnumerable Enumerate([EnumeratorCancellation] CancellationToken ct) - { + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.FromResult>(Array.Empty()); + } + + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.FromResult(null); + } + + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.CompletedTask; + } + + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Enumerate(cancellationToken); + + static async IAsyncEnumerable Enumerate([EnumeratorCancellation] CancellationToken ct) + { ct.ThrowIfCancellationRequested(); await Task.Yield(); yield break; diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs b/src/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs index 19760ca4..836405a8 100644 --- a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs +++ b/src/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs @@ -9,11 +9,12 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Concelier.Exporter.Json; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Storage.Mongo.Advisories; -using StellaOps.Concelier.Storage.Mongo.Exporting; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Concelier.Exporter.Json; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Storage.Mongo.Advisories; +using StellaOps.Concelier.Storage.Mongo.Exporting; namespace StellaOps.Concelier.Exporter.Json.Tests; @@ -259,21 +260,31 @@ public sealed class JsonFeedExporterTests : IDisposable _advisories = advisories; } - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) - => Task.FromResult(_advisories); - - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) - => Task.FromResult(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); - - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) - => Task.CompletedTask; - - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) - { - return EnumerateAsync(cancellationToken); - - async IAsyncEnumerable EnumerateAsync([EnumeratorCancellation] CancellationToken ct) - { + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.FromResult(_advisories); + } + + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.FromResult(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); + } + + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.CompletedTask; + } + + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return EnumerateAsync(cancellationToken); + + async IAsyncEnumerable EnumerateAsync([EnumeratorCancellation] CancellationToken ct) + { foreach (var advisory in _advisories) { ct.ThrowIfCancellationRequested(); diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs b/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs index 977c2cab..5b3b2337 100644 --- a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs +++ b/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs @@ -11,6 +11,7 @@ using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; +using MongoDB.Driver; using StellaOps.Concelier.Exporter.Json; using StellaOps.Concelier.Exporter.TrivyDb; using StellaOps.Concelier.Models; @@ -733,17 +734,27 @@ public sealed class TrivyDbFeedExporterTests : IDisposable _advisories = advisories; } - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) - => Task.FromResult(_advisories); - - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) - => Task.FromResult(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); - - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) - => Task.CompletedTask; - - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) { + _ = session; + return Task.FromResult(_advisories); + } + + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.FromResult(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey)); + } + + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return Task.CompletedTask; + } + + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; return EnumerateAsync(cancellationToken); async IAsyncEnumerable EnumerateAsync([EnumeratorCancellation] CancellationToken ct) diff --git a/src/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs b/src/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs index dbefc87e..2833104f 100644 --- a/src/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs +++ b/src/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs @@ -1,6 +1,7 @@ using System.Collections.Concurrent; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Time.Testing; +using MongoDB.Driver; using StellaOps.Concelier.Core; using StellaOps.Concelier.Merge.Services; using StellaOps.Concelier.Models; @@ -166,23 +167,32 @@ public sealed class AdvisoryMergeServiceTests } } - public Task FindAsync(string advisoryKey, CancellationToken cancellationToken) + public Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) { + _ = session; _advisories.TryGetValue(advisoryKey, out var advisory); return Task.FromResult(advisory); } - public Task> GetRecentAsync(int limit, CancellationToken cancellationToken) - => Task.FromResult>(Array.Empty()); - - public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) + public Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) { + _ = session; + return Task.FromResult>(Array.Empty()); + } + + public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; _advisories[advisory.AdvisoryKey] = advisory; LastUpserted = advisory; return Task.CompletedTask; } - public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken) => AsyncEnumerable.Empty(); + public IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _ = session; + return AsyncEnumerable.Empty(); + } } private sealed class InMemoryMergeEventStore : IMergeEventStore diff --git a/src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs b/src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs index 40788198..903dd6b8 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs @@ -37,7 +37,7 @@ public sealed class AdvisoryStore : IAdvisoryStore } - public async Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken) + public async Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null) { ArgumentNullException.ThrowIfNull(advisory); @@ -67,24 +67,35 @@ public sealed class AdvisoryStore : IAdvisoryStore NormalizedVersions = normalizedVersions, }; - var options = new ReplaceOptions { IsUpsert = true }; - await _collection.ReplaceOneAsync(x => x.AdvisoryKey == advisory.AdvisoryKey, document, options, cancellationToken).ConfigureAwait(false); - _logger.LogDebug("Upserted advisory {AdvisoryKey}", advisory.AdvisoryKey); - - var aliasEntries = BuildAliasEntries(advisory); - var updatedAt = _timeProvider.GetUtcNow(); - await _aliasStore.ReplaceAsync(advisory.AdvisoryKey, aliasEntries, updatedAt, cancellationToken).ConfigureAwait(false); - } - - public async Task FindAsync(string advisoryKey, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrEmpty(advisoryKey); - var document = await _collection.Find(x => x.AdvisoryKey == advisoryKey) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - return document is null ? null : Deserialize(document.Payload); - } + var options = new ReplaceOptions { IsUpsert = true }; + var filter = Builders.Filter.Eq(x => x.AdvisoryKey, advisory.AdvisoryKey); + if (session is null) + { + await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + _logger.LogDebug("Upserted advisory {AdvisoryKey}", advisory.AdvisoryKey); + + var aliasEntries = BuildAliasEntries(advisory); + var updatedAt = _timeProvider.GetUtcNow(); + await _aliasStore.ReplaceAsync(advisory.AdvisoryKey, aliasEntries, updatedAt, cancellationToken).ConfigureAwait(false); + } + + public async Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentException.ThrowIfNullOrEmpty(advisoryKey); + var filter = Builders.Filter.Eq(x => x.AdvisoryKey, advisoryKey); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + + return document is null ? null : Deserialize(document.Payload); + } private static IEnumerable BuildAliasEntries(Advisory advisory) { @@ -103,29 +114,31 @@ public sealed class AdvisoryStore : IAdvisoryStore yield return new AliasEntry(AliasStoreConstants.PrimaryScheme, advisory.AdvisoryKey); } - public async Task> GetRecentAsync(int limit, CancellationToken cancellationToken) - { - var cursor = await _collection.Find(FilterDefinition.Empty) - .SortByDescending(x => x.Modified) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return cursor.Select(static doc => Deserialize(doc.Payload)).ToArray(); - } - - public async IAsyncEnumerable StreamAsync([EnumeratorCancellation] CancellationToken cancellationToken) - { - var options = new FindOptions - { - Sort = Builders.Sort.Ascending(static doc => doc.AdvisoryKey), - }; - - using var cursor = await _collection.FindAsync( - FilterDefinition.Empty, - options, - cancellationToken) - .ConfigureAwait(false); + public async Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var filter = FilterDefinition.Empty; + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + var cursor = await query + .SortByDescending(x => x.Modified) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor.Select(static doc => Deserialize(doc.Payload)).ToArray(); + } + + public async IAsyncEnumerable StreamAsync([EnumeratorCancellation] CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var options = new FindOptions + { + Sort = Builders.Sort.Ascending(static doc => doc.AdvisoryKey), + }; + + using var cursor = session is null + ? await _collection.FindAsync(FilterDefinition.Empty, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindAsync(session, FilterDefinition.Empty, options, cancellationToken).ConfigureAwait(false); while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) { diff --git a/src/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs b/src/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs index e5ff7bf0..5ada4dfe 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs @@ -1,14 +1,15 @@ -using StellaOps.Concelier.Models; - -namespace StellaOps.Concelier.Storage.Mongo.Advisories; - -public interface IAdvisoryStore -{ - Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken); - - Task FindAsync(string advisoryKey, CancellationToken cancellationToken); - - Task> GetRecentAsync(int limit, CancellationToken cancellationToken); - - IAsyncEnumerable StreamAsync(CancellationToken cancellationToken); -} +using MongoDB.Driver; +using StellaOps.Concelier.Models; + +namespace StellaOps.Concelier.Storage.Mongo.Advisories; + +public interface IAdvisoryStore +{ + Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + IAsyncEnumerable StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null); +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs b/src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs index 672ca39a..252928b4 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs @@ -15,54 +15,73 @@ public sealed class DocumentStore : IDocumentStore _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(record); - - var document = DocumentDocumentExtensions.FromRecord(record); + public async Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentNullException.ThrowIfNull(record); + + var document = DocumentDocumentExtensions.FromRecord(record); var filter = Builders.Filter.Eq(x => x.SourceName, record.SourceName) & Builders.Filter.Eq(x => x.Uri, record.Uri); - var options = new FindOneAndReplaceOptions - { - IsUpsert = true, - ReturnDocument = ReturnDocument.After, - }; - - var replaced = await _collection.FindOneAndReplaceAsync(filter, document, options, cancellationToken).ConfigureAwait(false); - _logger.LogDebug("Upserted document {Source}/{Uri}", record.SourceName, record.Uri); - return (replaced ?? document).ToRecord(); - } - - public async Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrEmpty(sourceName); - ArgumentException.ThrowIfNullOrEmpty(uri); - - var filter = Builders.Filter.Eq(x => x.SourceName, sourceName) - & Builders.Filter.Eq(x => x.Uri, uri); - - var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document?.ToRecord(); - } - - public async Task FindAsync(Guid id, CancellationToken cancellationToken) - { - var idValue = id.ToString(); - var document = await _collection.Find(x => x.Id == idValue).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document?.ToRecord(); - } - - public async Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrEmpty(status); - - var update = Builders.Update - .Set(x => x.Status, status) - .Set(x => x.LastModified, DateTime.UtcNow); - - var idValue = id.ToString(); - var result = await _collection.UpdateOneAsync(x => x.Id == idValue, update, cancellationToken: cancellationToken).ConfigureAwait(false); - return result.MatchedCount > 0; - } -} + var options = new FindOneAndReplaceOptions + { + IsUpsert = true, + ReturnDocument = ReturnDocument.After, + }; + + var replaced = session is null + ? await _collection.FindOneAndReplaceAsync(filter, document, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindOneAndReplaceAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + _logger.LogDebug("Upserted document {Source}/{Uri}", record.SourceName, record.Uri); + return (replaced ?? document).ToRecord(); + } + + public async Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentException.ThrowIfNullOrEmpty(sourceName); + ArgumentException.ThrowIfNullOrEmpty(uri); + + var filter = Builders.Filter.Eq(x => x.SourceName, sourceName) + & Builders.Filter.Eq(x => x.Uri, uri); + + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToRecord(); + } + + public async Task FindAsync(Guid id, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var idValue = id.ToString(); + var filter = Builders.Filter.Eq(x => x.Id, idValue); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToRecord(); + } + + public async Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentException.ThrowIfNullOrEmpty(status); + + var update = Builders.Update + .Set(x => x.Status, status) + .Set(x => x.LastModified, DateTime.UtcNow); + + var idValue = id.ToString(); + var filter = Builders.Filter.Eq(x => x.Id, idValue); + UpdateResult result; + if (session is null) + { + result = await _collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + result = await _collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + return result.MatchedCount > 0; + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs b/src/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs index fc4d0179..330f263c 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs @@ -1,12 +1,14 @@ -namespace StellaOps.Concelier.Storage.Mongo.Documents; - -public interface IDocumentStore -{ - Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken); - - Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken); - - Task FindAsync(Guid id, CancellationToken cancellationToken); - - Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken); -} +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo.Documents; + +public interface IDocumentStore +{ + Task UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task FindAsync(Guid id, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, IClientSessionHandle? session = null); +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs b/src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs index 6a843c20..4ecffdc0 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs @@ -15,43 +15,52 @@ public sealed class DtoStore : IDtoStore _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(record); - - var document = DtoDocumentExtensions.FromRecord(record); - var documentId = record.DocumentId.ToString(); + public async Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentNullException.ThrowIfNull(record); + + var document = DtoDocumentExtensions.FromRecord(record); + var documentId = record.DocumentId.ToString(); var filter = Builders.Filter.Eq(x => x.DocumentId, documentId) & Builders.Filter.Eq(x => x.SourceName, record.SourceName); var options = new FindOneAndReplaceOptions - { - IsUpsert = true, - ReturnDocument = ReturnDocument.After, - }; - - var replaced = await _collection.FindOneAndReplaceAsync(filter, document, options, cancellationToken).ConfigureAwait(false); - _logger.LogDebug("Upserted DTO for {Source}/{DocumentId}", record.SourceName, record.DocumentId); - return (replaced ?? document).ToRecord(); - } - - public async Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken) - { - var documentIdValue = documentId.ToString(); - var document = await _collection.Find(x => x.DocumentId == documentIdValue) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - return document?.ToRecord(); - } - - public async Task> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken) - { - var cursor = await _collection.Find(x => x.SourceName == sourceName) - .SortByDescending(x => x.ValidatedAt) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - + { + IsUpsert = true, + ReturnDocument = ReturnDocument.After, + }; + + var replaced = session is null + ? await _collection.FindOneAndReplaceAsync(filter, document, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindOneAndReplaceAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + _logger.LogDebug("Upserted DTO for {Source}/{DocumentId}", record.SourceName, record.DocumentId); + return (replaced ?? document).ToRecord(); + } + + public async Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var documentIdValue = documentId.ToString(); + var filter = Builders.Filter.Eq(x => x.DocumentId, documentIdValue); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToRecord(); + } + + public async Task> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var filter = Builders.Filter.Eq(x => x.SourceName, sourceName); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var cursor = await query + .SortByDescending(x => x.ValidatedAt) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + return cursor.Select(static x => x.ToRecord()).ToArray(); } } diff --git a/src/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs b/src/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs index 25c0e59d..082a5b7e 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs @@ -1,10 +1,12 @@ -namespace StellaOps.Concelier.Storage.Mongo.Dtos; - -public interface IDtoStore -{ - Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken); - - Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken); - - Task> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken); -} +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo.Dtos; + +public interface IDtoStore +{ + Task UpsertAsync(DtoRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null); +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs b/src/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs index 491f36ea..5174a7d1 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs @@ -1,14 +1,15 @@ -using MongoDB.Bson; - -namespace StellaOps.Concelier.Storage.Mongo; - -public interface ISourceStateRepository -{ - Task TryGetAsync(string sourceName, CancellationToken cancellationToken); - - Task UpsertAsync(SourceStateRecord record, CancellationToken cancellationToken); - - Task UpdateCursorAsync(string sourceName, BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken); - - Task MarkFailureAsync(string sourceName, DateTimeOffset failedAt, TimeSpan? backoff, string? failureReason, CancellationToken cancellationToken); -} +using MongoDB.Bson; +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo; + +public interface ISourceStateRepository +{ + Task TryGetAsync(string sourceName, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task UpsertAsync(SourceStateRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task UpdateCursorAsync(string sourceName, BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null); + + Task MarkFailureAsync(string sourceName, DateTimeOffset failedAt, TimeSpan? backoff, string? failureReason, CancellationToken cancellationToken, IClientSessionHandle? session = null); +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs b/src/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs index a46480f8..481dfac5 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs @@ -23,129 +23,154 @@ public sealed class MongoJobStore : IJobStore _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async Task CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken) - { - var runId = Guid.NewGuid(); - var document = JobRunDocumentExtensions.FromRequest(request, runId); + public async Task CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var runId = Guid.NewGuid(); + var document = JobRunDocumentExtensions.FromRequest(request, runId); + + if (session is null) + { + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + _logger.LogDebug("Created job run {RunId} for {Kind} with trigger {Trigger}", runId, request.Kind, request.Trigger); + + return document.ToSnapshot(); + } - await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); - _logger.LogDebug("Created job run {RunId} for {Kind} with trigger {Trigger}", runId, request.Kind, request.Trigger); - - return document.ToSnapshot(); - } - - public async Task TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken) - { - var runIdValue = runId.ToString(); - var filter = Builders.Filter.Eq(x => x.Id, runIdValue) - & Builders.Filter.Eq(x => x.Status, PendingStatus); - - var update = Builders.Update - .Set(x => x.Status, RunningStatus) - .Set(x => x.StartedAt, startedAt.UtcDateTime); - - var result = await _collection.FindOneAndUpdateAsync( - filter, - update, - new FindOneAndUpdateOptions - { - ReturnDocument = ReturnDocument.After, - }, - cancellationToken).ConfigureAwait(false); - - if (result is null) - { - _logger.LogDebug("Failed to start job run {RunId}; status transition rejected", runId); - return null; + public async Task TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var runIdValue = runId.ToString(); + var filter = Builders.Filter.Eq(x => x.Id, runIdValue) + & Builders.Filter.Eq(x => x.Status, PendingStatus); + + var update = Builders.Update + .Set(x => x.Status, RunningStatus) + .Set(x => x.StartedAt, startedAt.UtcDateTime); + + var options = new FindOneAndUpdateOptions + { + ReturnDocument = ReturnDocument.After, + }; + + var result = session is null + ? await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); + + if (result is null) + { + _logger.LogDebug("Failed to start job run {RunId}; status transition rejected", runId); + return null; } return result.ToSnapshot(); } - public async Task TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken) - { - var runIdValue = runId.ToString(); - var filter = Builders.Filter.Eq(x => x.Id, runIdValue) - & Builders.Filter.In(x => x.Status, new[] { PendingStatus, RunningStatus }); - - var update = Builders.Update - .Set(x => x.Status, completion.Status.ToString()) - .Set(x => x.CompletedAt, completion.CompletedAt.UtcDateTime) - .Set(x => x.Error, completion.Error); - - var result = await _collection.FindOneAndUpdateAsync( - filter, - update, - new FindOneAndUpdateOptions - { - ReturnDocument = ReturnDocument.After, - }, - cancellationToken).ConfigureAwait(false); - - if (result is null) - { - _logger.LogWarning("Failed to mark job run {RunId} as {Status}", runId, completion.Status); - return null; + public async Task TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var runIdValue = runId.ToString(); + var filter = Builders.Filter.Eq(x => x.Id, runIdValue) + & Builders.Filter.In(x => x.Status, new[] { PendingStatus, RunningStatus }); + + var update = Builders.Update + .Set(x => x.Status, completion.Status.ToString()) + .Set(x => x.CompletedAt, completion.CompletedAt.UtcDateTime) + .Set(x => x.Error, completion.Error); + + var options = new FindOneAndUpdateOptions + { + ReturnDocument = ReturnDocument.After, + }; + + var result = session is null + ? await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); + + if (result is null) + { + _logger.LogWarning("Failed to mark job run {RunId} as {Status}", runId, completion.Status); + return null; } return result.ToSnapshot(); } - public async Task FindAsync(Guid runId, CancellationToken cancellationToken) - { - var cursor = await _collection.FindAsync(x => x.Id == runId.ToString(), cancellationToken: cancellationToken).ConfigureAwait(false); - var document = await cursor.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document?.ToSnapshot(); - } - - public async Task> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken) - { - if (limit <= 0) - { - return Array.Empty(); - } - - var filter = string.IsNullOrWhiteSpace(kind) - ? Builders.Filter.Empty - : Builders.Filter.Eq(x => x.Kind, kind); - - var cursor = await _collection.Find(filter) - .SortByDescending(x => x.CreatedAt) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); + public async Task FindAsync(Guid runId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var filter = Builders.Filter.Eq(x => x.Id, runId.ToString()); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToSnapshot(); + } + public async Task> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + if (limit <= 0) + { + return Array.Empty(); + } + + var filter = string.IsNullOrWhiteSpace(kind) + ? Builders.Filter.Empty + : Builders.Filter.Eq(x => x.Kind, kind); + + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var cursor = await query + .SortByDescending(x => x.CreatedAt) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + return cursor.Select(static doc => doc.ToSnapshot()).ToArray(); } - public async Task> GetActiveRunsAsync(CancellationToken cancellationToken) - { - var filter = Builders.Filter.In(x => x.Status, new[] { PendingStatus, RunningStatus }); - var cursor = await _collection.Find(filter) - .SortByDescending(x => x.CreatedAt) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return cursor.Select(static doc => doc.ToSnapshot()).ToArray(); + public async Task> GetActiveRunsAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var filter = Builders.Filter.In(x => x.Status, new[] { PendingStatus, RunningStatus }); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var cursor = await query + .SortByDescending(x => x.CreatedAt) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor.Select(static doc => doc.ToSnapshot()).ToArray(); } - public async Task GetLastRunAsync(string kind, CancellationToken cancellationToken) - { - var cursor = await _collection.Find(x => x.Kind == kind) - .SortByDescending(x => x.CreatedAt) - .Limit(1) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - + public async Task GetLastRunAsync(string kind, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var filter = Builders.Filter.Eq(x => x.Kind, kind); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var cursor = await query + .SortByDescending(x => x.CreatedAt) + .Limit(1) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + return cursor.FirstOrDefault()?.ToSnapshot(); } - public async Task> GetLastRunsAsync(IEnumerable kinds, CancellationToken cancellationToken) - { - if (kinds is null) - { - throw new ArgumentNullException(nameof(kinds)); - } + public async Task> GetLastRunsAsync(IEnumerable kinds, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + if (kinds is null) + { + throw new ArgumentNullException(nameof(kinds)); + } var kindList = kinds .Where(static kind => !string.IsNullOrWhiteSpace(kind)) @@ -168,13 +193,17 @@ public sealed class MongoJobStore : IJobStore var pipeline = new[] { matchStage, sortStage, groupStage }; - var aggregate = await _collection.Aggregate(pipeline) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - var results = new Dictionary(StringComparer.Ordinal); - foreach (var element in aggregate) - { + var aggregateFluent = session is null + ? _collection.Aggregate(pipeline) + : _collection.Aggregate(session, pipeline); + + var aggregate = await aggregateFluent + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + var results = new Dictionary(StringComparer.Ordinal); + foreach (var element in aggregate) + { if (!element.TryGetValue("_id", out var idValue) || idValue.BsonType != BsonType.String) { continue; diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs b/src/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs new file mode 100644 index 00000000..e4e400ef --- /dev/null +++ b/src/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs @@ -0,0 +1,34 @@ +using Microsoft.Extensions.Options; +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo; + +public interface IMongoSessionProvider +{ + Task StartSessionAsync(CancellationToken cancellationToken = default); +} + +internal sealed class MongoSessionProvider : IMongoSessionProvider +{ + private readonly IMongoClient _client; + private readonly MongoStorageOptions _options; + + public MongoSessionProvider(IMongoClient client, IOptions options) + { + _client = client ?? throw new ArgumentNullException(nameof(client)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + } + + public Task StartSessionAsync(CancellationToken cancellationToken = default) + { + var sessionOptions = new ClientSessionOptions + { + DefaultTransactionOptions = new TransactionOptions( + readPreference: ReadPreference.Primary, + readConcern: ReadConcern.Majority, + writeConcern: WriteConcern.WMajority.With(wTimeout: _options.CommandTimeout)) + }; + + return _client.StartSessionAsync(sessionOptions, cancellationToken); + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs b/src/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs index 21635e15..93bae1cf 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs @@ -18,32 +18,41 @@ public sealed class MongoSourceStateRepository : ISourceStateRepository _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public async Task TryGetAsync(string sourceName, CancellationToken cancellationToken) - { - var cursor = await _collection.FindAsync(x => x.SourceName == sourceName, cancellationToken: cancellationToken).ConfigureAwait(false); - var document = await cursor.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document?.ToRecord(); - } - - public async Task UpsertAsync(SourceStateRecord record, CancellationToken cancellationToken) - { - var document = SourceStateDocumentExtensions.FromRecord(record with { UpdatedAt = DateTimeOffset.UtcNow }); - await _collection.ReplaceOneAsync( - x => x.SourceName == record.SourceName, - document, - new ReplaceOptions { IsUpsert = true }, - cancellationToken).ConfigureAwait(false); - - _logger.LogDebug("Upserted source state for {Source}", record.SourceName); - return document.ToRecord(); - } - - public async Task UpdateCursorAsync(string sourceName, BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrEmpty(sourceName); - var update = Builders.Update - .Set(x => x.Cursor, cursor ?? new BsonDocument()) - .Set(x => x.LastSuccess, completedAt.UtcDateTime) + public async Task TryGetAsync(string sourceName, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var filter = Builders.Filter.Eq(x => x.SourceName, sourceName); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document?.ToRecord(); + } + + public async Task UpsertAsync(SourceStateRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var document = SourceStateDocumentExtensions.FromRecord(record with { UpdatedAt = DateTimeOffset.UtcNow }); + var filter = Builders.Filter.Eq(x => x.SourceName, record.SourceName); + var options = new ReplaceOptions { IsUpsert = true }; + + if (session is null) + { + await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + + _logger.LogDebug("Upserted source state for {Source}", record.SourceName); + return document.ToRecord(); + } + + public async Task UpdateCursorAsync(string sourceName, BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentException.ThrowIfNullOrEmpty(sourceName); + var update = Builders.Update + .Set(x => x.Cursor, cursor ?? new BsonDocument()) + .Set(x => x.LastSuccess, completedAt.UtcDateTime) .Set(x => x.FailCount, 0) .Set(x => x.BackoffUntil, (DateTime?)null) .Set(x => x.LastFailureReason, null) @@ -52,26 +61,23 @@ public sealed class MongoSourceStateRepository : ISourceStateRepository var options = new FindOneAndUpdateOptions { - ReturnDocument = ReturnDocument.After, - IsUpsert = true, - }; - - var document = await _collection - .FindOneAndUpdateAsync( - x => x.SourceName == sourceName, - update, - options, - cancellationToken) - .ConfigureAwait(false); - return document?.ToRecord(); - } - - public async Task MarkFailureAsync(string sourceName, DateTimeOffset failedAt, TimeSpan? backoff, string? failureReason, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrEmpty(sourceName); - var reasonValue = NormalizeFailureReason(failureReason); - var update = Builders.Update - .Inc(x => x.FailCount, 1) + ReturnDocument = ReturnDocument.After, + IsUpsert = true, + }; + + var filter = Builders.Filter.Eq(x => x.SourceName, sourceName); + var document = session is null + ? await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); + return document?.ToRecord(); + } + + public async Task MarkFailureAsync(string sourceName, DateTimeOffset failedAt, TimeSpan? backoff, string? failureReason, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentException.ThrowIfNullOrEmpty(sourceName); + var reasonValue = NormalizeFailureReason(failureReason); + var update = Builders.Update + .Inc(x => x.FailCount, 1) .Set(x => x.LastFailure, failedAt.UtcDateTime) .Set(x => x.BackoffUntil, backoff.HasValue ? failedAt.UtcDateTime.Add(backoff.Value) : null) .Set(x => x.LastFailureReason, reasonValue) @@ -80,19 +86,16 @@ public sealed class MongoSourceStateRepository : ISourceStateRepository var options = new FindOneAndUpdateOptions { - ReturnDocument = ReturnDocument.After, - IsUpsert = true, - }; - - var document = await _collection - .FindOneAndUpdateAsync( - x => x.SourceName == sourceName, - update, - options, - cancellationToken) - .ConfigureAwait(false); - return document?.ToRecord(); - } + ReturnDocument = ReturnDocument.After, + IsUpsert = true, + }; + + var filter = Builders.Filter.Eq(x => x.SourceName, sourceName); + var document = session is null + ? await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); + return document?.ToRecord(); + } private static string? NormalizeFailureReason(string? reason) { diff --git a/src/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs b/src/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs index 6a33a527..31cfbe6d 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs +++ b/src/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs @@ -36,11 +36,11 @@ public static class ServiceCollectionExtensions return new MongoClient(options.ConnectionString); }); - services.AddSingleton(static sp => - { - var options = sp.GetRequiredService>().Value; - var client = sp.GetRequiredService(); - var settings = new MongoDatabaseSettings + services.AddSingleton(static sp => + { + var options = sp.GetRequiredService>().Value; + var client = sp.GetRequiredService(); + var settings = new MongoDatabaseSettings { ReadConcern = ReadConcern.Majority, WriteConcern = WriteConcern.WMajority, @@ -49,11 +49,13 @@ public static class ServiceCollectionExtensions var database = client.GetDatabase(options.GetDatabaseName(), settings); var writeConcern = database.Settings.WriteConcern.With(wTimeout: options.CommandTimeout); - return database.WithWriteConcern(writeConcern); - }); - - services.AddSingleton(); - services.AddSingleton(); + return database.WithWriteConcern(writeConcern); + }); + + services.AddScoped(); + + services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); diff --git a/src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj b/src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj index 004494a9..57200b41 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj +++ b/src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj @@ -7,8 +7,7 @@ true - - + diff --git a/src/StellaOps.Concelier.Storage.Mongo/TASKS.md b/src/StellaOps.Concelier.Storage.Mongo/TASKS.md index ceaa3636..2c6fb6d0 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/TASKS.md +++ b/src/StellaOps.Concelier.Storage.Mongo/TASKS.md @@ -20,5 +20,5 @@ |FEEDSTORAGE-DATA-02-002 Provenance decision persistence|BE-Storage|Models `FEEDMODELS-SCHEMA-01-002`|**DONE (2025-10-12)** – Normalized documents carry decision reasons/source/timestamps with regression coverage verifying SemVer notes + provenance fallbacks.| |FEEDSTORAGE-DATA-02-003 Normalized versions index creation|BE-Storage|Normalization, Mongo bootstrapper|**DONE (2025-10-12)** – Bootstrapper seeds `normalizedVersions.*` indexes when SemVer style is enabled; docs/tests confirm index presence.| |FEEDSTORAGE-DATA-04-001 Advisory payload parity (description/CWEs/canonical metric)|BE-Storage|Models, Core|DONE (2025-10-15) – Mongo payloads round-trip new advisory fields; serializer/tests updated, no migration required beyond optional backfill.| -|FEEDSTORAGE-MONGO-08-001 Causal-consistent session plumbing|BE-Storage|Concelier Core DI|TODO – Introduce scoped MongoDB session provider enabling causal consistency + majority read/write concerns in `AddMongoStorage`; flow optional `IClientSessionHandle` through job/advisory/source state/document stores; add integration test simulating primary election to prove read-your-write + monotonic reads.| +|FEEDSTORAGE-MONGO-08-001 Causal-consistent session plumbing|BE-Storage|Concelier Core DI|**DONE (2025-10-19)** – Scoped session provider registered via `AddMongoStorage`, storage repositories accept optional `IClientSessionHandle`, and `MongoSessionConsistencyTests` exercises read-your-write + monotonic reads across forced primary step-down.| |FEEDSTORAGE-DATA-07-001 Advisory statement & conflict collections|Team Normalization & Storage Backbone|FEEDMERGE-ENGINE-07-001|TODO – Create `advisory_statements` (immutable) and `advisory_conflicts` collections, define `asOf`/`vulnerabilityKey` indexes, and document migration/rollback steps for event-sourced merge.| diff --git a/src/StellaOps.Configuration/StellaOpsAuthorityOptions.cs b/src/StellaOps.Configuration/StellaOpsAuthorityOptions.cs index 5414ff9b..0baec328 100644 --- a/src/StellaOps.Configuration/StellaOpsAuthorityOptions.cs +++ b/src/StellaOps.Configuration/StellaOpsAuthorityOptions.cs @@ -184,10 +184,16 @@ public sealed class AuthoritySecurityOptions /// public PasswordHashOptions PasswordHashing { get; } = new(); + /// + /// Sender-constraint configuration (DPoP, mTLS). + /// + public AuthoritySenderConstraintOptions SenderConstraints { get; } = new(); + internal void Validate() { RateLimiting.Validate(); PasswordHashing.Validate(); + SenderConstraints.Validate(); } } @@ -241,6 +247,168 @@ public sealed class AuthorityRateLimitingOptions } } +public sealed class AuthoritySenderConstraintOptions +{ + public AuthoritySenderConstraintOptions() + { + Dpop = new AuthorityDpopOptions(); + Mtls = new AuthorityMtlsOptions(); + } + + public AuthorityDpopOptions Dpop { get; } + + public AuthorityMtlsOptions Mtls { get; } + + internal void Validate() + { + Dpop.Validate(); + Mtls.Validate(); + } +} + +public sealed class AuthorityDpopOptions +{ + private readonly HashSet allowedAlgorithms = new(StringComparer.OrdinalIgnoreCase) + { + "ES256", + "ES384" + }; + + public bool Enabled { get; set; } = true; + + public TimeSpan ProofLifetime { get; set; } = TimeSpan.FromMinutes(2); + + public TimeSpan AllowedClockSkew { get; set; } = TimeSpan.FromSeconds(30); + + public TimeSpan ReplayWindow { get; set; } = TimeSpan.FromMinutes(5); + + public ISet AllowedAlgorithms => allowedAlgorithms; + + public IReadOnlySet NormalizedAlgorithms { get; private set; } = new HashSet(StringComparer.Ordinal); + + public AuthorityDpopNonceOptions Nonce { get; } = new(); + + internal void Validate() + { + if (ProofLifetime <= TimeSpan.Zero) + { + throw new InvalidOperationException("Dpop.ProofLifetime must be greater than zero."); + } + + if (AllowedClockSkew < TimeSpan.Zero || AllowedClockSkew > TimeSpan.FromMinutes(5)) + { + throw new InvalidOperationException("Dpop.AllowedClockSkew must be between 0 and 5 minutes."); + } + + if (ReplayWindow < TimeSpan.Zero) + { + throw new InvalidOperationException("Dpop.ReplayWindow must be greater than or equal to zero."); + } + + if (allowedAlgorithms.Count == 0) + { + throw new InvalidOperationException("At least one DPoP algorithm must be configured."); + } + + NormalizedAlgorithms = allowedAlgorithms + .Select(static alg => alg.Trim().ToUpperInvariant()) + .Where(static alg => alg.Length > 0) + .ToHashSet(StringComparer.Ordinal); + + if (NormalizedAlgorithms.Count == 0) + { + throw new InvalidOperationException("Allowed DPoP algorithms cannot be empty after normalization."); + } + + Nonce.Validate(); + } +} + +public sealed class AuthorityDpopNonceOptions +{ + private readonly HashSet requiredAudiences = new(StringComparer.OrdinalIgnoreCase) + { + "signer", + "attestor" + }; + + public bool Enabled { get; set; } = true; + + public TimeSpan Ttl { get; set; } = TimeSpan.FromMinutes(10); + + public int MaxIssuancePerMinute { get; set; } = 120; + + public ISet RequiredAudiences => requiredAudiences; + + public IReadOnlySet NormalizedAudiences { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); + + internal void Validate() + { + if (Ttl <= TimeSpan.Zero) + { + throw new InvalidOperationException("Dpop.Nonce.Ttl must be greater than zero."); + } + + if (MaxIssuancePerMinute < 1) + { + throw new InvalidOperationException("Dpop.Nonce.MaxIssuancePerMinute must be at least 1."); + } + + NormalizedAudiences = requiredAudiences + .Select(static aud => aud.Trim()) + .Where(static aud => aud.Length > 0) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + if (NormalizedAudiences.Count == 0) + { + throw new InvalidOperationException("Dpop.Nonce.RequiredAudiences must include at least one audience."); + } + } +} + +public sealed class AuthorityMtlsOptions +{ + private readonly HashSet enforceForAudiences = new(StringComparer.OrdinalIgnoreCase) + { + "signer" + }; + + public bool Enabled { get; set; } + + public bool RequireChainValidation { get; set; } = true; + + public TimeSpan RotationGrace { get; set; } = TimeSpan.FromMinutes(15); + + public ISet EnforceForAudiences => enforceForAudiences; + + public IReadOnlySet NormalizedAudiences { get; private set; } = new HashSet(StringComparer.OrdinalIgnoreCase); + + public IList AllowedCertificateAuthorities { get; } = new List(); + + internal void Validate() + { + if (RotationGrace < TimeSpan.Zero) + { + throw new InvalidOperationException("Mtls.RotationGrace must be non-negative."); + } + + NormalizedAudiences = enforceForAudiences + .Select(static aud => aud.Trim()) + .Where(static aud => aud.Length > 0) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + if (Enabled && NormalizedAudiences.Count == 0) + { + throw new InvalidOperationException("Mtls.EnforceForAudiences must include at least one audience when enabled."); + } + + if (AllowedCertificateAuthorities.Any(static path => string.IsNullOrWhiteSpace(path))) + { + throw new InvalidOperationException("Mtls.AllowedCertificateAuthorities entries must not be empty."); + } + } +} + public sealed class AuthorityEndpointRateLimitOptions { /// diff --git a/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs b/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs new file mode 100644 index 00000000..2d3328ff --- /dev/null +++ b/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs @@ -0,0 +1,21 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.Plugin.BouncyCastle; + +/// +/// Dependency injection helpers for registering the BouncyCastle Ed25519 crypto provider. +/// +public static class BouncyCastleCryptoServiceCollectionExtensions +{ + public static IServiceCollection AddBouncyCastleEd25519Provider(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + services.TryAddSingleton(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + + return services; + } +} diff --git a/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs b/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs new file mode 100644 index 00000000..6ea216ae --- /dev/null +++ b/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs @@ -0,0 +1,211 @@ +using System.Collections.Concurrent; +using Microsoft.IdentityModel.Tokens; +using Org.BouncyCastle.Crypto.Parameters; +using Org.BouncyCastle.Crypto.Signers; +using StellaOps.Cryptography; + +namespace StellaOps.Cryptography.Plugin.BouncyCastle; + +/// +/// Ed25519 signing provider backed by BouncyCastle primitives. +/// +public sealed class BouncyCastleEd25519CryptoProvider : ICryptoProvider +{ + private static readonly HashSet SupportedAlgorithms = new(StringComparer.OrdinalIgnoreCase) + { + SignatureAlgorithms.Ed25519, + SignatureAlgorithms.EdDsa + }; + + private static readonly string[] DefaultKeyOps = { "sign", "verify" }; + + private readonly ConcurrentDictionary signingKeys = new(StringComparer.Ordinal); + + public string Name => "bouncycastle.ed25519"; + + public bool Supports(CryptoCapability capability, string algorithmId) + { + if (string.IsNullOrWhiteSpace(algorithmId)) + { + return false; + } + + return capability switch + { + CryptoCapability.Signing or CryptoCapability.Verification => SupportedAlgorithms.Contains(algorithmId), + _ => false + }; + } + + public IPasswordHasher GetPasswordHasher(string algorithmId) + => throw new NotSupportedException("BouncyCastle provider does not expose password hashing capabilities."); + + public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference) + { + ArgumentException.ThrowIfNullOrWhiteSpace(algorithmId); + ArgumentNullException.ThrowIfNull(keyReference); + + if (!signingKeys.TryGetValue(keyReference.KeyId, out var entry)) + { + throw new KeyNotFoundException($"Signing key '{keyReference.KeyId}' is not registered with provider '{Name}'."); + } + + EnsureAlgorithmSupported(algorithmId); + var normalized = NormalizeAlgorithm(algorithmId); + if (!string.Equals(entry.Descriptor.AlgorithmId, normalized, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException( + $"Signing key '{keyReference.KeyId}' is registered for algorithm '{entry.Descriptor.AlgorithmId}', not '{algorithmId}'."); + } + + return new Ed25519SignerWrapper(entry); + } + + public void UpsertSigningKey(CryptoSigningKey signingKey) + { + ArgumentNullException.ThrowIfNull(signingKey); + EnsureAlgorithmSupported(signingKey.AlgorithmId); + + if (signingKey.Kind != CryptoSigningKeyKind.Raw) + { + throw new InvalidOperationException($"Provider '{Name}' requires raw Ed25519 private key material."); + } + + var privateKey = NormalizePrivateKey(signingKey.PrivateKey); + var publicKey = NormalizePublicKey(signingKey.PublicKey, privateKey); + + var privateKeyParameters = new Ed25519PrivateKeyParameters(privateKey, 0); + var publicKeyParameters = new Ed25519PublicKeyParameters(publicKey, 0); + + var descriptor = new CryptoSigningKey( + signingKey.Reference, + NormalizeAlgorithm(signingKey.AlgorithmId), + privateKey, + signingKey.CreatedAt, + signingKey.ExpiresAt, + publicKey, + signingKey.Metadata); + + signingKeys.AddOrUpdate( + signingKey.Reference.KeyId, + _ => new KeyEntry(descriptor, privateKeyParameters, publicKeyParameters), + (_, _) => new KeyEntry(descriptor, privateKeyParameters, publicKeyParameters)); + } + + public bool RemoveSigningKey(string keyId) + { + if (string.IsNullOrWhiteSpace(keyId)) + { + return false; + } + + return signingKeys.TryRemove(keyId, out _); + } + + public IReadOnlyCollection GetSigningKeys() + => signingKeys.Values.Select(static entry => entry.Descriptor).ToArray(); + + private static void EnsureAlgorithmSupported(string algorithmId) + { + if (!SupportedAlgorithms.Contains(algorithmId)) + { + throw new InvalidOperationException($"Signing algorithm '{algorithmId}' is not supported by provider 'bouncycastle.ed25519'."); + } + } + + private static string NormalizeAlgorithm(string algorithmId) + => string.Equals(algorithmId, SignatureAlgorithms.EdDsa, StringComparison.OrdinalIgnoreCase) + ? SignatureAlgorithms.Ed25519 + : SignatureAlgorithms.Ed25519; + + private static byte[] NormalizePrivateKey(ReadOnlyMemory privateKey) + { + var span = privateKey.Span; + return span.Length switch + { + 32 => span.ToArray(), + 64 => span[..32].ToArray(), + _ => throw new InvalidOperationException("Ed25519 private key must be 32 or 64 bytes.") + }; + } + + private static byte[] NormalizePublicKey(ReadOnlyMemory publicKey, byte[] privateKey) + { + if (publicKey.IsEmpty) + { + var privateParams = new Ed25519PrivateKeyParameters(privateKey, 0); + return privateParams.GeneratePublicKey().GetEncoded(); + } + + if (publicKey.Span.Length != 32) + { + throw new InvalidOperationException("Ed25519 public key must be 32 bytes."); + } + + return publicKey.ToArray(); + } + + private sealed record KeyEntry( + CryptoSigningKey Descriptor, + Ed25519PrivateKeyParameters PrivateKey, + Ed25519PublicKeyParameters PublicKey); + + private sealed class Ed25519SignerWrapper : ICryptoSigner + { + private readonly KeyEntry entry; + + public Ed25519SignerWrapper(KeyEntry entry) + { + this.entry = entry ?? throw new ArgumentNullException(nameof(entry)); + } + + public string KeyId => entry.Descriptor.Reference.KeyId; + + public string AlgorithmId => entry.Descriptor.AlgorithmId; + + public ValueTask SignAsync(ReadOnlyMemory data, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var signer = new Ed25519Signer(); + var buffer = data.ToArray(); + signer.Init(true, entry.PrivateKey); + signer.BlockUpdate(buffer, 0, buffer.Length); + var signature = signer.GenerateSignature(); + return ValueTask.FromResult(signature); + } + + public ValueTask VerifyAsync(ReadOnlyMemory data, ReadOnlyMemory signature, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var verifier = new Ed25519Signer(); + var buffer = data.ToArray(); + verifier.Init(false, entry.PublicKey); + verifier.BlockUpdate(buffer, 0, buffer.Length); + var verified = verifier.VerifySignature(signature.ToArray()); + return ValueTask.FromResult(verified); + } + + public JsonWebKey ExportPublicJsonWebKey() + { + var jwk = new JsonWebKey + { + Kid = entry.Descriptor.Reference.KeyId, + Alg = SignatureAlgorithms.EdDsa, + Kty = "OKP", + Use = JsonWebKeyUseNames.Sig, + Crv = "Ed25519" + }; + + foreach (var op in DefaultKeyOps) + { + jwk.KeyOps.Add(op); + } + + jwk.X = Base64UrlEncoder.Encode(entry.PublicKey.GetEncoded()); + + return jwk; + } + } +} diff --git a/src/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj b/src/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj new file mode 100644 index 00000000..cb52e219 --- /dev/null +++ b/src/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj @@ -0,0 +1,16 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + diff --git a/src/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs b/src/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs new file mode 100644 index 00000000..204dca55 --- /dev/null +++ b/src/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs @@ -0,0 +1,52 @@ +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Cryptography; +using StellaOps.Cryptography.DependencyInjection; +using StellaOps.Cryptography.Plugin.BouncyCastle; +using Xunit; + +namespace StellaOps.Cryptography.Tests; + +public sealed class BouncyCastleEd25519CryptoProviderTests +{ + [Fact] + public async Task SignAndVerify_WithBouncyCastleProvider_Succeeds() + { + var services = new ServiceCollection(); + services.AddStellaOpsCrypto(); + services.AddBouncyCastleEd25519Provider(); + + using var provider = services.BuildServiceProvider(); + var registry = provider.GetRequiredService(); + var bcProvider = provider.GetServices() + .OfType() + .Single(); + + var keyId = "ed25519-unit-test"; + var privateKeyBytes = Enumerable.Range(0, 32).Select(i => (byte)(i + 1)).ToArray(); + var keyReference = new CryptoKeyReference(keyId, bcProvider.Name); + var signingKey = new CryptoSigningKey( + keyReference, + SignatureAlgorithms.Ed25519, + privateKeyBytes, + createdAt: DateTimeOffset.UtcNow); + + bcProvider.UpsertSigningKey(signingKey); + + var resolution = registry.ResolveSigner( + CryptoCapability.Signing, + SignatureAlgorithms.Ed25519, + keyReference, + bcProvider.Name); + + var payload = new byte[] { 0x01, 0x02, 0x03, 0x04 }; + var signature = await resolution.Signer.SignAsync(payload); + + Assert.True(await resolution.Signer.VerifyAsync(payload, signature)); + + var jwk = resolution.Signer.ExportPublicJsonWebKey(); + Assert.Equal("OKP", jwk.Kty); + Assert.Equal("Ed25519", jwk.Crv); + Assert.Equal(SignatureAlgorithms.EdDsa, jwk.Alg); + Assert.Equal(keyId, jwk.Kid); + } +} diff --git a/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj b/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj index 67024af6..15ccd5ef 100644 --- a/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj +++ b/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj @@ -10,5 +10,7 @@ + + diff --git a/src/StellaOps.Cryptography/AGENTS.md b/src/StellaOps.Cryptography/AGENTS.md index b1124feb..ac24d2f2 100644 --- a/src/StellaOps.Cryptography/AGENTS.md +++ b/src/StellaOps.Cryptography/AGENTS.md @@ -18,4 +18,5 @@ Team 8 owns the end-to-end security posture for StellaOps Authority and its cons - Rate-limit `/token` and bootstrap endpoints once CORE8 hooks are available. - Deliver offline revocation bundles signed with detached JWS and provide a verification script. - Maintain `docs/security/authority-threat-model.md` and ensure mitigations are tracked. -- All crypto consumption flows through `StellaOps.Cryptography` abstractions to enable sovereign crypto providers. +- All crypto consumption flows through `StellaOps.Cryptography` abstractions to enable sovereign crypto providers. +- Every new cryptographic algorithm, dependency, or acceleration path ships as an `ICryptoProvider` plug-in under `StellaOps.Cryptography.*`; feature code must never bind directly to third-party crypto libraries. diff --git a/src/StellaOps.Cryptography/CryptoSigningKey.cs b/src/StellaOps.Cryptography/CryptoSigningKey.cs index 13d3fe27..b3745f8b 100644 --- a/src/StellaOps.Cryptography/CryptoSigningKey.cs +++ b/src/StellaOps.Cryptography/CryptoSigningKey.cs @@ -6,6 +6,15 @@ using System.Security.Cryptography; namespace StellaOps.Cryptography; +/// +/// Describes the underlying key material for a . +/// +public enum CryptoSigningKeyKind +{ + Ec, + Raw +} + /// /// Represents asymmetric signing key material managed by a crypto provider. /// @@ -13,6 +22,10 @@ public sealed class CryptoSigningKey { private static readonly ReadOnlyDictionary EmptyMetadata = new(new Dictionary(StringComparer.OrdinalIgnoreCase)); + private static readonly byte[] EmptyKey = Array.Empty(); + + private readonly byte[] privateKeyBytes; + private readonly byte[] publicKeyBytes; public CryptoSigningKey( CryptoKeyReference reference, @@ -37,6 +50,10 @@ public sealed class CryptoSigningKey AlgorithmId = algorithmId; CreatedAt = createdAt; ExpiresAt = expiresAt; + Kind = CryptoSigningKeyKind.Ec; + + privateKeyBytes = EmptyKey; + publicKeyBytes = EmptyKey; PrivateParameters = CloneParameters(privateParameters, includePrivate: true); PublicParameters = CloneParameters(privateParameters, includePrivate: false); @@ -48,6 +65,45 @@ public sealed class CryptoSigningKey StringComparer.OrdinalIgnoreCase)); } + public CryptoSigningKey( + CryptoKeyReference reference, + string algorithmId, + ReadOnlyMemory privateKey, + DateTimeOffset createdAt, + DateTimeOffset? expiresAt = null, + ReadOnlyMemory publicKey = default, + IReadOnlyDictionary? metadata = null) + { + Reference = reference ?? throw new ArgumentNullException(nameof(reference)); + + if (string.IsNullOrWhiteSpace(algorithmId)) + { + throw new ArgumentException("Algorithm identifier is required.", nameof(algorithmId)); + } + + if (privateKey.IsEmpty) + { + throw new ArgumentException("Private key material must be provided.", nameof(privateKey)); + } + + AlgorithmId = algorithmId; + CreatedAt = createdAt; + ExpiresAt = expiresAt; + Kind = CryptoSigningKeyKind.Raw; + + privateKeyBytes = privateKey.ToArray(); + publicKeyBytes = publicKey.IsEmpty ? EmptyKey : publicKey.ToArray(); + + PrivateParameters = default; + PublicParameters = default; + Metadata = metadata is null + ? EmptyMetadata + : new ReadOnlyDictionary(metadata.ToDictionary( + static pair => pair.Key, + static pair => pair.Value, + StringComparer.OrdinalIgnoreCase)); + } + /// /// Gets the key reference (id + provider hint). /// @@ -68,6 +124,21 @@ public sealed class CryptoSigningKey /// public ECParameters PublicParameters { get; } + /// + /// Indicates the underlying key material representation. + /// + public CryptoSigningKeyKind Kind { get; } + + /// + /// Gets the raw private key bytes when available (empty for EC-backed keys). + /// + public ReadOnlyMemory PrivateKey => privateKeyBytes; + + /// + /// Gets the raw public key bytes when available (empty for EC-backed keys or when not supplied). + /// + public ReadOnlyMemory PublicKey => publicKeyBytes; + /// /// Gets the timestamp when the key was created/imported. /// diff --git a/src/StellaOps.Cryptography/DefaultCryptoProvider.cs b/src/StellaOps.Cryptography/DefaultCryptoProvider.cs index 68066062..76e94eda 100644 --- a/src/StellaOps.Cryptography/DefaultCryptoProvider.cs +++ b/src/StellaOps.Cryptography/DefaultCryptoProvider.cs @@ -86,6 +86,10 @@ public sealed class DefaultCryptoProvider : ICryptoProvider { ArgumentNullException.ThrowIfNull(signingKey); EnsureSigningSupported(signingKey.AlgorithmId); + if (signingKey.Kind != CryptoSigningKeyKind.Ec) + { + throw new InvalidOperationException($"Provider '{Name}' only accepts EC signing keys."); + } ValidateSigningKey(signingKey); signingKeys.AddOrUpdate(signingKey.Reference.KeyId, signingKey, (_, _) => signingKey); diff --git a/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs b/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs index 9670c6f8..87df8bee 100644 --- a/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs +++ b/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs @@ -64,6 +64,10 @@ public sealed class LibsodiumCryptoProvider : ICryptoProvider { ArgumentNullException.ThrowIfNull(signingKey); EnsureAlgorithmSupported(signingKey.AlgorithmId); + if (signingKey.Kind != CryptoSigningKeyKind.Ec) + { + throw new InvalidOperationException($"Provider '{Name}' only accepts EC signing keys."); + } signingKeys.AddOrUpdate(signingKey.Reference.KeyId, signingKey, (_, _) => signingKey); } diff --git a/src/StellaOps.Cryptography/SignatureAlgorithms.cs b/src/StellaOps.Cryptography/SignatureAlgorithms.cs index 0299f503..37857588 100644 --- a/src/StellaOps.Cryptography/SignatureAlgorithms.cs +++ b/src/StellaOps.Cryptography/SignatureAlgorithms.cs @@ -8,4 +8,6 @@ public static class SignatureAlgorithms public const string Es256 = "ES256"; public const string Es384 = "ES384"; public const string Es512 = "ES512"; + public const string Ed25519 = "ED25519"; + public const string EdDsa = "EdDSA"; } diff --git a/src/StellaOps.Cryptography/TASKS.md b/src/StellaOps.Cryptography/TASKS.md index 89bc0dec..e8a1f3c7 100644 --- a/src/StellaOps.Cryptography/TASKS.md +++ b/src/StellaOps.Cryptography/TASKS.md @@ -25,6 +25,7 @@ > Remark (2025-10-14): Offline kit docs include manifest verification workflow; attestation artifacts referenced. | SEC5.H | DONE (2025-10-13) | Security Guild + Authority Core | Ensure `/token` denials persist audit records with correlation IDs. | SEC2.A, SEC2.B | ✅ Audit store captures denials; ✅ Tests cover success/failure/lockout; ✅ Threat model review updated. | | D5.A | DONE (2025-10-12) | Security Guild | Flesh out `StellaOps.Cryptography` provider registry, policy, and DI helpers enabling sovereign crypto selection. | SEC1.A, SEC4.B | ✅ `ICryptoProviderRegistry` implementation with provider selection rules; ✅ `StellaOps.Cryptography.DependencyInjection` extensions; ✅ Tests covering fallback ordering. | +| SEC6.A | DONE (2025-10-19) | Security Guild | Ship BouncyCastle-backed Ed25519 signing as a `StellaOps.Cryptography` plug-in and migrate Scanner WebService signing to consume the provider registry; codify the plug-in rule in AGENTS.
2025-10-19: Added `StellaOps.Cryptography.Plugin.BouncyCastle`, updated DI and ReportSigner, captured provider tests (`BouncyCastleEd25519CryptoProviderTests`). | D5.A | ✅ Plug-in registered via DI (`AddStellaOpsCrypto` + `AddBouncyCastleEd25519Provider`); ✅ Report signer resolves keys through registry; ✅ Unit tests cover Ed25519 sign/verify via provider. | > Remark (2025-10-13, SEC2.B): Coordinated with Authority Core — audit sinks now receive `/token` success/failure events; awaiting host test suite once signing fixture lands. > diff --git a/src/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj b/src/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj index d7972603..3bac05d1 100644 --- a/src/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj +++ b/src/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj @@ -8,8 +8,8 @@ - - + + diff --git a/src/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md b/src/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md new file mode 100644 index 00000000..083877aa --- /dev/null +++ b/src/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md @@ -0,0 +1,149 @@ +# EXCITITOR-ATTEST-01-003 - Verification & Observability Plan + +- **Date:** 2025-10-19 +- **Status:** Draft +- **Owner:** Team Excititor Attestation +- **Related tasks:** EXCITITOR-ATTEST-01-003 (Wave 0), EXCITITOR-WEB-01-003/004, EXCITITOR-WORKER-01-003 +- **Prerequisites satisfied:** EXCITITOR-ATTEST-01-002 (Rekor v2 client integration) + +## 1. Objectives + +1. Provide deterministic attestation verification helpers consumable by Excititor WebService (`/excititor/verify`, `/excititor/export*`) and Worker re-verification loops. +2. Surface structured diagnostics for success, soft failures, and hard failures (signature mismatch, Rekor gaps, artifact digest drift). +3. Emit observability signals (logs, metrics, optional tracing) that can run offline and degrade gracefully when transparency services are unreachable. +4. Add regression tests (unit + integration) covering positive path, negative path, and offline fallback scenarios. + +## 2. Deliverables + +- `IVexAttestationVerifier` abstraction + `VexAttestationVerifier` implementation inside `StellaOps.Excititor.Attestation`, encapsulating DSSE validation, predicate checks, artifact digest confirmation, Rekor inclusion verification, and deterministic diagnostics. +- DI wiring (extension method) for registering verifier + instrumentation dependencies alongside the existing signer/rekor client. +- Shared `VexAttestationDiagnostics` record describing normalized diagnostic keys consumed by Worker/WebService logging. +- Metrics utility (`AttestationMetrics`) exposing counters/histograms via `System.Diagnostics.Metrics`, exported under `StellaOps.Excititor.Attestation` meter. +- Activity source (`AttestationActivitySource`) for optional tracing spans around sign/verify operations. +- Documentation updates (`EXCITITOR-ATTEST-01-003-plan.md`, `TASKS.md` notes) describing instrumentation + test expectations. +- Test coverage in `StellaOps.Excititor.Attestation.Tests` (unit) and scaffolding notes for WebService/Worker integration tests. + +## 3. Verification Flow + +### 3.1 Inputs + +- `VexAttestationRequest` from Core (contains export identifiers, artifact digest, metadata, source providers). +- Optional Rekor reference from previous signing (`VexAttestationMetadata.Rekor`). +- Configured policies (tolerated clock skew, Rekor verification toggle, offline mode flag, maximum metadata drift). + +### 3.2 Steps + +1. **Envelope decode** - retrieve DSSE envelope + predicate from storage (Worker) or request payload (WebService), canonicalize JSON, compute digest, compare with metadata `envelopeDigest`. +2. **Subject validation** - ensure subject digest matches exported artifact digest (algorithm & value) and export identifier matches `request.ExportId`. +3. **Signature verification** - delegate to signer/verifier abstraction (cosign/x509) using configured trust anchors; record `signature_state` diagnostic (verified, skipped_offline, failed). +4. **Provenance checks** - confirm predicate type (`https://stella-ops.org/attestations/vex-export`) and metadata shape; enforce deterministic timestamp tolerance. +5. **Transparency log** - if Rekor reference present and verification enabled, call `ITransparencyLogClient.VerifyAsync` with retry/backoff budget; support offline bypass with diagnostic `rekor_state=unreachable`. +6. **Result aggregation** - produce `VexAttestationVerification` containing `IsValid` flag and diagnostics map (includes `failure_reason` when invalid). + +### 3.3 Failure Categories & Handling + +| Category | Detection | Handling | +|---|---|---| +| Signature mismatch | Signer verification failure or subject digest mismatch | Mark invalid, emit warning log, increment `verify.failed` counter with `reason=signature_mismatch`. | +| Rekor absence/stale | Rekor verify returns false | Mark invalid unless offline mode configured; log with correlation ID; `reason=rekor_missing`. | +| Predicate schema drift | Predicate type or required fields missing | Mark invalid, include `reason=predicate_invalid`. | +| Time skew | `signedAt` older than policy threshold | Mark invalid (hard) or warn (soft) per options; include `reason=stale_attestation`. | +| Unexpected metadata | Unknown export format, provider mismatch | Mark invalid; `reason=metadata_mismatch`. | +| Offline Rekor | HTTP client throws | Mark soft failure if `AllowOfflineTransparency` true; degrade metrics with `rekor_state=offline`. | + +## 4. Observability + +### 4.1 Metrics (Meter name: `StellaOps.Excititor.Attestation`) + +| Metric | Type | Dimensions | Description | +|---|---|---|---| +| `stellaops.excititor.attestation.verify.total` | Counter | `result` (`success`/`failure`/`soft_failure`), `component` (`webservice`/`worker`), `reverify` (`true`/`false`) | Counts verification attempts. | +| `stellaops.excititor.attestation.verify.duration.ms` | Histogram | `component`, `result` | Measures end-to-end verification latency. | +| `stellaops.excititor.attestation.verify.rekor.calls` | Counter | `result` (`verified`/`unreachable`/`skipped`) | Rekor verification outcomes. | +| `stellaops.excititor.attestation.verify.cache.hit` | Counter | `hit` (`true`/`false`) | Tracks reuse of cached verification results (Worker loop). | + +Metrics must register via static helper using `Meter` and support offline operation (no exporter dependency). Histogram records double milliseconds; use `Stopwatch.GetElapsedTime` for monotonic timing. + +### 4.2 Logging + +- Use structured logs (`ILogger`) with event IDs: `AttestationVerified` (Information), `AttestationVerificationFailed` (Warning), `AttestationVerificationError` (Error). +- Include correlation ID (`request.QuerySignature.Value`), `exportId`, `envelopeDigest`, `rekorLocation`, `reason`, and `durationMs`. +- Avoid logging private keys or full envelope; log envelope digest only. For debug builds, gate optional envelope JSON behind `LogLevel.Trace` and configuration flag. + +### 4.3 Tracing + +- Activity source name `StellaOps.Excititor.Attestation` with spans `attestation.verify` (parent from WebService request or Worker job) including tags: `stellaops.export_id`, `stellaops.result`, `stellaops.rekor.state`. +- Propagate Activity through Rekor client via `HttpClient` instrumentation (auto instrumentation available). + +## 5. Integration Points + +### 5.1 WebService + +- Inject `IVexAttestationVerifier` into export endpoints and `/excititor/verify` handler. +- Persist verification result diagnostics alongside response payload for deterministic clients. +- Return HTTP 200 with `{ valid: true }` when verified; 409 for invalid attestation with diagnostics JSON; 503 when Rekor unreachable and offline override disabled. +- Add caching for idempotent verification (e.g., by envelope digest) to reduce Rekor calls and surface via metrics. + +### 5.2 Worker + +- Schedule background job (`EXCITITOR-WORKER-01-003`) to re-verify stored attestations on TTL (default 12h) using new verifier; on failure, flag export for re-sign and notify via event bus (future task). +- Emit logs/metrics with `component=worker`; include job IDs and next scheduled run. +- Provide cancellation-aware loops (respect `CancellationToken`) and deterministic order (sorted by export id). + +### 5.3 Storage / Cache Hooks + +- Store latest verification status and diagnostics in attestation metadata collection (Mongo) keyed by `envelopeDigest` + `artifactDigest` to avoid duplicate work. +- Expose read API (via WebService) for clients to fetch last verification timestamp + result. + +## 6. Test Strategy + +### 6.1 Unit Tests (`StellaOps.Excititor.Attestation.Tests`) + +- `VexAttestationVerifierTests.VerifyAsync_Succeeds_WhenSignatureAndRekorValid` - uses fake signer/verifier + in-memory Rekor client returning success. +- `...ReturnsSoftFailure_WhenRekorOfflineAndAllowed` - ensure `IsValid=true`, diagnostic `rekor_state=offline`, metric increments `result=soft_failure`. +- `...Fails_WhenDigestMismatch` - ensures invalid result, log entry recorded, metrics increment `result=failure` with `reason=signature_mismatch`. +- `...Fails_WhenPredicateTypeUnexpected` - invalid with `reason=predicate_invalid`. +- `...RespectsCancellation` - cancellation token triggered before Rekor call results in `OperationCanceledException` and no metrics increments beyond started attempt. + +### 6.2 WebService Integration Tests (`StellaOps.Excititor.WebService.Tests`) + +- `VerifyEndpoint_Returns200_OnValidAttestation` - mocks verifier to return success, asserts response payload, metrics stub invoked. +- `VerifyEndpoint_Returns409_OnInvalid` - invalid diag forwarded, ensures logging occurs. +- `ExportEndpoint_IncludesVerificationDiagnostics` - ensures signed export responses include last verification metadata. + +### 6.3 Worker Tests (`StellaOps.Excititor.Worker.Tests`) + +- `ReverificationJob_RequeuesOnFailure` - invalid result triggers requeue/backoff. +- `ReverificationJob_PersistsStatusAndMetrics` - success path updates repository & metrics. + +### 6.4 Determinism/Regression + +- Golden test verifying that identical inputs produce identical diagnostics dictionaries (sorted keys). +- Ensure metrics dimensions remain stable via snapshot test (e.g., capturing tags in fake meter listener). + +## 7. Implementation Sequencing + +1. Introduce verifier abstraction + implementation with basic tests (signature + Rekor success/failure). +2. Add observability helpers (metrics, activity, logging) and wire into verifier; extend tests to assert instrumentation (using in-memory listener/log sink). +3. Update WebService DI/service layer to use verifier; craft endpoint integration tests. +4. Update Worker scheduling code to call verifier & emit metrics. +5. Wire persistence/caching and document configuration knobs (retry, offline, TTL). +6. Finalize documentation (architecture updates, runbook entries) before closing task. + +## 8. Configuration Defaults + +- `AttestationVerificationOptions` (new): `RequireRekor=true`, `AllowOfflineTransparency=false`, `MaxClockSkew=PT5M`, `ReverifyInterval=PT12H`, `CacheWindow=PT1H`. +- Options bind from configuration section `Excititor:Attestation` across WebService/Worker; offline kit ships defaults. + +## 9. Open Questions + +- Should verification gracefully accept legacy predicate types (pre-1.0) or hard fail? (Proposed: allow via allowlist with warning diagnostics.) +- Do we need cross-module eventing when verification fails (e.g., notify Export module) or is logging sufficient in Wave 0? (Proposed: log + metrics now, escalate in later wave.) +- Confirm whether Worker re-verification writes to Mongo or triggers Export module to re-sign artifacts automatically; placeholder: record status + timestamp only. + +## 10. Acceptance Criteria + +- Plan approved by Attestation + WebService + Worker leads. +- Metrics/logging names peer-reviewed to avoid collisions. +- Test backlog items entered into respective `TASKS.md` once implementation starts. +- Documentation (this plan) linked from `TASKS.md` notes for discoverability. diff --git a/src/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj b/src/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj index 6f83f7b7..f1aafa2d 100644 --- a/src/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj +++ b/src/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj @@ -7,9 +7,9 @@ true - - - + + + diff --git a/src/StellaOps.Excititor.Attestation/TASKS.md b/src/StellaOps.Excititor.Attestation/TASKS.md index ff79ef32..747585b8 100644 --- a/src/StellaOps.Excititor.Attestation/TASKS.md +++ b/src/StellaOps.Excititor.Attestation/TASKS.md @@ -4,4 +4,4 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md |---|---|---|---| |EXCITITOR-ATTEST-01-001 – In-toto predicate & DSSE builder|Team Excititor Attestation|EXCITITOR-CORE-01-001|**DONE (2025-10-16)** – Added deterministic in-toto predicate/statement models, DSSE envelope builder wired to signer abstraction, and attestation client producing metadata + diagnostics.| |EXCITITOR-ATTEST-01-002 – Rekor v2 client integration|Team Excititor Attestation|EXCITITOR-ATTEST-01-001|**DONE (2025-10-16)** – Implemented Rekor HTTP client with retry/backoff, transparency log abstraction, DI helpers, and attestation client integration capturing Rekor metadata + diagnostics.| -|EXCITITOR-ATTEST-01-003 – Verification suite & observability|Team Excititor Attestation|EXCITITOR-ATTEST-01-002|TODO – Add verification helpers for Worker/WebService, metrics/logging hooks, and negative-path regression tests.| +|EXCITITOR-ATTEST-01-003 – Verification suite & observability|Team Excititor Attestation|EXCITITOR-ATTEST-01-002|DOING (2025-10-19) – Add verification helpers for Worker/WebService, metrics/logging hooks, and negative-path regression tests. Draft plan logged in `EXCITITOR-ATTEST-01-003-plan.md` (2025-10-19).| diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs b/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs index 91859afe..1a431d5d 100644 --- a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs +++ b/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs @@ -23,7 +23,7 @@ public abstract class VexConnectorBase : IVexConnector /// public VexProviderKind Kind => Descriptor.Kind; - protected VexConnectorDescriptor Descriptor { get; } + public VexConnectorDescriptor Descriptor { get; } protected ILogger Logger { get; } diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs b/src/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs index 11c1d028..2ec899e4 100644 --- a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs +++ b/src/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs @@ -129,10 +129,20 @@ public sealed class CiscoCsafConnector : VexConnectorBase if (stateChanged) { - var newState = new VexConnectorState( + var baseState = state ?? new VexConnectorState( Descriptor.Id, - latestTimestamp == DateTimeOffset.MinValue ? state?.LastUpdated : latestTimestamp, - digestList.ToImmutableArray()); + null, + ImmutableArray.Empty, + ImmutableDictionary.Empty, + null, + 0, + null, + null); + var newState = baseState with + { + LastUpdated = latestTimestamp == DateTimeOffset.MinValue ? state?.LastUpdated : latestTimestamp, + DocumentDigests = digestList.ToImmutableArray(), + }; await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false); } } diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj b/src/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj index c9ee7659..e0ccf67c 100644 --- a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj +++ b/src/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj @@ -12,9 +12,9 @@ - - - + + + diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md b/src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md index 833d1535..43abdbb3 100644 --- a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md +++ b/src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md @@ -4,4 +4,4 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md |---|---|---|---| |EXCITITOR-CONN-CISCO-01-001 – Endpoint discovery & auth plumbing|Team Excititor Connectors – Cisco|EXCITITOR-CONN-ABS-01-001|**DONE (2025-10-17)** – Added `CiscoProviderMetadataLoader` with bearer token support, offline snapshot fallback, DI helpers, and tests covering network/offline discovery to unblock subsequent fetch work.| |EXCITITOR-CONN-CISCO-01-002 – CSAF pull loop & pagination|Team Excititor Connectors – Cisco|EXCITITOR-CONN-CISCO-01-001, EXCITITOR-STORAGE-01-003|**DONE (2025-10-17)** – Implemented paginated advisory fetch using provider directories, raw document persistence with dedupe/state tracking, offline resiliency, and unit coverage.| -|EXCITITOR-CONN-CISCO-01-003 – Provider trust metadata|Team Excititor Connectors – Cisco|EXCITITOR-CONN-CISCO-01-002, EXCITITOR-POLICY-01-001|TODO – Emit cosign/PGP trust metadata and advisory provenance hints for policy weighting.| +|EXCITITOR-CONN-CISCO-01-003 – Provider trust metadata|Team Excititor Connectors – Cisco|EXCITITOR-CONN-CISCO-01-002, EXCITITOR-POLICY-01-001|**DOING (2025-10-19)** – Prereqs confirmed (both DONE); implementing cosign/PGP trust metadata emission and advisory provenance hints for policy weighting.| diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs b/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs new file mode 100644 index 00000000..b6e5de3b --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs @@ -0,0 +1,363 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO.Compression; +using System.Net; +using System.Net.Http; +using System.Text; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.MSRC.CSAF; +using StellaOps.Excititor.Connectors.MSRC.CSAF.Authentication; +using StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using Xunit; + +namespace StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.Connectors; + +public sealed class MsrcCsafConnectorTests +{ + private static readonly VexConnectorDescriptor Descriptor = new("excititor:msrc", VexProviderKind.Vendor, "MSRC CSAF"); + + [Fact] + public async Task FetchAsync_EmitsDocumentAndPersistsState() + { + var summary = """ + { + "value": [ + { + "id": "ADV-0001", + "vulnerabilityId": "ADV-0001", + "severity": "Critical", + "releaseDate": "2025-10-17T00:00:00Z", + "lastModifiedDate": "2025-10-18T00:00:00Z", + "cvrfUrl": "https://example.com/csaf/ADV-0001.json" + } + ] + } + """; + + var csaf = """{"document":{"title":"Example"}}"""; + var handler = TestHttpMessageHandler.Create( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csaf, "application/json")); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://example.com/"), + }; + + var factory = new SingleClientHttpClientFactory(httpClient); + var stateRepository = new InMemoryConnectorStateRepository(); + var options = Options.Create(CreateOptions()); + var connector = new MsrcCsafConnector( + factory, + new StubTokenProvider(), + stateRepository, + options, + NullLogger.Instance, + TimeProvider.System); + + await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); + + var sink = new CapturingRawSink(); + var context = new VexConnectorContext( + Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero), + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider()); + + var documents = new List(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + var emitted = documents[0]; + emitted.SourceUri.Should().Be(new Uri("https://example.com/csaf/ADV-0001.json")); + emitted.Metadata["msrc.vulnerabilityId"].Should().Be("ADV-0001"); + emitted.Metadata["msrc.csaf.format"].Should().Be("json"); + emitted.Metadata.Should().NotContainKey("excititor.quarantine.reason"); + + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.LastUpdated.Should().Be(new DateTimeOffset(2025, 10, 18, 0, 0, 0, TimeSpan.Zero)); + stateRepository.State.DocumentDigests.Should().HaveCount(1); + } + + [Fact] + public async Task FetchAsync_SkipsDocumentsWithExistingDigest() + { + var summary = """ + { + "value": [ + { + "id": "ADV-0001", + "vulnerabilityId": "ADV-0001", + "lastModifiedDate": "2025-10-18T00:00:00Z", + "cvrfUrl": "https://example.com/csaf/ADV-0001.json" + } + ] + } + """; + + var csaf = """{"document":{"title":"Example"}}"""; + var handler = TestHttpMessageHandler.Create( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csaf, "application/json")); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://example.com/"), + }; + + var factory = new SingleClientHttpClientFactory(httpClient); + var stateRepository = new InMemoryConnectorStateRepository(); + var options = Options.Create(CreateOptions()); + var connector = new MsrcCsafConnector( + factory, + new StubTokenProvider(), + stateRepository, + options, + NullLogger.Instance, + TimeProvider.System); + + await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); + + var sink = new CapturingRawSink(); + var context = new VexConnectorContext( + Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero), + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider()); + + var firstPass = new List(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + firstPass.Add(document); + } + + firstPass.Should().HaveCount(1); + stateRepository.State.Should().NotBeNull(); + var persistedState = stateRepository.State!; + + handler.Reset( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csaf, "application/json")); + + sink.Documents.Clear(); + var secondPass = new List(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + secondPass.Add(document); + } + + secondPass.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.DocumentDigests.Should().Equal(persistedState.DocumentDigests); + } + + [Fact] + public async Task FetchAsync_QuarantinesInvalidCsafPayload() + { + var summary = """ + { + "value": [ + { + "id": "ADV-0002", + "vulnerabilityId": "ADV-0002", + "lastModifiedDate": "2025-10-19T00:00:00Z", + "cvrfUrl": "https://example.com/csaf/ADV-0002.zip" + } + ] + } + """; + + var csafZip = CreateZip("document.json", "{ invalid json "); + var handler = TestHttpMessageHandler.Create( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csafZip, "application/zip")); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://example.com/"), + }; + + var factory = new SingleClientHttpClientFactory(httpClient); + var stateRepository = new InMemoryConnectorStateRepository(); + var options = Options.Create(CreateOptions()); + var connector = new MsrcCsafConnector( + factory, + new StubTokenProvider(), + stateRepository, + options, + NullLogger.Instance, + TimeProvider.System); + + await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); + + var sink = new CapturingRawSink(); + var context = new VexConnectorContext( + Since: new DateTimeOffset(2025, 10, 17, 0, 0, 0, TimeSpan.Zero), + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider()); + + var documents = new List(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().HaveCount(1); + sink.Documents[0].Metadata["excititor.quarantine.reason"].Should().Contain("JSON parse failed"); + sink.Documents[0].Metadata["msrc.csaf.format"].Should().Be("zip"); + + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.DocumentDigests.Should().HaveCount(1); + } + + private static HttpResponseMessage Response(HttpStatusCode statusCode, string content, string contentType) + => new(statusCode) + { + Content = new StringContent(content, Encoding.UTF8, contentType), + }; + + private static HttpResponseMessage Response(HttpStatusCode statusCode, byte[] content, string contentType) + { + var response = new HttpResponseMessage(statusCode); + response.Content = new ByteArrayContent(content); + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(contentType); + return response; + } + + private static MsrcConnectorOptions CreateOptions() + => new() + { + BaseUri = new Uri("https://example.com/", UriKind.Absolute), + TenantId = Guid.NewGuid().ToString(), + ClientId = "client-id", + ClientSecret = "secret", + Scope = MsrcConnectorOptions.DefaultScope, + PageSize = 5, + MaxAdvisoriesPerFetch = 5, + RequestDelay = TimeSpan.Zero, + RetryBaseDelay = TimeSpan.FromMilliseconds(10), + MaxRetryAttempts = 2, + }; + + private static byte[] CreateZip(string entryName, string content) + { + using var buffer = new MemoryStream(); + using (var archive = new ZipArchive(buffer, ZipArchiveMode.Create, leaveOpen: true)) + { + var entry = archive.CreateEntry(entryName); + using var writer = new StreamWriter(entry.Open(), Encoding.UTF8); + writer.Write(content); + } + + return buffer.ToArray(); + } + + private sealed class StubTokenProvider : IMsrcTokenProvider + { + public ValueTask GetAccessTokenAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(new MsrcAccessToken("token", "Bearer", DateTimeOffset.MaxValue)); + } + + private sealed class CapturingRawSink : IVexRawDocumentSink + { + public List Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray.Empty, ImmutableDictionary.Empty)); + } + + private sealed class SingleClientHttpClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleClientHttpClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository + { + public VexConnectorState? State { get; private set; } + + public ValueTask GetAsync(string connectorId, CancellationToken cancellationToken) + => ValueTask.FromResult(State); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken) + { + State = state; + return ValueTask.CompletedTask; + } + } + + private sealed class TestHttpMessageHandler : HttpMessageHandler + { + private readonly Queue> _responders; + + private TestHttpMessageHandler(IEnumerable> responders) + { + _responders = new Queue>(responders); + } + + public static TestHttpMessageHandler Create(params Func[] responders) + => new(responders); + + public void Reset(params Func[] responders) + { + _responders.Clear(); + foreach (var responder in responders) + { + _responders.Enqueue(responder); + } + } + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (_responders.Count == 0) + { + throw new InvalidOperationException("No responder configured for MSRC connector test request."); + } + + var responder = _responders.Count > 1 ? _responders.Dequeue() : _responders.Peek(); + var response = responder(request); + response.RequestMessage = request; + return Task.FromResult(response); + } + } +} diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj b/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj index fb2791b8..1030e682 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj +++ b/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj @@ -11,7 +11,7 @@ - + diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs index 4b65f94a..d2e21051 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs +++ b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs @@ -1,6 +1,8 @@ using System; +using System.Globalization; using System.IO; using System.IO.Abstractions; +using System.Linq; namespace StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration; @@ -8,6 +10,10 @@ public sealed class MsrcConnectorOptions { public const string TokenClientName = "excititor.connector.msrc.token"; public const string DefaultScope = "https://api.msrc.microsoft.com/.default"; + public const string ApiClientName = "excititor.connector.msrc.api"; + public const string DefaultBaseUri = "https://api.msrc.microsoft.com/sug/v2.0/"; + public const string DefaultLocale = "en-US"; + public const string DefaultApiVersion = "2024-08-01"; /// /// Azure AD tenant identifier (GUID or domain). @@ -45,6 +51,61 @@ public sealed class MsrcConnectorOptions /// public int ExpiryLeewaySeconds { get; set; } = 60; + /// + /// Base URI for MSRC Security Update Guide API. + /// + public Uri BaseUri { get; set; } = new(DefaultBaseUri, UriKind.Absolute); + + /// + /// Locale requested when fetching summaries. + /// + public string Locale { get; set; } = DefaultLocale; + + /// + /// API version appended to MSRC requests. + /// + public string ApiVersion { get; set; } = DefaultApiVersion; + + /// + /// Page size used while enumerating summaries. + /// + public int PageSize { get; set; } = 100; + + /// + /// Maximum CSAF advisories fetched per connector run. + /// + public int MaxAdvisoriesPerFetch { get; set; } = 200; + + /// + /// Overlap window applied when resuming from the last modified cursor. + /// + public TimeSpan CursorOverlap { get; set; } = TimeSpan.FromMinutes(10); + + /// + /// Delay between CSAF downloads to respect rate limits. + /// + public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + /// + /// Maximum retry attempts for summary/detail fetch operations. + /// + public int MaxRetryAttempts { get; set; } = 3; + + /// + /// Base delay applied between retries (jitter handled by connector). + /// + public TimeSpan RetryBaseDelay { get; set; } = TimeSpan.FromSeconds(2); + + /// + /// Optional lower bound for initial synchronisation when no cursor is stored. + /// + public DateTimeOffset? InitialLastModified { get; set; } = DateTimeOffset.UtcNow.AddDays(-30); + + /// + /// Maximum number of document digests persisted for deduplication. + /// + public int MaxTrackedDigests { get; set; } = 2048; + public void Validate(IFileSystem? fileSystem = null) { if (PreferOfflineToken) @@ -82,6 +143,61 @@ public sealed class MsrcConnectorOptions ExpiryLeewaySeconds = 10; } + if (BaseUri is null || !BaseUri.IsAbsoluteUri) + { + throw new InvalidOperationException("BaseUri must be an absolute URI."); + } + + if (string.IsNullOrWhiteSpace(Locale)) + { + throw new InvalidOperationException("Locale must be provided."); + } + + if (!CultureInfo.GetCultures(CultureTypes.AllCultures).Any(c => string.Equals(c.Name, Locale, StringComparison.OrdinalIgnoreCase))) + { + throw new InvalidOperationException($"Locale '{Locale}' is not recognised."); + } + + if (string.IsNullOrWhiteSpace(ApiVersion)) + { + throw new InvalidOperationException("ApiVersion must be provided."); + } + + if (PageSize <= 0 || PageSize > 500) + { + throw new InvalidOperationException($"{nameof(PageSize)} must be between 1 and 500."); + } + + if (MaxAdvisoriesPerFetch <= 0) + { + throw new InvalidOperationException($"{nameof(MaxAdvisoriesPerFetch)} must be greater than zero."); + } + + if (CursorOverlap < TimeSpan.Zero || CursorOverlap > TimeSpan.FromHours(6)) + { + throw new InvalidOperationException($"{nameof(CursorOverlap)} must be within 0-6 hours."); + } + + if (RequestDelay < TimeSpan.Zero || RequestDelay > TimeSpan.FromSeconds(10)) + { + throw new InvalidOperationException($"{nameof(RequestDelay)} must be between 0 and 10 seconds."); + } + + if (MaxRetryAttempts <= 0 || MaxRetryAttempts > 10) + { + throw new InvalidOperationException($"{nameof(MaxRetryAttempts)} must be between 1 and 10."); + } + + if (RetryBaseDelay < TimeSpan.Zero || RetryBaseDelay > TimeSpan.FromMinutes(5)) + { + throw new InvalidOperationException($"{nameof(RetryBaseDelay)} must be between 0 and 5 minutes."); + } + + if (MaxTrackedDigests <= 0 || MaxTrackedDigests > 10000) + { + throw new InvalidOperationException($"{nameof(MaxTrackedDigests)} must be between 1 and 10000."); + } + if (!string.IsNullOrWhiteSpace(OfflineTokenPath)) { var fs = fileSystem ?? new FileSystem(); diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs index 565718e0..3a3025e8 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs +++ b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs @@ -4,9 +4,12 @@ using System.Net.Http; using Microsoft.Extensions.Caching.Memory; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Excititor.Connectors.Abstractions; using StellaOps.Excititor.Connectors.MSRC.CSAF.Authentication; using StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration; using System.IO.Abstractions; +using StellaOps.Excititor.Core; namespace StellaOps.Excititor.Connectors.MSRC.CSAF.DependencyInjection; @@ -33,7 +36,22 @@ public static class MsrcConnectorServiceCollectionExtensions AutomaticDecompression = DecompressionMethods.All, }); + services.AddHttpClient(MsrcConnectorOptions.ApiClientName) + .ConfigureHttpClient((provider, client) => + { + var options = provider.GetRequiredService>().Value; + client.BaseAddress = options.BaseUri; + client.Timeout = TimeSpan.FromSeconds(60); + client.DefaultRequestHeaders.UserAgent.ParseAdd("StellaOps.Excititor.Connectors.MSRC.CSAF/1.0"); + client.DefaultRequestHeaders.Accept.ParseAdd("application/json"); + }) + .ConfigurePrimaryHttpMessageHandler(static () => new HttpClientHandler + { + AutomaticDecompression = DecompressionMethods.All, + }); + services.AddSingleton(); + services.AddSingleton(); return services; } diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs new file mode 100644 index 00000000..b0c3613b --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs @@ -0,0 +1,581 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.MSRC.CSAF.Authentication; +using StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; + +namespace StellaOps.Excititor.Connectors.MSRC.CSAF; + +public sealed class MsrcCsafConnector : VexConnectorBase +{ + private const string QuarantineMetadataKey = "excititor.quarantine.reason"; + private const string FormatMetadataKey = "msrc.csaf.format"; + private const string VulnerabilityMetadataKey = "msrc.vulnerabilityId"; + private const string AdvisoryIdMetadataKey = "msrc.advisoryId"; + private const string LastModifiedMetadataKey = "msrc.lastModified"; + private const string ReleaseDateMetadataKey = "msrc.releaseDate"; + private const string CvssSeverityMetadataKey = "msrc.severity"; + private const string CvrfUrlMetadataKey = "msrc.cvrfUrl"; + + private static readonly VexConnectorDescriptor DescriptorInstance = new( + id: "excititor:msrc", + kind: VexProviderKind.Vendor, + displayName: "Microsoft MSRC CSAF") + { + Description = "Authenticated connector for Microsoft Security Response Center CSAF advisories.", + SupportedFormats = ImmutableArray.Create(VexDocumentFormat.Csaf), + Tags = ImmutableArray.Create("microsoft", "csaf", "vendor"), + }; + + private readonly IHttpClientFactory _httpClientFactory; + private readonly IMsrcTokenProvider _tokenProvider; + private readonly IVexConnectorStateRepository _stateRepository; + private readonly IOptions _options; + private readonly ILogger _logger; + private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + }; + + private MsrcConnectorOptions? _validatedOptions; + + public MsrcCsafConnector( + IHttpClientFactory httpClientFactory, + IMsrcTokenProvider tokenProvider, + IVexConnectorStateRepository stateRepository, + IOptions options, + ILogger logger, + TimeProvider timeProvider) + : base(DescriptorInstance, logger, timeProvider) + { + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public override ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken) + { + var options = _options.Value ?? throw new InvalidOperationException("MSRC connector options were not registered."); + options.Validate(); + _validatedOptions = options; + + LogConnectorEvent( + LogLevel.Information, + "validate", + "Validated MSRC CSAF connector options.", + new Dictionary + { + ["baseUri"] = options.BaseUri.ToString(), + ["locale"] = options.Locale, + ["apiVersion"] = options.ApiVersion, + ["pageSize"] = options.PageSize, + ["maxAdvisories"] = options.MaxAdvisoriesPerFetch, + }); + + return ValueTask.CompletedTask; + } + + public override async IAsyncEnumerable FetchAsync( + VexConnectorContext context, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var options = EnsureOptionsValidated(); + var state = await _stateRepository.GetAsync(Descriptor.Id, cancellationToken).ConfigureAwait(false); + var (from, to) = CalculateWindow(context.Since, state, options); + + LogConnectorEvent( + LogLevel.Information, + "fetch.window", + $"Fetching MSRC CSAF advisories updated between {from:O} and {to:O}.", + new Dictionary + { + ["from"] = from, + ["to"] = to, + ["cursorOverlapSeconds"] = options.CursorOverlap.TotalSeconds, + }); + + var client = await CreateAuthenticatedClientAsync(options, cancellationToken).ConfigureAwait(false); + + var knownDigests = state?.DocumentDigests ?? ImmutableArray.Empty; + var digestSet = new HashSet(knownDigests, StringComparer.OrdinalIgnoreCase); + var digestList = new List(knownDigests); + var latest = state?.LastUpdated ?? from; + var fetched = 0; + var stateChanged = false; + + await foreach (var summary in EnumerateSummariesAsync(client, options, from, to, cancellationToken).ConfigureAwait(false)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (fetched >= options.MaxAdvisoriesPerFetch) + { + break; + } + + if (string.IsNullOrWhiteSpace(summary.CvrfUrl)) + { + LogConnectorEvent(LogLevel.Debug, "skip.no-cvrf", $"Skipping MSRC advisory {summary.Id} because no CSAF URL was provided."); + continue; + } + + var documentUri = ResolveCvrfUri(options.BaseUri, summary.CvrfUrl); + + VexRawDocument? rawDocument = null; + try + { + rawDocument = await DownloadCsafAsync(client, summary, documentUri, options, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + LogConnectorEvent(LogLevel.Warning, "fetch.error", $"Failed to download MSRC CSAF package {documentUri}.", new Dictionary + { + ["advisoryId"] = summary.Id, + ["vulnerabilityId"] = summary.VulnerabilityId ?? summary.Id, + }, ex); + + await Task.Delay(GetRetryDelay(options, 1), cancellationToken).ConfigureAwait(false); + continue; + } + + if (!digestSet.Add(rawDocument.Digest)) + { + LogConnectorEvent(LogLevel.Debug, "skip.duplicate", $"Skipping MSRC CSAF package {documentUri} because it was already processed."); + continue; + } + + await context.RawSink.StoreAsync(rawDocument, cancellationToken).ConfigureAwait(false); + digestList.Add(rawDocument.Digest); + stateChanged = true; + fetched++; + + latest = DetermineLatest(summary, latest) ?? latest; + + var quarantineReason = rawDocument.Metadata.TryGetValue(QuarantineMetadataKey, out var reason) ? reason : null; + if (quarantineReason is not null) + { + LogConnectorEvent(LogLevel.Warning, "quarantine", $"Quarantined MSRC CSAF package {documentUri} ({quarantineReason})."); + continue; + } + + yield return rawDocument; + + if (options.RequestDelay > TimeSpan.Zero) + { + await Task.Delay(options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + } + + if (stateChanged) + { + if (digestList.Count > options.MaxTrackedDigests) + { + var trimmed = digestList.Count - options.MaxTrackedDigests; + digestList.RemoveRange(0, trimmed); + } + + var baseState = state ?? new VexConnectorState( + Descriptor.Id, + null, + ImmutableArray.Empty, + ImmutableDictionary.Empty, + null, + 0, + null, + null); + var newState = baseState with + { + LastUpdated = latest == DateTimeOffset.MinValue ? state?.LastUpdated : latest, + DocumentDigests = digestList.ToImmutableArray(), + }; + + await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false); + } + + LogConnectorEvent( + LogLevel.Information, + "fetch.completed", + $"MSRC CSAF fetch completed with {fetched} new documents.", + new Dictionary + { + ["fetched"] = fetched, + ["stateChanged"] = stateChanged, + ["lastUpdated"] = latest, + }); + } + + public override ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => throw new NotSupportedException("MSRC CSAF connector relies on CSAF normalizers for document processing."); + + private async Task DownloadCsafAsync( + HttpClient client, + MsrcVulnerabilitySummary summary, + Uri documentUri, + MsrcConnectorOptions options, + CancellationToken cancellationToken) + { + using var response = await SendWithRetryAsync( + client, + () => new HttpRequestMessage(HttpMethod.Get, documentUri), + options, + cancellationToken).ConfigureAwait(false); + + var payload = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + + var validation = ValidateCsafPayload(payload); + var metadata = BuildMetadata(builder => + { + builder.Add(AdvisoryIdMetadataKey, summary.Id); + builder.Add(VulnerabilityMetadataKey, summary.VulnerabilityId ?? summary.Id); + builder.Add(CvrfUrlMetadataKey, documentUri.ToString()); + builder.Add(FormatMetadataKey, validation.Format); + + if (!string.IsNullOrWhiteSpace(summary.Severity)) + { + builder.Add(CvssSeverityMetadataKey, summary.Severity); + } + + if (summary.LastModifiedDate is not null) + { + builder.Add(LastModifiedMetadataKey, summary.LastModifiedDate.Value.ToString("O")); + } + + if (summary.ReleaseDate is not null) + { + builder.Add(ReleaseDateMetadataKey, summary.ReleaseDate.Value.ToString("O")); + } + + if (!string.IsNullOrWhiteSpace(validation.QuarantineReason)) + { + builder.Add(QuarantineMetadataKey, validation.QuarantineReason); + } + + if (response.Headers.ETag is not null) + { + builder.Add("http.etag", response.Headers.ETag.Tag); + } + + if (response.Content.Headers.LastModified is { } lastModified) + { + builder.Add("http.lastModified", lastModified.ToString("O")); + } + }); + + return CreateRawDocument(VexDocumentFormat.Csaf, documentUri, payload, metadata); + } + + private async Task CreateAuthenticatedClientAsync(MsrcConnectorOptions options, CancellationToken cancellationToken) + { + var token = await _tokenProvider.GetAccessTokenAsync(cancellationToken).ConfigureAwait(false); + var client = _httpClientFactory.CreateClient(MsrcConnectorOptions.ApiClientName); + + client.DefaultRequestHeaders.Remove("Authorization"); + client.DefaultRequestHeaders.Add("Authorization", $"{token.Type} {token.Value}"); + client.DefaultRequestHeaders.Remove("Accept-Language"); + client.DefaultRequestHeaders.Add("Accept-Language", options.Locale); + client.DefaultRequestHeaders.Remove("api-version"); + client.DefaultRequestHeaders.Add("api-version", options.ApiVersion); + client.DefaultRequestHeaders.Remove("Accept"); + client.DefaultRequestHeaders.Add("Accept", "application/json"); + + return client; + } + + private async Task SendWithRetryAsync( + HttpClient client, + Func requestFactory, + MsrcConnectorOptions options, + CancellationToken cancellationToken) + { + Exception? lastError = null; + HttpResponseMessage? response = null; + + for (var attempt = 1; attempt <= options.MaxRetryAttempts; attempt++) + { + response?.Dispose(); + using var request = requestFactory(); + try + { + response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (response.IsSuccessStatusCode) + { + return response; + } + + if (!ShouldRetry(response.StatusCode) || attempt == options.MaxRetryAttempts) + { + response.EnsureSuccessStatusCode(); + } + } + catch (Exception ex) when (IsTransient(ex) && attempt < options.MaxRetryAttempts) + { + lastError = ex; + LogConnectorEvent(LogLevel.Warning, "retry", $"Retrying MSRC request (attempt {attempt}/{options.MaxRetryAttempts}).", exception: ex); + } + catch (Exception ex) + { + response?.Dispose(); + throw; + } + + await Task.Delay(GetRetryDelay(options, attempt), cancellationToken).ConfigureAwait(false); + } + + response?.Dispose(); + throw lastError ?? new InvalidOperationException("MSRC request retries exhausted."); + } + + private TimeSpan GetRetryDelay(MsrcConnectorOptions options, int attempt) + { + var baseDelay = options.RetryBaseDelay.TotalMilliseconds; + var multiplier = Math.Pow(2, Math.Max(0, attempt - 1)); + var jitter = Random.Shared.NextDouble() * baseDelay * 0.25; + var delayMs = Math.Min(baseDelay * multiplier + jitter, TimeSpan.FromMinutes(5).TotalMilliseconds); + return TimeSpan.FromMilliseconds(delayMs); + } + + private async IAsyncEnumerable EnumerateSummariesAsync( + HttpClient client, + MsrcConnectorOptions options, + DateTimeOffset from, + DateTimeOffset to, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + var fetched = 0; + var requestUri = BuildSummaryUri(options, from, to); + + while (requestUri is not null && fetched < options.MaxAdvisoriesPerFetch) + { + using var response = await SendWithRetryAsync( + client, + () => new HttpRequestMessage(HttpMethod.Get, requestUri), + options, + cancellationToken).ConfigureAwait(false); + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + var payload = await JsonSerializer.DeserializeAsync(stream, _serializerOptions, cancellationToken).ConfigureAwait(false) + ?? new MsrcSummaryResponse(); + + foreach (var summary in payload.Value) + { + if (string.IsNullOrWhiteSpace(summary.CvrfUrl)) + { + continue; + } + + yield return summary; + fetched++; + + if (fetched >= options.MaxAdvisoriesPerFetch) + { + yield break; + } + } + + if (string.IsNullOrWhiteSpace(payload.NextLink)) + { + break; + } + + if (!Uri.TryCreate(payload.NextLink, UriKind.Absolute, out requestUri)) + { + LogConnectorEvent(LogLevel.Warning, "pagination.invalid", $"MSRC pagination returned invalid next link '{payload.NextLink}'."); + break; + } + } + } + + private static Uri BuildSummaryUri(MsrcConnectorOptions options, DateTimeOffset from, DateTimeOffset to) + { + var baseText = options.BaseUri.ToString().TrimEnd('/'); + var builder = new StringBuilder(baseText.Length + 128); + builder.Append(baseText); + if (!baseText.EndsWith("/vulnerabilities", StringComparison.OrdinalIgnoreCase)) + { + builder.Append("/vulnerabilities"); + } + + builder.Append("?"); + builder.Append("$top=").Append(options.PageSize); + builder.Append("&lastModifiedStartDateTime=").Append(Uri.EscapeDataString(from.ToUniversalTime().ToString("O"))); + builder.Append("&lastModifiedEndDateTime=").Append(Uri.EscapeDataString(to.ToUniversalTime().ToString("O"))); + builder.Append("&$orderby=lastModifiedDate"); + builder.Append("&locale=").Append(Uri.EscapeDataString(options.Locale)); + builder.Append("&api-version=").Append(Uri.EscapeDataString(options.ApiVersion)); + + return new Uri(builder.ToString(), UriKind.Absolute); + } + + private (DateTimeOffset From, DateTimeOffset To) CalculateWindow( + DateTimeOffset? contextSince, + VexConnectorState? state, + MsrcConnectorOptions options) + { + var now = UtcNow(); + var since = contextSince ?? state?.LastUpdated ?? options.InitialLastModified ?? now.AddDays(-30); + + if (state?.LastUpdated is { } persisted && persisted > since) + { + since = persisted; + } + + if (options.CursorOverlap > TimeSpan.Zero) + { + since = since.Add(-options.CursorOverlap); + } + + if (since < now.AddYears(-20)) + { + since = now.AddYears(-20); + } + + return (since, now); + } + + private static bool ShouldRetry(HttpStatusCode statusCode) + => statusCode == HttpStatusCode.TooManyRequests || + (int)statusCode >= 500; + + private static bool IsTransient(Exception exception) + => exception is HttpRequestException or IOException or TaskCanceledException; + + private static Uri ResolveCvrfUri(Uri baseUri, string cvrfUrl) + => Uri.TryCreate(cvrfUrl, UriKind.Absolute, out var absolute) + ? absolute + : new Uri(baseUri, cvrfUrl); + + private static CsafValidationResult ValidateCsafPayload(ReadOnlyMemory payload) + { + try + { + if (IsZip(payload.Span)) + { + using var zipStream = new MemoryStream(payload.ToArray(), writable: false); + using var archive = new ZipArchive(zipStream, ZipArchiveMode.Read, leaveOpen: true); + var entry = archive.Entries.FirstOrDefault(e => e.Name.EndsWith(".json", StringComparison.OrdinalIgnoreCase)) + ?? archive.Entries.FirstOrDefault(); + if (entry is null) + { + return new CsafValidationResult("zip", "Zip archive did not contain any entries."); + } + + using var entryStream = entry.Open(); + using var reader = new StreamReader(entryStream, Encoding.UTF8); + using var json = JsonDocument.Parse(reader.ReadToEnd()); + return CsafValidationResult.Valid("zip"); + } + + if (IsGzip(payload.Span)) + { + using var input = new MemoryStream(payload.ToArray(), writable: false); + using var gzip = new GZipStream(input, CompressionMode.Decompress); + using var reader = new StreamReader(gzip, Encoding.UTF8); + using var json = JsonDocument.Parse(reader.ReadToEnd()); + return CsafValidationResult.Valid("gzip"); + } + + using var jsonDocument = JsonDocument.Parse(payload.Span); + return CsafValidationResult.Valid("json"); + } + catch (JsonException ex) + { + return new CsafValidationResult("json", $"JSON parse failed: {ex.Message}"); + } + catch (InvalidDataException ex) + { + return new CsafValidationResult("invalid", ex.Message); + } + catch (EndOfStreamException ex) + { + return new CsafValidationResult("invalid", ex.Message); + } + } + + private static bool IsZip(ReadOnlySpan content) + => content.Length > 3 && content[0] == 0x50 && content[1] == 0x4B; + + private static bool IsGzip(ReadOnlySpan content) + => content.Length > 2 && content[0] == 0x1F && content[1] == 0x8B; + + private static DateTimeOffset? DetermineLatest(MsrcVulnerabilitySummary summary, DateTimeOffset? current) + { + var candidate = summary.LastModifiedDate ?? summary.ReleaseDate; + if (candidate is null) + { + return current; + } + + if (current is null || candidate > current) + { + return candidate; + } + + return current; + } + + private MsrcConnectorOptions EnsureOptionsValidated() + { + if (_validatedOptions is not null) + { + return _validatedOptions; + } + + var options = _options.Value ?? throw new InvalidOperationException("MSRC connector options were not registered."); + options.Validate(); + _validatedOptions = options; + return options; + } + + private sealed record CsafValidationResult(string Format, string? QuarantineReason) + { + public static CsafValidationResult Valid(string format) => new(format, null); + } +} + +internal sealed record MsrcSummaryResponse +{ + [JsonPropertyName("value")] + public List Value { get; init; } = new(); + + [JsonPropertyName("@odata.nextLink")] + public string? NextLink { get; init; } +} + +internal sealed record MsrcVulnerabilitySummary +{ + [JsonPropertyName("id")] + public string Id { get; init; } = string.Empty; + + [JsonPropertyName("vulnerabilityId")] + public string? VulnerabilityId { get; init; } + + [JsonPropertyName("severity")] + public string? Severity { get; init; } + + [JsonPropertyName("releaseDate")] + public DateTimeOffset? ReleaseDate { get; init; } + + [JsonPropertyName("lastModifiedDate")] + public DateTimeOffset? LastModifiedDate { get; init; } + + [JsonPropertyName("cvrfUrl")] + public string? CvrfUrl { get; init; } +} diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj index f6b57b36..00e40cff 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj +++ b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj @@ -8,11 +8,12 @@ + - - - + + + diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md index 1b74c7e3..298284d0 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md +++ b/src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md @@ -3,5 +3,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-CONN-MS-01-001 – AAD onboarding & token cache|Team Excititor Connectors – MSRC|EXCITITOR-CONN-ABS-01-001|**DONE (2025-10-17)** – Added MSRC connector project with configurable AAD options, token provider (offline/online modes), DI wiring, and unit tests covering caching and fallback scenarios.| -|EXCITITOR-CONN-MS-01-002 – CSAF download pipeline|Team Excititor Connectors – MSRC|EXCITITOR-CONN-MS-01-001, EXCITITOR-STORAGE-01-003|TODO – Fetch CSAF packages with retry/backoff, checksum verification, and raw document persistence plus quarantine for schema failures.| +|EXCITITOR-CONN-MS-01-002 – CSAF download pipeline|Team Excititor Connectors – MSRC|EXCITITOR-CONN-MS-01-001, EXCITITOR-STORAGE-01-003|**DOING (2025-10-19)** – Prereqs verified (EXCITITOR-CONN-MS-01-001, EXCITITOR-STORAGE-01-003); drafting fetch/retry plan and storage wiring before implementation of CSAF package download, checksum validation, and quarantine flows.| |EXCITITOR-CONN-MS-01-003 – Trust metadata & provenance hints|Team Excititor Connectors – MSRC|EXCITITOR-CONN-MS-01-002, EXCITITOR-POLICY-01-001|TODO – Emit cosign/AAD issuer metadata, attach provenance details, and document policy integration.| diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj b/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj index 947627bf..e610bcf4 100644 --- a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj +++ b/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj @@ -12,9 +12,9 @@
- - - + + + diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs b/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs new file mode 100644 index 00000000..fa99caba --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs @@ -0,0 +1,311 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Net; +using System.Net.Http; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.Oracle.CSAF; +using StellaOps.Excititor.Connectors.Oracle.CSAF.Configuration; +using StellaOps.Excititor.Connectors.Oracle.CSAF.Metadata; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using System.IO.Abstractions.TestingHelpers; +using Xunit; + +namespace StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.Connectors; + +public sealed class OracleCsafConnectorTests +{ + [Fact] + public async Task FetchAsync_NewEntry_PersistsDocumentAndUpdatesState() + { + var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json"); + var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); + var payloadDigest = ComputeDigest(payload); + var snapshotPath = "/snapshots/oracle-catalog.json"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, payloadDigest, "2025-10-15T00:00:00Z"))); + + var handler = new StubHttpMessageHandler(new Dictionary + { + [documentUri] = CreateResponse(payload), + }); + var httpClient = new HttpClient(handler); + var httpFactory = new SingleHttpClientFactory(httpClient); + var loader = new OracleCatalogLoader( + httpFactory, + new MemoryCache(new MemoryCacheOptions()), + fileSystem, + NullLogger.Instance, + TimeProvider.System); + + var stateRepository = new InMemoryConnectorStateRepository(); + var connector = new OracleCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { new OracleConnectorOptionsValidator(fileSystem) }, + NullLogger.Instance, + TimeProvider.System); + + var settingsValues = ImmutableDictionary.Empty + .Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true") + .Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath) + .Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false"); + var settings = new VexConnectorSettings(settingsValues); + + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext( + Since: null, + Settings: settings, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider()); + + var documents = new List(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + documents[0].Digest.Should().Be(payloadDigest); + documents[0].Metadata["oracle.csaf.entryId"].Should().Be("CPU2025Oct"); + documents[0].Metadata["oracle.csaf.sha256"].Should().Be(payloadDigest); + + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.DocumentDigests.Should().ContainSingle().Which.Should().Be(payloadDigest); + + handler.GetCallCount(documentUri).Should().Be(1); + + // second run should short-circuit without downloading again + sink.Documents.Clear(); + documents.Clear(); + + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + handler.GetCallCount(documentUri).Should().Be(1); + } + + [Fact] + public async Task FetchAsync_ChecksumMismatch_SkipsDocument() + { + var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json"); + var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); + var snapshotPath = "/snapshots/oracle-catalog.json"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, "deadbeef", "2025-10-15T00:00:00Z"))); + + var handler = new StubHttpMessageHandler(new Dictionary + { + [documentUri] = CreateResponse(payload), + }); + var httpClient = new HttpClient(handler); + var httpFactory = new SingleHttpClientFactory(httpClient); + var loader = new OracleCatalogLoader( + httpFactory, + new MemoryCache(new MemoryCacheOptions()), + fileSystem, + NullLogger.Instance, + TimeProvider.System); + + var stateRepository = new InMemoryConnectorStateRepository(); + var connector = new OracleCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { new OracleConnectorOptionsValidator(fileSystem) }, + NullLogger.Instance, + TimeProvider.System); + + var settingsValues = ImmutableDictionary.Empty + .Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true") + .Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath) + .Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false"); + var settings = new VexConnectorSettings(settingsValues); + + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext( + Since: null, + Settings: settings, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider()); + + var documents = new List(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + stateRepository.State.Should().BeNull(); + handler.GetCallCount(documentUri).Should().Be(1); + } + + private static HttpResponseMessage CreateResponse(byte[] payload) + => new(HttpStatusCode.OK) + { + Content = new ByteArrayContent(payload) + { + Headers = + { + ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"), + } + } + }; + + private static string ComputeDigest(byte[] payload) + { + Span buffer = stackalloc byte[32]; + SHA256.HashData(payload, buffer); + return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); + } + + private static string BuildOfflineSnapshot(Uri documentUri, string sha256, string publishedAt) + { + var snapshot = new + { + metadata = new + { + generatedAt = "2025-10-14T12:00:00Z", + entries = new[] + { + new + { + id = "CPU2025Oct", + title = "Oracle Critical Patch Update Advisory - October 2025", + documentUri = documentUri.ToString(), + publishedAt, + revision = publishedAt, + sha256, + size = 1024, + products = new[] { "Oracle Database" } + } + }, + cpuSchedule = Array.Empty() + }, + fetchedAt = "2025-10-14T12:00:00Z" + }; + + return JsonSerializer.Serialize(snapshot, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + } + + private sealed class StubHttpMessageHandler : HttpMessageHandler + { + private readonly Dictionary _responses; + private readonly Dictionary _callCounts = new(); + + public StubHttpMessageHandler(Dictionary responses) + { + _responses = responses; + } + + public int GetCallCount(Uri uri) => _callCounts.TryGetValue(uri, out var count) ? count : 0; + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request.RequestUri is null || !_responses.TryGetValue(request.RequestUri, out var response)) + { + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)); + } + + _callCounts.TryGetValue(request.RequestUri, out var count); + _callCounts[request.RequestUri] = count + 1; + return Task.FromResult(response.Clone()); + } + } + + private sealed class SingleHttpClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleHttpClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository + { + public VexConnectorState? State { get; private set; } + + public ValueTask GetAsync(string connectorId, CancellationToken cancellationToken) + => ValueTask.FromResult(State); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken) + { + State = state; + return ValueTask.CompletedTask; + } + } + + private sealed class InMemoryRawSink : IVexRawDocumentSink + { + public List Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray.Empty, ImmutableDictionary.Empty)); + } +} + +internal static class HttpResponseMessageExtensions +{ + public static HttpResponseMessage Clone(this HttpResponseMessage response) + { + var clone = new HttpResponseMessage(response.StatusCode); + foreach (var header in response.Headers) + { + clone.Headers.TryAddWithoutValidation(header.Key, header.Value); + } + + if (response.Content is not null) + { + var payload = response.Content.ReadAsByteArrayAsync().GetAwaiter().GetResult(); + var mediaType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; + clone.Content = new ByteArrayContent(payload); + clone.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(mediaType); + } + + return clone; + } +} diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj b/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj index a93d8780..e9083487 100644 --- a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj +++ b/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj @@ -11,7 +11,7 @@ - + diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs b/src/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs index 757c5beb..8401fbe3 100644 --- a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs +++ b/src/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs @@ -1,11 +1,19 @@ using System.Collections.Generic; using System.Collections.Immutable; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.Logging; using StellaOps.Excititor.Connectors.Abstractions; using StellaOps.Excititor.Connectors.Oracle.CSAF.Configuration; using StellaOps.Excititor.Connectors.Oracle.CSAF.Metadata; using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; namespace StellaOps.Excititor.Connectors.Oracle.CSAF; @@ -20,6 +28,8 @@ public sealed class OracleCsafConnector : VexConnectorBase }; private readonly OracleCatalogLoader _catalogLoader; + private readonly IHttpClientFactory _httpClientFactory; + private readonly IVexConnectorStateRepository _stateRepository; private readonly IEnumerable> _validators; private OracleConnectorOptions? _options; @@ -27,12 +37,16 @@ public sealed class OracleCsafConnector : VexConnectorBase public OracleCsafConnector( OracleCatalogLoader catalogLoader, + IHttpClientFactory httpClientFactory, + IVexConnectorStateRepository stateRepository, IEnumerable> validators, ILogger logger, TimeProvider timeProvider) : base(DescriptorInstance, logger, timeProvider) { _catalogLoader = catalogLoader ?? throw new ArgumentNullException(nameof(catalogLoader)); + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); _validators = validators ?? Array.Empty>(); } @@ -61,21 +75,286 @@ public sealed class OracleCsafConnector : VexConnectorBase throw new InvalidOperationException("Connector must be validated before fetch operations."); } - if (_catalog is null) - { - _catalog = await _catalogLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false); - } + _catalog ??= await _catalogLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false); - LogConnectorEvent(LogLevel.Debug, "fetch", "Oracle CSAF discovery ready; document ingestion handled by follow-up task.", new Dictionary + var entries = _catalog.Metadata.Entries + .OrderBy(static entry => entry.PublishedAt == default ? DateTimeOffset.MinValue : entry.PublishedAt) + .ToImmutableArray(); + + var state = await _stateRepository.GetAsync(Descriptor.Id, cancellationToken).ConfigureAwait(false); + var since = ResolveSince(context.Since, state?.LastUpdated); + var knownDigests = state?.DocumentDigests ?? ImmutableArray.Empty; + var digestSet = new HashSet(knownDigests, StringComparer.OrdinalIgnoreCase); + var digestList = new List(knownDigests); + var latestPublished = state?.LastUpdated ?? since ?? DateTimeOffset.MinValue; + var stateChanged = false; + + var client = _httpClientFactory.CreateClient(OracleConnectorOptions.HttpClientName); + + LogConnectorEvent(LogLevel.Information, "fetch.begin", "Starting Oracle CSAF catalogue iteration.", new Dictionary { - ["since"] = context.Since?.ToString("O"), + ["since"] = since?.ToString("O"), + ["entryCount"] = entries.Length, }); - yield break; + foreach (var entry in entries) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (ShouldSkipEntry(entry, since)) + { + continue; + } + + var expectedDigest = NormalizeDigest(entry.Sha256); + if (expectedDigest is not null && digestSet.Contains(expectedDigest)) + { + latestPublished = UpdateLatest(latestPublished, entry.PublishedAt); + LogConnectorEvent(LogLevel.Debug, "fetch.skip.cached", "Skipping Oracle CSAF entry because digest already processed.", new Dictionary + { + ["entryId"] = entry.Id, + ["digest"] = expectedDigest, + }); + continue; + } + + var rawDocument = await DownloadEntryAsync(client, entry, cancellationToken).ConfigureAwait(false); + if (rawDocument is null) + { + continue; + } + + if (expectedDigest is not null && !string.Equals(rawDocument.Digest, expectedDigest, StringComparison.OrdinalIgnoreCase)) + { + LogConnectorEvent(LogLevel.Warning, "fetch.checksum_mismatch", "Oracle CSAF document checksum mismatch; document skipped.", new Dictionary + { + ["entryId"] = entry.Id, + ["expected"] = expectedDigest, + ["actual"] = rawDocument.Digest, + ["documentUri"] = entry.DocumentUri.ToString(), + }); + continue; + } + + if (!digestSet.Add(rawDocument.Digest)) + { + LogConnectorEvent(LogLevel.Debug, "fetch.skip.duplicate", "Oracle CSAF document digest already ingested.", new Dictionary + { + ["entryId"] = entry.Id, + ["digest"] = rawDocument.Digest, + }); + continue; + } + + await context.RawSink.StoreAsync(rawDocument, cancellationToken).ConfigureAwait(false); + digestList.Add(rawDocument.Digest); + stateChanged = true; + latestPublished = UpdateLatest(latestPublished, entry.PublishedAt); + + LogConnectorEvent(LogLevel.Information, "fetch.document_ingested", "Oracle CSAF document stored.", new Dictionary + { + ["entryId"] = entry.Id, + ["digest"] = rawDocument.Digest, + ["documentUri"] = entry.DocumentUri.ToString(), + ["publishedAt"] = entry.PublishedAt.ToString("O"), + }); + + yield return rawDocument; + + if (_options.RequestDelay > TimeSpan.Zero) + { + await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + } + + if (stateChanged) + { + var baseState = state ?? new VexConnectorState( + Descriptor.Id, + null, + ImmutableArray.Empty, + ImmutableDictionary.Empty, + null, + 0, + null, + null); + var newState = baseState with + { + LastUpdated = latestPublished == DateTimeOffset.MinValue ? baseState.LastUpdated : latestPublished, + DocumentDigests = digestList.ToImmutableArray(), + }; + + await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false); + } + + var ingestedCount = digestList.Count - knownDigests.Length; + LogConnectorEvent(LogLevel.Information, "fetch.complete", "Oracle CSAF fetch completed.", new Dictionary + { + ["stateChanged"] = stateChanged, + ["documentsProcessed"] = ingestedCount, + ["latestPublished"] = latestPublished == DateTimeOffset.MinValue ? null : latestPublished.ToString("O"), + }); } public override ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) => throw new NotSupportedException("OracleCsafConnector relies on dedicated CSAF normalizers."); public OracleCatalogResult? GetCachedCatalog() => _catalog; + + private static DateTimeOffset? ResolveSince(DateTimeOffset? contextSince, DateTimeOffset? stateSince) + { + if (contextSince is null) + { + return stateSince; + } + + if (stateSince is null) + { + return contextSince; + } + + return stateSince > contextSince ? stateSince : contextSince; + } + + private static bool ShouldSkipEntry(OracleCatalogEntry entry, DateTimeOffset? since) + { + if (since is null) + { + return false; + } + + if (entry.PublishedAt == default) + { + return false; + } + + return entry.PublishedAt <= since; + } + + private async Task DownloadEntryAsync(HttpClient client, OracleCatalogEntry entry, CancellationToken cancellationToken) + { + if (entry.DocumentUri is null) + { + LogConnectorEvent(LogLevel.Warning, "fetch.skip.missing_uri", "Oracle CSAF entry missing document URI; skipping.", new Dictionary + { + ["entryId"] = entry.Id, + }); + return null; + } + + var payload = await DownloadWithRetryAsync(client, entry.DocumentUri, cancellationToken).ConfigureAwait(false); + if (payload is null) + { + return null; + } + + var metadata = BuildMetadata(builder => + { + builder.Add("oracle.csaf.entryId", entry.Id); + builder.Add("oracle.csaf.title", entry.Title); + builder.Add("oracle.csaf.revision", entry.Revision); + if (entry.PublishedAt != default) + { + builder.Add("oracle.csaf.published", entry.PublishedAt.ToString("O")); + } + + builder.Add("oracle.csaf.sha256", NormalizeDigest(entry.Sha256)); + builder.Add("oracle.csaf.size", entry.Size?.ToString(CultureInfo.InvariantCulture)); + if (!entry.Products.IsDefaultOrEmpty) + { + builder.Add("oracle.csaf.products", string.Join(",", entry.Products)); + } + }); + + return CreateRawDocument(VexDocumentFormat.Csaf, entry.DocumentUri, payload.AsMemory(), metadata); + } + + private async Task DownloadWithRetryAsync(HttpClient client, Uri uri, CancellationToken cancellationToken) + { + const int maxAttempts = 3; + var delay = TimeSpan.FromSeconds(1); + + for (var attempt = 1; attempt <= maxAttempts; attempt++) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + using var response = await client.GetAsync(uri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + if (IsTransient(response.StatusCode) && attempt < maxAttempts) + { + LogConnectorEvent(LogLevel.Warning, "fetch.retry.status", "Oracle CSAF document request returned transient status; retrying.", new Dictionary + { + ["status"] = (int)response.StatusCode, + ["attempt"] = attempt, + ["uri"] = uri.ToString(), + }); + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + delay = delay + delay; + continue; + } + + response.EnsureSuccessStatusCode(); + } + + var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + return bytes; + } + catch (Exception ex) when (IsTransient(ex) && attempt < maxAttempts) + { + LogConnectorEvent(LogLevel.Warning, "fetch.retry.exception", "Oracle CSAF document request failed; retrying.", new Dictionary + { + ["attempt"] = attempt, + ["uri"] = uri.ToString(), + ["exception"] = ex.GetType().Name, + }); + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + delay = delay + delay; + } + } + + LogConnectorEvent(LogLevel.Error, "fetch.failed", "Oracle CSAF document could not be retrieved after retries.", new Dictionary + { + ["uri"] = uri.ToString(), + }); + + return null; + } + + private static bool IsTransient(Exception exception) + => exception is HttpRequestException or IOException or TaskCanceledException; + + private static bool IsTransient(HttpStatusCode statusCode) + { + var status = (int)statusCode; + return status is >= 500 or 408 or 429; + } + + private static string? NormalizeDigest(string? digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return null; + } + + var trimmed = digest.Trim(); + if (!trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + trimmed = "sha256:" + trimmed; + } + + return trimmed.ToLowerInvariant(); + } + + private static DateTimeOffset UpdateLatest(DateTimeOffset current, DateTimeOffset published) + { + if (published == default) + { + return current; + } + + return published > current ? published : current; + } } diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj b/src/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj index f6b57b36..e0ccf67c 100644 --- a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj +++ b/src/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj @@ -8,11 +8,13 @@ + + - - - + + + diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md b/src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md index dee7319f..50573a4f 100644 --- a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md +++ b/src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md @@ -2,6 +2,6 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md # TASKS | Task | Owner(s) | Depends on | Notes | |---|---|---|---| -|EXCITITOR-CONN-ORACLE-01-001 – Oracle CSAF catalogue discovery|Team Excititor Connectors – Oracle|EXCITITOR-CONN-ABS-01-001|DOING (2025-10-17) – Implement catalogue discovery, CPU calendar awareness, and offline snapshot import for Oracle CSAF feeds.| -|EXCITITOR-CONN-ORACLE-01-002 – CSAF download & dedupe pipeline|Team Excititor Connectors – Oracle|EXCITITOR-CONN-ORACLE-01-001, EXCITITOR-STORAGE-01-003|TODO – Fetch CSAF documents with retry/backoff, checksum validation, revision deduplication, and raw persistence.| +|EXCITITOR-CONN-ORACLE-01-001 – Oracle CSAF catalogue discovery|Team Excititor Connectors – Oracle|EXCITITOR-CONN-ABS-01-001|**DONE (2025-10-19)** – Implemented cached Oracle CSAF catalog loader with CPU calendar merge, offline snapshot ingest/persist, options validation + DI wiring, and regression tests; prerequisite EXCITITOR-CONN-ABS-01-001 verified DONE per Sprint 5 log (2025-10-19).| +|EXCITITOR-CONN-ORACLE-01-002 – CSAF download & dedupe pipeline|Team Excititor Connectors – Oracle|EXCITITOR-CONN-ORACLE-01-001, EXCITITOR-STORAGE-01-003|**DONE (2025-10-19)** – Added Oracle CSAF fetch loop with retry/backoff, checksum validation, resume-aware state persistence, digest dedupe, configurable throttling, and raw storage wiring; regression tests cover new ingestion and mismatch handling.| |EXCITITOR-CONN-ORACLE-01-003 – Trust metadata + provenance|Team Excititor Connectors – Oracle|EXCITITOR-CONN-ORACLE-01-002, EXCITITOR-POLICY-01-001|TODO – Emit Oracle signing metadata (PGP/cosign) and provenance hints for consensus weighting.| diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs b/src/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs index 8b7fc08c..8045f5cd 100644 --- a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs +++ b/src/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs @@ -100,10 +100,20 @@ public sealed class RedHatCsafConnector : VexConnectorBase if (stateChanged) { var newLastUpdated = latestUpdated == DateTimeOffset.MinValue ? state?.LastUpdated : latestUpdated; - var updatedState = new VexConnectorState( + var baseState = state ?? new VexConnectorState( Descriptor.Id, - newLastUpdated, - digestList.ToImmutableArray()); + null, + ImmutableArray.Empty, + ImmutableDictionary.Empty, + null, + 0, + null, + null); + var updatedState = baseState with + { + LastUpdated = newLastUpdated, + DocumentDigests = digestList.ToImmutableArray(), + }; await _stateRepository.SaveAsync(updatedState, cancellationToken).ConfigureAwait(false); } diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj b/src/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj index afd08612..00e40cff 100644 --- a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj +++ b/src/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj @@ -11,9 +11,9 @@ - - - + + + diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md new file mode 100644 index 00000000..afa199c5 --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md @@ -0,0 +1,127 @@ +# EXCITITOR-CONN-SUSE-01-002 — Checkpointed Event Ingestion (Design) + +**Status:** draft • **Updated:** 2025-10-19 +**Scope:** StellaOps.Excititor.Connectors.SUSE.RancherVEXHub + +## Goals + +- Stream Rancher VEX Hub events deterministically, supporting cold start and incremental resumes. +- Persist checkpoints so subsequent runs (worker/manual CLI) resume where the previous execution stopped. +- Deduplicate hub payloads using cryptographic digests while keeping a short history (≤ 200 entries) to align with `IVexConnectorStateRepository` constraints. +- Quarantine malformed/unfetchable events without blocking healthy ones, making failures observable for operators. +- Remain offline-friendly: work from discovery metadata snapshots and cached checkpoints without live network calls when configured. + +## Assumed Event Model + +Discovery metadata supplies `subscription.eventsUri` and (optionally) `subscription.checkpointUri`. Rancher emits JSON event batches over HTTP(S): + +```json +{ + "cursor": "opaque-offset-123", + "events": [ + { + "id": "evt-2025-10-19T12:42:30Z-001", + "type": "vex.statement.published", + "channel": "rancher/rke2", + "publishedAt": "2025-10-19T12:42:30Z", + "document": { + "uri": "https://hub.suse.example/events/evt-.../statement.json", + "sha256": "ab12...", + "format": "csaf" + } + } + ] +} +``` + +Key properties assumed per discovery schema validation: + +- `cursor` advances monotonically and can be replayed via `?cursor=` or a POST to `checkpointUri`. +- Events carry a `document.uri` (absolute HTTP(S) URI) and an optional digest (`document.sha256`). When absent, a digest is computed after download. +- `publishedAt` is UTC and stable; it is used as `VexConnectorState.LastUpdated` fallback when no checkpoint is provided. +- Optional `channels` allow filtering (`channels=foo,bar`) to minimise payloads. + +The connector must tolerate missing fields by quaratining the raw envelope. + +## Flow Overview + +1. **Load connector state** from `IVexConnectorStateRepository` keyed by `Descriptor.Id`. + - `LastUpdated` stores the last successfully processed `publishedAt`. + - `DocumentDigests` stores: + - Last checkpoint token entry prefixed `checkpoint:` (only most recent kept). + - Recent raw document digests for deduping. +2. **Resolve resume parameters**: + - Start cursor: explicit CLI `context.Since` overrides persisted checkpoint. + - If checkpoint exists, call `eventsUri?cursor=`; else pass `since=` (from `state.LastUpdated` or `context.Since`). + - Limit channels if discovery enumerated them and options specify `RancherHubConnectorOptions.EnabledChannels` (future option). +3. **Fetch batches** in a deterministic, cancellation-aware loop: + - Send GETs with `pageSize` cap (default 200) and follow `nextCursor`/pagination until exhaustion. + - For each batch log metrics (`eventCount`, `cursor`, `fromOffline` flag). +4. **Process events**: + - Validate minimal shape (id, document uri). Missing/invalid fields => log warning + quarantine JSON payload. + - Fetch document content via shared HTTP client. Respect optional digests (compare after download). + - Build raw metadata: event ID, channel, publishedAt, checkpoint cursor (if provided), offline flag. + - Deduplicate using `HashSet` seeded with persisted digests; skip duplicates without re-writing state. + - Push valid documents to `context.RawSink.StoreAsync` and yield them downstream. + - Capture latest `publishedAt` and `cursor` for state update. +5. **Quarantine path**: + - Serialize offending envelope into UTF-8 JSON (`application/vnd.stella.quarantine+json` metadata flag). + - Persist via `context.RawSink.StoreAsync` using format `VexDocumentFormat.Json` and metadata `{"rancher.event.quarantine":"true"}` to allow downstream filtering/reporting. +6. **Persist state** once the batch completes or on graceful cancellation: + - Update `LastUpdated` with max `publishedAt` processed. + - Rebuild digest window (most recent ≤ 200). + - Store latest checkpoint token (if hub supplied one) as first digest entry `checkpoint:` for quick retrieval. + +## Key Types & Components + +```csharp +internal sealed record RancherHubEventEnvelope( + string Id, + string? Type, + string Channel, + DateTimeOffset PublishedAt, + Uri DocumentUri, + string? DocumentDigest, + string? DocumentFormat); + +internal sealed record RancherHubCheckpointState( + string? Cursor, + DateTimeOffset? LatestPublishedAt, + ImmutableArray Digests); +``` + +- `RancherHubEventClient` (new) encapsulates HTTP paging, cursor handling, and offline replay (reading bundled snapshot JSON when `PreferOfflineSnapshot` enabled). +- `RancherHubCheckpointManager` (new) reads/writes `VexConnectorState`, encoding checkpoint token under the `checkpoint:` prefix and trimming digest history. + +## Deduplication Strategy + +- Primary key: document SHA-256 digest (hub-provided or computed). Fallback: `event.Id` when digest missing (encoded as `event:` entry). +- Persist dedupe keys via `DocumentDigests` to short-circuit duplicates on next run. Keep insertion order for deterministic state updates. +- When offline snapshot is replayed, skip dedupe reset—reused digests still apply. + +## Quarantine Semantics + +- Trigger conditions: + - JSON envelope missing required fields. + - Document fetch returns non-success HTTP code. + - Digest mismatch between declared `document.sha256` and computed value. +- Action: create `VexRawDocument` with metadata: + - `rancher.event.id`, `rancher.event.channel`, `rancher.event.type`, `rancher.event.error`. + - `rancher.event.quarantine=true` flag for downstream routing. + - Content: original envelope JSON (or error stub when fetch failed). +- Quarantine entries count toward dedupe history using a synthetic digest `quarantine:` to prevent repeated attempts until manual intervention. + +## Cancellation & Determinism + +- Each HTTP call honours `CancellationToken`. +- Loop checkpoints after each processed batch; if cancellation occurs mid-batch, state updates only include successfully handled documents to preserve deterministic replays. +- Sorting: events processed in ascending `publishedAt` (or server-provided order). Within batch, maintain original order to avoid digest reshuffling. + +## Open Questions / Follow-ups + +- Confirm exact Rancher event schema (pending coordination with SUSE PSIRT) and adjust parser accordingly. +- Validate whether `checkpointUri` requires POST with body `{ "cursor": "..."} ` or simple GET. +- Decide on channel filtering surface area (option flag vs. discovery default). +- Establish metrics contract once observability task (future) starts. + +Until those are resolved the implementation will keep parser tolerant with detailed logging and quarantine coverage so future adjustments are low risk. diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs new file mode 100644 index 00000000..3923d976 --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs @@ -0,0 +1,311 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO.Abstractions; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Authentication; +using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Configuration; +using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Metadata; + +namespace StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Events; + +internal sealed class RancherHubEventClient +{ + private readonly IHttpClientFactory _httpClientFactory; + private readonly RancherHubTokenProvider _tokenProvider; + private readonly IFileSystem _fileSystem; + private readonly ILogger _logger; + private readonly JsonDocumentOptions _documentOptions = new() + { + CommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + }; + + private const string CheckpointPrefix = "checkpoint"; + + public RancherHubEventClient( + IHttpClientFactory httpClientFactory, + RancherHubTokenProvider tokenProvider, + IFileSystem fileSystem, + ILogger logger) + { + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider)); + _fileSystem = fileSystem ?? throw new ArgumentNullException(nameof(fileSystem)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async IAsyncEnumerable FetchEventBatchesAsync( + RancherHubConnectorOptions options, + RancherHubMetadata metadata, + string? cursor, + DateTimeOffset? since, + ImmutableArray channels, + [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentNullException.ThrowIfNull(metadata); + + if (options.PreferOfflineSnapshot && metadata.OfflineSnapshot is not null) + { + var offline = await LoadOfflineSnapshotAsync(metadata.OfflineSnapshot, cancellationToken).ConfigureAwait(false); + if (offline is not null) + { + yield return offline; + } + + yield break; + } + + var client = _httpClientFactory.CreateClient(RancherHubConnectorOptions.HttpClientName); + var currentCursor = cursor; + var currentSince = since; + var firstRequest = true; + + while (true) + { + cancellationToken.ThrowIfCancellationRequested(); + + var requestUri = BuildRequestUri(metadata.Subscription.EventsUri, currentCursor, currentSince, channels); + using var request = await CreateRequestAsync(options, metadata, requestUri, cancellationToken).ConfigureAwait(false); + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Rancher hub events request failed ({(int)response.StatusCode} {response.StatusCode}). Payload: {payload}"); + } + + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var batch = ParseBatch(json, fromOfflineSnapshot: false); + + yield return batch; + + if (string.IsNullOrWhiteSpace(batch.NextCursor)) + { + break; + } + + if (!firstRequest && string.Equals(batch.NextCursor, currentCursor, StringComparison.Ordinal)) + { + _logger.LogWarning("Detected stable cursor {Cursor}; stopping to avoid loop.", batch.NextCursor); + break; + } + + currentCursor = batch.NextCursor; + currentSince = null; // cursor supersedes since parameter + firstRequest = false; + } + } + + private async Task LoadOfflineSnapshotAsync(RancherHubOfflineSnapshotMetadata offline, CancellationToken cancellationToken) + { + try + { + string payload; + if (offline.SnapshotUri.Scheme.Equals("file", StringComparison.OrdinalIgnoreCase)) + { + var path = offline.SnapshotUri.LocalPath; + payload = await _fileSystem.File.ReadAllTextAsync(path, Encoding.UTF8, cancellationToken).ConfigureAwait(false); + } + else + { + var client = _httpClientFactory.CreateClient(RancherHubConnectorOptions.HttpClientName); + using var response = await client.GetAsync(offline.SnapshotUri, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(offline.Sha256)) + { + var computed = ComputeSha256(payload); + if (!string.Equals(computed, offline.Sha256, StringComparison.OrdinalIgnoreCase)) + { + _logger.LogWarning( + "Offline snapshot digest mismatch (expected {Expected}, computed {Computed}); proceeding anyway.", + offline.Sha256, + computed); + } + } + + return ParseBatch(payload, fromOfflineSnapshot: true); + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + _logger.LogWarning(ex, "Failed to load Rancher hub offline snapshot from {Uri}.", offline.SnapshotUri); + return null; + } + } + + private async Task CreateRequestAsync(RancherHubConnectorOptions options, RancherHubMetadata metadata, Uri requestUri, CancellationToken cancellationToken) + { + var request = new HttpRequestMessage(HttpMethod.Get, requestUri); + request.Headers.Accept.ParseAdd("application/json"); + + if (metadata.Subscription.RequiresAuthentication) + { + var token = await _tokenProvider.GetAccessTokenAsync(options, cancellationToken).ConfigureAwait(false); + if (token is not null) + { + var scheme = string.IsNullOrWhiteSpace(token.TokenType) ? "Bearer" : token.TokenType; + request.Headers.Authorization = new AuthenticationHeaderValue(scheme, token.Value); + } + } + + return request; + } + + private RancherHubEventBatch ParseBatch(string payload, bool fromOfflineSnapshot) + { + using var document = JsonDocument.Parse(payload, _documentOptions); + var root = document.RootElement; + + var cursor = ReadString(root, "cursor", "currentCursor", "checkpoint"); + var nextCursor = ReadString(root, "nextCursor", "next", "continuation", "continuationToken"); + var eventsElement = TryGetProperty(root, "events", "items", "data") ?? default; + var events = ImmutableArray.CreateBuilder(); + + if (eventsElement.ValueKind == JsonValueKind.Array) + { + foreach (var item in eventsElement.EnumerateArray()) + { + events.Add(ParseEvent(item)); + } + } + + return new RancherHubEventBatch(cursor, nextCursor, events.ToImmutable(), fromOfflineSnapshot, payload); + } + + private RancherHubEventRecord ParseEvent(JsonElement element) + { + var rawJson = element.GetRawText(); + var id = ReadString(element, "id", "eventId", "uuid"); + var type = ReadString(element, "type", "eventType"); + var channel = ReadString(element, "channel", "product", "stream"); + var publishedAt = ParseDate(ReadString(element, "publishedAt", "timestamp", "createdAt")); + + Uri? documentUri = null; + string? documentDigest = null; + string? documentFormat = null; + + var documentElement = TryGetProperty(element, "document", "payload", "statement"); + if (documentElement.HasValue) + { + documentUri = ParseUri(ReadString(documentElement.Value, "uri", "url", "href")); + documentDigest = ReadString(documentElement.Value, "sha256", "digest", "checksum"); + documentFormat = ReadString(documentElement.Value, "format", "kind", "type"); + } + else + { + documentUri = ParseUri(ReadString(element, "documentUri", "uri", "url")); + documentDigest = ReadString(element, "documentSha256", "sha256", "digest"); + documentFormat = ReadString(element, "documentFormat", "format"); + } + + return new RancherHubEventRecord(rawJson, id, type, channel, publishedAt, documentUri, documentDigest, documentFormat); + } + + private static Uri? ParseUri(string? value) + => string.IsNullOrWhiteSpace(value) ? null : Uri.TryCreate(value, UriKind.Absolute, out var uri) ? uri : null; + + private static DateTimeOffset? ParseDate(string? value) + => string.IsNullOrWhiteSpace(value) ? null : DateTimeOffset.TryParse(value, out var parsed) ? parsed : null; + + private static string? ReadString(JsonElement element, params string[] propertyNames) + { + var property = TryGetProperty(element, propertyNames); + if (!property.HasValue || property.Value.ValueKind is not JsonValueKind.String) + { + return null; + } + + var value = property.Value.GetString(); + return string.IsNullOrWhiteSpace(value) ? null : value; + } + + private static JsonElement? TryGetProperty(JsonElement element, params string[] propertyNames) + { + foreach (var propertyName in propertyNames) + { + if (element.TryGetProperty(propertyName, out var property) && property.ValueKind is not JsonValueKind.Null and not JsonValueKind.Undefined) + { + return property; + } + } + + return null; + } + + private static string BuildQueryString(Dictionary parameters) + { + if (parameters.Count == 0) + { + return string.Empty; + } + + var builder = new StringBuilder(); + var first = true; + foreach (var kvp in parameters) + { + if (string.IsNullOrEmpty(kvp.Value)) + { + continue; + } + + if (!first) + { + builder.Append('&'); + } + builder.Append(Uri.EscapeDataString(kvp.Key)); + builder.Append('='); + builder.Append(Uri.EscapeDataString(kvp.Value)); + first = false; + } + + return builder.ToString(); + } + + private static Uri BuildRequestUri(Uri baseUri, string? cursor, DateTimeOffset? since, ImmutableArray channels) + { + var builder = new UriBuilder(baseUri); + var parameters = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (!string.IsNullOrWhiteSpace(cursor)) + { + parameters["cursor"] = cursor; + } + else if (since is not null) + { + parameters["since"] = since.Value.ToUniversalTime().ToString("O"); + } + + if (!channels.IsDefaultOrEmpty && channels.Length > 0) + { + parameters["channels"] = string.Join(',', channels); + } + + var query = BuildQueryString(parameters); + builder.Query = string.IsNullOrEmpty(query) ? null : query; + return builder.Uri; + } + + private static string ComputeSha256(string payload) + { + var bytes = Encoding.UTF8.GetBytes(payload); + Span hash = stackalloc byte[32]; + if (SHA256.TryHashData(bytes, hash, out _)) + { + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + using var sha = SHA256.Create(); + return Convert.ToHexString(sha.ComputeHash(bytes)).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs new file mode 100644 index 00000000..69c650d6 --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs @@ -0,0 +1,21 @@ +using System; +using System.Collections.Immutable; + +namespace StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Events; + +internal sealed record RancherHubEventRecord( + string RawJson, + string? Id, + string? Type, + string? Channel, + DateTimeOffset? PublishedAt, + Uri? DocumentUri, + string? DocumentDigest, + string? DocumentFormat); + +internal sealed record RancherHubEventBatch( + string? Cursor, + string? NextCursor, + ImmutableArray Events, + bool FromOfflineSnapshot, + string RawPayload); diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs index 9f8bcb1a..013d1869 100644 --- a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs +++ b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs @@ -1,11 +1,20 @@ using System; using System.Collections.Generic; using System.Collections.Immutable; -using System.Runtime.CompilerServices; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; using Microsoft.Extensions.Logging; using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Authentication; using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Configuration; +using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Events; using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Metadata; +using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.State; using StellaOps.Excititor.Core; namespace StellaOps.Excititor.Connectors.SUSE.RancherVEXHub; @@ -21,6 +30,10 @@ public sealed class RancherHubConnector : VexConnectorBase }; private readonly RancherHubMetadataLoader _metadataLoader; + private readonly RancherHubEventClient _eventClient; + private readonly RancherHubCheckpointManager _checkpointManager; + private readonly RancherHubTokenProvider _tokenProvider; + private readonly IHttpClientFactory _httpClientFactory; private readonly IEnumerable> _validators; private RancherHubConnectorOptions? _options; @@ -28,12 +41,20 @@ public sealed class RancherHubConnector : VexConnectorBase public RancherHubConnector( RancherHubMetadataLoader metadataLoader, + RancherHubEventClient eventClient, + RancherHubCheckpointManager checkpointManager, + RancherHubTokenProvider tokenProvider, + IHttpClientFactory httpClientFactory, ILogger logger, TimeProvider timeProvider, IEnumerable>? validators = null) : base(StaticDescriptor, logger, timeProvider) { _metadataLoader = metadataLoader ?? throw new ArgumentNullException(nameof(metadataLoader)); + _eventClient = eventClient ?? throw new ArgumentNullException(nameof(eventClient)); + _checkpointManager = checkpointManager ?? throw new ArgumentNullException(nameof(checkpointManager)); + _tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider)); + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); _validators = validators ?? Array.Empty>(); } @@ -69,17 +90,255 @@ public sealed class RancherHubConnector : VexConnectorBase _metadata = await _metadataLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false); } - LogConnectorEvent(LogLevel.Debug, "fetch", "Rancher hub connector discovery ready; event ingestion will be implemented in EXCITITOR-CONN-SUSE-01-002.", new Dictionary + var checkpoint = await _checkpointManager.LoadAsync(Descriptor.Id, context, cancellationToken).ConfigureAwait(false); + var digestHistory = checkpoint.Digests.ToList(); + var dedupeSet = new HashSet(checkpoint.Digests, StringComparer.OrdinalIgnoreCase); + var latestCursor = checkpoint.Cursor; + var latestPublishedAt = checkpoint.LastPublishedAt ?? checkpoint.EffectiveSince; + var stateChanged = false; + + LogConnectorEvent(LogLevel.Information, "fetch_start", "Starting Rancher hub event ingestion.", new Dictionary { - ["since"] = context.Since?.ToString("O"), + ["since"] = checkpoint.EffectiveSince?.ToString("O"), + ["cursor"] = checkpoint.Cursor, ["subscriptionUri"] = _metadata.Metadata.Subscription.EventsUri.ToString(), + ["offline"] = checkpoint.Cursor is null && _options.PreferOfflineSnapshot, }); - yield break; + await foreach (var batch in _eventClient.FetchEventBatchesAsync( + _options, + _metadata.Metadata, + checkpoint.Cursor, + checkpoint.EffectiveSince, + _metadata.Metadata.Subscription.Channels, + cancellationToken).ConfigureAwait(false)) + { + LogConnectorEvent(LogLevel.Debug, "batch", "Processing Rancher hub batch.", new Dictionary + { + ["cursor"] = batch.Cursor, + ["nextCursor"] = batch.NextCursor, + ["count"] = batch.Events.Length, + ["offline"] = batch.FromOfflineSnapshot, + }); + + if (!string.IsNullOrWhiteSpace(batch.NextCursor) && !string.Equals(batch.NextCursor, latestCursor, StringComparison.Ordinal)) + { + latestCursor = batch.NextCursor; + stateChanged = true; + } + else if (string.IsNullOrWhiteSpace(latestCursor) && !string.IsNullOrWhiteSpace(batch.Cursor)) + { + latestCursor = batch.Cursor; + } + + foreach (var record in batch.Events) + { + cancellationToken.ThrowIfCancellationRequested(); + + var result = await ProcessEventAsync(record, batch, context, dedupeSet, digestHistory, cancellationToken).ConfigureAwait(false); + if (result.ProcessedDocument is not null) + { + yield return result.ProcessedDocument; + stateChanged = true; + if (result.PublishedAt is { } published && (latestPublishedAt is null || published > latestPublishedAt)) + { + latestPublishedAt = published; + } + } + else if (result.Quarantined) + { + stateChanged = true; + } + } + } + + if (stateChanged || !string.Equals(latestCursor, checkpoint.Cursor, StringComparison.Ordinal) || latestPublishedAt != checkpoint.LastPublishedAt) + { + await _checkpointManager.SaveAsync( + Descriptor.Id, + latestCursor, + latestPublishedAt, + digestHistory.ToImmutableArray(), + cancellationToken).ConfigureAwait(false); + } } public override ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) => throw new NotSupportedException("RancherHubConnector relies on format-specific normalizers for CSAF/OpenVEX payloads."); public RancherHubMetadata? GetCachedMetadata() => _metadata?.Metadata; + + private async Task ProcessEventAsync( + RancherHubEventRecord record, + RancherHubEventBatch batch, + VexConnectorContext context, + HashSet dedupeSet, + List digestHistory, + CancellationToken cancellationToken) + { + var quarantineKey = BuildQuarantineKey(record); + if (dedupeSet.Contains(quarantineKey)) + { + return EventProcessingResult.QuarantinedOnly; + } + + if (record.DocumentUri is null || string.IsNullOrWhiteSpace(record.Id)) + { + await QuarantineAsync(record, batch, "missing documentUri or id", context, cancellationToken).ConfigureAwait(false); + AddQuarantineDigest(quarantineKey, dedupeSet, digestHistory); + return EventProcessingResult.QuarantinedOnly; + } + + var client = _httpClientFactory.CreateClient(RancherHubConnectorOptions.HttpClientName); + using var request = await CreateDocumentRequestAsync(record.DocumentUri, cancellationToken).ConfigureAwait(false); + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + await QuarantineAsync(record, batch, $"document fetch failed ({(int)response.StatusCode} {response.StatusCode})", context, cancellationToken).ConfigureAwait(false); + AddQuarantineDigest(quarantineKey, dedupeSet, digestHistory); + return EventProcessingResult.QuarantinedOnly; + } + + var contentBytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + var publishedAt = record.PublishedAt ?? UtcNow(); + var metadata = BuildMetadata(builder => builder + .Add("rancher.event.id", record.Id) + .Add("rancher.event.type", record.Type) + .Add("rancher.event.channel", record.Channel) + .Add("rancher.event.published", publishedAt) + .Add("rancher.event.cursor", batch.NextCursor ?? batch.Cursor) + .Add("rancher.event.offline", batch.FromOfflineSnapshot ? "true" : "false") + .Add("rancher.event.declaredDigest", record.DocumentDigest)); + + var format = ResolveFormat(record.DocumentFormat); + var document = CreateRawDocument(format, record.DocumentUri, contentBytes, metadata); + + if (!string.IsNullOrWhiteSpace(record.DocumentDigest)) + { + var declared = NormalizeDigest(record.DocumentDigest); + var computed = NormalizeDigest(document.Digest); + if (!string.Equals(declared, computed, StringComparison.OrdinalIgnoreCase)) + { + await QuarantineAsync(record, batch, $"digest mismatch (declared {record.DocumentDigest}, computed {document.Digest})", context, cancellationToken).ConfigureAwait(false); + AddQuarantineDigest(quarantineKey, dedupeSet, digestHistory); + return EventProcessingResult.QuarantinedOnly; + } + } + + if (!dedupeSet.Add(document.Digest)) + { + return EventProcessingResult.Skipped; + } + + digestHistory.Add(document.Digest); + await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false); + return new EventProcessingResult(document, false, publishedAt); + } + + private async Task CreateDocumentRequestAsync(Uri documentUri, CancellationToken cancellationToken) + { + var request = new HttpRequestMessage(HttpMethod.Get, documentUri); + if (_metadata?.Metadata.Subscription.RequiresAuthentication ?? false) + { + var token = await _tokenProvider.GetAccessTokenAsync(_options!, cancellationToken).ConfigureAwait(false); + if (token is not null) + { + var scheme = string.IsNullOrWhiteSpace(token.TokenType) ? "Bearer" : token.TokenType; + request.Headers.Authorization = new AuthenticationHeaderValue(scheme, token.Value); + } + } + + return request; + } + + private async Task QuarantineAsync( + RancherHubEventRecord record, + RancherHubEventBatch batch, + string reason, + VexConnectorContext context, + CancellationToken cancellationToken) + { + var metadata = BuildMetadata(builder => builder + .Add("rancher.event.id", record.Id) + .Add("rancher.event.type", record.Type) + .Add("rancher.event.channel", record.Channel) + .Add("rancher.event.quarantine", "true") + .Add("rancher.event.error", reason) + .Add("rancher.event.cursor", batch.NextCursor ?? batch.Cursor) + .Add("rancher.event.offline", batch.FromOfflineSnapshot ? "true" : "false")); + + var sourceUri = record.DocumentUri ?? _metadata?.Metadata.Subscription.EventsUri ?? _options!.DiscoveryUri; + var payload = Encoding.UTF8.GetBytes(record.RawJson); + var document = CreateRawDocument(VexDocumentFormat.Csaf, sourceUri, payload, metadata); + await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false); + + LogConnectorEvent(LogLevel.Warning, "quarantine", "Rancher hub event moved to quarantine.", new Dictionary + { + ["eventId"] = record.Id ?? "(missing)", + ["reason"] = reason, + }); + } + + private static void AddQuarantineDigest(string key, HashSet dedupeSet, List digestHistory) + { + if (dedupeSet.Add(key)) + { + digestHistory.Add(key); + } + } + + private static string BuildQuarantineKey(RancherHubEventRecord record) + { + if (!string.IsNullOrWhiteSpace(record.Id)) + { + return $"quarantine:{record.Id}"; + } + + Span hash = stackalloc byte[32]; + var bytes = Encoding.UTF8.GetBytes(record.RawJson); + if (!SHA256.TryHashData(bytes, hash, out _)) + { + using var sha = SHA256.Create(); + hash = sha.ComputeHash(bytes); + } + + return $"quarantine:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + private static string NormalizeDigest(string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return digest; + } + + var trimmed = digest.Trim(); + return trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? trimmed.ToLowerInvariant() + : $"sha256:{trimmed.ToLowerInvariant()}"; + } + + private static VexDocumentFormat ResolveFormat(string? format) + { + if (string.IsNullOrWhiteSpace(format)) + { + return VexDocumentFormat.Csaf; + } + + return format.ToLowerInvariant() switch + { + "csaf" or "csaf_json" or "json" => VexDocumentFormat.Csaf, + "cyclonedx" or "cyclonedx_vex" => VexDocumentFormat.CycloneDx, + "openvex" => VexDocumentFormat.OpenVex, + "oci" or "oci_attestation" or "attestation" => VexDocumentFormat.OciAttestation, + _ => VexDocumentFormat.Csaf, + }; + } + + private sealed record EventProcessingResult(VexRawDocument? ProcessedDocument, bool Quarantined, DateTimeOffset? PublishedAt) + { + public static EventProcessingResult QuarantinedOnly { get; } = new(null, true, null); + + public static EventProcessingResult Skipped { get; } = new(null, false, null); + } } diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs new file mode 100644 index 00000000..4a3c3456 --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs @@ -0,0 +1,98 @@ +using System; +using System.Collections.Immutable; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; + +namespace StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.State; + +internal sealed record RancherHubCheckpointState( + string? Cursor, + DateTimeOffset? LastPublishedAt, + DateTimeOffset? EffectiveSince, + ImmutableArray Digests); + +internal sealed class RancherHubCheckpointManager +{ + private const string CheckpointPrefix = "checkpoint:"; + private readonly IVexConnectorStateRepository _repository; + + public RancherHubCheckpointManager(IVexConnectorStateRepository repository) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + } + + public async ValueTask LoadAsync(string connectorId, VexConnectorContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var state = await _repository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false); + var cursor = ExtractCursor(state?.DocumentDigests ?? ImmutableArray.Empty); + var digests = ExtractDigests(state?.DocumentDigests ?? ImmutableArray.Empty); + var lastPublishedAt = state?.LastUpdated; + var effectiveSince = context.Since; + + if (context.Settings.Values.TryGetValue("checkpoint", out var checkpointOverride) && !string.IsNullOrWhiteSpace(checkpointOverride)) + { + cursor = checkpointOverride; + digests = ImmutableArray.Empty; + } + + if (effectiveSince is null && lastPublishedAt is not null) + { + effectiveSince = lastPublishedAt; + } + + if (effectiveSince is not null && lastPublishedAt is not null && effectiveSince < lastPublishedAt) + { + digests = ImmutableArray.Empty; + } + + return new RancherHubCheckpointState(cursor, lastPublishedAt, effectiveSince, digests); + } + + public ValueTask SaveAsync(string connectorId, string? cursor, DateTimeOffset? lastPublishedAt, ImmutableArray digests, CancellationToken cancellationToken) + { + var entries = ImmutableArray.CreateBuilder(); + if (!string.IsNullOrWhiteSpace(cursor)) + { + entries.Add($"{CheckpointPrefix}{cursor}"); + } + + foreach (var digest in digests) + { + if (string.IsNullOrWhiteSpace(digest)) + { + continue; + } + + if (digest.StartsWith(CheckpointPrefix, StringComparison.Ordinal)) + { + continue; + } + + entries.Add(digest); + } + + var state = new VexConnectorState(connectorId, lastPublishedAt, entries.ToImmutable()); + return _repository.SaveAsync(state, cancellationToken); + } + + private static string? ExtractCursor(ImmutableArray digests) + { + foreach (var entry in digests) + { + if (entry.StartsWith(CheckpointPrefix, StringComparison.Ordinal)) + { + return entry[CheckpointPrefix.Length..]; + } + } + + return null; + } + + private static ImmutableArray ExtractDigests(ImmutableArray digests) + => digests.Where(d => !d.StartsWith(CheckpointPrefix, StringComparison.Ordinal)).ToImmutableArray(); +} diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj index afd08612..00e40cff 100644 --- a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj +++ b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj @@ -11,9 +11,9 @@ - - - + + + diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md index 324d9386..8a410b84 100644 --- a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md +++ b/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md @@ -3,5 +3,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-CONN-SUSE-01-001 – Rancher hub discovery & auth|Team Excititor Connectors – SUSE|EXCITITOR-CONN-ABS-01-001|**DONE (2025-10-17)** – Added Rancher hub options/token provider, discovery metadata loader with offline snapshots + caching, connector shell, DI wiring, and unit tests covering network/offline paths.| -|EXCITITOR-CONN-SUSE-01-002 – Checkpointed event ingestion|Team Excititor Connectors – SUSE|EXCITITOR-CONN-SUSE-01-001, EXCITITOR-STORAGE-01-003|TODO – Process hub events with resume checkpoints, deduplication, and quarantine path for malformed payloads.| +|EXCITITOR-CONN-SUSE-01-002 – Checkpointed event ingestion|Team Excititor Connectors – SUSE|EXCITITOR-CONN-SUSE-01-001, EXCITITOR-STORAGE-01-003|**DOING (2025-10-19)** – Process hub events with resume checkpoints, deduplication, and quarantine path for malformed payloads.
2025-10-19: Prereqs EXCITITOR-CONN-SUSE-01-001 & EXCITITOR-STORAGE-01-003 confirmed complete; initiating checkpoint/resume implementation plan.| |EXCITITOR-CONN-SUSE-01-003 – Trust metadata & policy hints|Team Excititor Connectors – SUSE|EXCITITOR-CONN-SUSE-01-002, EXCITITOR-POLICY-01-001|TODO – Emit provider trust configuration (signers, weight overrides) and attach provenance hints for consensus engine.| diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs new file mode 100644 index 00000000..c61acaa2 --- /dev/null +++ b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs @@ -0,0 +1,309 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using FluentAssertions; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.Ubuntu.CSAF; +using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Configuration; +using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Metadata; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using System.IO.Abstractions.TestingHelpers; +using Xunit; + +namespace StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.Connectors; + +public sealed class UbuntuCsafConnectorTests +{ + [Fact] + public async Task FetchAsync_IngestsNewDocument_UpdatesStateAndUsesEtag() + { + var baseUri = new Uri("https://ubuntu.test/security/csaf/"); + var indexUri = new Uri(baseUri, "index.json"); + var catalogUri = new Uri(baseUri, "stable/catalog.json"); + var advisoryUri = new Uri(baseUri, "stable/USN-2025-0001.json"); + + var manifest = CreateTestManifest(advisoryUri, "USN-2025-0001", "2025-10-18T00:00:00Z"); + var documentPayload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); + var documentSha = ComputeSha256(documentPayload); + + var indexJson = manifest.IndexJson; + var catalogJson = manifest.CatalogJson.Replace("{{SHA256}}", documentSha, StringComparison.Ordinal); + var handler = new UbuntuTestHttpHandler(indexUri, indexJson, catalogUri, catalogJson, advisoryUri, documentPayload, expectedEtag: "etag-123"); + + var httpClient = new HttpClient(handler); + var httpFactory = new SingleClientFactory(httpClient); + var cache = new MemoryCache(new MemoryCacheOptions()); + var fileSystem = new MockFileSystem(); + var loader = new UbuntuCatalogLoader(httpFactory, cache, fileSystem, NullLogger.Instance, TimeProvider.System); + + var optionsValidator = new UbuntuConnectorOptionsValidator(fileSystem); + var stateRepository = new InMemoryConnectorStateRepository(); + var connector = new UbuntuCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { optionsValidator }, + NullLogger.Instance, + TimeProvider.System); + + var settings = new VexConnectorSettings(ImmutableDictionary.Empty); + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider()); + + var documents = new List(); + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + var stored = sink.Documents.Single(); + stored.Digest.Should().Be($"sha256:{documentSha}"); + stored.Metadata.TryGetValue("ubuntu.etag", out var storedEtag).Should().BeTrue(); + storedEtag.Should().Be("etag-123"); + + stateRepository.CurrentState.Should().NotBeNull(); + stateRepository.CurrentState!.DocumentDigests.Should().Contain($"sha256:{documentSha}"); + stateRepository.CurrentState.DocumentDigests.Should().Contain($"etag:{advisoryUri}|etag-123"); + stateRepository.CurrentState.LastUpdated.Should().Be(DateTimeOffset.Parse("2025-10-18T00:00:00Z")); + + handler.DocumentRequestCount.Should().Be(1); + + // Second run: Expect connector to send If-None-Match and skip download via 304. + sink.Documents.Clear(); + documents.Clear(); + + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + handler.DocumentRequestCount.Should().Be(2); + handler.SeenIfNoneMatch.Should().Contain("\"etag-123\""); + } + + [Fact] + public async Task FetchAsync_SkipsWhenChecksumMismatch() + { + var baseUri = new Uri("https://ubuntu.test/security/csaf/"); + var indexUri = new Uri(baseUri, "index.json"); + var catalogUri = new Uri(baseUri, "stable/catalog.json"); + var advisoryUri = new Uri(baseUri, "stable/USN-2025-0002.json"); + + var manifest = CreateTestManifest(advisoryUri, "USN-2025-0002", "2025-10-18T00:00:00Z"); + var indexJson = manifest.IndexJson; + var catalogJson = manifest.CatalogJson.Replace("{{SHA256}}", new string('a', 64), StringComparison.Ordinal); + var handler = new UbuntuTestHttpHandler(indexUri, indexJson, catalogUri, catalogJson, advisoryUri, Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"), expectedEtag: "etag-999"); + + var httpClient = new HttpClient(handler); + var httpFactory = new SingleClientFactory(httpClient); + var cache = new MemoryCache(new MemoryCacheOptions()); + var fileSystem = new MockFileSystem(); + var loader = new UbuntuCatalogLoader(httpFactory, cache, fileSystem, NullLogger.Instance, TimeProvider.System); + var optionsValidator = new UbuntuConnectorOptionsValidator(fileSystem); + var stateRepository = new InMemoryConnectorStateRepository(); + + var connector = new UbuntuCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { optionsValidator }, + NullLogger.Instance, + TimeProvider.System); + + await connector.ValidateAsync(new VexConnectorSettings(ImmutableDictionary.Empty), CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider()); + + var documents = new List(); + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + stateRepository.CurrentState.Should().NotBeNull(); + stateRepository.CurrentState!.DocumentDigests.Should().BeEmpty(); + handler.DocumentRequestCount.Should().Be(1); + } + + private static (string IndexJson, string CatalogJson) CreateTestManifest(Uri advisoryUri, string advisoryId, string timestamp) + { + var indexJson = $$""" + { + "generated": "2025-10-18T00:00:00Z", + "channels": [ + { + "name": "stable", + "catalogUrl": "{{advisoryUri.GetLeftPart(UriPartial.Authority)}}/security/csaf/stable/catalog.json", + "sha256": "ignore" + } + ] + } + """; + + var catalogJson = $$""" + { + "resources": [ + { + "id": "{{advisoryId}}", + "type": "csaf", + "url": "{{advisoryUri}}", + "last_modified": "{{timestamp}}", + "hashes": { + "sha256": "{{SHA256}}" + }, + "etag": "\"etag-123\"", + "title": "{{advisoryId}}" + } + ] + } + """; + + return (indexJson, catalogJson); + } + + private static string ComputeSha256(ReadOnlySpan payload) + { + Span buffer = stackalloc byte[32]; + SHA256.HashData(payload, buffer); + return Convert.ToHexString(buffer).ToLowerInvariant(); + } + + private sealed class SingleClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class UbuntuTestHttpHandler : HttpMessageHandler + { + private readonly Uri _indexUri; + private readonly string _indexPayload; + private readonly Uri _catalogUri; + private readonly string _catalogPayload; + private readonly Uri _documentUri; + private readonly byte[] _documentPayload; + private readonly string _expectedEtag; + + public int DocumentRequestCount { get; private set; } + public List SeenIfNoneMatch { get; } = new(); + + public UbuntuTestHttpHandler(Uri indexUri, string indexPayload, Uri catalogUri, string catalogPayload, Uri documentUri, byte[] documentPayload, string expectedEtag) + { + _indexUri = indexUri; + _indexPayload = indexPayload; + _catalogUri = catalogUri; + _catalogPayload = catalogPayload; + _documentUri = documentUri; + _documentPayload = documentPayload; + _expectedEtag = expectedEtag; + } + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request.RequestUri == _indexUri) + { + return Task.FromResult(CreateJsonResponse(_indexPayload)); + } + + if (request.RequestUri == _catalogUri) + { + return Task.FromResult(CreateJsonResponse(_catalogPayload)); + } + + if (request.RequestUri == _documentUri) + { + DocumentRequestCount++; + if (request.Headers.IfNoneMatch is { Count: > 0 }) + { + var header = request.Headers.IfNoneMatch.First().ToString(); + SeenIfNoneMatch.Add(header); + if (header.Trim('"') == _expectedEtag || header == $"\"{_expectedEtag}\"") + { + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotModified)); + } + } + + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(_documentPayload), + }; + response.Headers.ETag = new EntityTagHeaderValue($"\"{_expectedEtag}\""); + response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + return Task.FromResult(response); + } + + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound) + { + Content = new StringContent($"No response configured for {request.RequestUri}"), + }); + } + + private static HttpResponseMessage CreateJsonResponse(string payload) + => new(HttpStatusCode.OK) + { + Content = new StringContent(payload, Encoding.UTF8, "application/json"), + }; + } + + private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository + { + public VexConnectorState? CurrentState { get; private set; } + + public ValueTask GetAsync(string connectorId, CancellationToken cancellationToken) + => ValueTask.FromResult(CurrentState); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken) + { + CurrentState = state; + return ValueTask.CompletedTask; + } + } + + private sealed class InMemoryRawSink : IVexRawDocumentSink + { + public List Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray.Empty, ImmutableDictionary.Empty)); + } +} diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj index 57fe463d..e80c8012 100644 --- a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj +++ b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj @@ -11,7 +11,7 @@ - + diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj index f6b57b36..e0ccf67c 100644 --- a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj +++ b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj @@ -8,11 +8,13 @@ + + - - - + + + diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md index e85b2818..52a12bce 100644 --- a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md +++ b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md @@ -3,5 +3,6 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-CONN-UBUNTU-01-001 – Ubuntu CSAF discovery & channels|Team Excititor Connectors – Ubuntu|EXCITITOR-CONN-ABS-01-001|**DONE (2025-10-17)** – Added Ubuntu connector project with configurable channel options, catalog loader (network/offline), DI wiring, and discovery unit tests.| -|EXCITITOR-CONN-UBUNTU-01-002 – Incremental fetch & deduplication|Team Excititor Connectors – Ubuntu|EXCITITOR-CONN-UBUNTU-01-001, EXCITITOR-STORAGE-01-003|TODO – Fetch CSAF bundles with ETag handling, checksum validation, deduplication, and raw persistence.| +|EXCITITOR-CONN-UBUNTU-01-002 – Incremental fetch & deduplication|Team Excititor Connectors – Ubuntu|EXCITITOR-CONN-UBUNTU-01-001, EXCITITOR-STORAGE-01-003|**DOING (2025-10-19)** – Fetch CSAF bundles with ETag handling, checksum validation, deduplication, and raw persistence.| |EXCITITOR-CONN-UBUNTU-01-003 – Trust metadata & provenance|Team Excititor Connectors – Ubuntu|EXCITITOR-CONN-UBUNTU-01-002, EXCITITOR-POLICY-01-001|TODO – Emit Ubuntu signing metadata (GPG fingerprints) plus provenance hints for policy weighting and diagnostics.| +> Remark (2025-10-19, EXCITITOR-CONN-UBUNTU-01-002): Prerequisites EXCITITOR-CONN-UBUNTU-01-001 and EXCITITOR-STORAGE-01-003 verified as **DONE**; advancing to DOING per Wave 0 kickoff. diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs index 85645843..c369f668 100644 --- a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs +++ b/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs @@ -1,16 +1,24 @@ using System.Collections.Generic; using System.Collections.Immutable; +using System.Globalization; +using System.Net; +using System.Net.Http; using System.Runtime.CompilerServices; +using System.Security.Cryptography; +using System.Text.Json; using Microsoft.Extensions.Logging; using StellaOps.Excititor.Connectors.Abstractions; using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Configuration; using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Metadata; using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; namespace StellaOps.Excititor.Connectors.Ubuntu.CSAF; public sealed class UbuntuCsafConnector : VexConnectorBase { + private const string EtagTokenPrefix = "etag:"; + private static readonly VexConnectorDescriptor DescriptorInstance = new( id: "excititor:ubuntu", kind: VexProviderKind.Distro, @@ -20,6 +28,8 @@ public sealed class UbuntuCsafConnector : VexConnectorBase }; private readonly UbuntuCatalogLoader _catalogLoader; + private readonly IHttpClientFactory _httpClientFactory; + private readonly IVexConnectorStateRepository _stateRepository; private readonly IEnumerable> _validators; private UbuntuConnectorOptions? _options; @@ -27,12 +37,16 @@ public sealed class UbuntuCsafConnector : VexConnectorBase public UbuntuCsafConnector( UbuntuCatalogLoader catalogLoader, + IHttpClientFactory httpClientFactory, + IVexConnectorStateRepository stateRepository, IEnumerable> validators, ILogger logger, TimeProvider timeProvider) : base(DescriptorInstance, logger, timeProvider) { _catalogLoader = catalogLoader ?? throw new ArgumentNullException(nameof(catalogLoader)); + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); _validators = validators ?? Array.Empty>(); } @@ -65,16 +79,424 @@ public sealed class UbuntuCsafConnector : VexConnectorBase _catalog = await _catalogLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false); } - LogConnectorEvent(LogLevel.Debug, "fetch", "Ubuntu CSAF discovery ready; channel catalogs handled in subsequent task.", new Dictionary - { - ["since"] = context.Since?.ToString("O"), - }); + var state = await _stateRepository.GetAsync(Descriptor.Id, cancellationToken).ConfigureAwait(false); + var knownTokens = state?.DocumentDigests ?? ImmutableArray.Empty; + var digestSet = new HashSet(StringComparer.OrdinalIgnoreCase); + var tokenSet = new HashSet(StringComparer.Ordinal); + var tokenList = new List(knownTokens.Length + 16); + var etagMap = new Dictionary(StringComparer.OrdinalIgnoreCase); - yield break; + foreach (var token in knownTokens) + { + tokenSet.Add(token); + tokenList.Add(token); + if (TryParseEtagToken(token, out var uri, out var etag)) + { + etagMap[uri] = etag; + } + else + { + digestSet.Add(token); + } + } + + var since = context.Since ?? state?.LastUpdated ?? DateTimeOffset.MinValue; + var latestTimestamp = state?.LastUpdated ?? since; + var stateChanged = false; + + foreach (var channel in _catalog.Metadata.Channels) + { + await foreach (var entry in EnumerateChannelResourcesAsync(channel, cancellationToken).ConfigureAwait(false)) + { + var entryTimestamp = entry.LastModified ?? channel.LastUpdated ?? _catalog.Metadata.GeneratedAt; + if (entryTimestamp <= since) + { + if (entryTimestamp > latestTimestamp) + { + latestTimestamp = entryTimestamp; + } + + continue; + } + + var expectedDigest = entry.Sha256 is null ? null : NormalizeDigest(entry.Sha256); + if (expectedDigest is not null && digestSet.Contains(expectedDigest)) + { + if (entryTimestamp > latestTimestamp) + { + latestTimestamp = entryTimestamp; + } + + continue; + } + + etagMap.TryGetValue(entry.DocumentUri.ToString(), out var knownEtag); + + var download = await DownloadDocumentAsync(entry, knownEtag, cancellationToken).ConfigureAwait(false); + if (download is null) + { + if (entryTimestamp > latestTimestamp) + { + latestTimestamp = entryTimestamp; + } + + continue; + } + + var document = download.Document; + if (!digestSet.Add(document.Digest)) + { + if (entryTimestamp > latestTimestamp) + { + latestTimestamp = entryTimestamp; + } + + continue; + } + + await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false); + if (tokenSet.Add(document.Digest)) + { + tokenList.Add(document.Digest); + } + + if (!string.IsNullOrWhiteSpace(download.ETag)) + { + var etagValue = download.ETag!; + etagMap[entry.DocumentUri.ToString()] = etagValue; + var etagToken = CreateEtagToken(entry.DocumentUri, etagValue); + if (tokenSet.Add(etagToken)) + { + tokenList.Add(etagToken); + } + } + + stateChanged = true; + if (entryTimestamp > latestTimestamp) + { + latestTimestamp = entryTimestamp; + } + + yield return document; + } + } + + if (stateChanged || latestTimestamp > (state?.LastUpdated ?? DateTimeOffset.MinValue)) + { + var newState = new VexConnectorState( + Descriptor.Id, + latestTimestamp == DateTimeOffset.MinValue ? state?.LastUpdated : latestTimestamp, + tokenList.ToImmutableArray()); + + await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false); + } } public override ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) => throw new NotSupportedException("UbuntuCsafConnector relies on CSAF normalizers for document processing."); public UbuntuCatalogResult? GetCachedCatalog() => _catalog; + + private async IAsyncEnumerable EnumerateChannelResourcesAsync(UbuChannelCatalog channel, [EnumeratorCancellation] CancellationToken cancellationToken) + { + var client = _httpClientFactory.CreateClient(UbuntuConnectorOptions.HttpClientName); + HttpResponseMessage? response = null; + try + { + response = await client.GetAsync(channel.CatalogUri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + + if (!document.RootElement.TryGetProperty("resources", out var resourcesElement) || resourcesElement.ValueKind != JsonValueKind.Array) + { + LogConnectorEvent(LogLevel.Warning, "fetch.catalog.empty", "Ubuntu CSAF channel catalog missing 'resources' array.", new Dictionary + { + ["channel"] = channel.Name, + ["catalog"] = channel.CatalogUri.ToString(), + }); + yield break; + } + + foreach (var resource in resourcesElement.EnumerateArray()) + { + var type = GetString(resource, "type"); + if (type is not null && !type.Equals("csaf", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var uriText = GetString(resource, "url") + ?? GetString(resource, "canonical") + ?? GetString(resource, "download") + ?? GetString(resource, "uri"); + + if (uriText is null || !Uri.TryCreate(uriText, UriKind.Absolute, out var documentUri)) + { + continue; + } + + var sha256 = TryGetHash(resource); + var etag = GetString(resource, "etag"); + var lastModified = ParseDate(resource, "last_modified") + ?? ParseDate(resource, "published") + ?? ParseDate(resource, "released") + ?? channel.LastUpdated; + var title = GetString(resource, "title"); + var version = GetString(resource, "version"); + var advisoryId = GetString(resource, "id") ?? ExtractAdvisoryId(documentUri, title); + + yield return new UbuntuCatalogEntry( + channel.Name, + advisoryId, + documentUri, + sha256, + etag, + lastModified, + title, + version); + } + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + LogConnectorEvent(LogLevel.Warning, "fetch.catalog.failure", "Failed to enumerate Ubuntu CSAF channel catalog.", new Dictionary + { + ["channel"] = channel.Name, + ["catalog"] = channel.CatalogUri.ToString(), + }, ex); + } + finally + { + response?.Dispose(); + } + } + + private async Task DownloadDocumentAsync(UbuntuCatalogEntry entry, string? knownEtag, CancellationToken cancellationToken) + { + var client = _httpClientFactory.CreateClient(UbuntuConnectorOptions.HttpClientName); + using var request = new HttpRequestMessage(HttpMethod.Get, entry.DocumentUri); + if (!string.IsNullOrWhiteSpace(knownEtag)) + { + request.Headers.IfNoneMatch.TryParseAdd(EnsureQuoted(knownEtag)); + } + + HttpResponseMessage? response = null; + try + { + response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (response.StatusCode == HttpStatusCode.NotModified) + { + LogConnectorEvent(LogLevel.Debug, "fetch.document.not_modified", "Ubuntu CSAF document not modified per ETag.", new Dictionary + { + ["uri"] = entry.DocumentUri.ToString(), + ["etag"] = knownEtag, + }); + return null; + } + + response.EnsureSuccessStatusCode(); + + var payload = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + if (entry.Sha256 is not null) + { + var expected = NormalizeDigest(entry.Sha256); + var actual = "sha256:" + ComputeSha256Hex(payload); + if (!string.Equals(expected, actual, StringComparison.OrdinalIgnoreCase)) + { + LogConnectorEvent(LogLevel.Warning, "fetch.document.checksum_mismatch", "Ubuntu CSAF document checksum mismatch; skipping document.", new Dictionary + { + ["uri"] = entry.DocumentUri.ToString(), + ["expected"] = expected, + ["actual"] = actual, + }); + return null; + } + } + + var etagHeader = response.Headers.ETag?.Tag; + var etagValue = !string.IsNullOrWhiteSpace(etagHeader) + ? Unquote(etagHeader!) + : entry.ETag is null ? null : Unquote(entry.ETag); + + var metadata = BuildMetadata(builder => + { + builder.Add("ubuntu.channel", entry.Channel); + builder.Add("ubuntu.uri", entry.DocumentUri.ToString()); + if (!string.IsNullOrWhiteSpace(entry.AdvisoryId)) + { + builder.Add("ubuntu.advisoryId", entry.AdvisoryId); + } + + if (!string.IsNullOrWhiteSpace(entry.Title)) + { + builder.Add("ubuntu.title", entry.Title!); + } + + if (!string.IsNullOrWhiteSpace(entry.Version)) + { + builder.Add("ubuntu.version", entry.Version!); + } + + if (entry.LastModified is { } modified) + { + builder.Add("ubuntu.lastModified", modified.ToString("O")); + } + + if (entry.Sha256 is not null) + { + builder.Add("ubuntu.sha256", NormalizeDigest(entry.Sha256)); + } + + if (!string.IsNullOrWhiteSpace(etagValue)) + { + builder.Add("ubuntu.etag", etagValue!); + } + }); + + var document = CreateRawDocument(VexDocumentFormat.Csaf, entry.DocumentUri, payload, metadata); + return new DownloadResult(document, etagValue); + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + LogConnectorEvent(LogLevel.Warning, "fetch.document.failure", "Failed to download Ubuntu CSAF document.", new Dictionary + { + ["uri"] = entry.DocumentUri.ToString(), + }, ex); + return null; + } + finally + { + response?.Dispose(); + } + } + + private static string NormalizeDigest(string value) + { + var trimmed = value.Trim(); + if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + trimmed = trimmed[7..]; + } + + return "sha256:" + trimmed.Replace(" ", string.Empty, StringComparison.Ordinal).ToLowerInvariant(); + } + + private static string ComputeSha256Hex(ReadOnlySpan payload) + { + Span buffer = stackalloc byte[32]; + SHA256.HashData(payload, buffer); + return Convert.ToHexString(buffer).ToLowerInvariant(); + } + + private static string? GetString(JsonElement element, string propertyName) + { + if (element.TryGetProperty(propertyName, out var property) && property.ValueKind == JsonValueKind.String) + { + var value = property.GetString(); + return string.IsNullOrWhiteSpace(value) ? null : value; + } + + return null; + } + + private static string? TryGetHash(JsonElement resource) + { + if (resource.TryGetProperty("hashes", out var hashesElement) && hashesElement.ValueKind == JsonValueKind.Object) + { + if (hashesElement.TryGetProperty("sha256", out var hash) && hash.ValueKind == JsonValueKind.String) + { + var value = hash.GetString(); + if (!string.IsNullOrWhiteSpace(value)) + { + return value; + } + } + } + + return GetString(resource, "sha256"); + } + + private static DateTimeOffset? ParseDate(JsonElement element, string propertyName) + { + var text = GetString(element, propertyName); + if (text is null) + { + return null; + } + + return DateTimeOffset.TryParse(text, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal | DateTimeStyles.AssumeUniversal, out var value) + ? value + : (DateTimeOffset?)null; + } + + private static string ExtractAdvisoryId(Uri uri, string? title) + { + if (!string.IsNullOrWhiteSpace(title)) + { + return title!; + } + + var segments = uri.Segments; + if (segments.Length > 0) + { + var candidate = segments[^1].Trim('/'); + if (candidate.EndsWith(".json", StringComparison.OrdinalIgnoreCase)) + { + candidate = candidate[..^5]; + } + + if (!string.IsNullOrWhiteSpace(candidate)) + { + return candidate; + } + } + + return uri.AbsolutePath.Trim('/'); + } + + private static string EnsureQuoted(string value) + { + var trimmed = value.Trim(); + return trimmed.StartsWith('"') ? trimmed : $"\"{trimmed}\""; + } + + private static string Unquote(string value) + => value.Trim().Trim('"'); + + private static string CreateEtagToken(Uri uri, string etag) + => $"{EtagTokenPrefix}{uri}|{etag}"; + + private static bool TryParseEtagToken(string token, out string uri, out string etag) + { + uri = string.Empty; + etag = string.Empty; + if (!token.StartsWith(EtagTokenPrefix, StringComparison.Ordinal)) + { + return false; + } + + var separatorIndex = token.IndexOf('|', EtagTokenPrefix.Length); + if (separatorIndex < 0 || separatorIndex == EtagTokenPrefix.Length) + { + return false; + } + + uri = token[EtagTokenPrefix.Length..separatorIndex]; + etag = token[(separatorIndex + 1)..]; + return !string.IsNullOrWhiteSpace(uri) && !string.IsNullOrWhiteSpace(etag); + } + + private sealed record UbuntuCatalogEntry( + string Channel, + string? AdvisoryId, + Uri DocumentUri, + string? Sha256, + string? ETag, + DateTimeOffset? LastModified, + string? Title, + string? Version); + + private sealed record DownloadResult(VexRawDocument Document, string? ETag); } diff --git a/src/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs b/src/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs index cb98aa66..7fe6aef7 100644 --- a/src/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs +++ b/src/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs @@ -41,6 +41,10 @@ public sealed class VexCanonicalJsonSerializerTests justification: VexJustification.ComponentNotPresent, detail: "Package not shipped in this channel.", confidence: new VexConfidence("high", 0.95, "policy/default"), + signals: new VexSignalSnapshot( + new VexSeveritySignal("CVSS:3.1", 7.5, label: "high", vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H"), + kev: true, + epss: 0.42), additionalMetadata: ImmutableDictionary.Empty .Add("source", "csaf") .Add("revision", "2024-09-15")); @@ -48,7 +52,38 @@ public sealed class VexCanonicalJsonSerializerTests var json = VexCanonicalJsonSerializer.Serialize(claim); Assert.Equal( - "{\"vulnerabilityId\":\"CVE-2025-12345\",\"providerId\":\"redhat\",\"product\":{\"key\":\"pkg:redhat/demo\",\"name\":\"Demo App\",\"version\":\"1.2.3\",\"purl\":\"pkg:rpm/redhat/demo@1.2.3\",\"cpe\":\"cpe:2.3:a:redhat:demo:1.2.3\",\"componentIdentifiers\":[\"componentA\",\"componentB\"]},\"status\":\"not_affected\",\"justification\":\"component_not_present\",\"detail\":\"Package not shipped in this channel.\",\"document\":{\"format\":\"csaf\",\"digest\":\"sha256:6d5a\",\"sourceUri\":\"https://example.org/vex/csaf.json\",\"revision\":\"2024-09-15\",\"signature\":{\"type\":\"pgp\",\"subject\":\"CN=Red Hat\",\"issuer\":\"CN=Red Hat Root\",\"keyId\":\"0xABCD\",\"verifiedAt\":\"2025-10-14T09:30:00+00:00\",\"transparencyLogReference\":null}},\"firstSeen\":\"2025-10-10T12:00:00+00:00\",\"lastSeen\":\"2025-10-11T12:00:00+00:00\",\"confidence\":{\"level\":\"high\",\"score\":0.95,\"method\":\"policy/default\"},\"additionalMetadata\":{\"revision\":\"2024-09-15\",\"source\":\"csaf\"}}", + "{\"vulnerabilityId\":\"CVE-2025-12345\",\"providerId\":\"redhat\",\"product\":{\"key\":\"pkg:redhat/demo\",\"name\":\"Demo App\",\"version\":\"1.2.3\",\"purl\":\"pkg:rpm/redhat/demo@1.2.3\",\"cpe\":\"cpe:2.3:a:redhat:demo:1.2.3\",\"componentIdentifiers\":[\"componentA\",\"componentB\"]},\"status\":\"not_affected\",\"justification\":\"component_not_present\",\"detail\":\"Package not shipped in this channel.\",\"signals\":{\"severity\":{\"scheme\":\"CVSS:3.1\",\"score\":7.5,\"label\":\"high\",\"vector\":\"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H\"},\"kev\":true,\"epss\":0.42},\"document\":{\"format\":\"csaf\",\"digest\":\"sha256:6d5a\",\"sourceUri\":\"https://example.org/vex/csaf.json\",\"revision\":\"2024-09-15\",\"signature\":{\"type\":\"pgp\",\"subject\":\"CN=Red Hat\",\"issuer\":\"CN=Red Hat Root\",\"keyId\":\"0xABCD\",\"verifiedAt\":\"2025-10-14T09:30:00+00:00\",\"transparencyLogReference\":null}},\"firstSeen\":\"2025-10-10T12:00:00+00:00\",\"lastSeen\":\"2025-10-11T12:00:00+00:00\",\"confidence\":{\"level\":\"high\",\"score\":0.95,\"method\":\"policy/default\"},\"additionalMetadata\":{\"revision\":\"2024-09-15\",\"source\":\"csaf\"}}", + json); + } + + [Fact] + public void SerializeConsensus_IncludesSignalsInOrder() + { + var product = new VexProduct("pkg:demo/app", "Demo App"); + var sources = new[] + { + new VexConsensusSource("redhat", VexClaimStatus.Affected, "sha256:redhat", 1.0), + }; + + var consensus = new VexConsensus( + "CVE-2025-9999", + product, + VexConsensusStatus.Affected, + new DateTimeOffset(2025, 10, 15, 12, 0, 0, TimeSpan.Zero), + sources, + signals: new VexSignalSnapshot( + new VexSeveritySignal("stellaops:v1", score: 9.1, label: "critical"), + kev: false, + epss: 0.67), + policyVersion: "baseline/v1", + summary: "Affected due to vendor advisory.", + policyRevisionId: "rev-1", + policyDigest: "sha256:abcd"); + + var json = VexCanonicalJsonSerializer.Serialize(consensus); + + Assert.Equal( + "{\"vulnerabilityId\":\"CVE-2025-9999\",\"product\":{\"key\":\"pkg:demo/app\",\"name\":\"Demo App\",\"version\":null,\"purl\":null,\"cpe\":null,\"componentIdentifiers\":[]},\"status\":\"affected\",\"calculatedAt\":\"2025-10-15T12:00:00+00:00\",\"sources\":[{\"providerId\":\"redhat\",\"status\":\"affected\",\"documentDigest\":\"sha256:redhat\",\"weight\":1,\"justification\":null,\"detail\":null,\"confidence\":null}],\"conflicts\":[],\"signals\":{\"severity\":{\"scheme\":\"stellaops:v1\",\"score\":9.1,\"label\":\"critical\",\"vector\":null},\"kev\":false,\"epss\":0.67},\"policyVersion\":\"baseline/v1\",\"summary\":\"Affected due to vendor advisory.\",\"policyDigest\":\"sha256:abcd\",\"policyRevisionId\":\"rev-1\"}", json); } diff --git a/src/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs b/src/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs index 07e738e3..8b5764e4 100644 --- a/src/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs +++ b/src/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs @@ -166,6 +166,33 @@ public sealed class VexConsensusResolverTests Assert.Contains("No majority consensus", result.Consensus.Summary!, StringComparison.Ordinal); } + [Fact] + public void Resolve_RespectsRaisedWeightCeiling() + { + var provider = CreateProvider("vendor", VexProviderKind.Vendor); + var claim = CreateClaim( + "CVE-2025-0100", + provider.Id, + VexClaimStatus.Affected, + documentDigest: "sha256:vendor"); + + var policy = new BaselineVexConsensusPolicy(new VexConsensusPolicyOptions( + vendorWeight: 1.4, + weightCeiling: 2.0)); + var resolver = new VexConsensusResolver(policy); + + var result = resolver.Resolve(new VexConsensusRequest( + claim.VulnerabilityId, + DemoProduct, + new[] { claim }, + new Dictionary { [provider.Id] = provider }, + DateTimeOffset.Parse("2025-10-15T12:00:00Z"), + WeightCeiling: 2.0)); + + var source = Assert.Single(result.Consensus.Sources); + Assert.Equal(1.4, source.Weight); + } + private static VexProvider CreateProvider(string id, VexProviderKind kind) => new( id, diff --git a/src/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs b/src/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs index 8cfd2571..d48fc4df 100644 --- a/src/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs +++ b/src/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs @@ -11,11 +11,16 @@ public sealed class VexPolicyBinderTests { "version": "custom/v2", "weights": { - "vendor": 0.95, - "distro": 0.85 + "vendor": 1.3, + "distro": 0.85, + "ceiling": 2.0 + }, + "scoring": { + "alpha": 0.35, + "beta": 0.75 }, "providerOverrides": { - "provider.example": 0.5 + "provider.example": 1.8 } } """; @@ -41,6 +46,12 @@ public sealed class VexPolicyBinderTests Assert.NotNull(result.NormalizedOptions); Assert.Equal("custom/v2", result.Options!.Version); Assert.Equal("custom/v2", result.NormalizedOptions!.Version); + Assert.Equal(1.3, result.NormalizedOptions.VendorWeight); + Assert.Equal(0.85, result.NormalizedOptions.DistroWeight); + Assert.Equal(2.0, result.NormalizedOptions.WeightCeiling); + Assert.Equal(0.35, result.NormalizedOptions.Alpha); + Assert.Equal(0.75, result.NormalizedOptions.Beta); + Assert.Equal(1.8, result.NormalizedOptions.ProviderOverrides["provider.example"]); Assert.Empty(result.Issues); } @@ -80,4 +91,40 @@ public sealed class VexPolicyBinderTests Assert.True(result.Success); Assert.NotNull(result.Options); } + + [Fact] + public void Bind_InvalidWeightsAndScoring_EmitsWarningsAndClamps() + { + const string policy = """ + { + "weights": { + "vendor": 3.5, + "ceiling": 0.8 + }, + "scoring": { + "alpha": -0.1, + "beta": 10.0 + }, + "providerOverrides": { + "bad": 4.0 + } + } + """; + + var result = VexPolicyBinder.Bind(policy, VexPolicyDocumentFormat.Json); + + Assert.True(result.Success); + Assert.NotNull(result.NormalizedOptions); + var consensus = result.NormalizedOptions!; + Assert.Equal(1.0, consensus.WeightCeiling); + Assert.Equal(1.0, consensus.VendorWeight); + Assert.Equal(1.0, consensus.ProviderOverrides["bad"]); + Assert.Equal(VexConsensusPolicyOptions.DefaultAlpha, consensus.Alpha); + Assert.Equal(VexConsensusPolicyOptions.MaxSupportedCoefficient, consensus.Beta); + Assert.Contains(result.Issues, issue => issue.Code == "weights.ceiling.minimum"); + Assert.Contains(result.Issues, issue => issue.Code == "weights.vendor.range"); + Assert.Contains(result.Issues, issue => issue.Code == "weights.overrides.bad.range"); + Assert.Contains(result.Issues, issue => issue.Code == "scoring.alpha.range"); + Assert.Contains(result.Issues, issue => issue.Code == "scoring.beta.maximum"); + } } diff --git a/src/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs b/src/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs new file mode 100644 index 00000000..9435d1bd --- /dev/null +++ b/src/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs @@ -0,0 +1,35 @@ +using System; +using Xunit; + +namespace StellaOps.Excititor.Core.Tests; + +public sealed class VexSignalSnapshotTests +{ + [Theory] + [InlineData(-0.01)] + [InlineData(1.01)] + [InlineData(double.NaN)] + [InlineData(double.PositiveInfinity)] + public void Constructor_InvalidEpss_Throws(double value) + { + Assert.Throws(() => new VexSignalSnapshot(epss: value)); + } + + [Theory] + [InlineData("")] + [InlineData(" ")] + [InlineData(null)] + public void VexSeveritySignal_InvalidScheme_Throws(string? scheme) + { + Assert.Throws(() => new VexSeveritySignal(scheme!)); + } + + [Theory] + [InlineData(-0.1)] + [InlineData(double.NaN)] + [InlineData(double.NegativeInfinity)] + public void VexSeveritySignal_InvalidScore_Throws(double value) + { + Assert.Throws(() => new VexSeveritySignal("cvss", value)); + } +} diff --git a/src/StellaOps.Excititor.Core/IVexConsensusPolicy.cs b/src/StellaOps.Excititor.Core/IVexConsensusPolicy.cs index 47ba82b7..7fa9f0bf 100644 --- a/src/StellaOps.Excititor.Core/IVexConsensusPolicy.cs +++ b/src/StellaOps.Excititor.Core/IVexConsensusPolicy.cs @@ -11,7 +11,7 @@ public interface IVexConsensusPolicy string Version { get; } /// - /// Returns the effective weight (0-1) to apply for the provided VEX source. + /// Returns the effective weight (bounded by the policy ceiling) to apply for the provided VEX source. /// double GetProviderWeight(VexProvider provider); diff --git a/src/StellaOps.Excititor.Core/TASKS.md b/src/StellaOps.Excititor.Core/TASKS.md index 54f4688f..456fc524 100644 --- a/src/StellaOps.Excititor.Core/TASKS.md +++ b/src/StellaOps.Excititor.Core/TASKS.md @@ -5,5 +5,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md |EXCITITOR-CORE-01-001 – Canonical VEX domain records|Team Excititor Core & Policy|docs/ARCHITECTURE_EXCITITOR.md|DONE (2025-10-15) – Introduced `VexClaim`, `VexConsensus`, provider metadata, export manifest records, and deterministic JSON serialization with tests covering canonical ordering and query signatures.| |EXCITITOR-CORE-01-002 – Trust-weighted consensus resolver|Team Excititor Core & Policy|EXCITITOR-CORE-01-001|DONE (2025-10-15) – Added consensus resolver, baseline policy (tier weights + justification gate), telemetry output, and tests covering acceptance, conflict ties, and determinism.| |EXCITITOR-CORE-01-003 – Shared contracts & query signatures|Team Excititor Core & Policy|EXCITITOR-CORE-01-001|DONE (2025-10-15) – Published connector/normalizer/exporter/attestation abstractions and expanded deterministic `VexQuerySignature`/hash utilities with test coverage.| -|EXCITITOR-CORE-02-001 – Context signal schema prep|Team Excititor Core & Policy|EXCITITOR-POLICY-02-001|TODO – Extend `VexClaim`/`VexConsensus` with optional severity/KEV/EPSS payloads, update canonical serializer/hashes, and coordinate migration notes with Storage.| +|EXCITITOR-CORE-02-001 – Context signal schema prep|Team Excititor Core & Policy|EXCITITOR-POLICY-02-001|DONE (2025-10-19) – Added `VexSignalSnapshot` (severity/KEV/EPSS) to claims/consensus, updated canonical serializer + resolver plumbing, documented storage follow-up, and validated via `dotnet test src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`.| |EXCITITOR-CORE-02-002 – Deterministic risk scoring engine|Team Excititor Core & Policy|EXCITITOR-CORE-02-001, EXCITITOR-POLICY-02-001|BACKLOG – Introduce the scoring calculator invoked by consensus, persist score envelopes with audit trails, and add regression fixtures covering gate/boost behaviour before enabling exports.| diff --git a/src/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs b/src/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs index 1fa78c1f..241a4218 100644 --- a/src/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs +++ b/src/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs @@ -63,6 +63,7 @@ public static class VexCanonicalJsonSerializer "status", "justification", "detail", + "signals", "document", "firstSeen", "lastSeen", @@ -124,6 +125,7 @@ public static class VexCanonicalJsonSerializer "calculatedAt", "sources", "conflicts", + "signals", "policyVersion", "summary", } @@ -195,6 +197,25 @@ public static class VexCanonicalJsonSerializer "diagnostics", } }, + { + typeof(VexSignalSnapshot), + new[] + { + "severity", + "kev", + "epss", + } + }, + { + typeof(VexSeveritySignal), + new[] + { + "scheme", + "score", + "label", + "vector", + } + }, { typeof(VexExportManifest), new[] @@ -208,10 +229,39 @@ public static class VexCanonicalJsonSerializer "fromCache", "sourceProviders", "consensusRevision", + "policyRevisionId", + "policyDigest", + "consensusDigest", + "scoreDigest", "attestation", "sizeBytes", } }, + { + typeof(VexScoreEnvelope), + new[] + { + "generatedAt", + "policyRevisionId", + "policyDigest", + "alpha", + "beta", + "weightCeiling", + "entries", + } + }, + { + typeof(VexScoreEntry), + new[] + { + "vulnerabilityId", + "productKey", + "status", + "calculatedAt", + "signals", + "score", + } + }, { typeof(VexContentAddress), new[] diff --git a/src/StellaOps.Excititor.Core/VexClaim.cs b/src/StellaOps.Excititor.Core/VexClaim.cs index 385c9bf0..ffa49eb1 100644 --- a/src/StellaOps.Excititor.Core/VexClaim.cs +++ b/src/StellaOps.Excititor.Core/VexClaim.cs @@ -16,6 +16,7 @@ public sealed record VexClaim VexJustification? justification = null, string? detail = null, VexConfidence? confidence = null, + VexSignalSnapshot? signals = null, ImmutableDictionary? additionalMetadata = null) { if (string.IsNullOrWhiteSpace(vulnerabilityId)) @@ -43,6 +44,7 @@ public sealed record VexClaim Justification = justification; Detail = string.IsNullOrWhiteSpace(detail) ? null : detail.Trim(); Confidence = confidence; + Signals = signals; AdditionalMetadata = NormalizeMetadata(additionalMetadata); } @@ -66,6 +68,8 @@ public sealed record VexClaim public VexConfidence? Confidence { get; } + public VexSignalSnapshot? Signals { get; } + public ImmutableSortedDictionary AdditionalMetadata { get; } private static ImmutableSortedDictionary NormalizeMetadata( diff --git a/src/StellaOps.Excititor.Core/VexConsensus.cs b/src/StellaOps.Excititor.Core/VexConsensus.cs index ad9e9fd2..8990cd8b 100644 --- a/src/StellaOps.Excititor.Core/VexConsensus.cs +++ b/src/StellaOps.Excititor.Core/VexConsensus.cs @@ -12,6 +12,7 @@ public sealed record VexConsensus DateTimeOffset calculatedAt, IEnumerable sources, IEnumerable? conflicts = null, + VexSignalSnapshot? signals = null, string? policyVersion = null, string? summary = null, string? policyRevisionId = null, @@ -28,6 +29,7 @@ public sealed record VexConsensus CalculatedAt = calculatedAt; Sources = NormalizeSources(sources); Conflicts = NormalizeConflicts(conflicts); + Signals = signals; PolicyVersion = string.IsNullOrWhiteSpace(policyVersion) ? null : policyVersion.Trim(); Summary = string.IsNullOrWhiteSpace(summary) ? null : summary.Trim(); PolicyRevisionId = string.IsNullOrWhiteSpace(policyRevisionId) ? null : policyRevisionId.Trim(); @@ -46,6 +48,8 @@ public sealed record VexConsensus public ImmutableArray Conflicts { get; } + public VexSignalSnapshot? Signals { get; } + public string? PolicyVersion { get; } public string? Summary { get; } diff --git a/src/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs b/src/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs index c953a763..fe1b99a8 100644 --- a/src/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs +++ b/src/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs @@ -6,6 +6,12 @@ public sealed record VexConsensusPolicyOptions { public const string BaselineVersion = "baseline/v1"; + public const double DefaultWeightCeiling = 1.0; + public const double DefaultAlpha = 0.25; + public const double DefaultBeta = 0.5; + public const double MaxSupportedCeiling = 5.0; + public const double MaxSupportedCoefficient = 5.0; + public VexConsensusPolicyOptions( string? version = null, double vendorWeight = 1.0, @@ -13,15 +19,21 @@ public sealed record VexConsensusPolicyOptions double platformWeight = 0.7, double hubWeight = 0.5, double attestationWeight = 0.6, - IEnumerable>? providerOverrides = null) + IEnumerable>? providerOverrides = null, + double weightCeiling = DefaultWeightCeiling, + double alpha = DefaultAlpha, + double beta = DefaultBeta) { Version = string.IsNullOrWhiteSpace(version) ? BaselineVersion : version.Trim(); - VendorWeight = NormalizeWeight(vendorWeight); - DistroWeight = NormalizeWeight(distroWeight); - PlatformWeight = NormalizeWeight(platformWeight); - HubWeight = NormalizeWeight(hubWeight); - AttestationWeight = NormalizeWeight(attestationWeight); - ProviderOverrides = NormalizeOverrides(providerOverrides); + WeightCeiling = NormalizeWeightCeiling(weightCeiling); + VendorWeight = NormalizeWeight(vendorWeight, WeightCeiling); + DistroWeight = NormalizeWeight(distroWeight, WeightCeiling); + PlatformWeight = NormalizeWeight(platformWeight, WeightCeiling); + HubWeight = NormalizeWeight(hubWeight, WeightCeiling); + AttestationWeight = NormalizeWeight(attestationWeight, WeightCeiling); + ProviderOverrides = NormalizeOverrides(providerOverrides, WeightCeiling); + Alpha = NormalizeCoefficient(alpha, nameof(alpha)); + Beta = NormalizeCoefficient(beta, nameof(beta)); } public string Version { get; } @@ -36,9 +48,15 @@ public sealed record VexConsensusPolicyOptions public double AttestationWeight { get; } + public double WeightCeiling { get; } + + public double Alpha { get; } + + public double Beta { get; } + public ImmutableDictionary ProviderOverrides { get; } - private static double NormalizeWeight(double weight) + private static double NormalizeWeight(double weight, double ceiling) { if (double.IsNaN(weight) || double.IsInfinity(weight)) { @@ -50,16 +68,17 @@ public sealed record VexConsensusPolicyOptions return 0; } - if (weight >= 1) + if (weight >= ceiling) { - return 1; + return ceiling; } return weight; } private static ImmutableDictionary NormalizeOverrides( - IEnumerable>? overrides) + IEnumerable>? overrides, + double ceiling) { if (overrides is null) { @@ -74,9 +93,54 @@ public sealed record VexConsensusPolicyOptions continue; } - builder[key.Trim()] = NormalizeWeight(weight); + builder[key.Trim()] = NormalizeWeight(weight, ceiling); } return builder.ToImmutable(); } + + private static double NormalizeWeightCeiling(double value) + { + if (double.IsNaN(value) || double.IsInfinity(value)) + { + throw new ArgumentOutOfRangeException(nameof(value), "Weight ceiling must be a finite number."); + } + + if (value <= 0) + { + throw new ArgumentOutOfRangeException(nameof(value), "Weight ceiling must be greater than zero."); + } + + if (value < 1) + { + return 1; + } + + if (value > MaxSupportedCeiling) + { + return MaxSupportedCeiling; + } + + return value; + } + + private static double NormalizeCoefficient(double value, string name) + { + if (double.IsNaN(value) || double.IsInfinity(value)) + { + throw new ArgumentOutOfRangeException(name, "Coefficient must be a finite number."); + } + + if (value < 0) + { + throw new ArgumentOutOfRangeException(name, "Coefficient must be non-negative."); + } + + if (value > MaxSupportedCoefficient) + { + return MaxSupportedCoefficient; + } + + return value; + } } diff --git a/src/StellaOps.Excititor.Core/VexConsensusResolver.cs b/src/StellaOps.Excititor.Core/VexConsensusResolver.cs index 5ec9268a..742cc45e 100644 --- a/src/StellaOps.Excititor.Core/VexConsensusResolver.cs +++ b/src/StellaOps.Excititor.Core/VexConsensusResolver.cs @@ -39,18 +39,21 @@ public sealed class VexConsensusResolver double weight = 0; var included = false; - if (provider is null) - { - rejectionReason = "provider_not_registered"; - } - else - { - weight = NormalizeWeight(_policy.GetProviderWeight(provider)); - if (weight <= 0) + if (provider is null) { - rejectionReason = "weight_not_positive"; + rejectionReason = "provider_not_registered"; } - else if (!_policy.IsClaimEligible(claim, provider, out rejectionReason)) + else + { + var ceiling = request.WeightCeiling <= 0 || double.IsNaN(request.WeightCeiling) || double.IsInfinity(request.WeightCeiling) + ? VexConsensusPolicyOptions.DefaultWeightCeiling + : Math.Clamp(request.WeightCeiling, 0.1, VexConsensusPolicyOptions.MaxSupportedCeiling); + weight = NormalizeWeight(_policy.GetProviderWeight(provider), ceiling); + if (weight <= 0) + { + rejectionReason = "weight_not_positive"; + } + else if (!_policy.IsClaimEligible(claim, provider, out rejectionReason)) { rejectionReason ??= "rejected_by_policy"; } @@ -105,6 +108,7 @@ public sealed class VexConsensusResolver request.CalculatedAt, acceptedSources, AttachConflictDetails(conflicts, acceptedSources, consensusStatus, conflictKeys), + request.Signals, _policy.Version, summary, request.PolicyRevisionId, @@ -130,16 +134,16 @@ public sealed class VexConsensusResolver return accumulator; } - private static double NormalizeWeight(double weight) + private static double NormalizeWeight(double weight, double ceiling) { if (double.IsNaN(weight) || double.IsInfinity(weight) || weight <= 0) { return 0; } - if (weight >= 1) + if (weight >= ceiling) { - return 1; + return ceiling; } return weight; @@ -275,6 +279,8 @@ public sealed record VexConsensusRequest( IReadOnlyList Claims, IReadOnlyDictionary Providers, DateTimeOffset CalculatedAt, + double WeightCeiling = VexConsensusPolicyOptions.DefaultWeightCeiling, + VexSignalSnapshot? Signals = null, string? PolicyRevisionId = null, string? PolicyDigest = null); diff --git a/src/StellaOps.Excititor.Core/VexExportManifest.cs b/src/StellaOps.Excititor.Core/VexExportManifest.cs index b0da8fe8..dcfc00fc 100644 --- a/src/StellaOps.Excititor.Core/VexExportManifest.cs +++ b/src/StellaOps.Excititor.Core/VexExportManifest.cs @@ -16,6 +16,10 @@ public sealed record VexExportManifest IEnumerable sourceProviders, bool fromCache = false, string? consensusRevision = null, + string? policyRevisionId = null, + string? policyDigest = null, + VexContentAddress? consensusDigest = null, + VexContentAddress? scoreDigest = null, VexAttestationMetadata? attestation = null, long sizeBytes = 0) { @@ -43,6 +47,10 @@ public sealed record VexExportManifest FromCache = fromCache; SourceProviders = NormalizeProviders(sourceProviders); ConsensusRevision = string.IsNullOrWhiteSpace(consensusRevision) ? null : consensusRevision.Trim(); + PolicyRevisionId = string.IsNullOrWhiteSpace(policyRevisionId) ? null : policyRevisionId.Trim(); + PolicyDigest = string.IsNullOrWhiteSpace(policyDigest) ? null : policyDigest.Trim(); + ConsensusDigest = consensusDigest; + ScoreDigest = scoreDigest; Attestation = attestation; SizeBytes = sizeBytes; } @@ -65,6 +73,14 @@ public sealed record VexExportManifest public string? ConsensusRevision { get; } + public string? PolicyRevisionId { get; } + + public string? PolicyDigest { get; } + + public VexContentAddress? ConsensusDigest { get; } + + public VexContentAddress? ScoreDigest { get; } + public VexAttestationMetadata? Attestation { get; } public long SizeBytes { get; } diff --git a/src/StellaOps.Excititor.Core/VexScoreEnvelope.cs b/src/StellaOps.Excititor.Core/VexScoreEnvelope.cs new file mode 100644 index 00000000..98e19253 --- /dev/null +++ b/src/StellaOps.Excititor.Core/VexScoreEnvelope.cs @@ -0,0 +1,187 @@ +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Excititor.Core; + +public sealed record VexScoreEnvelope( + DateTimeOffset GeneratedAt, + string PolicyRevisionId, + string? PolicyDigest, + double Alpha, + double Beta, + double WeightCeiling, + ImmutableArray Entries) +{ + public VexScoreEnvelope( + DateTimeOffset GeneratedAt, + string PolicyRevisionId, + string? PolicyDigest, + double Alpha, + double Beta, + double WeightCeiling, + IEnumerable Entries) + : this( + GeneratedAt, + PolicyRevisionId, + PolicyDigest, + Alpha, + Beta, + WeightCeiling, + NormalizeEntries(Entries)) + { + } + + private VexScoreEnvelope( + DateTimeOffset generatedAt, + string policyRevisionId, + string? policyDigest, + double alpha, + double beta, + double weightCeiling, + ImmutableArray entries) + { + if (string.IsNullOrWhiteSpace(policyRevisionId)) + { + throw new ArgumentException("Policy revision id must be provided.", nameof(policyRevisionId)); + } + + if (double.IsNaN(alpha) || double.IsInfinity(alpha) || alpha < 0) + { + throw new ArgumentOutOfRangeException(nameof(alpha), "Alpha must be a finite, non-negative number."); + } + + if (double.IsNaN(beta) || double.IsInfinity(beta) || beta < 0) + { + throw new ArgumentOutOfRangeException(nameof(beta), "Beta must be a finite, non-negative number."); + } + + if (double.IsNaN(weightCeiling) || double.IsInfinity(weightCeiling) || weightCeiling <= 0) + { + throw new ArgumentOutOfRangeException(nameof(weightCeiling), "Weight ceiling must be a finite number greater than zero."); + } + + this.GeneratedAt = generatedAt; + this.PolicyRevisionId = policyRevisionId.Trim(); + this.PolicyDigest = string.IsNullOrWhiteSpace(policyDigest) ? null : policyDigest.Trim(); + this.Alpha = alpha; + this.Beta = beta; + this.WeightCeiling = weightCeiling; + this.Entries = entries; + } + + public DateTimeOffset GeneratedAt { get; } + + public string PolicyRevisionId { get; } + + public string? PolicyDigest { get; } + + public double Alpha { get; } + + public double Beta { get; } + + public double WeightCeiling { get; } + + public ImmutableArray Entries { get; } + + private static ImmutableArray NormalizeEntries(IEnumerable entries) + { + if (entries is null) + { + throw new ArgumentNullException(nameof(entries)); + } + + return entries + .OrderBy(static entry => entry.VulnerabilityId, StringComparer.Ordinal) + .ThenBy(static entry => entry.ProductKey, StringComparer.Ordinal) + .ToImmutableArray(); + } +} + +public sealed record VexScoreEntry( + string VulnerabilityId, + string ProductKey, + VexConsensusStatus Status, + DateTimeOffset CalculatedAt, + VexSignalSnapshot? Signals, + double? Score) +{ + public VexScoreEntry( + string VulnerabilityId, + string ProductKey, + VexConsensusStatus Status, + DateTimeOffset CalculatedAt, + VexSignalSnapshot? Signals, + double? Score) + : this( + ValidateVulnerability(VulnerabilityId), + ValidateProduct(ProductKey), + Status, + CalculatedAt, + Signals, + ValidateScore(Score)) + { + } + + private VexScoreEntry( + string vulnerabilityId, + string productKey, + VexConsensusStatus status, + DateTimeOffset calculatedAt, + VexSignalSnapshot? signals, + double? score) + { + VulnerabilityId = vulnerabilityId; + ProductKey = productKey; + Status = status; + CalculatedAt = calculatedAt; + Signals = signals; + Score = score; + } + + public string VulnerabilityId { get; } + + public string ProductKey { get; } + + public VexConsensusStatus Status { get; } + + public DateTimeOffset CalculatedAt { get; } + + public VexSignalSnapshot? Signals { get; } + + public double? Score { get; } + + private static string ValidateVulnerability(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Vulnerability id must be provided.", nameof(value)); + } + + return value.Trim(); + } + + private static string ValidateProduct(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Product key must be provided.", nameof(value)); + } + + return value.Trim(); + } + + private static double? ValidateScore(double? score) + { + if (score is null) + { + return null; + } + + if (double.IsNaN(score.Value) || double.IsInfinity(score.Value) || score.Value < 0) + { + throw new ArgumentOutOfRangeException(nameof(score), "Score must be a finite, non-negative number."); + } + + return score; + } +} diff --git a/src/StellaOps.Excititor.Core/VexSignals.cs b/src/StellaOps.Excititor.Core/VexSignals.cs new file mode 100644 index 00000000..dc6b3061 --- /dev/null +++ b/src/StellaOps.Excititor.Core/VexSignals.cs @@ -0,0 +1,64 @@ +namespace StellaOps.Excititor.Core; + +public sealed record VexSignalSnapshot +{ + public VexSignalSnapshot( + VexSeveritySignal? severity = null, + bool? kev = null, + double? epss = null) + { + if (epss is { } epssValue) + { + if (double.IsNaN(epssValue) || double.IsInfinity(epssValue) || epssValue < 0 || epssValue > 1) + { + throw new ArgumentOutOfRangeException(nameof(epss), "EPSS probability must be between 0 and 1."); + } + } + + Severity = severity; + Kev = kev; + Epss = epss; + } + + public VexSeveritySignal? Severity { get; } + + public bool? Kev { get; } + + public double? Epss { get; } +} + +public sealed record VexSeveritySignal +{ + public VexSeveritySignal( + string scheme, + double? score = null, + string? label = null, + string? vector = null) + { + if (string.IsNullOrWhiteSpace(scheme)) + { + throw new ArgumentException("Severity scheme must be provided.", nameof(scheme)); + } + + if (score is { } scoreValue) + { + if (double.IsNaN(scoreValue) || double.IsInfinity(scoreValue) || scoreValue < 0) + { + throw new ArgumentOutOfRangeException(nameof(score), "Severity score must be a finite, non-negative number."); + } + } + + Scheme = scheme.Trim(); + Score = score; + Label = string.IsNullOrWhiteSpace(label) ? null : label.Trim(); + Vector = string.IsNullOrWhiteSpace(vector) ? null : vector.Trim(); + } + + public string Scheme { get; } + + public double? Score { get; } + + public string? Label { get; } + + public string? Vector { get; } +} diff --git a/src/StellaOps.Excititor.Core/VexSignatureVerifiers.cs b/src/StellaOps.Excititor.Core/VexSignatureVerifiers.cs new file mode 100644 index 00000000..6a64377d --- /dev/null +++ b/src/StellaOps.Excititor.Core/VexSignatureVerifiers.cs @@ -0,0 +1,17 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Excititor.Core; + +/// +/// Signature verifier implementation that trusts ingress sources without performing verification. +/// Useful for offline development flows and ingestion pipelines that perform verification upstream. +/// +public sealed class NoopVexSignatureVerifier : IVexSignatureVerifier +{ + public ValueTask VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + return ValueTask.FromResult(null); + } +} diff --git a/src/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj b/src/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj index 8c99255e..6e7c6a60 100644 --- a/src/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj +++ b/src/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj @@ -7,8 +7,8 @@ true - - + + diff --git a/src/StellaOps.Excititor.Export/TASKS.md b/src/StellaOps.Excititor.Export/TASKS.md index acb2dc03..d4daf795 100644 --- a/src/StellaOps.Excititor.Export/TASKS.md +++ b/src/StellaOps.Excititor.Export/TASKS.md @@ -6,6 +6,6 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md |EXCITITOR-EXPORT-01-002 – Cache index & eviction hooks|Team Excititor Export|EXCITITOR-EXPORT-01-001, EXCITITOR-STORAGE-01-003|**DONE (2025-10-16)** – Export engine now invalidates cache entries on force refresh, cache services expose prune/invalidate APIs, and storage maintenance trims expired/dangling records with Mongo2Go coverage.| |EXCITITOR-EXPORT-01-003 – Artifact store adapters|Team Excititor Export|EXCITITOR-EXPORT-01-001|**DONE (2025-10-16)** – Implemented multi-store pipeline with filesystem, S3-compatible, and offline bundle adapters (hash verification + manifest/zip output) plus unit coverage and DI hooks.| |EXCITITOR-EXPORT-01-004 – Attestation handoff integration|Team Excititor Export|EXCITITOR-EXPORT-01-001, EXCITITOR-ATTEST-01-001|**DONE (2025-10-17)** – Export engine now invokes attestation client, logs diagnostics, and persists Rekor/envelope metadata on manifests; regression coverage added in `ExportEngineTests.ExportAsync_AttachesAttestationMetadata`.| -|EXCITITOR-EXPORT-01-005 – Score & resolve envelope surfaces|Team Excititor Export|EXCITITOR-EXPORT-01-004, EXCITITOR-CORE-02-001|TODO – Emit consensus+score envelopes in export manifests, include policy/scoring digests, and update offline bundle/ORAS layouts to carry signed VEX responses.| +|EXCITITOR-EXPORT-01-005 – Score & resolve envelope surfaces|Team Excititor Export|EXCITITOR-EXPORT-01-004, EXCITITOR-CORE-02-001|**DOING (2025-10-19)** – Prereqs EXCITITOR-EXPORT-01-004 and EXCITITOR-CORE-02-001 confirmed DONE; planning export updates to emit consensus+score envelopes, include policy/scoring digests, and extend offline bundle/ORAS layouts for signed VEX responses.| |EXCITITOR-EXPORT-01-006 – Quiet provenance packaging|Team Excititor Export|EXCITITOR-EXPORT-01-005, POLICY-CORE-09-005|TODO – Attach `quietedBy` statement IDs, signers, and justification codes to exports/offline bundles, mirror metadata into attested manifest, and add regression fixtures.| |EXCITITOR-EXPORT-01-007 – Mirror bundle + domain manifest|Team Excititor Export|EXCITITOR-EXPORT-01-006|TODO – Create per-domain mirror bundles with consensus/score artifacts, publish signed index for downstream Excititor sync, and ensure deterministic digests + fixtures.| diff --git a/src/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs b/src/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs index 29f9d0a0..1b66cf6a 100644 --- a/src/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs +++ b/src/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs @@ -20,6 +20,47 @@ public sealed class CsafNormalizer : IVexNormalizer [VexClaimStatus.Fixed] = 3, }.ToImmutableDictionary(); + private static readonly ImmutableDictionary StatusMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["known_affected"] = VexClaimStatus.Affected, + ["first_affected"] = VexClaimStatus.Affected, + ["last_affected"] = VexClaimStatus.Affected, + ["affected"] = VexClaimStatus.Affected, + ["fixed_after_release"] = VexClaimStatus.Fixed, + ["fixed"] = VexClaimStatus.Fixed, + ["first_fixed"] = VexClaimStatus.Fixed, + ["last_fixed"] = VexClaimStatus.Fixed, + ["recommended"] = VexClaimStatus.Fixed, + ["known_not_affected"] = VexClaimStatus.NotAffected, + ["first_not_affected"] = VexClaimStatus.NotAffected, + ["last_not_affected"] = VexClaimStatus.NotAffected, + ["not_affected"] = VexClaimStatus.NotAffected, + ["under_investigation"] = VexClaimStatus.UnderInvestigation, + ["investigating"] = VexClaimStatus.UnderInvestigation, + ["in_investigation"] = VexClaimStatus.UnderInvestigation, + ["in_triage"] = VexClaimStatus.UnderInvestigation, + ["unknown"] = VexClaimStatus.UnderInvestigation, + }.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase); + + private static readonly ImmutableDictionary JustificationMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["component_not_present"] = VexJustification.ComponentNotPresent, + ["component_not_configured"] = VexJustification.ComponentNotConfigured, + ["vulnerable_code_not_present"] = VexJustification.VulnerableCodeNotPresent, + ["vulnerable_code_not_in_execute_path"] = VexJustification.VulnerableCodeNotInExecutePath, + ["vulnerable_code_cannot_be_controlled_by_adversary"] = VexJustification.VulnerableCodeCannotBeControlledByAdversary, + ["inline_mitigations_already_exist"] = VexJustification.InlineMitigationsAlreadyExist, + ["protected_by_mitigating_control"] = VexJustification.ProtectedByMitigatingControl, + ["protected_by_compensating_control"] = VexJustification.ProtectedByCompensatingControl, + ["protected_at_runtime"] = VexJustification.ProtectedAtRuntime, + ["protected_at_perimeter"] = VexJustification.ProtectedAtPerimeter, + ["code_not_present"] = VexJustification.CodeNotPresent, + ["code_not_reachable"] = VexJustification.CodeNotReachable, + ["requires_configuration"] = VexJustification.RequiresConfiguration, + ["requires_dependency"] = VexJustification.RequiresDependency, + ["requires_environment"] = VexJustification.RequiresEnvironment, + }.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase); + private readonly ILogger _logger; public CsafNormalizer(ILogger logger) @@ -59,6 +100,17 @@ public sealed class CsafNormalizer : IVexNormalizer result.Revision, signature: null); + var metadata = result.Metadata; + if (!string.IsNullOrWhiteSpace(entry.RawStatus)) + { + metadata = metadata.SetItem("csaf.product_status.raw", entry.RawStatus); + } + + if (!string.IsNullOrWhiteSpace(entry.RawJustification)) + { + metadata = metadata.SetItem("csaf.justification.label", entry.RawJustification); + } + var claim = new VexClaim( entry.VulnerabilityId, provider.Id, @@ -67,10 +119,10 @@ public sealed class CsafNormalizer : IVexNormalizer claimDocument, result.FirstRelease, result.LastRelease, - justification: null, + entry.Justification, detail: entry.Detail, confidence: null, - additionalMetadata: result.Metadata); + additionalMetadata: metadata); claims.Add(claim); } @@ -86,10 +138,27 @@ public sealed class CsafNormalizer : IVexNormalizer document.SourceUri, orderedClaims.Length); - return ValueTask.FromResult(new VexClaimBatch( - document, - orderedClaims, - ImmutableDictionary.Empty)); + var diagnosticsBuilder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + if (!result.UnsupportedStatuses.IsDefaultOrEmpty && result.UnsupportedStatuses.Length > 0) + { + diagnosticsBuilder["csaf.unsupported_statuses"] = string.Join(",", result.UnsupportedStatuses); + } + + if (!result.UnsupportedJustifications.IsDefaultOrEmpty && result.UnsupportedJustifications.Length > 0) + { + diagnosticsBuilder["csaf.unsupported_justifications"] = string.Join(",", result.UnsupportedJustifications); + } + + if (!result.ConflictingJustifications.IsDefaultOrEmpty && result.ConflictingJustifications.Length > 0) + { + diagnosticsBuilder["csaf.justification_conflicts"] = string.Join(",", result.ConflictingJustifications); + } + + var diagnostics = diagnosticsBuilder.Count == 0 + ? ImmutableDictionary.Empty + : diagnosticsBuilder.ToImmutable(); + + return ValueTask.FromResult(new VexClaimBatch(document, orderedClaims, diagnostics)); } catch (JsonException ex) { @@ -128,6 +197,12 @@ public sealed class CsafNormalizer : IVexNormalizer var revision = TryGetString(tracking, "revision"); var productCatalog = CollectProducts(root); + var productGroups = CollectProductGroups(root); + + var unsupportedStatuses = new HashSet(StringComparer.OrdinalIgnoreCase); + var unsupportedJustifications = new HashSet(StringComparer.OrdinalIgnoreCase); + var conflictingJustifications = new HashSet(StringComparer.OrdinalIgnoreCase); + var claimsBuilder = ImmutableArray.CreateBuilder(); if (root.TryGetProperty("vulnerabilities", out var vulnerabilitiesElement) && @@ -142,11 +217,20 @@ public sealed class CsafNormalizer : IVexNormalizer } var detail = ResolveDetail(vulnerability); + var justifications = CollectJustifications( + vulnerability, + productCatalog, + productGroups, + unsupportedJustifications, + conflictingJustifications); + var productClaims = BuildClaimsForVulnerability( vulnerabilityId, vulnerability, productCatalog, - detail); + justifications, + detail, + unsupportedStatuses); claimsBuilder.AddRange(productClaims); } @@ -157,14 +241,19 @@ public sealed class CsafNormalizer : IVexNormalizer lastRelease, revision, metadataBuilder.ToImmutable(), - claimsBuilder.ToImmutable()); + claimsBuilder.ToImmutable(), + unsupportedStatuses.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase).ToImmutableArray(), + unsupportedJustifications.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase).ToImmutableArray(), + conflictingJustifications.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase).ToImmutableArray()); } private static IReadOnlyList BuildClaimsForVulnerability( string vulnerabilityId, JsonElement vulnerability, IReadOnlyDictionary productCatalog, - string? detail) + ImmutableDictionary justifications, + string? detail, + ISet unsupportedStatuses) { if (!vulnerability.TryGetProperty("product_status", out var statusElement) || statusElement.ValueKind != JsonValueKind.Object) @@ -176,7 +265,7 @@ public sealed class CsafNormalizer : IVexNormalizer foreach (var statusProperty in statusElement.EnumerateObject()) { - var status = MapStatus(statusProperty.Name); + var status = MapStatus(statusProperty.Name, unsupportedStatuses); if (status is null) { continue; @@ -195,8 +284,11 @@ public sealed class CsafNormalizer : IVexNormalizer continue; } - var product = ResolveProduct(productCatalog, productId); - UpdateClaim(claims, product, status.Value, detail); + var trimmedProductId = productId.Trim(); + var product = ResolveProduct(productCatalog, trimmedProductId); + justifications.TryGetValue(trimmedProductId, out var justificationInfo); + + UpdateClaim(claims, product, status.Value, statusProperty.Name, detail, justificationInfo); } } @@ -210,7 +302,10 @@ public sealed class CsafNormalizer : IVexNormalizer vulnerabilityId, builder.Product, builder.Status, - builder.Detail)) + builder.RawStatus, + builder.Detail, + builder.Justification, + builder.RawJustification)) .ToArray(); } @@ -218,13 +313,60 @@ public sealed class CsafNormalizer : IVexNormalizer IDictionary claims, CsafProductInfo product, VexClaimStatus status, - string? detail) + string rawStatus, + string? detail, + CsafJustificationInfo? justification) { if (!claims.TryGetValue(product.ProductId, out var existing) || StatusPrecedence[status] > StatusPrecedence[existing.Status]) { - claims[product.ProductId] = new CsafClaimEntryBuilder(product, status, detail); + claims[product.ProductId] = new CsafClaimEntryBuilder( + product, + status, + NormalizeRaw(rawStatus), + detail, + justification?.Normalized, + justification?.RawValue); + return; } + + if (StatusPrecedence[status] < StatusPrecedence[existing.Status]) + { + return; + } + + var updated = existing; + + if (string.IsNullOrWhiteSpace(existing.RawStatus)) + { + updated = updated with { RawStatus = NormalizeRaw(rawStatus) }; + } + + if (existing.Detail is null && detail is not null) + { + updated = updated with { Detail = detail }; + } + + if (justification is not null) + { + if (existing.Justification is null && justification.Normalized is not null) + { + updated = updated with + { + Justification = justification.Normalized, + RawJustification = justification.RawValue + }; + } + else if (existing.Justification is null && + justification.Normalized is null && + string.IsNullOrWhiteSpace(existing.RawJustification) && + !string.IsNullOrWhiteSpace(justification.RawValue)) + { + updated = updated with { RawJustification = justification.RawValue }; + } + } + + claims[product.ProductId] = updated; } private static CsafProductInfo ResolveProduct( @@ -375,6 +517,198 @@ public sealed class CsafNormalizer : IVexNormalizer } } + private static ImmutableDictionary> CollectProductGroups(JsonElement root) + { + if (!root.TryGetProperty("product_tree", out var productTree) || + productTree.ValueKind != JsonValueKind.Object || + !productTree.TryGetProperty("product_groups", out var groupsElement) || + groupsElement.ValueKind != JsonValueKind.Array) + { + return ImmutableDictionary>.Empty; + } + + var groups = new Dictionary>(StringComparer.OrdinalIgnoreCase); + + foreach (var group in groupsElement.EnumerateArray()) + { + if (group.ValueKind != JsonValueKind.Object) + { + continue; + } + + var groupId = TryGetString(group, "group_id"); + if (string.IsNullOrWhiteSpace(groupId)) + { + continue; + } + + if (!group.TryGetProperty("product_ids", out var productIdsElement) || + productIdsElement.ValueKind != JsonValueKind.Array) + { + continue; + } + + var members = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var productIdElement in productIdsElement.EnumerateArray()) + { + var productId = productIdElement.GetString(); + if (string.IsNullOrWhiteSpace(productId)) + { + continue; + } + + members.Add(productId.Trim()); + } + + if (members.Count == 0) + { + continue; + } + + groups[groupId.Trim()] = members + .OrderBy(static id => id, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + } + + return groups.Count == 0 + ? ImmutableDictionary>.Empty + : groups.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase); + } + + private static ImmutableDictionary CollectJustifications( + JsonElement vulnerability, + IReadOnlyDictionary productCatalog, + ImmutableDictionary> productGroups, + ISet unsupportedJustifications, + ISet conflictingJustifications) + { + if (!vulnerability.TryGetProperty("flags", out var flagsElement) || + flagsElement.ValueKind != JsonValueKind.Array) + { + return ImmutableDictionary.Empty; + } + + var map = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var flag in flagsElement.EnumerateArray()) + { + if (flag.ValueKind != JsonValueKind.Object) + { + continue; + } + + var label = TryGetString(flag, "label"); + if (string.IsNullOrWhiteSpace(label)) + { + continue; + } + + var rawLabel = NormalizeRaw(label); + var normalized = MapJustification(rawLabel, unsupportedJustifications); + + var targetIds = ExpandFlagProducts(flag, productGroups); + foreach (var productId in targetIds) + { + if (!productCatalog.ContainsKey(productId)) + { + continue; + } + + var info = new CsafJustificationInfo(rawLabel, normalized); + if (map.TryGetValue(productId, out var existing)) + { + if (existing.Normalized is null && normalized is not null) + { + map[productId] = info; + } + else if (existing.Normalized is not null && normalized is not null && existing.Normalized != normalized) + { + conflictingJustifications.Add(productId); + } + else if (existing.Normalized is null && + normalized is null && + string.IsNullOrWhiteSpace(existing.RawValue) && + !string.IsNullOrWhiteSpace(rawLabel)) + { + map[productId] = info; + } + } + else + { + map[productId] = info; + } + } + } + + return map.Count == 0 + ? ImmutableDictionary.Empty + : map.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase); + } + + private static IEnumerable ExpandFlagProducts( + JsonElement flag, + ImmutableDictionary> productGroups) + { + var productIds = new HashSet(StringComparer.OrdinalIgnoreCase); + + if (flag.TryGetProperty("product_ids", out var productIdsElement) && + productIdsElement.ValueKind == JsonValueKind.Array) + { + foreach (var idElement in productIdsElement.EnumerateArray()) + { + var id = idElement.GetString(); + if (string.IsNullOrWhiteSpace(id)) + { + continue; + } + + productIds.Add(id.Trim()); + } + } + + if (flag.TryGetProperty("group_ids", out var groupIdsElement) && + groupIdsElement.ValueKind == JsonValueKind.Array) + { + foreach (var groupIdElement in groupIdsElement.EnumerateArray()) + { + var groupId = groupIdElement.GetString(); + if (string.IsNullOrWhiteSpace(groupId)) + { + continue; + } + + if (productGroups.TryGetValue(groupId.Trim(), out var members)) + { + foreach (var member in members) + { + productIds.Add(member); + } + } + } + } + + return productIds; + } + + private static VexJustification? MapJustification(string justification, ISet unsupportedJustifications) + { + if (string.IsNullOrWhiteSpace(justification)) + { + return null; + } + + if (JustificationMap.TryGetValue(justification, out var mapped)) + { + return mapped; + } + + unsupportedJustifications.Add(justification); + return null; + } + + private static string NormalizeRaw(string value) + => string.IsNullOrWhiteSpace(value) ? string.Empty : value.Trim(); + private static CsafProductInfo? ParseProduct(JsonElement element, string? parentBranchName) { if (element.ValueKind != JsonValueKind.Object) @@ -418,21 +752,21 @@ public sealed class CsafNormalizer : IVexNormalizer return new CsafProductInfo(productId.Trim(), name.Trim(), version?.Trim(), purl?.Trim(), cpe?.Trim()); } - private static VexClaimStatus? MapStatus(string statusName) + private static VexClaimStatus? MapStatus(string statusName, ISet unsupportedStatuses) { if (string.IsNullOrWhiteSpace(statusName)) { return null; } - return statusName switch + var normalized = statusName.Trim(); + if (StatusMap.TryGetValue(normalized, out var mapped)) { - "known_affected" or "fixed_after_release" or "first_affected" or "last_affected" => VexClaimStatus.Affected, - "known_not_affected" or "last_not_affected" or "first_not_affected" => VexClaimStatus.NotAffected, - "fixed" or "first_fixed" or "last_fixed" => VexClaimStatus.Fixed, - "under_investigation" or "investigating" => VexClaimStatus.UnderInvestigation, - _ => null, - }; + return mapped; + } + + unsupportedStatuses.Add(normalized); + return null; } private static DateTimeOffset? ParseDate(JsonElement element, string propertyName) @@ -507,7 +841,10 @@ public sealed class CsafNormalizer : IVexNormalizer private readonly record struct CsafClaimEntryBuilder( CsafProductInfo Product, VexClaimStatus Status, - string? Detail); + string RawStatus, + string? Detail, + VexJustification? Justification, + string? RawJustification); } private sealed record CsafParseResult( @@ -515,13 +852,19 @@ public sealed class CsafNormalizer : IVexNormalizer DateTimeOffset LastRelease, string? Revision, ImmutableDictionary Metadata, - ImmutableArray Claims); + ImmutableArray Claims, + ImmutableArray UnsupportedStatuses, + ImmutableArray UnsupportedJustifications, + ImmutableArray ConflictingJustifications); private sealed record CsafClaimEntry( string VulnerabilityId, CsafProductInfo Product, VexClaimStatus Status, - string? Detail); + string RawStatus, + string? Detail, + VexJustification? Justification, + string? RawJustification); private sealed record CsafProductInfo( string ProductId, diff --git a/src/StellaOps.Excititor.Formats.CSAF/TASKS.md b/src/StellaOps.Excititor.Formats.CSAF/TASKS.md index cf869d50..018a6c73 100644 --- a/src/StellaOps.Excititor.Formats.CSAF/TASKS.md +++ b/src/StellaOps.Excititor.Formats.CSAF/TASKS.md @@ -3,5 +3,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-FMT-CSAF-01-001 – CSAF normalizer foundation|Team Excititor Formats|EXCITITOR-CORE-01-001|**DONE (2025-10-17)** – Implemented CSAF normalizer + DI hook, parsing tracking metadata, product tree branches/full names, and mapping product statuses into canonical `VexClaim`s with baseline precedence. Regression added in `CsafNormalizerTests`.| -|EXCITITOR-FMT-CSAF-01-002 – Status/justification mapping|Team Excititor Formats|EXCITITOR-FMT-CSAF-01-001, EXCITITOR-POLICY-01-001|TODO – Normalize CSAF `product_status` + `justification` values into policy-aware enums with audit diagnostics for unsupported codes.| -|EXCITITOR-FMT-CSAF-01-003 – CSAF export adapter|Team Excititor Formats|EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-CSAF-01-001|TODO – Provide CSAF export writer producing deterministic documents (per vuln/product) and manifest metadata for attestation.| +|EXCITITOR-FMT-CSAF-01-002 – Status/justification mapping|Team Excititor Formats|EXCITITOR-FMT-CSAF-01-001, EXCITITOR-POLICY-01-001|**DOING (2025-10-19)** – Prereqs EXCITITOR-FMT-CSAF-01-001 & EXCITITOR-POLICY-01-001 verified DONE; starting normalization of `product_status`/`justification` values with policy-aligned diagnostics.| +|EXCITITOR-FMT-CSAF-01-003 – CSAF export adapter|Team Excititor Formats|EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-CSAF-01-001|**DOING (2025-10-19)** – Prereqs EXCITITOR-EXPORT-01-001 & EXCITITOR-FMT-CSAF-01-001 confirmed DONE; drafting deterministic CSAF exporter and manifest metadata flow.| diff --git a/src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md b/src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md index 0df3ecac..7895c98d 100644 --- a/src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md +++ b/src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md @@ -3,5 +3,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-FMT-CYCLONE-01-001 – CycloneDX VEX normalizer|Team Excititor Formats|EXCITITOR-CORE-01-001|**DONE (2025-10-17)** – CycloneDX normalizer parses `analysis` data, resolves component references, and emits canonical `VexClaim`s; regression lives in `CycloneDxNormalizerTests`.| -|EXCITITOR-FMT-CYCLONE-01-002 – Component reference reconciliation|Team Excititor Formats|EXCITITOR-FMT-CYCLONE-01-001|TODO – Implement helpers to reconcile component/service references against policy expectations and emit diagnostics for missing SBOM links.| -|EXCITITOR-FMT-CYCLONE-01-003 – CycloneDX export serializer|Team Excititor Formats|EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-CYCLONE-01-001|TODO – Provide exporters producing CycloneDX VEX output with canonical ordering and hash-stable manifests.| +|EXCITITOR-FMT-CYCLONE-01-002 – Component reference reconciliation|Team Excititor Formats|EXCITITOR-FMT-CYCLONE-01-001|**DOING (2025-10-19)** – Prereq EXCITITOR-FMT-CYCLONE-01-001 confirmed DONE; proceeding with reference reconciliation helpers and diagnostics for missing SBOM links.| +|EXCITITOR-FMT-CYCLONE-01-003 – CycloneDX export serializer|Team Excititor Formats|EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-CYCLONE-01-001|**DOING (2025-10-19)** – Prereqs EXCITITOR-EXPORT-01-001 & EXCITITOR-FMT-CYCLONE-01-001 verified DONE; initiating deterministic CycloneDX VEX exporter work.| diff --git a/src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md b/src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md index 22c363e7..3cfa3e47 100644 --- a/src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md +++ b/src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md @@ -3,5 +3,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-FMT-OPENVEX-01-001 – OpenVEX normalizer|Team Excititor Formats|EXCITITOR-CORE-01-001|**DONE (2025-10-17)** – OpenVEX normalizer parses statements/products, maps status/justification, and surfaces provenance metadata; coverage in `OpenVexNormalizerTests`.| -|EXCITITOR-FMT-OPENVEX-01-002 – Statement merge utilities|Team Excititor Formats|EXCITITOR-FMT-OPENVEX-01-001|TODO – Add reducers merging multiple OpenVEX statements, resolving conflicts deterministically, and emitting policy diagnostics.| -|EXCITITOR-FMT-OPENVEX-01-003 – OpenVEX export writer|Team Excititor Formats|EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-OPENVEX-01-001|TODO – Provide export serializer generating canonical OpenVEX documents with optional SBOM references and hash-stable ordering.| +|EXCITITOR-FMT-OPENVEX-01-002 – Statement merge utilities|Team Excititor Formats|EXCITITOR-FMT-OPENVEX-01-001|**DOING (2025-10-19)** – Prereq EXCITITOR-FMT-OPENVEX-01-001 confirmed DONE; building deterministic merge reducers with policy diagnostics.| +|EXCITITOR-FMT-OPENVEX-01-003 – OpenVEX export writer|Team Excititor Formats|EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-OPENVEX-01-001|**DOING (2025-10-19)** – Prereqs EXCITITOR-EXPORT-01-001 & EXCITITOR-FMT-OPENVEX-01-001 verified DONE; starting canonical OpenVEX exporter with stable ordering/SBOM references.| diff --git a/src/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs b/src/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs index fce555fd..e788095c 100644 --- a/src/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs +++ b/src/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs @@ -21,6 +21,9 @@ public sealed class VexPolicyProviderTests Assert.Equal(VexConsensusPolicyOptions.BaselineVersion, snapshot.Version); Assert.Empty(snapshot.Issues); + Assert.Equal(VexConsensusPolicyOptions.DefaultWeightCeiling, snapshot.ConsensusOptions.WeightCeiling); + Assert.Equal(VexConsensusPolicyOptions.DefaultAlpha, snapshot.ConsensusOptions.Alpha); + Assert.Equal(VexConsensusPolicyOptions.DefaultBeta, snapshot.ConsensusOptions.Beta); var evaluator = new VexPolicyEvaluator(provider); var consensusProvider = new VexProvider("vendor", "Vendor", VexProviderKind.Vendor); @@ -63,6 +66,9 @@ public sealed class VexPolicyProviderTests Assert.Equal("custom/v1", snapshot.Version); Assert.NotEmpty(snapshot.Issues); Assert.Equal(0.95, snapshot.ConsensusOptions.ProviderOverrides["vendor"]); + Assert.Contains(snapshot.Issues, issue => issue.Code == "weights.vendor.range"); + Assert.Equal(VexConsensusPolicyOptions.DefaultWeightCeiling, snapshot.ConsensusOptions.WeightCeiling); + Assert.Equal(1.0, snapshot.ConsensusOptions.VendorWeight); var evaluator = new VexPolicyEvaluator(provider); var vendor = new VexProvider("vendor", "Vendor", VexProviderKind.Vendor); diff --git a/src/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj b/src/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj index 76da4d64..6cbeefa1 100644 --- a/src/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj +++ b/src/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj @@ -7,8 +7,8 @@ true - - + + diff --git a/src/StellaOps.Excititor.Policy/TASKS.md b/src/StellaOps.Excititor.Policy/TASKS.md index 0fc8c033..c2902fba 100644 --- a/src/StellaOps.Excititor.Policy/TASKS.md +++ b/src/StellaOps.Excititor.Policy/TASKS.md @@ -7,5 +7,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md |EXCITITOR-POLICY-01-003 – Operator diagnostics & docs|Team Excititor Policy|EXCITITOR-POLICY-01-001|**DONE (2025-10-16)** – Surface structured diagnostics (CLI/WebService) and author policy upgrade guidance in docs/ARCHITECTURE_EXCITITOR.md appendix.
2025-10-16: Added `IVexPolicyDiagnostics`/`VexPolicyDiagnosticsReport`, sorted issue ordering, recommendations, and appendix guidance. Tests: `dotnet test src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`.| |EXCITITOR-POLICY-01-004 – Policy schema validation & YAML binding|Team Excititor Policy|EXCITITOR-POLICY-01-001|**DONE (2025-10-16)** – Added strongly-typed YAML/JSON binding, schema validation, and deterministic diagnostics for operator-supplied policy bundles.| |EXCITITOR-POLICY-01-005 – Policy change tracking & telemetry|Team Excititor Policy|EXCITITOR-POLICY-01-002|**DONE (2025-10-16)** – Emit revision history, expose snapshot digests via CLI/WebService, and add structured logging/metrics for policy reloads.
2025-10-16: `VexPolicySnapshot` now carries revision/digest, provider logs reloads, `vex.policy.reloads` metric emitted, binder/diagnostics expose digest metadata. Tests: `dotnet test src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`.| -|EXCITITOR-POLICY-02-001 – Scoring coefficients & weight ceilings|Team Excititor Policy|EXCITITOR-POLICY-01-004|TODO – Extend `VexPolicyOptions` with α/β boosters and optional >1.0 weight ceilings, validate ranges, and document operator guidance in `docs/ARCHITECTURE_EXCITITOR.md`/`docs/EXCITITOR_SCORRING.md`.| +|EXCITITOR-POLICY-02-001 – Scoring coefficients & weight ceilings|Team Excititor Policy|EXCITITOR-POLICY-01-004|DONE (2025-10-19) – Added `weights.ceiling` + `scoring.{alpha,beta}` options with normalization warnings, extended consensus policy/digest, refreshed docs (`docs/ARCHITECTURE_EXCITITOR.md`, `docs/EXCITITOR_SCORRING.md`), and validated via `dotnet test` for core/policy suites.| |EXCITITOR-POLICY-02-002 – Diagnostics for scoring signals|Team Excititor Policy|EXCITITOR-POLICY-02-001|BACKLOG – Update diagnostics reports to surface missing severity/KEV/EPSS mappings, coefficient overrides, and provide actionable recommendations for policy tuning.| diff --git a/src/StellaOps.Excititor.Policy/VexPolicyDigest.cs b/src/StellaOps.Excititor.Policy/VexPolicyDigest.cs index 9d633b6e..4db82937 100644 --- a/src/StellaOps.Excititor.Policy/VexPolicyDigest.cs +++ b/src/StellaOps.Excititor.Policy/VexPolicyDigest.cs @@ -17,7 +17,10 @@ internal static class VexPolicyDigest .Append(options.DistroWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|') .Append(options.PlatformWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|') .Append(options.HubWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|') - .Append(options.AttestationWeight.ToString("F6", CultureInfo.InvariantCulture)); + .Append(options.AttestationWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|') + .Append(options.WeightCeiling.ToString("F6", CultureInfo.InvariantCulture)).Append('|') + .Append(options.Alpha.ToString("F6", CultureInfo.InvariantCulture)).Append('|') + .Append(options.Beta.ToString("F6", CultureInfo.InvariantCulture)); foreach (var kvp in options.ProviderOverrides .OrderBy(static pair => pair.Key, StringComparer.Ordinal)) diff --git a/src/StellaOps.Excititor.Policy/VexPolicyOptions.cs b/src/StellaOps.Excititor.Policy/VexPolicyOptions.cs index e2a91d0c..24692d7d 100644 --- a/src/StellaOps.Excititor.Policy/VexPolicyOptions.cs +++ b/src/StellaOps.Excititor.Policy/VexPolicyOptions.cs @@ -8,6 +8,8 @@ public sealed class VexPolicyOptions public VexPolicyWeightOptions Weights { get; set; } = new(); + public VexPolicyScoringOptions Scoring { get; set; } = new(); + public IDictionary? ProviderOverrides { get; set; } } @@ -22,4 +24,13 @@ public sealed class VexPolicyWeightOptions public double? Hub { get; set; } public double? Attestation { get; set; } + + public double? Ceiling { get; set; } +} + +public sealed class VexPolicyScoringOptions +{ + public double? Alpha { get; set; } + + public double? Beta { get; set; } } diff --git a/src/StellaOps.Excititor.Policy/VexPolicyProcessing.cs b/src/StellaOps.Excititor.Policy/VexPolicyProcessing.cs index 3939ecf3..fc90ecfb 100644 --- a/src/StellaOps.Excititor.Policy/VexPolicyProcessing.cs +++ b/src/StellaOps.Excititor.Policy/VexPolicyProcessing.cs @@ -6,34 +6,33 @@ namespace StellaOps.Excititor.Policy; internal static class VexPolicyProcessing { + private const double DefaultVendorWeight = 1.0; + private const double DefaultDistroWeight = 0.9; + private const double DefaultPlatformWeight = 0.7; + private const double DefaultHubWeight = 0.5; + private const double DefaultAttestationWeight = 0.6; + public static VexPolicyNormalizationResult Normalize(VexPolicyOptions? options) { var issues = ImmutableArray.CreateBuilder(); var policyOptions = options ?? new VexPolicyOptions(); - if (!TryNormalizeWeights( - policyOptions.Weights, - out var normalizedWeights, - issues)) - { - issues.Add(new VexPolicyIssue( - "weights.invalid", - "Weight configuration is invalid; falling back to defaults.", - VexPolicyIssueSeverity.Warning)); - normalizedWeights = new VexConsensusPolicyOptions(); - } - - var overrides = NormalizeOverrides(policyOptions.ProviderOverrides, issues); + var normalizedWeights = NormalizeWeights(policyOptions.Weights, issues); + var overrides = NormalizeOverrides(policyOptions.ProviderOverrides, normalizedWeights.Ceiling, issues); + var scoring = NormalizeScoring(policyOptions.Scoring, issues); var consensusOptions = new VexConsensusPolicyOptions( policyOptions.Version ?? VexConsensusPolicyOptions.BaselineVersion, - normalizedWeights.VendorWeight, - normalizedWeights.DistroWeight, - normalizedWeights.PlatformWeight, - normalizedWeights.HubWeight, - normalizedWeights.AttestationWeight, - overrides); + normalizedWeights.Vendor, + normalizedWeights.Distro, + normalizedWeights.Platform, + normalizedWeights.Hub, + normalizedWeights.Attestation, + overrides, + normalizedWeights.Ceiling, + scoring.Alpha, + scoring.Beta); var orderedIssues = issues.ToImmutable().Sort(IssueComparer); @@ -43,76 +42,119 @@ internal static class VexPolicyProcessing public static ImmutableArray SortIssues(IEnumerable issues) => issues.ToImmutableArray().Sort(IssueComparer); - private static bool TryNormalizeWeights( + private static WeightNormalizationResult NormalizeWeights( VexPolicyWeightOptions? options, - out VexConsensusPolicyOptions normalized, ImmutableArray.Builder issues) { - if (options is null) - { - normalized = new VexConsensusPolicyOptions(); - return true; - } + var ceiling = NormalizeWeightCeiling(options?.Ceiling, issues); - var hasAny = - options.Vendor.HasValue || - options.Distro.HasValue || - options.Platform.HasValue || - options.Hub.HasValue || - options.Attestation.HasValue; + var vendor = NormalizeWeightValue( + options?.Vendor, + "vendor", + DefaultVendorWeight, + ceiling, + issues); + var distro = NormalizeWeightValue( + options?.Distro, + "distro", + DefaultDistroWeight, + ceiling, + issues); + var platform = NormalizeWeightValue( + options?.Platform, + "platform", + DefaultPlatformWeight, + ceiling, + issues); + var hub = NormalizeWeightValue( + options?.Hub, + "hub", + DefaultHubWeight, + ceiling, + issues); + var attestation = NormalizeWeightValue( + options?.Attestation, + "attestation", + DefaultAttestationWeight, + ceiling, + issues); - if (!hasAny) - { - normalized = new VexConsensusPolicyOptions(); - return true; - } - - var vendor = Clamp(options.Vendor, nameof(options.Vendor), issues); - var distro = Clamp(options.Distro, nameof(options.Distro), issues); - var platform = Clamp(options.Platform, nameof(options.Platform), issues); - var hub = Clamp(options.Hub, nameof(options.Hub), issues); - var attestation = Clamp(options.Attestation, nameof(options.Attestation), issues); - - normalized = new VexConsensusPolicyOptions( - VexConsensusPolicyOptions.BaselineVersion, - vendor ?? 1.0, - distro ?? 0.9, - platform ?? 0.7, - hub ?? 0.5, - attestation ?? 0.6); - return true; + return new WeightNormalizationResult(vendor, distro, platform, hub, attestation, ceiling); } - private static double? Clamp(double? value, string fieldName, ImmutableArray.Builder issues) + private static double NormalizeWeightValue( + double? value, + string fieldName, + double defaultValue, + double ceiling, + ImmutableArray.Builder issues) { if (value is null) { - return null; + return defaultValue; } if (double.IsNaN(value.Value) || double.IsInfinity(value.Value)) { issues.Add(new VexPolicyIssue( $"weights.{fieldName}.invalid", - $"{fieldName} must be a finite number.", + $"{fieldName} must be a finite number; default {defaultValue.ToString(CultureInfo.InvariantCulture)} applied.", VexPolicyIssueSeverity.Warning)); - return null; + return defaultValue; } - if (value.Value < 0 || value.Value > 1) + if (value.Value < 0 || value.Value > ceiling) { issues.Add(new VexPolicyIssue( $"weights.{fieldName}.range", - $"{fieldName} must be between 0 and 1; value {value.Value.ToString(CultureInfo.InvariantCulture)} was clamped.", + $"{fieldName} must be between 0 and {ceiling.ToString(CultureInfo.InvariantCulture)}; value {value.Value.ToString(CultureInfo.InvariantCulture)} was clamped.", VexPolicyIssueSeverity.Warning)); - return Math.Clamp(value.Value, 0, 1); + return Math.Clamp(value.Value, 0, ceiling); } return value.Value; } + private static double NormalizeWeightCeiling(double? ceiling, ImmutableArray.Builder issues) + { + if (ceiling is null) + { + return VexConsensusPolicyOptions.DefaultWeightCeiling; + } + + if (double.IsNaN(ceiling.Value) || double.IsInfinity(ceiling.Value) || ceiling.Value <= 0) + { + issues.Add(new VexPolicyIssue( + "weights.ceiling.invalid", + "weights.ceiling must be a positive, finite number; default ceiling applied.", + VexPolicyIssueSeverity.Warning)); + return VexConsensusPolicyOptions.DefaultWeightCeiling; + } + + if (ceiling.Value < 1) + { + issues.Add(new VexPolicyIssue( + "weights.ceiling.minimum", + "weights.ceiling below 1 falls back to 1 to preserve baseline behaviour.", + VexPolicyIssueSeverity.Warning)); + return 1; + } + + if (ceiling.Value > VexConsensusPolicyOptions.MaxSupportedCeiling) + { + issues.Add(new VexPolicyIssue( + "weights.ceiling.maximum", + $"weights.ceiling exceeded supported range; value {ceiling.Value.ToString(CultureInfo.InvariantCulture)} was clamped to {VexConsensusPolicyOptions.MaxSupportedCeiling.ToString(CultureInfo.InvariantCulture)}.", + VexPolicyIssueSeverity.Warning)); + return VexConsensusPolicyOptions.MaxSupportedCeiling; + } + + return ceiling.Value; + } + private static ImmutableDictionary NormalizeOverrides( IDictionary? overrides, + double ceiling, ImmutableArray.Builder issues) { if (overrides is null || overrides.Count == 0) @@ -128,17 +170,81 @@ internal static class VexPolicyProcessing issues.Add(new VexPolicyIssue( "overrides.key.missing", "Encountered provider override with empty key; ignoring entry.", - VexPolicyIssueSeverity.Warning)); + VexPolicyIssueSeverity.Warning)); continue; } - var weight = Clamp(kvp.Value, $"overrides.{kvp.Key}", issues) ?? kvp.Value; - builder[kvp.Key.Trim()] = weight; + var key = kvp.Key.Trim(); + var weight = NormalizeWeightValue( + kvp.Value, + $"overrides.{key}", + DefaultVendorWeight, + ceiling, + issues); + builder[key] = weight; } return builder.ToImmutable(); } + private static ScoringNormalizationResult NormalizeScoring( + VexPolicyScoringOptions? options, + ImmutableArray.Builder issues) + { + var alpha = NormalizeCoefficient( + options?.Alpha, + "alpha", + VexConsensusPolicyOptions.DefaultAlpha, + issues); + var beta = NormalizeCoefficient( + options?.Beta, + "beta", + VexConsensusPolicyOptions.DefaultBeta, + issues); + return new ScoringNormalizationResult(alpha, beta); + } + + private static double NormalizeCoefficient( + double? value, + string fieldName, + double defaultValue, + ImmutableArray.Builder issues) + { + if (value is null) + { + return defaultValue; + } + + if (double.IsNaN(value.Value) || double.IsInfinity(value.Value)) + { + issues.Add(new VexPolicyIssue( + $"scoring.{fieldName}.invalid", + $"{fieldName} coefficient must be a finite number; default {defaultValue.ToString(CultureInfo.InvariantCulture)} applied.", + VexPolicyIssueSeverity.Warning)); + return defaultValue; + } + + if (value.Value < 0) + { + issues.Add(new VexPolicyIssue( + $"scoring.{fieldName}.range", + $"{fieldName} cannot be negative; default {defaultValue.ToString(CultureInfo.InvariantCulture)} applied.", + VexPolicyIssueSeverity.Warning)); + return defaultValue; + } + + if (value.Value > VexConsensusPolicyOptions.MaxSupportedCoefficient) + { + issues.Add(new VexPolicyIssue( + $"scoring.{fieldName}.maximum", + $"{fieldName} exceeded supported range; value {value.Value.ToString(CultureInfo.InvariantCulture)} was clamped to {VexConsensusPolicyOptions.MaxSupportedCoefficient.ToString(CultureInfo.InvariantCulture)}.", + VexPolicyIssueSeverity.Warning)); + return VexConsensusPolicyOptions.MaxSupportedCoefficient; + } + + return value.Value; + } + private static int CompareIssues(VexPolicyIssue left, VexPolicyIssue right) { var severityCompare = GetSeverityRank(left.Severity).CompareTo(GetSeverityRank(right.Severity)); @@ -163,4 +269,14 @@ internal static class VexPolicyProcessing internal sealed record VexPolicyNormalizationResult( VexConsensusPolicyOptions ConsensusOptions, ImmutableArray Issues); + + private sealed record WeightNormalizationResult( + double Vendor, + double Distro, + double Platform, + double Hub, + double Attestation, + double Ceiling); + + private sealed record ScoringNormalizationResult(double Alpha, double Beta); } diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs b/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs index 522b0623..3e4abdc1 100644 --- a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs +++ b/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs @@ -1,5 +1,6 @@ using System.Collections.Immutable; using System.Globalization; +using System.Linq; using System.Text; using Microsoft.Extensions.Options; using Mongo2Go; @@ -181,6 +182,78 @@ public sealed class MongoVexRepositoryTests : IAsyncLifetime Assert.Null(remaining); } + [Fact] + public async Task ClaimStore_AppendsAndQueriesStatements() + { + var database = _client.GetDatabase($"vex-claims-{Guid.NewGuid():N}"); + var store = new MongoVexClaimStore(database); + + var product = new VexProduct("pkg:demo/app", "Demo App", version: "1.0.0", purl: "pkg:demo/app@1.0.0"); + var document = new VexClaimDocument( + VexDocumentFormat.Csaf, + "sha256:claim-1", + new Uri("https://example.org/vex/claim-1.json"), + revision: "2025-10-19"); + + var initialClaim = new VexClaim( + vulnerabilityId: "CVE-2025-0101", + providerId: "redhat", + product: product, + status: VexClaimStatus.NotAffected, + document: document, + firstSeen: DateTimeOffset.UtcNow.AddMinutes(-30), + lastSeen: DateTimeOffset.UtcNow.AddMinutes(-10), + justification: VexJustification.ComponentNotPresent, + detail: "Package not shipped in this channel.", + confidence: new VexConfidence("high", 0.9, "policy/default"), + signals: new VexSignalSnapshot( + new VexSeveritySignal("CVSS:3.1", 5.8, "medium", "CVSS:3.1/..."), + kev: false, + epss: 0.21), + additionalMetadata: ImmutableDictionary.Empty.Add("source", "csaf")); + + await store.AppendAsync(new[] { initialClaim }, DateTimeOffset.UtcNow.AddMinutes(-5), CancellationToken.None); + + var secondDocument = new VexClaimDocument( + VexDocumentFormat.Csaf, + "sha256:claim-2", + new Uri("https://example.org/vex/claim-2.json"), + revision: "2025-10-19.1"); + + var secondClaim = new VexClaim( + vulnerabilityId: initialClaim.VulnerabilityId, + providerId: initialClaim.ProviderId, + product: initialClaim.Product, + status: initialClaim.Status, + document: secondDocument, + firstSeen: initialClaim.FirstSeen, + lastSeen: DateTimeOffset.UtcNow, + justification: initialClaim.Justification, + detail: initialClaim.Detail, + confidence: initialClaim.Confidence, + signals: new VexSignalSnapshot( + new VexSeveritySignal("CVSS:3.1", 7.2, "high"), + kev: true, + epss: 0.43), + additionalMetadata: initialClaim.AdditionalMetadata.ToImmutableDictionary(kvp => kvp.Key, kvp => kvp.Value)); + + await store.AppendAsync(new[] { secondClaim }, DateTimeOffset.UtcNow, CancellationToken.None); + + var all = await store.FindAsync("CVE-2025-0101", product.Key, since: null, CancellationToken.None); + var allList = all.ToList(); + Assert.Equal(2, allList.Count); + Assert.Equal("sha256:claim-2", allList[0].Document.Digest); + Assert.True(allList[0].Signals?.Kev); + Assert.Equal(0.43, allList[0].Signals?.Epss); + Assert.Equal("sha256:claim-1", allList[1].Document.Digest); + Assert.Equal("csaf", allList[1].AdditionalMetadata["source"]); + + var recentOnly = await store.FindAsync("CVE-2025-0101", product.Key, DateTimeOffset.UtcNow.AddMinutes(-2), CancellationToken.None); + var recentList = recentOnly.ToList(); + Assert.Single(recentList); + Assert.Equal("sha256:claim-2", recentList[0].Document.Digest); + } + private MongoVexRawStore CreateRawStore(IMongoDatabase database, int thresholdBytes) { var options = Options.Create(new VexMongoStorageOptions diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs b/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs index 7277becc..4db758a6 100644 --- a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs +++ b/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs @@ -150,10 +150,29 @@ public sealed class MongoVexStoreMappingTests : IAsyncLifetime }, } }, + { + "Signals", + new BsonDocument + { + { + "Severity", + new BsonDocument + { + { "Scheme", "CVSS:3.1" }, + { "Score", 7.5 }, + { "Label", "high" }, + { "Vector", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" }, + } + }, + { "Kev", true }, + { "Epss", 0.42 }, + } + }, { "PolicyVersion", "2025.10" }, { "PolicyRevisionId", "rev-1" }, { "PolicyDigest", "sha256:abc" }, { "Summary", "Vendor confirms not affected." }, + { "GeneratedAt", DateTime.UtcNow }, { "Unexpected", new BsonDocument { { "foo", "bar" } } }, }; @@ -186,6 +205,12 @@ public sealed class MongoVexStoreMappingTests : IAsyncLifetime Assert.Equal("policy_override", conflict.Reason); Assert.Equal("Vendor confirms not affected.", result.Summary); Assert.Equal("2025.10", result.PolicyVersion); + Assert.NotNull(result.Signals); + Assert.True(result.Signals!.Kev); + Assert.Equal(0.42, result.Signals.Epss); + Assert.NotNull(result.Signals.Severity); + Assert.Equal("CVSS:3.1", result.Signals.Severity!.Scheme); + Assert.Equal(7.5, result.Signals.Severity.Score); } [Fact] diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs b/src/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs index 08e98dec..28f61de1 100644 --- a/src/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs +++ b/src/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs @@ -5,6 +5,7 @@ using Microsoft.Extensions.Logging.Abstractions; using Mongo2Go; using MongoDB.Driver; using StellaOps.Excititor.Storage.Mongo.Migrations; +using StellaOps.Excititor.Storage.Mongo; namespace StellaOps.Excititor.Storage.Mongo.Tests; @@ -23,24 +24,32 @@ public sealed class VexMongoMigrationRunnerTests : IAsyncLifetime [Fact] public async Task RunAsync_AppliesInitialIndexesOnce() { - var migration = new VexInitialIndexMigration(); - var runner = new VexMongoMigrationRunner(_database, new[] { migration }, NullLogger.Instance); + var migrations = new IVexMongoMigration[] + { + new VexInitialIndexMigration(), + new VexConsensusSignalsMigration(), + }; + var runner = new VexMongoMigrationRunner(_database, migrations, NullLogger.Instance); await runner.RunAsync(CancellationToken.None); await runner.RunAsync(CancellationToken.None); var appliedCollection = _database.GetCollection(VexMongoCollectionNames.Migrations); var applied = await appliedCollection.Find(FilterDefinition.Empty).ToListAsync(); - Assert.Single(applied); - Assert.Equal(migration.Id, applied[0].Id); + Assert.Equal(2, applied.Count); + Assert.Equal(migrations.Select(m => m.Id).OrderBy(id => id, StringComparer.Ordinal), applied.Select(record => record.Id).OrderBy(id => id, StringComparer.Ordinal)); Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Raw), "ProviderId_1_Format_1_RetrievedAt_1")); Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Providers), "Kind_1")); Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Consensus), "VulnerabilityId_1_Product.Key_1")); Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Consensus), "PolicyRevisionId_1_PolicyDigest_1")); + Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Consensus), "PolicyRevisionId_1_CalculatedAt_-1")); Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Exports), "QuerySignature_1_Format_1")); Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Cache), "QuerySignature_1_Format_1")); Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Cache), "ExpiresAt_1")); + Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Statements), "VulnerabilityId_1_Product.Key_1_InsertedAt_-1")); + Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Statements), "ProviderId_1_InsertedAt_-1")); + Assert.True(HasIndex(_database.GetCollection(VexMongoCollectionNames.Statements), "Document.Digest_1")); } private static bool HasIndex(IMongoCollection collection, string name) diff --git a/src/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs b/src/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs index b3258aef..e69de29b 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs +++ b/src/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs @@ -1,17 +0,0 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Excititor.Core; - -namespace StellaOps.Excititor.Storage.Mongo; - -public interface IVexRawStore : IVexRawDocumentSink -{ - ValueTask FindByDigestAsync(string digest, CancellationToken cancellationToken); -} - -public interface IVexExportStore -{ - ValueTask FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken); - - ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken); -} diff --git a/src/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs b/src/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs index 58a61257..ae60d3c8 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs +++ b/src/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs @@ -25,10 +25,22 @@ public interface IVexConsensusStore ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken); } +public interface IVexClaimStore +{ + ValueTask AppendAsync(IEnumerable claims, DateTimeOffset observedAt, CancellationToken cancellationToken); + + ValueTask> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken); +} + public sealed record VexConnectorState( string ConnectorId, DateTimeOffset? LastUpdated, - ImmutableArray DocumentDigests); + ImmutableArray DocumentDigests, + ImmutableDictionary ResumeTokens, + DateTimeOffset? LastSuccessAt, + int FailureCount, + DateTimeOffset? NextEligibleRun, + string? LastFailureReason); public interface IVexConnectorStateRepository { diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs b/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs new file mode 100644 index 00000000..c1c75da7 --- /dev/null +++ b/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs @@ -0,0 +1,52 @@ +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Driver; + +namespace StellaOps.Excititor.Storage.Mongo.Migrations; + +internal sealed class VexConsensusSignalsMigration : IVexMongoMigration +{ + public string Id => "20251019-consensus-signals-statements"; + + public async ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + await EnsureConsensusIndexesAsync(database, cancellationToken).ConfigureAwait(false); + await EnsureStatementIndexesAsync(database, cancellationToken).ConfigureAwait(false); + } + + private static Task EnsureConsensusIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + var collection = database.GetCollection(VexMongoCollectionNames.Consensus); + var revisionGeneratedIndex = Builders.IndexKeys + .Ascending(x => x.PolicyRevisionId) + .Descending(x => x.CalculatedAt); + + return collection.Indexes.CreateOneAsync( + new CreateIndexModel(revisionGeneratedIndex), + cancellationToken: cancellationToken); + } + + private static Task EnsureStatementIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + var collection = database.GetCollection(VexMongoCollectionNames.Statements); + + var vulnProductInsertedIndex = Builders.IndexKeys + .Ascending(x => x.VulnerabilityId) + .Ascending(x => x.Product.Key) + .Descending(x => x.InsertedAt); + + var providerInsertedIndex = Builders.IndexKeys + .Ascending(x => x.ProviderId) + .Descending(x => x.InsertedAt); + + var digestIndex = Builders.IndexKeys + .Ascending(x => x.Document.Digest); + + return Task.WhenAll( + collection.Indexes.CreateOneAsync(new CreateIndexModel(vulnProductInsertedIndex), cancellationToken: cancellationToken), + collection.Indexes.CreateOneAsync(new CreateIndexModel(providerInsertedIndex), cancellationToken: cancellationToken), + collection.Indexes.CreateOneAsync(new CreateIndexModel(digestIndex), cancellationToken: cancellationToken)); + } +} diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs b/src/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs new file mode 100644 index 00000000..8d125434 --- /dev/null +++ b/src/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs @@ -0,0 +1,57 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Driver; +using StellaOps.Excititor.Core; + +namespace StellaOps.Excititor.Storage.Mongo; + +public sealed class MongoVexClaimStore : IVexClaimStore +{ + private readonly IMongoCollection _collection; + + public MongoVexClaimStore(IMongoDatabase database) + { + ArgumentNullException.ThrowIfNull(database); + VexMongoMappingRegistry.Register(); + _collection = database.GetCollection(VexMongoCollectionNames.Statements); + } + + public async ValueTask AppendAsync(IEnumerable claims, DateTimeOffset observedAt, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(claims); + var records = claims + .Select(claim => VexStatementRecord.FromDomain(claim, observedAt)) + .ToList(); + + if (records.Count == 0) + { + return; + } + + await _collection.InsertManyAsync(records, new InsertManyOptions { IsOrdered = false }, cancellationToken).ConfigureAwait(false); + } + + public async ValueTask> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId); + ArgumentException.ThrowIfNullOrWhiteSpace(productKey); + + var filter = Builders.Filter.Eq(x => x.VulnerabilityId, vulnerabilityId.Trim()) & + Builders.Filter.Eq(x => x.Product.Key, productKey.Trim()); + + if (since is { } sinceValue) + { + filter &= Builders.Filter.Gte(x => x.InsertedAt, sinceValue.UtcDateTime); + } + + var records = await _collection + .Find(filter) + .SortByDescending(x => x.InsertedAt) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return records.ConvertAll(static record => record.ToDomain()); + } +} diff --git a/src/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs b/src/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs index 687cac2b..4700c148 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs +++ b/src/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs @@ -1,5 +1,9 @@ +using System.ComponentModel.DataAnnotations; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Excititor.Core; using StellaOps.Excititor.Storage.Mongo.Migrations; namespace StellaOps.Excititor.Storage.Mongo; @@ -10,14 +14,49 @@ public static class VexMongoServiceCollectionExtensions { services.AddOptions(); + services.TryAddSingleton(static provider => + { + var options = provider.GetRequiredService>().Value; + Validator.ValidateObject(options, new ValidationContext(options), validateAllProperties: true); + + var mongoUrl = MongoUrl.Create(options.ConnectionString); + var settings = MongoClientSettings.FromUrl(mongoUrl); + settings.ReadConcern = ReadConcern.Majority; + settings.ReadPreference = ReadPreference.Primary; + settings.WriteConcern = WriteConcern.WMajority.With(wTimeout: options.CommandTimeout); + settings.RetryReads = true; + settings.RetryWrites = true; + return new MongoClient(settings); + }); + + services.TryAddSingleton(static provider => + { + var options = provider.GetRequiredService>().Value; + var client = provider.GetRequiredService(); + + var settings = new MongoDatabaseSettings + { + ReadConcern = ReadConcern.Majority, + ReadPreference = ReadPreference.PrimaryPreferred, + WriteConcern = WriteConcern.WMajority.With(wTimeout: options.CommandTimeout), + }; + + return client.GetDatabase(options.GetDatabaseName(), settings); + }); + + services.AddScoped(); + services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); services.AddHostedService(); return services; diff --git a/src/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj b/src/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj index 9008505a..ac19bf54 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj +++ b/src/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj @@ -7,11 +7,10 @@ true - - - - - + + + + diff --git a/src/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs b/src/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs new file mode 100644 index 00000000..d2ea123b --- /dev/null +++ b/src/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs @@ -0,0 +1,50 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Excititor.Core; + +namespace StellaOps.Excititor.Storage.Mongo; + +/// +/// Normalizer router that resolves providers from Mongo storage before invoking the format-specific normalizer. +/// +public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter +{ + private readonly VexNormalizerRegistry _registry; + private readonly IVexProviderStore _providerStore; + private readonly ILogger _logger; + + public StorageBackedVexNormalizerRouter( + IEnumerable normalizers, + IVexProviderStore providerStore, + ILogger logger) + { + ArgumentNullException.ThrowIfNull(normalizers); + _providerStore = providerStore ?? throw new ArgumentNullException(nameof(providerStore)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + _registry = new VexNormalizerRegistry(normalizers.ToImmutableArray()); + } + + public async ValueTask NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + + var normalizer = _registry.Resolve(document); + if (normalizer is null) + { + _logger.LogWarning("No normalizer registered for VEX document format {Format}. Skipping normalization for {Digest}.", document.Format, document.Digest); + return new VexClaimBatch( + document, + ImmutableArray.Empty, + ImmutableDictionary.Empty); + } + + var provider = await _providerStore.FindAsync(document.ProviderId, cancellationToken).ConfigureAwait(false) + ?? new VexProvider(document.ProviderId, document.ProviderId, VexProviderKind.Vendor); + + return await normalizer.NormalizeAsync(document, provider, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Excititor.Storage.Mongo/TASKS.md b/src/StellaOps.Excititor.Storage.Mongo/TASKS.md index 96311557..d10350ec 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/TASKS.md +++ b/src/StellaOps.Excititor.Storage.Mongo/TASKS.md @@ -6,5 +6,6 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md |EXCITITOR-STORAGE-01-002 – Migrations & indices bootstrap|Team Excititor Storage|EXCITITOR-STORAGE-01-001|**DONE (2025-10-16)** – Add bootstrapper creating indices (claims by vulnId/product, exports by querySignature, etc.) and migrations for existing deployments.
2025-10-16: Introduced migration runner + hosted service, initial index migration covers raw/providers/consensus/exports/cache, and tests use Mongo2Go to verify execution.| |EXCITITOR-STORAGE-01-003 – Repository layer & transactional flows|Team Excititor Storage|EXCITITOR-STORAGE-01-001|**DONE (2025-10-16)** – Added GridFS-backed raw store with transactional upserts (including fallback for non-replicaset Mongo), export/cache repository coordination, and coverage verifying cache TTL + GridFS round-trips.| |EXCITITOR-STORAGE-01-004 – Provider/consensus/cache mappings|Team Excititor Storage|EXCITITOR-STORAGE-01-001|**DONE (2025-10-16)** – Registered MongoDB class maps for provider/consensus/cache records with forward-compatible field handling and added coverage ensuring GridFS-linked cache entries round-trip cleanly.| -|EXCITITOR-STORAGE-02-001 – Statement events & scoring signals|Team Excititor Storage|EXCITITOR-CORE-02-001|TODO – Add immutable `vex.statements` collection, extend consensus documents with severity/KEV/EPSS fields, build indices for `policyRevisionId`/`generatedAt`, and script migrations/backfill guidance for Phase 1 rollout.| -|EXCITITOR-STORAGE-MONGO-08-001 – Session + causal consistency hardening|Team Excititor Storage|EXCITITOR-STORAGE-01-003|TODO – Register Mongo client/database with majority read/write concerns, expose scoped session helper enabling causal consistency, thread session handles through raw/export/consensus/cache stores (including GridFS reads), and extend integration tests to verify read-your-write semantics during replica-set failover.| +|EXCITITOR-STORAGE-02-001 – Statement events & scoring signals|Team Excititor Storage|EXCITITOR-CORE-02-001|DONE (2025-10-19) – Added immutable `vex.statements` collection + claim store, extended consensus persistence with severity/KEV/EPSS signals, shipped migration `20251019-consensus-signals-statements`, and updated docs. Tests: `dotnet test src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj` & `dotnet test src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj`; worker/web suites pending due to NU1903 (`Microsoft.Extensions.Caching.Memory`) advisory.| +|EXCITITOR-STORAGE-03-001 – Statement backfill tooling|Team Excititor Storage|EXCITITOR-STORAGE-02-001|DOING (2025-10-19) – Provide CLI/scripted tooling to replay historical statements into `vex.statements` (leveraging `/excititor/statements`), document operational runbook, and add smoke test verifying replayed data includes severity/KEV/EPSS signals.
2025-10-19: Prerequisite EXCITITOR-STORAGE-02-001 verified complete; Wave 0 kickoff acknowledged per EXECPLAN.| +|EXCITITOR-STORAGE-MONGO-08-001 – Session + causal consistency hardening|Team Excititor Storage|EXCITITOR-STORAGE-01-003|DOING (2025-10-19) – Register Mongo client/database with majority read/write concerns, expose scoped session helper enabling causal consistency, thread session handles through raw/export/consensus/cache stores (including GridFS reads), and extend integration tests to verify read-your-write semantics during replica-set failover.
2025-10-19: Prerequisite EXCITITOR-STORAGE-01-003 confirmed complete; prerequisites satisfied for Wave 0 hardening effort.| diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs b/src/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs index 1f92595a..563f19e4 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs +++ b/src/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs @@ -38,6 +38,11 @@ public static class VexMongoMappingRegistry RegisterClassMap(); RegisterClassMap(); RegisterClassMap(); + RegisterClassMap(); + RegisterClassMap(); + RegisterClassMap(); + RegisterClassMap(); + RegisterClassMap(); RegisterClassMap(); RegisterClassMap(); } @@ -63,7 +68,8 @@ public static class VexMongoCollectionNames public const string Migrations = "vex.migrations"; public const string Providers = "vex.providers"; public const string Raw = "vex.raw"; - public const string Claims = "vex.claims"; + public const string Statements = "vex.statements"; + public const string Claims = Statements; public const string Consensus = "vex.consensus"; public const string Exports = "vex.exports"; public const string Cache = "vex.cache"; diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs b/src/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs index f6ac4ec7..55258eb9 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs +++ b/src/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs @@ -90,6 +90,18 @@ internal sealed class VexExportManifestRecord public string? ConsensusRevision { get; set; } = null; + public string? PolicyRevisionId { get; set; } + = null; + + public string? PolicyDigest { get; set; } + = null; + + public string? ConsensusDigest { get; set; } + = null; + + public string? ScoreDigest { get; set; } + = null; + public string? PredicateType { get; set; } = null; @@ -127,6 +139,10 @@ internal sealed class VexExportManifestRecord FromCache = manifest.FromCache, SourceProviders = manifest.SourceProviders.ToList(), ConsensusRevision = manifest.ConsensusRevision, + PolicyRevisionId = manifest.PolicyRevisionId, + PolicyDigest = manifest.PolicyDigest, + ConsensusDigest = manifest.ConsensusDigest?.ToUri(), + ScoreDigest = manifest.ScoreDigest?.ToUri(), PredicateType = manifest.Attestation?.PredicateType, RekorApiVersion = manifest.Attestation?.Rekor?.ApiVersion, RekorLocation = manifest.Attestation?.Rekor?.Location, @@ -167,10 +183,40 @@ internal sealed class VexExportManifestRecord SourceProviders, FromCache, ConsensusRevision, + PolicyRevisionId, + PolicyDigest, + ParseContentAddress(ConsensusDigest), + ParseContentAddress(ScoreDigest), attestation, SizeBytes); } + private static VexContentAddress? ParseContentAddress(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var separatorIndex = value.IndexOf(':'); + if (separatorIndex <= 0 || separatorIndex >= value.Length - 1) + { + return null; + } + + var algorithm = value.Substring(0, separatorIndex); + var digest = value.Substring(separatorIndex + 1); + + try + { + return new VexContentAddress(algorithm, digest); + } + catch + { + return null; + } + } + public static string CreateId(VexQuerySignature signature, VexExportFormat format) => string.Format(CultureInfo.InvariantCulture, "{0}|{1}", signature.Value, format.ToString().ToLowerInvariant()); } @@ -311,6 +357,9 @@ internal sealed class VexConsensusRecord public List Conflicts { get; set; } = new(); + public VexSignalDocument? Signals { get; set; } + = null; + public string? PolicyVersion { get; set; } = null; @@ -323,6 +372,9 @@ internal sealed class VexConsensusRecord public string? Summary { get; set; } = null; + public DateTime GeneratedAt { get; set; } + = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc); + public static string CreateId(string vulnerabilityId, string productKey) => string.Format(CultureInfo.InvariantCulture, "{0}|{1}", vulnerabilityId.Trim(), productKey.Trim()); @@ -336,10 +388,12 @@ internal sealed class VexConsensusRecord CalculatedAt = consensus.CalculatedAt.UtcDateTime, Sources = consensus.Sources.Select(VexConsensusSourceDocument.FromDomain).ToList(), Conflicts = consensus.Conflicts.Select(VexConsensusConflictDocument.FromDomain).ToList(), + Signals = VexSignalDocument.FromDomain(consensus.Signals), PolicyVersion = consensus.PolicyVersion, PolicyRevisionId = consensus.PolicyRevisionId, PolicyDigest = consensus.PolicyDigest, Summary = consensus.Summary, + GeneratedAt = consensus.CalculatedAt.UtcDateTime, }; public VexConsensus ToDomain() @@ -350,6 +404,7 @@ internal sealed class VexConsensusRecord new DateTimeOffset(CalculatedAt, TimeSpan.Zero), Sources.Select(static source => source.ToDomain()), Conflicts.Select(static conflict => conflict.ToDomain()), + Signals?.ToDomain(), PolicyVersion, Summary, PolicyRevisionId, @@ -479,6 +534,245 @@ internal sealed class VexConsensusConflictDocument Reason); } +[BsonIgnoreExtraElements] +internal sealed class VexSignalDocument +{ + public VexSeveritySignalDocument? Severity { get; set; } + = null; + + public bool? Kev { get; set; } + = null; + + public double? Epss { get; set; } + = null; + + public static VexSignalDocument? FromDomain(VexSignalSnapshot? signals) + { + if (signals is null) + { + return null; + } + + return new VexSignalDocument + { + Severity = VexSeveritySignalDocument.FromDomain(signals.Severity), + Kev = signals.Kev, + Epss = signals.Epss, + }; + } + + public VexSignalSnapshot ToDomain() + => new( + Severity?.ToDomain(), + Kev, + Epss); +} + +[BsonIgnoreExtraElements] +internal sealed class VexSeveritySignalDocument +{ + public string Scheme { get; set; } = default!; + + public double? Score { get; set; } + = null; + + public string? Label { get; set; } + = null; + + public string? Vector { get; set; } + = null; + + public static VexSeveritySignalDocument? FromDomain(VexSeveritySignal? signal) + { + if (signal is null) + { + return null; + } + + return new VexSeveritySignalDocument + { + Scheme = signal.Scheme, + Score = signal.Score, + Label = signal.Label, + Vector = signal.Vector, + }; + } + + public VexSeveritySignal ToDomain() + => new( + Scheme, + Score, + Label, + Vector); +} + +[BsonIgnoreExtraElements] +internal sealed class VexStatementRecord +{ + [BsonId] + public ObjectId Id { get; set; } + = ObjectId.GenerateNewId(); + + public string VulnerabilityId { get; set; } = default!; + + public string ProviderId { get; set; } = default!; + + public VexProductDocument Product { get; set; } = default!; + + public string Status { get; set; } = default!; + + public string? Justification { get; set; } + = null; + + public string? Detail { get; set; } + = null; + + public VexClaimDocumentRecord Document { get; set; } = default!; + + public DateTime FirstSeen { get; set; } + = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc); + + public DateTime LastSeen { get; set; } + = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc); + + public VexConfidenceDocument? Confidence { get; set; } + = null; + + public Dictionary AdditionalMetadata { get; set; } = new(StringComparer.Ordinal); + + public VexSignalDocument? Signals { get; set; } + = null; + + public DateTime InsertedAt { get; set; } + = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc); + + public static VexStatementRecord FromDomain(VexClaim claim, DateTimeOffset observedAt) + => new() + { + VulnerabilityId = claim.VulnerabilityId, + ProviderId = claim.ProviderId, + Product = VexProductDocument.FromDomain(claim.Product), + Status = claim.Status.ToString().ToLowerInvariant(), + Justification = claim.Justification?.ToString().ToLowerInvariant(), + Detail = claim.Detail, + Document = VexClaimDocumentRecord.FromDomain(claim.Document), + FirstSeen = claim.FirstSeen.UtcDateTime, + LastSeen = claim.LastSeen.UtcDateTime, + Confidence = claim.Confidence is null ? null : VexConfidenceDocument.FromDomain(claim.Confidence), + AdditionalMetadata = claim.AdditionalMetadata.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal), + Signals = VexSignalDocument.FromDomain(claim.Signals), + InsertedAt = observedAt.UtcDateTime, + }; + + public VexClaim ToDomain() + { + VexJustification? justification = string.IsNullOrWhiteSpace(Justification) + ? null + : Enum.Parse(Justification, ignoreCase: true); + + var metadata = (AdditionalMetadata ?? new Dictionary(StringComparer.Ordinal)) + .ToImmutableDictionary(StringComparer.Ordinal); + + return new VexClaim( + VulnerabilityId, + ProviderId, + Product.ToDomain(), + Enum.Parse(Status, ignoreCase: true), + Document.ToDomain(), + new DateTimeOffset(FirstSeen, TimeSpan.Zero), + new DateTimeOffset(LastSeen, TimeSpan.Zero), + justification, + Detail, + Confidence?.ToDomain(), + Signals?.ToDomain(), + metadata); + } +} + +[BsonIgnoreExtraElements] +internal sealed class VexClaimDocumentRecord +{ + public string Format { get; set; } = default!; + + public string Digest { get; set; } = default!; + + public string SourceUri { get; set; } = default!; + + public string? Revision { get; set; } + = null; + + public VexSignatureMetadataDocument? Signature { get; set; } + = null; + + public static VexClaimDocumentRecord FromDomain(VexClaimDocument document) + => new() + { + Format = document.Format.ToString().ToLowerInvariant(), + Digest = document.Digest, + SourceUri = document.SourceUri.ToString(), + Revision = document.Revision, + Signature = document.Signature is null ? null : VexSignatureMetadataDocument.FromDomain(document.Signature), + }; + + public VexClaimDocument ToDomain() + { + var format = Enum.Parse(Format, ignoreCase: true); + return new VexClaimDocument( + format, + Digest, + new Uri(SourceUri), + Revision, + Signature?.ToDomain()); + } +} + +[BsonIgnoreExtraElements] +internal sealed class VexSignatureMetadataDocument +{ + public string Type { get; set; } = default!; + + public string? Subject { get; set; } + = null; + + public string? Issuer { get; set; } + = null; + + public string? KeyId { get; set; } + = null; + + public DateTime? VerifiedAt { get; set; } + = null; + + public string? TransparencyLogReference { get; set; } + = null; + + public static VexSignatureMetadataDocument FromDomain(VexSignatureMetadata metadata) + => new() + { + Type = metadata.Type, + Subject = metadata.Subject, + Issuer = metadata.Issuer, + KeyId = metadata.KeyId, + VerifiedAt = metadata.VerifiedAt?.UtcDateTime, + TransparencyLogReference = metadata.TransparencyLogReference, + }; + + public VexSignatureMetadata ToDomain() + { + var verifiedAt = VerifiedAt.HasValue + ? new DateTimeOffset(DateTime.SpecifyKind(VerifiedAt.Value, DateTimeKind.Utc)) + : (DateTimeOffset?)null; + + return new VexSignatureMetadata( + Type, + Subject, + Issuer, + KeyId, + verifiedAt, + TransparencyLogReference); + } +} + [BsonIgnoreExtraElements] internal sealed class VexConfidenceDocument { @@ -582,12 +876,31 @@ internal sealed class VexConnectorStateDocument public List DocumentDigests { get; set; } = new(); + public Dictionary ResumeTokens { get; set; } = new(StringComparer.Ordinal); + + public DateTime? LastSuccessAt { get; set; } + = null; + + public int FailureCount { get; set; } + = 0; + + public DateTime? NextEligibleRun { get; set; } + = null; + + public string? LastFailureReason { get; set; } + = null; + public static VexConnectorStateDocument FromRecord(VexConnectorState state) => new() { ConnectorId = state.ConnectorId, LastUpdated = state.LastUpdated?.UtcDateTime, DocumentDigests = state.DocumentDigests.ToList(), + ResumeTokens = state.ResumeTokens.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal), + LastSuccessAt = state.LastSuccessAt?.UtcDateTime, + FailureCount = state.FailureCount, + NextEligibleRun = state.NextEligibleRun?.UtcDateTime, + LastFailureReason = state.LastFailureReason, }; public VexConnectorState ToRecord() @@ -599,6 +912,11 @@ internal sealed class VexConnectorStateDocument return new VexConnectorState( ConnectorId, lastUpdated, - DocumentDigests.ToImmutableArray()); + DocumentDigests.ToImmutableArray(), + ResumeTokens?.ToImmutableDictionary(StringComparer.Ordinal) ?? ImmutableDictionary.Empty, + LastSuccessAt.HasValue ? new DateTimeOffset(DateTime.SpecifyKind(LastSuccessAt.Value, DateTimeKind.Utc)) : null, + FailureCount, + NextEligibleRun.HasValue ? new DateTimeOffset(DateTime.SpecifyKind(NextEligibleRun.Value, DateTimeKind.Utc)) : null, + string.IsNullOrWhiteSpace(LastFailureReason) ? null : LastFailureReason.Trim()); } } diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs b/src/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs new file mode 100644 index 00000000..c16e06ac --- /dev/null +++ b/src/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs @@ -0,0 +1,36 @@ +using MongoDB.Driver; +using Microsoft.Extensions.Options; + +namespace StellaOps.Excititor.Storage.Mongo; + +public interface IVexMongoSessionProvider +{ + Task StartSessionAsync(CancellationToken cancellationToken = default); +} + +internal sealed class VexMongoSessionProvider : IVexMongoSessionProvider +{ + private readonly IMongoClient _client; + private readonly IOptions _options; + + public VexMongoSessionProvider(IMongoClient client, IOptions options) + { + _client = client ?? throw new ArgumentNullException(nameof(client)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + } + + public Task StartSessionAsync(CancellationToken cancellationToken = default) + { + var options = _options.Value; + var sessionOptions = new ClientSessionOptions + { + CausalConsistency = true, + DefaultTransactionOptions = new TransactionOptions( + readPreference: ReadPreference.Primary, + readConcern: ReadConcern.Majority, + writeConcern: WriteConcern.WMajority.With(wTimeout: options.CommandTimeout)) + }; + + return _client.StartSessionAsync(sessionOptions, cancellationToken); + } +} diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs b/src/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs index 4f13affe..6ceaea22 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs +++ b/src/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; +using MongoDB.Driver; namespace StellaOps.Excititor.Storage.Mongo; @@ -11,6 +12,22 @@ public sealed class VexMongoStorageOptions : IValidatableObject { private const int DefaultInlineThreshold = 256 * 1024; private static readonly TimeSpan DefaultCacheTtl = TimeSpan.FromHours(12); + private static readonly TimeSpan DefaultCommandTimeout = TimeSpan.FromSeconds(30); + + /// + /// MongoDB connection string for Excititor storage. + /// + public string ConnectionString { get; set; } = "mongodb://localhost:27017"; + + /// + /// Overrides the database name extracted from . + /// + public string? DatabaseName { get; set; } + + /// + /// Timeout applied to write operations to ensure majority acknowledgement completes promptly. + /// + public TimeSpan CommandTimeout { get; set; } = DefaultCommandTimeout; /// /// Name of the GridFS bucket used for raw VEX payloads that exceed . @@ -27,8 +44,40 @@ public sealed class VexMongoStorageOptions : IValidatableObject /// public TimeSpan ExportCacheTtl { get; set; } = DefaultCacheTtl; + /// + /// Resolve the Mongo database name using the explicit override or connection string. + /// + public string GetDatabaseName() + { + if (!string.IsNullOrWhiteSpace(DatabaseName)) + { + return DatabaseName.Trim(); + } + + if (!string.IsNullOrWhiteSpace(ConnectionString)) + { + var url = MongoUrl.Create(ConnectionString); + if (!string.IsNullOrWhiteSpace(url.DatabaseName)) + { + return url.DatabaseName; + } + } + + return "excititor"; + } + public IEnumerable Validate(ValidationContext validationContext) { + if (string.IsNullOrWhiteSpace(ConnectionString)) + { + yield return new ValidationResult("Mongo connection string must be provided.", new[] { nameof(ConnectionString) }); + } + + if (CommandTimeout <= TimeSpan.Zero) + { + yield return new ValidationResult("Command timeout must be greater than zero.", new[] { nameof(CommandTimeout) }); + } + if (string.IsNullOrWhiteSpace(RawBucketName)) { yield return new ValidationResult("Raw bucket name must be provided.", new[] { nameof(RawBucketName) }); @@ -43,5 +92,7 @@ public sealed class VexMongoStorageOptions : IValidatableObject { yield return new ValidationResult("Export cache TTL must be greater than zero.", new[] { nameof(ExportCacheTtl) }); } + + _ = GetDatabaseName(); } } diff --git a/src/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs b/src/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs new file mode 100644 index 00000000..ccb9f58e --- /dev/null +++ b/src/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs @@ -0,0 +1,225 @@ +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.Net; +using System.Net.Http.Json; +using System.Text.Json; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using MongoDB.Driver; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.Policy; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.WebService.Options; + +namespace StellaOps.Excititor.WebService.Tests; + +public sealed class MirrorEndpointsTests : IClassFixture>, IDisposable +{ + private readonly WebApplicationFactory _factory; + private readonly Mongo2Go.MongoDbRunner _runner; + + public MirrorEndpointsTests(WebApplicationFactory factory) + { + _runner = Mongo2Go.MongoDbRunner.Start(); + _factory = factory.WithWebHostBuilder(builder => + { + builder.ConfigureAppConfiguration((_, configuration) => + { + var data = new Dictionary + { + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Id"] = "primary", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:DisplayName"] = "Primary Mirror", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxIndexRequestsPerHour"] = "1000", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxDownloadRequestsPerHour"] = "1000", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Key"] = "consensus", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Format"] = "json", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:vulnId"] = "CVE-2025-0001", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:productKey"] = "pkg:test/demo", + }; + + configuration.AddInMemoryCollection(data!); + }); + + builder.ConfigureServices(services => + { + services.RemoveAll(); + services.AddSingleton(_ => new MongoClient(_runner.ConnectionString)); + services.RemoveAll(); + services.AddSingleton(provider => provider.GetRequiredService().GetDatabase("mirror-tests")); + + services.RemoveAll(); + services.AddSingleton(provider => + { + var timeProvider = provider.GetRequiredService(); + return new FakeExportStore(timeProvider); + }); + + services.RemoveAll(); + services.AddSingleton(_ => new FakeArtifactStore()); + services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF")); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + }); + }); + } + + [Fact] + public async Task ListDomains_ReturnsConfiguredDomain() + { + var client = _factory.CreateClient(); + var response = await client.GetAsync("/excititor/mirror/domains"); + response.EnsureSuccessStatusCode(); + + using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var domains = document.RootElement.GetProperty("domains"); + Assert.Equal(1, domains.GetArrayLength()); + Assert.Equal("primary", domains[0].GetProperty("id").GetString()); + } + + [Fact] + public async Task DomainIndex_ReturnsManifestMetadata() + { + var client = _factory.CreateClient(); + var response = await client.GetAsync("/excititor/mirror/domains/primary/index"); + response.EnsureSuccessStatusCode(); + + using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var exports = document.RootElement.GetProperty("exports"); + Assert.Equal(1, exports.GetArrayLength()); + var entry = exports[0]; + Assert.Equal("consensus", entry.GetProperty("exportKey").GetString()); + Assert.Equal("exports/20251019T000000000Z/abcdef", entry.GetProperty("exportId").GetString()); + var artifact = entry.GetProperty("artifact"); + Assert.Equal("sha256", artifact.GetProperty("algorithm").GetString()); + Assert.Equal("deadbeef", artifact.GetProperty("digest").GetString()); + } + + [Fact] + public async Task Download_ReturnsArtifactContent() + { + var client = _factory.CreateClient(); + var response = await client.GetAsync("/excititor/mirror/domains/primary/exports/consensus/download"); + response.EnsureSuccessStatusCode(); + Assert.Equal("application/json", response.Content.Headers.ContentType?.MediaType); + var payload = await response.Content.ReadAsStringAsync(); + Assert.Equal("{\"status\":\"ok\"}", payload); + } + + public void Dispose() + { + _runner.Dispose(); + } + + private sealed class FakeExportStore : IVexExportStore + { + private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexExportManifest> _manifests = new(); + + public FakeExportStore(TimeProvider timeProvider) + { + var filters = new[] + { + new VexQueryFilter("vulnId", "CVE-2025-0001"), + new VexQueryFilter("productKey", "pkg:test/demo"), + }; + + var query = VexQuery.Create(filters, Enumerable.Empty()); + var signature = VexQuerySignature.FromQuery(query); + var createdAt = new DateTimeOffset(2025, 10, 19, 0, 0, 0, TimeSpan.Zero); + + var manifest = new VexExportManifest( + "exports/20251019T000000000Z/abcdef", + signature, + VexExportFormat.Json, + createdAt, + new VexContentAddress("sha256", "deadbeef"), + 1, + new[] { "primary" }, + fromCache: false, + consensusRevision: "rev-1", + attestation: new VexAttestationMetadata("https://stella-ops.org/attestations/vex-export"), + sizeBytes: 16); + + _manifests.TryAdd((signature.Value, VexExportFormat.Json), manifest); + + // Seed artifact content for download test. + FakeArtifactStore.Seed(manifest.Artifact, "{\"status\":\"ok\"}"); + } + + public ValueTask FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken) + { + _manifests.TryGetValue((signature.Value, format), out var manifest); + return ValueTask.FromResult(manifest); + } + + public ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + } + + private sealed class FakeArtifactStore : IVexArtifactStore + { + private static readonly ConcurrentDictionary Content = new(); + + public static void Seed(VexContentAddress contentAddress, string payload) + { + var bytes = System.Text.Encoding.UTF8.GetBytes(payload); + Content[contentAddress] = bytes; + } + + public ValueTask SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken) + { + Content[artifact.ContentAddress] = artifact.Content.ToArray(); + return ValueTask.FromResult(new VexStoredArtifact(artifact.ContentAddress, "memory://artifact", artifact.Content.Length, artifact.Metadata)); + } + + public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken) + { + Content.TryRemove(contentAddress, out _); + return ValueTask.CompletedTask; + } + + public ValueTask OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken) + { + if (!Content.TryGetValue(contentAddress, out var bytes)) + { + return ValueTask.FromResult(null); + } + + return ValueTask.FromResult(new MemoryStream(bytes, writable: false)); + } + } + + private sealed class FakeSigner : StellaOps.Excititor.Attestation.Signing.IVexSigner + { + public ValueTask SignAsync(ReadOnlyMemory payload, CancellationToken cancellationToken) + => ValueTask.FromResult(new StellaOps.Excititor.Attestation.Signing.VexSignedPayload("signature", "key")); + } + + private sealed class FakePolicyEvaluator : StellaOps.Excititor.Policy.IVexPolicyEvaluator + { + public string Version => "test"; + + public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; + + public double GetProviderWeight(VexProvider provider) => 1.0; + + public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) + { + rejectionReason = null; + return true; + } + } + + private sealed class FakeExportDataSource : IVexExportDataSource + { + public ValueTask FetchAsync(VexQuery query, CancellationToken cancellationToken) + { + var dataset = new VexExportDataSet(ImmutableArray.Empty, ImmutableArray.Empty, ImmutableArray.Empty); + return ValueTask.FromResult(dataset); + } + } +} diff --git a/src/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs b/src/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs new file mode 100644 index 00000000..8c415dd3 --- /dev/null +++ b/src/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs @@ -0,0 +1,342 @@ +using System.Collections.Immutable; +using System.Net; +using System.Net.Http.Json; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Mongo2Go; +using MongoDB.Driver; +using StellaOps.Excititor.Attestation.Signing; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.Policy; +using StellaOps.Excititor.Storage.Mongo; + +namespace StellaOps.Excititor.WebService.Tests; + +public sealed class ResolveEndpointTests : IClassFixture>, IDisposable +{ + private readonly WebApplicationFactory _factory; + private readonly MongoDbRunner _runner; + + public ResolveEndpointTests(WebApplicationFactory factory) + { + _runner = MongoDbRunner.Start(); + _factory = factory.WithWebHostBuilder(builder => + { + builder.ConfigureAppConfiguration((_, config) => + { + var rootPath = Path.Combine(Path.GetTempPath(), "excititor-resolve-tests"); + Directory.CreateDirectory(rootPath); + var settings = new Dictionary + { + ["Excititor:Storage:Mongo:RawBucketName"] = "vex.raw", + ["Excititor:Storage:Mongo:GridFsInlineThresholdBytes"] = "256", + ["Excititor:Artifacts:FileSystem:RootPath"] = rootPath, + }; + config.AddInMemoryCollection(settings!); + }); + + builder.ConfigureServices(services => + { + services.AddSingleton(_ => new MongoClient(_runner.ConnectionString)); + services.AddSingleton(provider => provider.GetRequiredService().GetDatabase("excititor-resolve-tests")); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF")); + }); + }); + } + + [Fact] + public async Task ResolveEndpoint_ReturnsBadRequest_WhenInputsMissing() + { + var client = _factory.CreateClient(); + var response = await client.PostAsJsonAsync("/excititor/resolve", new { vulnerabilityIds = new[] { "CVE-2025-0001" } }); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task ResolveEndpoint_ComputesConsensusAndAttestation() + { + const string vulnerabilityId = "CVE-2025-2222"; + const string productKey = "pkg:nuget/StellaOps.Demo@1.0.0"; + const string providerId = "redhat"; + + await SeedProviderAsync(providerId); + await SeedClaimAsync(vulnerabilityId, productKey, providerId); + + var client = _factory.CreateClient(); + var request = new ResolveRequest( + new[] { productKey }, + null, + new[] { vulnerabilityId }, + null); + + var response = await client.PostAsJsonAsync("/excititor/resolve", request); + response.EnsureSuccessStatusCode(); + + var payload = await response.Content.ReadFromJsonAsync(); + Assert.NotNull(payload); + Assert.NotNull(payload!.Policy); + + var result = Assert.Single(payload.Results); + Assert.Equal(vulnerabilityId, result.VulnerabilityId); + Assert.Equal(productKey, result.ProductKey); + Assert.Equal("not_affected", result.Status); + Assert.NotNull(result.Envelope); + Assert.Equal("signature", result.Envelope!.ContentSignature!.Value); + Assert.Equal("key", result.Envelope.ContentSignature.KeyId); + Assert.NotEqual(default, result.CalculatedAt); + + Assert.NotNull(result.Signals); + Assert.True(result.Signals!.Kev); + Assert.NotNull(result.Envelope.AttestationSignature); + Assert.False(string.IsNullOrWhiteSpace(result.Envelope.AttestationEnvelope)); + Assert.Equal(payload.Policy.ActiveRevisionId, result.PolicyRevisionId); + Assert.Equal(payload.Policy.Version, result.PolicyVersion); + Assert.Equal(payload.Policy.Digest, result.PolicyDigest); + + var decision = Assert.Single(result.Decisions); + Assert.True(decision.Included); + Assert.Equal(providerId, decision.ProviderId); + } + + [Fact] + public async Task ResolveEndpoint_ReturnsConflict_WhenPolicyRevisionMismatch() + { + const string vulnerabilityId = "CVE-2025-3333"; + const string productKey = "pkg:docker/demo@sha256:abcd"; + + var client = _factory.CreateClient(); + var request = new ResolveRequest( + new[] { productKey }, + null, + new[] { vulnerabilityId }, + "rev-0"); + + var response = await client.PostAsJsonAsync("/excititor/resolve", request); + Assert.Equal(HttpStatusCode.Conflict, response.StatusCode); + } + + private async Task SeedProviderAsync(string providerId) + { + await using var scope = _factory.Services.CreateAsyncScope(); + var store = scope.ServiceProvider.GetRequiredService(); + var provider = new VexProvider(providerId, "Red Hat", VexProviderKind.Distro); + await store.SaveAsync(provider, CancellationToken.None); + } + + private async Task SeedClaimAsync(string vulnerabilityId, string productKey, string providerId) + { + await using var scope = _factory.Services.CreateAsyncScope(); + var store = scope.ServiceProvider.GetRequiredService(); + var timeProvider = scope.ServiceProvider.GetRequiredService(); + var observedAt = timeProvider.GetUtcNow(); + + var claim = new VexClaim( + vulnerabilityId, + providerId, + new VexProduct(productKey, "Demo Component", version: "1.0.0", purl: productKey), + VexClaimStatus.NotAffected, + new VexClaimDocument(VexDocumentFormat.Csaf, "sha256:deadbeef", new Uri("https://example.org/vex/csaf.json")), + observedAt.AddDays(-1), + observedAt, + VexJustification.ProtectedByMitigatingControl, + detail: "Test justification", + confidence: new VexConfidence("high", 0.9, "unit-test"), + signals: new VexSignalSnapshot( + new VexSeveritySignal("cvss:v3.1", 5.5, "medium"), + kev: true, + epss: 0.25)); + + await store.AppendAsync(new[] { claim }, observedAt, CancellationToken.None); + } + + public void Dispose() + { + _runner.Dispose(); + } + + private sealed class ResolveRequest + { + public ResolveRequest( + IReadOnlyList? productKeys, + IReadOnlyList? purls, + IReadOnlyList? vulnerabilityIds, + string? policyRevisionId) + { + ProductKeys = productKeys; + Purls = purls; + VulnerabilityIds = vulnerabilityIds; + PolicyRevisionId = policyRevisionId; + } + + public IReadOnlyList? ProductKeys { get; } + + public IReadOnlyList? Purls { get; } + + public IReadOnlyList? VulnerabilityIds { get; } + + public string? PolicyRevisionId { get; } + } + + private sealed class ResolveResponse + { + public required DateTimeOffset ResolvedAt { get; init; } + + public required ResolvePolicy Policy { get; init; } + + public required List Results { get; init; } + } + + private sealed class ResolvePolicy + { + public required string ActiveRevisionId { get; init; } + + public required string Version { get; init; } + + public required string Digest { get; init; } + + public string? RequestedRevisionId { get; init; } + } + + private sealed class ResolveResult + { + public required string VulnerabilityId { get; init; } + + public required string ProductKey { get; init; } + + public required string Status { get; init; } + + public required DateTimeOffset CalculatedAt { get; init; } + + public required List Sources { get; init; } + + public required List Conflicts { get; init; } + + public ResolveSignals? Signals { get; init; } + + public string? Summary { get; init; } + + public required string PolicyRevisionId { get; init; } + + public required string PolicyVersion { get; init; } + + public required string PolicyDigest { get; init; } + + public required List Decisions { get; init; } + + public ResolveEnvelope? Envelope { get; init; } + } + + private sealed class ResolveSource + { + public required string ProviderId { get; init; } + } + + private sealed class ResolveConflict + { + public string? ProviderId { get; init; } + } + + private sealed class ResolveSignals + { + public ResolveSeverity? Severity { get; init; } + + public bool? Kev { get; init; } + + public double? Epss { get; init; } + } + + private sealed class ResolveSeverity + { + public string? Scheme { get; init; } + + public double? Score { get; init; } + } + + private sealed class ResolveDecision + { + public required string ProviderId { get; init; } + + public required bool Included { get; init; } + + public string? Reason { get; init; } + } + + private sealed class ResolveEnvelope + { + public required ResolveArtifact Artifact { get; init; } + + public ResolveSignature? ContentSignature { get; init; } + + public ResolveAttestationMetadata? Attestation { get; init; } + + public string? AttestationEnvelope { get; init; } + + public ResolveSignature? AttestationSignature { get; init; } + } + + private sealed class ResolveArtifact + { + public required string Algorithm { get; init; } + + public required string Digest { get; init; } + } + + private sealed class ResolveSignature + { + public required string Value { get; init; } + + public string? KeyId { get; init; } + } + + private sealed class ResolveAttestationMetadata + { + public required string PredicateType { get; init; } + + public ResolveRekorReference? Rekor { get; init; } + + public string? EnvelopeDigest { get; init; } + + public DateTimeOffset? SignedAt { get; init; } + } + + private sealed class ResolveRekorReference + { + public string? Location { get; init; } + } + + private sealed class FakeSigner : IVexSigner + { + public ValueTask SignAsync(ReadOnlyMemory payload, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexSignedPayload("signature", "key")); + } + + private sealed class FakePolicyEvaluator : IVexPolicyEvaluator + { + public string Version => "test"; + + public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; + + public double GetProviderWeight(VexProvider provider) => 1.0; + + public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) + { + rejectionReason = null; + return true; + } + } + + private sealed class FakeExportDataSource : IVexExportDataSource + { + public ValueTask FetchAsync(VexQuery query, CancellationToken cancellationToken) + { + var dataset = new VexExportDataSet(ImmutableArray.Empty, ImmutableArray.Empty, ImmutableArray.Empty); + return ValueTask.FromResult(dataset); + } + } +} diff --git a/src/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs b/src/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs index 977204a3..9f70192c 100644 --- a/src/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs +++ b/src/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs @@ -9,6 +9,7 @@ using Microsoft.Extensions.DependencyInjection; using Mongo2Go; using MongoDB.Driver; using StellaOps.Excititor.Attestation.Signing; +using StellaOps.Excititor.Connectors.Abstractions; using StellaOps.Excititor.Policy; using StellaOps.Excititor.Core; using StellaOps.Excititor.Export; @@ -46,6 +47,7 @@ public sealed class StatusEndpointTests : IClassFixture(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF")); }); }); } diff --git a/src/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs b/src/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs new file mode 100644 index 00000000..07450a2c --- /dev/null +++ b/src/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs @@ -0,0 +1,284 @@ +using System.Collections.Immutable; +using System.Globalization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Excititor.WebService.Services; + +namespace StellaOps.Excititor.WebService.Endpoints; + +internal static class IngestEndpoints +{ + private const string AdminScope = "vex.admin"; + + public static void MapIngestEndpoints(IEndpointRouteBuilder app) + { + var group = app.MapGroup("/excititor"); + + group.MapPost("/init", HandleInitAsync); + group.MapPost("/ingest/run", HandleRunAsync); + group.MapPost("/ingest/resume", HandleResumeAsync); + group.MapPost("/reconcile", HandleReconcileAsync); + } + + private static async Task HandleInitAsync( + HttpContext httpContext, + ExcititorInitRequest request, + IVexIngestOrchestrator orchestrator, + TimeProvider timeProvider, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(httpContext, AdminScope); + if (scopeResult is not null) + { + return scopeResult; + } + + var providerIds = NormalizeProviders(request.Providers); + var options = new IngestInitOptions(providerIds, request.Resume ?? false, timeProvider); + + var summary = await orchestrator.InitializeAsync(options, cancellationToken).ConfigureAwait(false); + var message = $"Initialized {summary.ProviderCount} provider(s); {summary.SuccessCount} succeeded, {summary.FailureCount} failed."; + + return Results.Ok(new + { + message, + runId = summary.RunId, + startedAt = summary.StartedAt, + completedAt = summary.CompletedAt, + providers = summary.Providers.Select(static provider => new + { + provider.providerId, + provider.displayName, + provider.status, + provider.durationMs, + provider.error + }) + }); + } + + private static async Task HandleRunAsync( + HttpContext httpContext, + ExcititorIngestRunRequest request, + IVexIngestOrchestrator orchestrator, + TimeProvider timeProvider, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(httpContext, AdminScope); + if (scopeResult is not null) + { + return scopeResult; + } + + if (!TryParseDateTimeOffset(request.Since, out var since, out var sinceError)) + { + return Results.BadRequest(new { message = sinceError }); + } + + if (!TryParseTimeSpan(request.Window, out var window, out var windowError)) + { + return Results.BadRequest(new { message = windowError }); + } + + var providerIds = NormalizeProviders(request.Providers); + var options = new IngestRunOptions( + providerIds, + since, + window, + request.Force ?? false, + timeProvider); + + var summary = await orchestrator.RunAsync(options, cancellationToken).ConfigureAwait(false); + var message = $"Ingest run completed for {summary.ProviderCount} provider(s); {summary.SuccessCount} succeeded, {summary.FailureCount} failed."; + + return Results.Ok(new + { + message, + runId = summary.RunId, + startedAt = summary.StartedAt, + completedAt = summary.CompletedAt, + durationMs = summary.Duration.TotalMilliseconds, + providers = summary.Providers.Select(static provider => new + { + provider.providerId, + provider.status, + provider.documents, + provider.claims, + provider.startedAt, + provider.completedAt, + provider.durationMs, + provider.lastDigest, + provider.lastUpdated, + provider.checkpoint, + provider.error + }) + }); + } + + private static async Task HandleResumeAsync( + HttpContext httpContext, + ExcititorIngestResumeRequest request, + IVexIngestOrchestrator orchestrator, + TimeProvider timeProvider, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(httpContext, AdminScope); + if (scopeResult is not null) + { + return scopeResult; + } + + var providerIds = NormalizeProviders(request.Providers); + var options = new IngestResumeOptions(providerIds, request.Checkpoint, timeProvider); + + var summary = await orchestrator.ResumeAsync(options, cancellationToken).ConfigureAwait(false); + var message = $"Resume run completed for {summary.ProviderCount} provider(s); {summary.SuccessCount} succeeded, {summary.FailureCount} failed."; + + return Results.Ok(new + { + message, + runId = summary.RunId, + startedAt = summary.StartedAt, + completedAt = summary.CompletedAt, + durationMs = summary.Duration.TotalMilliseconds, + providers = summary.Providers.Select(static provider => new + { + provider.providerId, + provider.status, + provider.documents, + provider.claims, + provider.startedAt, + provider.completedAt, + provider.durationMs, + provider.since, + provider.checkpoint, + provider.error + }) + }); + } + + private static async Task HandleReconcileAsync( + HttpContext httpContext, + ExcititorReconcileRequest request, + IVexIngestOrchestrator orchestrator, + TimeProvider timeProvider, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(httpContext, AdminScope); + if (scopeResult is not null) + { + return scopeResult; + } + + if (!TryParseTimeSpan(request.MaxAge, out var maxAge, out var error)) + { + return Results.BadRequest(new { message = error }); + } + + var providerIds = NormalizeProviders(request.Providers); + var options = new ReconcileOptions(providerIds, maxAge, timeProvider); + + var summary = await orchestrator.ReconcileAsync(options, cancellationToken).ConfigureAwait(false); + var message = $"Reconcile completed for {summary.ProviderCount} provider(s); {summary.ReconciledCount} reconciled, {summary.SkippedCount} skipped, {summary.FailureCount} failed."; + + return Results.Ok(new + { + message, + runId = summary.RunId, + startedAt = summary.StartedAt, + completedAt = summary.CompletedAt, + durationMs = summary.Duration.TotalMilliseconds, + providers = summary.Providers.Select(static provider => new + { + provider.providerId, + provider.status, + provider.action, + provider.lastUpdated, + provider.threshold, + provider.documents, + provider.claims, + provider.error + }) + }); + } + + private static ImmutableArray NormalizeProviders(IReadOnlyCollection? providers) + { + if (providers is null || providers.Count == 0) + { + return ImmutableArray.Empty; + } + + var set = new SortedSet(StringComparer.OrdinalIgnoreCase); + foreach (var provider in providers) + { + if (string.IsNullOrWhiteSpace(provider)) + { + continue; + } + + set.Add(provider.Trim()); + } + + return set.ToImmutableArray(); + } + + private static bool TryParseDateTimeOffset(string? value, out DateTimeOffset? result, out string? error) + { + result = null; + error = null; + + if (string.IsNullOrWhiteSpace(value)) + { + return true; + } + + if (DateTimeOffset.TryParse( + value.Trim(), + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsed)) + { + result = parsed; + return true; + } + + error = "Invalid 'since' value. Use ISO-8601 format (e.g. 2025-10-19T12:30:00Z)."; + return false; + } + + private static bool TryParseTimeSpan(string? value, out TimeSpan? result, out string? error) + { + result = null; + error = null; + + if (string.IsNullOrWhiteSpace(value)) + { + return true; + } + + if (TimeSpan.TryParse(value.Trim(), CultureInfo.InvariantCulture, out var parsed) && parsed >= TimeSpan.Zero) + { + result = parsed; + return true; + } + + error = "Invalid duration value. Use TimeSpan format (e.g. 1.00:00:00)."; + return false; + } + + private sealed record ExcititorInitRequest(IReadOnlyList? Providers, bool? Resume); + + private sealed record ExcititorIngestRunRequest( + IReadOnlyList? Providers, + string? Since, + string? Window, + bool? Force); + + private sealed record ExcititorIngestResumeRequest( + IReadOnlyList? Providers, + string? Checkpoint); + + private sealed record ExcititorReconcileRequest( + IReadOnlyList? Providers, + string? MaxAge); +} diff --git a/src/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs b/src/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs new file mode 100644 index 00000000..9b9d0989 --- /dev/null +++ b/src/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs @@ -0,0 +1,419 @@ +using System.Collections.Immutable; +using System.Globalization; +using System.IO; +using System.Text; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.WebService.Options; +using StellaOps.Excititor.WebService.Services; + +namespace StellaOps.Excititor.WebService.Endpoints; + +internal static class MirrorEndpoints +{ + public static void MapMirrorEndpoints(WebApplication app) + { + var group = app.MapGroup("/excititor/mirror"); + + group.MapGet("/domains", HandleListDomainsAsync); + group.MapGet("/domains/{domainId}", HandleDomainDetailAsync); + group.MapGet("/domains/{domainId}/index", HandleDomainIndexAsync); + group.MapGet("/domains/{domainId}/exports/{exportKey}", HandleExportMetadataAsync); + group.MapGet("/domains/{domainId}/exports/{exportKey}/download", HandleExportDownloadAsync); + } + + private static async Task HandleListDomainsAsync( + HttpContext httpContext, + IOptions options, + CancellationToken cancellationToken) + { + var domains = options.Value.Domains + .Select(static domain => new MirrorDomainSummary( + domain.Id, + string.IsNullOrWhiteSpace(domain.DisplayName) ? domain.Id : domain.DisplayName, + domain.RequireAuthentication, + Math.Max(domain.MaxIndexRequestsPerHour, 0), + Math.Max(domain.MaxDownloadRequestsPerHour, 0))) + .ToArray(); + + await WriteJsonAsync(httpContext, new MirrorDomainListResponse(domains), StatusCodes.Status200OK, cancellationToken).ConfigureAwait(false); + return Results.Empty; + } + + private static async Task HandleDomainDetailAsync( + string domainId, + HttpContext httpContext, + IOptions options, + CancellationToken cancellationToken) + { + if (!TryFindDomain(options.Value, domainId, out var domain)) + { + return Results.NotFound(); + } + + var response = new MirrorDomainDetail( + domain.Id, + string.IsNullOrWhiteSpace(domain.DisplayName) ? domain.Id : domain.DisplayName, + domain.RequireAuthentication, + Math.Max(domain.MaxIndexRequestsPerHour, 0), + Math.Max(domain.MaxDownloadRequestsPerHour, 0), + domain.Exports.Select(static export => export.Key).OrderBy(static key => key, StringComparer.Ordinal).ToImmutableArray()); + + await WriteJsonAsync(httpContext, response, StatusCodes.Status200OK, cancellationToken).ConfigureAwait(false); + return Results.Empty; + } + + private static async Task HandleDomainIndexAsync( + string domainId, + HttpContext httpContext, + IOptions options, + MirrorRateLimiter rateLimiter, + IVexExportStore exportStore, + TimeProvider timeProvider, + CancellationToken cancellationToken) + { + if (!TryFindDomain(options.Value, domainId, out var domain)) + { + return Results.NotFound(); + } + + if (domain.RequireAuthentication && (httpContext.User?.Identity?.IsAuthenticated is not true)) + { + return Results.Unauthorized(); + } + + if (!rateLimiter.TryAcquire(domain.Id, "index", Math.Max(domain.MaxIndexRequestsPerHour, 0), out var retryAfter)) + { + if (retryAfter is { } retry) + { + httpContext.Response.Headers.RetryAfter = ((int)Math.Ceiling(retry.TotalSeconds)).ToString(CultureInfo.InvariantCulture); + } + + await WritePlainTextAsync(httpContext, "mirror index quota exceeded", StatusCodes.Status429TooManyRequests, cancellationToken).ConfigureAwait(false); + return Results.Empty; + } + + var resolvedExports = new List(); + foreach (var exportOption in domain.Exports) + { + if (!TryBuildExportPlan(exportOption, out var plan, out var error)) + { + resolvedExports.Add(new MirrorExportIndexEntry( + exportOption.Key, + null, + null, + exportOption.Format, + null, + null, + 0, + null, + null, + error ?? "invalid_export_configuration")); + continue; + } + + var manifest = await exportStore.FindAsync(plan.Signature, plan.Format, cancellationToken).ConfigureAwait(false); + + if (manifest is null) + { + resolvedExports.Add(new MirrorExportIndexEntry( + exportOption.Key, + null, + plan.Signature.Value, + plan.Format.ToString().ToLowerInvariant(), + null, + null, + 0, + null, + null, + "manifest_not_found")); + continue; + } + + resolvedExports.Add(new MirrorExportIndexEntry( + exportOption.Key, + manifest.ExportId, + manifest.QuerySignature.Value, + manifest.Format.ToString().ToLowerInvariant(), + manifest.CreatedAt, + manifest.Artifact, + manifest.SizeBytes, + manifest.ConsensusRevision, + manifest.Attestation is null ? null : new MirrorExportAttestation(manifest.Attestation.PredicateType, manifest.Attestation.Rekor?.Location, manifest.Attestation.EnvelopeDigest, manifest.Attestation.SignedAt), + null)); + } + + var indexResponse = new MirrorDomainIndex( + domain.Id, + string.IsNullOrWhiteSpace(domain.DisplayName) ? domain.Id : domain.DisplayName, + timeProvider.GetUtcNow(), + resolvedExports.ToImmutableArray()); + + await WriteJsonAsync(httpContext, indexResponse, StatusCodes.Status200OK, cancellationToken).ConfigureAwait(false); + return Results.Empty; + } + + private static async Task HandleExportMetadataAsync( + string domainId, + string exportKey, + HttpContext httpContext, + IOptions options, + MirrorRateLimiter rateLimiter, + IVexExportStore exportStore, + TimeProvider timeProvider, + CancellationToken cancellationToken) + { + if (!TryFindDomain(options.Value, domainId, out var domain)) + { + return Results.NotFound(); + } + + if (domain.RequireAuthentication && (httpContext.User?.Identity?.IsAuthenticated is not true)) + { + return Results.Unauthorized(); + } + + if (!TryFindExport(domain, exportKey, out var exportOptions)) + { + return Results.NotFound(); + } + + if (!TryBuildExportPlan(exportOptions, out var plan, out var error)) + { + await WritePlainTextAsync(httpContext, error ?? "invalid_export_configuration", StatusCodes.Status503ServiceUnavailable, cancellationToken).ConfigureAwait(false); + return Results.Empty; + } + + var manifest = await exportStore.FindAsync(plan.Signature, plan.Format, cancellationToken).ConfigureAwait(false); + if (manifest is null) + { + return Results.NotFound(); + } + + var payload = new MirrorExportMetadata( + domain.Id, + exportOptions.Key, + manifest.ExportId, + manifest.QuerySignature.Value, + manifest.Format.ToString().ToLowerInvariant(), + manifest.CreatedAt, + manifest.Artifact, + manifest.SizeBytes, + manifest.SourceProviders, + manifest.Attestation is null ? null : new MirrorExportAttestation(manifest.Attestation.PredicateType, manifest.Attestation.Rekor?.Location, manifest.Attestation.EnvelopeDigest, manifest.Attestation.SignedAt)); + + await WriteJsonAsync(httpContext, payload, StatusCodes.Status200OK, cancellationToken).ConfigureAwait(false); + return Results.Empty; + } + + private static async Task HandleExportDownloadAsync( + string domainId, + string exportKey, + HttpContext httpContext, + IOptions options, + MirrorRateLimiter rateLimiter, + IVexExportStore exportStore, + IEnumerable artifactStores, + CancellationToken cancellationToken) + { + if (!TryFindDomain(options.Value, domainId, out var domain)) + { + return Results.NotFound(); + } + + if (domain.RequireAuthentication && (httpContext.User?.Identity?.IsAuthenticated is not true)) + { + return Results.Unauthorized(); + } + + if (!rateLimiter.TryAcquire(domain.Id, "download", Math.Max(domain.MaxDownloadRequestsPerHour, 0), out var retryAfter)) + { + if (retryAfter is { } retry) + { + httpContext.Response.Headers.RetryAfter = ((int)Math.Ceiling(retry.TotalSeconds)).ToString(CultureInfo.InvariantCulture); + } + + await WritePlainTextAsync(httpContext, "mirror download quota exceeded", StatusCodes.Status429TooManyRequests, cancellationToken).ConfigureAwait(false); + return Results.Empty; + } + + if (!TryFindExport(domain, exportKey, out var exportOptions) || !TryBuildExportPlan(exportOptions, out var plan, out _)) + { + return Results.NotFound(); + } + + var manifest = await exportStore.FindAsync(plan.Signature, plan.Format, cancellationToken).ConfigureAwait(false); + if (manifest is null) + { + return Results.NotFound(); + } + + Stream? contentStream = null; + foreach (var store in artifactStores) + { + contentStream = await store.OpenReadAsync(manifest.Artifact, cancellationToken).ConfigureAwait(false); + if (contentStream is not null) + { + break; + } + } + + if (contentStream is null) + { + return Results.NotFound(); + } + + await using (contentStream.ConfigureAwait(false)) + { + var contentType = ResolveContentType(manifest.Format); + httpContext.Response.StatusCode = StatusCodes.Status200OK; + httpContext.Response.ContentType = contentType; + httpContext.Response.Headers.ContentDisposition = FormattableString.Invariant($"attachment; filename=\"{BuildDownloadFileName(domain.Id, exportOptions.Key, manifest.Format)}\""); + + await contentStream.CopyToAsync(httpContext.Response.Body, cancellationToken).ConfigureAwait(false); + } + + return Results.Empty; + } + + private static bool TryFindDomain(MirrorDistributionOptions options, string domainId, out MirrorDomainOptions domain) + { + domain = options.Domains.FirstOrDefault(d => string.Equals(d.Id, domainId, StringComparison.OrdinalIgnoreCase))!; + return domain is not null; + } + + private static bool TryFindExport(MirrorDomainOptions domain, string exportKey, out MirrorExportOptions export) + { + export = domain.Exports.FirstOrDefault(e => string.Equals(e.Key, exportKey, StringComparison.OrdinalIgnoreCase))!; + return export is not null; + } + + private static bool TryBuildExportPlan(MirrorExportOptions exportOptions, out MirrorExportPlan plan, out string? error) + { + plan = null!; + error = null; + + if (string.IsNullOrWhiteSpace(exportOptions.Key)) + { + error = "missing_export_key"; + return false; + } + + if (string.IsNullOrWhiteSpace(exportOptions.Format) || !Enum.TryParse(exportOptions.Format, ignoreCase: true, out var format)) + { + error = "unsupported_export_format"; + return false; + } + + var filters = exportOptions.Filters.Select(pair => new KeyValuePair(pair.Key, pair.Value)).ToArray(); + var sorts = exportOptions.Sort.Select(pair => new VexQuerySort(pair.Key, pair.Value)).ToArray(); + var query = VexQuery.Create(filters.Select(kv => new VexQueryFilter(kv.Key, kv.Value)), sorts, exportOptions.Limit, exportOptions.Offset, exportOptions.View); + var signature = VexQuerySignature.FromQuery(query); + + plan = new MirrorExportPlan(format, query, signature); + return true; + } + + private static string ResolveContentType(VexExportFormat format) + => format switch + { + VexExportFormat.Json => "application/json", + VexExportFormat.JsonLines => "application/jsonl", + VexExportFormat.OpenVex => "application/json", + VexExportFormat.Csaf => "application/json", + _ => "application/octet-stream", + }; + + private static string BuildDownloadFileName(string domainId, string exportKey, VexExportFormat format) + { + var builder = new StringBuilder(domainId.Length + exportKey.Length + 8); + builder.Append(domainId).Append('-').Append(exportKey); + builder.Append(format switch + { + VexExportFormat.Json => ".json", + VexExportFormat.JsonLines => ".jsonl", + VexExportFormat.OpenVex => ".openvex.json", + VexExportFormat.Csaf => ".csaf.json", + _ => ".bin", + }); + return builder.ToString(); + } + + private static async Task WritePlainTextAsync(HttpContext context, string message, int statusCode, CancellationToken cancellationToken) + { + context.Response.StatusCode = statusCode; + context.Response.ContentType = "text/plain"; + await context.Response.WriteAsync(message, cancellationToken); + } + + private static async Task WriteJsonAsync(HttpContext context, T payload, int statusCode, CancellationToken cancellationToken) + { + context.Response.StatusCode = statusCode; + context.Response.ContentType = "application/json"; + var json = VexCanonicalJsonSerializer.Serialize(payload); + await context.Response.WriteAsync(json, cancellationToken); + } + + private sealed record MirrorExportPlan( + VexExportFormat Format, + VexQuery Query, + VexQuerySignature Signature); +} + +internal sealed record MirrorDomainListResponse(IReadOnlyList Domains); + +internal sealed record MirrorDomainSummary( + string Id, + string DisplayName, + bool RequireAuthentication, + int MaxIndexRequestsPerHour, + int MaxDownloadRequestsPerHour); + +internal sealed record MirrorDomainDetail( + string Id, + string DisplayName, + bool RequireAuthentication, + int MaxIndexRequestsPerHour, + int MaxDownloadRequestsPerHour, + IReadOnlyList Exports); + +internal sealed record MirrorDomainIndex( + string Id, + string DisplayName, + DateTimeOffset GeneratedAt, + IReadOnlyList Exports); + +internal sealed record MirrorExportIndexEntry( + string ExportKey, + string? ExportId, + string? QuerySignature, + string Format, + DateTimeOffset? CreatedAt, + VexContentAddress? Artifact, + long SizeBytes, + string? ConsensusRevision, + MirrorExportAttestation? Attestation, + string? Status); + +internal sealed record MirrorExportAttestation( + string PredicateType, + string? RekorLocation, + string? EnvelopeDigest, + DateTimeOffset? SignedAt); + +internal sealed record MirrorExportMetadata( + string DomainId, + string ExportKey, + string ExportId, + string QuerySignature, + string Format, + DateTimeOffset CreatedAt, + VexContentAddress Artifact, + long SizeBytes, + IReadOnlyList SourceProviders, + MirrorExportAttestation? Attestation); diff --git a/src/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs b/src/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs new file mode 100644 index 00000000..063eacaa --- /dev/null +++ b/src/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs @@ -0,0 +1,504 @@ +namespace StellaOps.Excititor.WebService.Endpoints; + +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using StellaOps.Excititor.Attestation; +using StellaOps.Excititor.Attestation.Dsse; +using StellaOps.Excititor.Attestation.Signing; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Policy; +using StellaOps.Excititor.Storage.Mongo; + +internal static class ResolveEndpoint +{ + private const int MaxSubjectPairs = 256; + + public static void MapResolveEndpoint(WebApplication app) + { + app.MapPost("/excititor/resolve", HandleResolveAsync); + } + + private static async Task HandleResolveAsync( + VexResolveRequest request, + HttpContext httpContext, + IVexClaimStore claimStore, + IVexConsensusStore consensusStore, + IVexProviderStore providerStore, + IVexPolicyProvider policyProvider, + TimeProvider timeProvider, + ILoggerFactory loggerFactory, + IVexAttestationClient? attestationClient, + IVexSigner? signer, + CancellationToken cancellationToken) + { + if (request is null) + { + return Results.BadRequest("Request payload is required."); + } + + var logger = loggerFactory.CreateLogger("ResolveEndpoint"); + + var productKeys = NormalizeValues(request.ProductKeys, request.Purls); + var vulnerabilityIds = NormalizeValues(request.VulnerabilityIds); + + if (productKeys.Count == 0) + { + await WritePlainTextAsync(httpContext, "At least one productKey or purl must be provided.", StatusCodes.Status400BadRequest, cancellationToken); + return Results.Empty; + } + + if (vulnerabilityIds.Count == 0) + { + await WritePlainTextAsync(httpContext, "At least one vulnerabilityId must be provided.", StatusCodes.Status400BadRequest, cancellationToken); + return Results.Empty; + } + + var pairCount = (long)productKeys.Count * vulnerabilityIds.Count; + if (pairCount > MaxSubjectPairs) + { + await WritePlainTextAsync(httpContext, FormattableString.Invariant($"A maximum of {MaxSubjectPairs} subject pairs are allowed per request."), StatusCodes.Status400BadRequest, cancellationToken); + return Results.Empty; + } + + var snapshot = policyProvider.GetSnapshot(); + + if (!string.IsNullOrWhiteSpace(request.PolicyRevisionId) && + !string.Equals(request.PolicyRevisionId.Trim(), snapshot.RevisionId, StringComparison.Ordinal)) + { + var conflictPayload = new + { + message = $"Requested policy revision '{request.PolicyRevisionId}' does not match active revision '{snapshot.RevisionId}'.", + activeRevision = snapshot.RevisionId, + requestedRevision = request.PolicyRevisionId, + }; + await WriteJsonAsync(httpContext, conflictPayload, StatusCodes.Status409Conflict, cancellationToken); + return Results.Empty; + } + + var resolver = new VexConsensusResolver(snapshot.ConsensusPolicy); + var resolvedAt = timeProvider.GetUtcNow(); + var providerCache = new Dictionary(StringComparer.Ordinal); + var results = new List((int)pairCount); + + foreach (var productKey in productKeys) + { + foreach (var vulnerabilityId in vulnerabilityIds) + { + var claims = await claimStore.FindAsync(vulnerabilityId, productKey, since: null, cancellationToken) + .ConfigureAwait(false); + + var claimArray = claims.Count == 0 ? Array.Empty() : claims.ToArray(); + var signals = AggregateSignals(claimArray); + var providers = await LoadProvidersAsync(claimArray, providerStore, providerCache, cancellationToken) + .ConfigureAwait(false); + var product = ResolveProduct(claimArray, productKey); + var calculatedAt = timeProvider.GetUtcNow(); + + var resolution = resolver.Resolve(new VexConsensusRequest( + vulnerabilityId, + product, + claimArray, + providers, + calculatedAt, + snapshot.ConsensusOptions.WeightCeiling, + signals, + snapshot.RevisionId, + snapshot.Digest)); + + var consensus = resolution.Consensus; + + if (!string.Equals(consensus.PolicyVersion, snapshot.Version, StringComparison.Ordinal) || + !string.Equals(consensus.PolicyRevisionId, snapshot.RevisionId, StringComparison.Ordinal) || + !string.Equals(consensus.PolicyDigest, snapshot.Digest, StringComparison.Ordinal)) + { + consensus = new VexConsensus( + consensus.VulnerabilityId, + consensus.Product, + consensus.Status, + consensus.CalculatedAt, + consensus.Sources, + consensus.Conflicts, + consensus.Signals, + snapshot.Version, + consensus.Summary, + snapshot.RevisionId, + snapshot.Digest); + } + + await consensusStore.SaveAsync(consensus, cancellationToken).ConfigureAwait(false); + + var payload = PreparePayload(consensus); + var contentSignature = await TrySignAsync(signer, payload, logger, cancellationToken).ConfigureAwait(false); + var attestation = await BuildAttestationAsync( + attestationClient, + consensus, + snapshot, + payload, + logger, + cancellationToken).ConfigureAwait(false); + + var decisions = resolution.DecisionLog.IsDefault + ? Array.Empty() + : resolution.DecisionLog.ToArray(); + + results.Add(new VexResolveResult( + consensus.VulnerabilityId, + consensus.Product.Key, + consensus.Status, + consensus.CalculatedAt, + consensus.Sources, + consensus.Conflicts, + consensus.Signals, + consensus.Summary, + consensus.PolicyRevisionId ?? snapshot.RevisionId, + consensus.PolicyVersion ?? snapshot.Version, + consensus.PolicyDigest ?? snapshot.Digest, + decisions, + new VexResolveEnvelope( + payload.Artifact, + contentSignature, + attestation.Metadata, + attestation.Envelope, + attestation.Signature))); + } + } + + var policy = new VexResolvePolicy( + snapshot.RevisionId, + snapshot.Version, + snapshot.Digest, + request.PolicyRevisionId?.Trim()); + + var response = new VexResolveResponse(resolvedAt, policy, results); + await WriteJsonAsync(httpContext, response, StatusCodes.Status200OK, cancellationToken); + return Results.Empty; + } + + private static List NormalizeValues(params IReadOnlyList?[] sources) + { + var result = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + foreach (var source in sources) + { + if (source is null) + { + continue; + } + + foreach (var value in source) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + var normalized = value.Trim(); + if (seen.Add(normalized)) + { + result.Add(normalized); + } + } + } + + return result; + } + + private static VexSignalSnapshot? AggregateSignals(IReadOnlyList claims) + { + if (claims.Count == 0) + { + return null; + } + + VexSeveritySignal? bestSeverity = null; + double? bestScore = null; + bool kevPresent = false; + bool kevTrue = false; + double? bestEpss = null; + + foreach (var claim in claims) + { + if (claim.Signals is null) + { + continue; + } + + var severity = claim.Signals.Severity; + if (severity is not null) + { + var score = severity.Score; + if (bestSeverity is null || + (score is not null && (bestScore is null || score.Value > bestScore.Value)) || + (score is null && bestScore is null && !string.IsNullOrWhiteSpace(severity.Label) && string.IsNullOrWhiteSpace(bestSeverity.Label))) + { + bestSeverity = severity; + bestScore = severity.Score; + } + } + + if (claim.Signals.Kev is { } kevValue) + { + kevPresent = true; + if (kevValue) + { + kevTrue = true; + } + } + + if (claim.Signals.Epss is { } epss) + { + if (bestEpss is null || epss > bestEpss.Value) + { + bestEpss = epss; + } + } + } + + if (bestSeverity is null && !kevPresent && bestEpss is null) + { + return null; + } + + bool? kev = kevTrue ? true : (kevPresent ? false : null); + return new VexSignalSnapshot(bestSeverity, kev, bestEpss); + } + + private static async Task> LoadProvidersAsync( + IReadOnlyList claims, + IVexProviderStore providerStore, + IDictionary cache, + CancellationToken cancellationToken) + { + if (claims.Count == 0) + { + return ImmutableDictionary.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + var seen = new HashSet(StringComparer.Ordinal); + + foreach (var providerId in claims.Select(claim => claim.ProviderId)) + { + if (!seen.Add(providerId)) + { + continue; + } + + if (cache.TryGetValue(providerId, out var cached)) + { + builder[providerId] = cached; + continue; + } + + var provider = await providerStore.FindAsync(providerId, cancellationToken).ConfigureAwait(false); + if (provider is not null) + { + cache[providerId] = provider; + builder[providerId] = provider; + } + } + + return builder.ToImmutable(); + } + + private static VexProduct ResolveProduct(IReadOnlyList claims, string productKey) + { + if (claims.Count > 0) + { + return claims[0].Product; + } + + var inferredPurl = productKey.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase) ? productKey : null; + return new VexProduct(productKey, name: null, version: null, purl: inferredPurl); + } + + private static ConsensusPayload PreparePayload(VexConsensus consensus) + { + var canonicalJson = VexCanonicalJsonSerializer.Serialize(consensus); + var bytes = Encoding.UTF8.GetBytes(canonicalJson); + var digest = SHA256.HashData(bytes); + var digestHex = Convert.ToHexString(digest).ToLowerInvariant(); + var address = new VexContentAddress("sha256", digestHex); + return new ConsensusPayload(address, bytes, canonicalJson); + } + + private static async ValueTask TrySignAsync( + IVexSigner? signer, + ConsensusPayload payload, + ILogger logger, + CancellationToken cancellationToken) + { + if (signer is null) + { + return null; + } + + try + { + var signature = await signer.SignAsync(payload.Bytes, cancellationToken).ConfigureAwait(false); + return new ResolveSignature(signature.Signature, signature.KeyId); + } + catch (Exception ex) + { + logger.LogWarning(ex, "Failed to sign resolve payload {Digest}", payload.Artifact.ToUri()); + return null; + } + } + + private static async ValueTask BuildAttestationAsync( + IVexAttestationClient? attestationClient, + VexConsensus consensus, + VexPolicySnapshot snapshot, + ConsensusPayload payload, + ILogger logger, + CancellationToken cancellationToken) + { + if (attestationClient is null) + { + return new ResolveAttestation(null, null, null); + } + + try + { + var exportId = BuildAttestationExportId(consensus.VulnerabilityId, consensus.Product.Key); + var filters = new[] + { + new KeyValuePair("vulnerabilityId", consensus.VulnerabilityId), + new KeyValuePair("productKey", consensus.Product.Key), + new KeyValuePair("policyRevisionId", snapshot.RevisionId), + }; + + var querySignature = VexQuerySignature.FromFilters(filters); + var providerIds = consensus.Sources + .Select(source => source.ProviderId) + .Distinct(StringComparer.Ordinal) + .ToImmutableArray(); + + var metadataBuilder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + metadataBuilder["consensusDigest"] = payload.Artifact.ToUri(); + metadataBuilder["policyRevisionId"] = snapshot.RevisionId; + metadataBuilder["policyVersion"] = snapshot.Version; + if (!string.IsNullOrWhiteSpace(snapshot.Digest)) + { + metadataBuilder["policyDigest"] = snapshot.Digest; + } + + var response = await attestationClient.SignAsync(new VexAttestationRequest( + exportId, + querySignature, + payload.Artifact, + VexExportFormat.Json, + consensus.CalculatedAt, + providerIds, + metadataBuilder.ToImmutable()), cancellationToken).ConfigureAwait(false); + + var envelopeJson = response.Diagnostics.TryGetValue("envelope", out var envelopeValue) + ? envelopeValue + : null; + + ResolveSignature? signature = null; + if (!string.IsNullOrWhiteSpace(envelopeJson)) + { + try + { + var envelope = JsonSerializer.Deserialize(envelopeJson); + var dsseSignature = envelope?.Signatures?.FirstOrDefault(); + if (dsseSignature is not null) + { + signature = new ResolveSignature(dsseSignature.Signature, dsseSignature.KeyId); + } + } + catch (Exception ex) + { + logger.LogDebug(ex, "Failed to deserialize DSSE envelope for resolve export {ExportId}", exportId); + } + } + + return new ResolveAttestation(response.Attestation, envelopeJson, signature); + } + catch (Exception ex) + { + logger.LogWarning(ex, "Unable to produce attestation for {VulnerabilityId}/{ProductKey}", consensus.VulnerabilityId, consensus.Product.Key); + return new ResolveAttestation(null, null, null); + } + } + + private static string BuildAttestationExportId(string vulnerabilityId, string productKey) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(productKey)); + var digest = Convert.ToHexString(hash).ToLowerInvariant(); + return FormattableString.Invariant($"resolve/{vulnerabilityId}/{digest}"); + } + +private sealed record ConsensusPayload(VexContentAddress Artifact, byte[] Bytes, string CanonicalJson); + +private static async Task WritePlainTextAsync(HttpContext context, string message, int statusCode, CancellationToken cancellationToken) +{ + context.Response.StatusCode = statusCode; + context.Response.ContentType = "text/plain"; + await context.Response.WriteAsync(message, cancellationToken); +} + +private static async Task WriteJsonAsync(HttpContext context, T payload, int statusCode, CancellationToken cancellationToken) +{ + context.Response.StatusCode = statusCode; + context.Response.ContentType = "application/json"; + var json = VexCanonicalJsonSerializer.Serialize(payload); + await context.Response.WriteAsync(json, cancellationToken); +} +} + +public sealed record VexResolveRequest( + IReadOnlyList? ProductKeys, + IReadOnlyList? Purls, + IReadOnlyList? VulnerabilityIds, + string? PolicyRevisionId); + +internal sealed record VexResolvePolicy( + string ActiveRevisionId, + string Version, + string Digest, + string? RequestedRevisionId); + +internal sealed record VexResolveResponse( + DateTimeOffset ResolvedAt, + VexResolvePolicy Policy, + IReadOnlyList Results); + +internal sealed record VexResolveResult( + string VulnerabilityId, + string ProductKey, + VexConsensusStatus Status, + DateTimeOffset CalculatedAt, + IReadOnlyList Sources, + IReadOnlyList Conflicts, + VexSignalSnapshot? Signals, + string? Summary, + string PolicyRevisionId, + string PolicyVersion, + string PolicyDigest, + IReadOnlyList Decisions, + VexResolveEnvelope Envelope); + +internal sealed record VexResolveEnvelope( + VexContentAddress Artifact, + ResolveSignature? ContentSignature, + VexAttestationMetadata? Attestation, + string? AttestationEnvelope, + ResolveSignature? AttestationSignature); + +internal sealed record ResolveSignature(string Value, string? KeyId); + +internal sealed record ResolveAttestation( + VexAttestationMetadata? Metadata, + string? Envelope, + ResolveSignature? Signature); diff --git a/src/StellaOps.Excititor.WebService/Options/MirrorDistributionOptions.cs b/src/StellaOps.Excititor.WebService/Options/MirrorDistributionOptions.cs new file mode 100644 index 00000000..11da9d78 --- /dev/null +++ b/src/StellaOps.Excititor.WebService/Options/MirrorDistributionOptions.cs @@ -0,0 +1,52 @@ +using System.Collections.Generic; + +namespace StellaOps.Excititor.WebService.Options; + +public sealed class MirrorDistributionOptions +{ + public const string SectionName = "Excititor:Mirror"; + + public List Domains { get; } = new(); +} + +public sealed class MirrorDomainOptions +{ + public string Id { get; set; } = string.Empty; + + public string DisplayName { get; set; } = string.Empty; + + public bool RequireAuthentication { get; set; } + = false; + + /// + /// Maximum index requests allowed per rolling window. + /// + public int MaxIndexRequestsPerHour { get; set; } = 120; + + /// + /// Maximum export downloads allowed per rolling window. + /// + public int MaxDownloadRequestsPerHour { get; set; } = 600; + + public List Exports { get; } = new(); +} + +public sealed class MirrorExportOptions +{ + public string Key { get; set; } = string.Empty; + + public string Format { get; set; } = string.Empty; + + public Dictionary Filters { get; } = new(); + + public Dictionary Sort { get; } = new(); + + public int? Limit { get; set; } + = null; + + public int? Offset { get; set; } + = null; + + public string? View { get; set; } + = null; +} diff --git a/src/StellaOps.Excititor.WebService/Program.cs b/src/StellaOps.Excititor.WebService/Program.cs index a4990bd5..f16a57ff 100644 --- a/src/StellaOps.Excititor.WebService/Program.cs +++ b/src/StellaOps.Excititor.WebService/Program.cs @@ -1,13 +1,23 @@ using System.Collections.Generic; using System.Linq; +using System.Collections.Immutable; +using Microsoft.AspNetCore.Authentication; using Microsoft.Extensions.Options; using StellaOps.Excititor.Attestation.Extensions; using StellaOps.Excititor.Attestation; using StellaOps.Excititor.Attestation.Transparency; using StellaOps.Excititor.ArtifactStores.S3.Extensions; -using StellaOps.Excititor.Export; -using StellaOps.Excititor.Storage.Mongo; using StellaOps.Excititor.Connectors.RedHat.CSAF.DependencyInjection; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.Formats.CSAF; +using StellaOps.Excititor.Formats.CycloneDX; +using StellaOps.Excititor.Formats.OpenVEX; +using StellaOps.Excititor.Policy; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.WebService.Endpoints; +using StellaOps.Excititor.WebService.Options; +using StellaOps.Excititor.WebService.Services; var builder = WebApplication.CreateBuilder(args); var configuration = builder.Configuration; @@ -18,11 +28,19 @@ services.AddOptions() .ValidateOnStart(); services.AddExcititorMongoStorage(); +services.AddCsafNormalizer(); +services.AddCycloneDxNormalizer(); +services.AddOpenVexNormalizer(); +services.AddSingleton(); +services.AddSingleton(); services.AddVexExportEngine(); services.AddVexExportCacheServices(); services.AddVexAttestation(); services.Configure(configuration.GetSection("Excititor:Attestation:Client")); +services.AddVexPolicy(); services.AddRedHatCsafConnector(); +services.Configure(configuration.GetSection(MirrorDistributionOptions.SectionName)); +services.AddSingleton(); var rekorSection = configuration.GetSection("Excititor:Attestation:Rekor"); if (rekorSection.Exists()) @@ -64,9 +82,15 @@ if (offlineSection.Exists()) services.AddEndpointsApiExplorer(); services.AddHealthChecks(); services.AddSingleton(TimeProvider.System); +services.AddMemoryCache(); +services.AddAuthentication(); +services.AddAuthorization(); var app = builder.Build(); +app.UseAuthentication(); +app.UseAuthorization(); + app.MapGet("/excititor/status", async (HttpContext context, IEnumerable artifactStores, IOptions mongoOptions, @@ -84,8 +108,139 @@ app.MapGet("/excititor/status", async (HttpContext context, app.MapHealthChecks("/excititor/health"); +app.MapPost("/excititor/statements", async ( + VexStatementIngestRequest request, + IVexClaimStore claimStore, + TimeProvider timeProvider, + CancellationToken cancellationToken) => +{ + if (request?.Statements is null || request.Statements.Count == 0) + { + return Results.BadRequest("At least one statement must be provided."); + } + + var claims = request.Statements.Select(statement => statement.ToDomainClaim()); + await claimStore.AppendAsync(claims, timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); + return Results.Accepted(); +}); + +app.MapGet("/excititor/statements/{vulnerabilityId}/{productKey}", async ( + string vulnerabilityId, + string productKey, + DateTimeOffset? since, + IVexClaimStore claimStore, + CancellationToken cancellationToken) => +{ + if (string.IsNullOrWhiteSpace(vulnerabilityId) || string.IsNullOrWhiteSpace(productKey)) + { + return Results.BadRequest("vulnerabilityId and productKey are required."); + } + + var claims = await claimStore.FindAsync(vulnerabilityId.Trim(), productKey.Trim(), since, cancellationToken).ConfigureAwait(false); + return Results.Ok(claims); +}); + +IngestEndpoints.MapIngestEndpoints(app); +ResolveEndpoint.MapResolveEndpoint(app); +MirrorEndpoints.MapMirrorEndpoints(app); + app.Run(); public partial class Program; internal sealed record StatusResponse(DateTimeOffset UtcNow, string MongoBucket, int InlineThreshold, string[] ArtifactStores); + +internal sealed record VexStatementIngestRequest(IReadOnlyList Statements); + +internal sealed record VexStatementEntry( + string VulnerabilityId, + string ProviderId, + string ProductKey, + string? ProductName, + string? ProductVersion, + string? ProductPurl, + string? ProductCpe, + IReadOnlyList? ComponentIdentifiers, + VexClaimStatus Status, + VexJustification? Justification, + string? Detail, + DateTimeOffset FirstSeen, + DateTimeOffset LastSeen, + VexDocumentFormat DocumentFormat, + string DocumentDigest, + string DocumentUri, + string? DocumentRevision, + VexSignatureMetadataRequest? Signature, + VexConfidenceRequest? Confidence, + VexSignalRequest? Signals, + IReadOnlyDictionary? Metadata) +{ + public VexClaim ToDomainClaim() + { + var product = new VexProduct( + ProductKey, + ProductName, + ProductVersion, + ProductPurl, + ProductCpe, + ComponentIdentifiers ?? Array.Empty()); + + if (!Uri.TryCreate(DocumentUri, UriKind.Absolute, out var uri)) + { + throw new InvalidOperationException($"DocumentUri '{DocumentUri}' is not a valid absolute URI."); + } + + var document = new VexClaimDocument( + DocumentFormat, + DocumentDigest, + uri, + DocumentRevision, + Signature?.ToDomain()); + + var additionalMetadata = Metadata is null + ? ImmutableDictionary.Empty + : Metadata.ToImmutableDictionary(StringComparer.Ordinal); + + return new VexClaim( + VulnerabilityId, + ProviderId, + product, + Status, + document, + FirstSeen, + LastSeen, + Justification, + Detail, + Confidence?.ToDomain(), + Signals?.ToDomain(), + additionalMetadata); + } +} + +internal sealed record VexSignatureMetadataRequest( + string Type, + string? Subject, + string? Issuer, + string? KeyId, + DateTimeOffset? VerifiedAt, + string? TransparencyLogReference) +{ + public VexSignatureMetadata ToDomain() + => new(Type, Subject, Issuer, KeyId, VerifiedAt, TransparencyLogReference); +} + +internal sealed record VexConfidenceRequest(string Level, double? Score, string? Method) +{ + public VexConfidence ToDomain() => new(Level, Score, Method); +} + +internal sealed record VexSignalRequest(VexSeveritySignalRequest? Severity, bool? Kev, double? Epss) +{ + public VexSignalSnapshot ToDomain() + => new(Severity?.ToDomain(), Kev, Epss); +} + +internal sealed record VexSeveritySignalRequest(string Scheme, double? Score, string? Label, string? Vector) +{ + public VexSeveritySignal ToDomain() => new(Scheme, Score, Label, Vector); +} diff --git a/src/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs b/src/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs new file mode 100644 index 00000000..ab3f5d07 --- /dev/null +++ b/src/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs @@ -0,0 +1,57 @@ +using Microsoft.Extensions.Caching.Memory; + +namespace StellaOps.Excititor.WebService.Services; + +internal sealed class MirrorRateLimiter +{ + private readonly IMemoryCache _cache; + private readonly TimeProvider _timeProvider; + private static readonly TimeSpan Window = TimeSpan.FromHours(1); + + public MirrorRateLimiter(IMemoryCache cache, TimeProvider timeProvider) + { + _cache = cache ?? throw new ArgumentNullException(nameof(cache)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public bool TryAcquire(string domainId, string scope, int limit, out TimeSpan? retryAfter) + { + retryAfter = null; + + if (limit <= 0 || limit == int.MaxValue) + { + return true; + } + + var key = CreateKey(domainId, scope); + var now = _timeProvider.GetUtcNow(); + + var counter = _cache.Get(key); + if (counter is null || now - counter.WindowStart >= Window) + { + counter = new Counter(now, 0); + } + + if (counter.Count >= limit) + { + var windowEnd = counter.WindowStart + Window; + retryAfter = windowEnd > now ? windowEnd - now : TimeSpan.Zero; + return false; + } + + counter = counter with { Count = counter.Count + 1 }; + var absoluteExpiration = counter.WindowStart + Window; + _cache.Set(key, counter, absoluteExpiration); + return true; + } + + private static string CreateKey(string domainId, string scope) + => string.Create(domainId.Length + scope.Length + 1, (domainId, scope), static (span, state) => + { + state.domainId.AsSpan().CopyTo(span); + span[state.domainId.Length] = '|'; + state.scope.AsSpan().CopyTo(span[(state.domainId.Length + 1)..]); + }); + + private sealed record Counter(DateTimeOffset WindowStart, int Count); +} diff --git a/src/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs b/src/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs new file mode 100644 index 00000000..51a20718 --- /dev/null +++ b/src/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs @@ -0,0 +1,54 @@ +using System.Linq; +using System.Security.Claims; +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Excititor.WebService.Services; + +internal static class ScopeAuthorization +{ + public static IResult? RequireScope(HttpContext context, string scope) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + if (string.IsNullOrWhiteSpace(scope)) + { + throw new ArgumentException("Scope must be provided.", nameof(scope)); + } + + var user = context.User; + if (user?.Identity?.IsAuthenticated is not true) + { + return Results.Unauthorized(); + } + + if (!HasScope(user, scope)) + { + return Results.Forbid(); + } + + return null; + } + + private static bool HasScope(ClaimsPrincipal user, string requiredScope) + { + var comparison = StringComparer.OrdinalIgnoreCase; + foreach (var claim in user.FindAll("scope").Concat(user.FindAll("scp"))) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var scopes = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (scopes.Any(scope => comparison.Equals(scope, requiredScope))) + { + return true; + } + } + + return false; + } +} diff --git a/src/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs b/src/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs new file mode 100644 index 00000000..722806b2 --- /dev/null +++ b/src/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs @@ -0,0 +1,570 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using Microsoft.Extensions.Logging; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; + +namespace StellaOps.Excititor.WebService.Services; + +internal interface IVexIngestOrchestrator +{ + Task InitializeAsync(IngestInitOptions options, CancellationToken cancellationToken); + + Task RunAsync(IngestRunOptions options, CancellationToken cancellationToken); + + Task ResumeAsync(IngestResumeOptions options, CancellationToken cancellationToken); + + Task ReconcileAsync(ReconcileOptions options, CancellationToken cancellationToken); +} + +internal sealed class VexIngestOrchestrator : IVexIngestOrchestrator +{ + private readonly IServiceProvider _serviceProvider; + private readonly IReadOnlyDictionary _connectors; + private readonly IVexRawStore _rawStore; + private readonly IVexClaimStore _claimStore; + private readonly IVexProviderStore _providerStore; + private readonly IVexConnectorStateRepository _stateRepository; + private readonly IVexNormalizerRouter _normalizerRouter; + private readonly IVexSignatureVerifier _signatureVerifier; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public VexIngestOrchestrator( + IServiceProvider serviceProvider, + IEnumerable connectors, + IVexRawStore rawStore, + IVexClaimStore claimStore, + IVexProviderStore providerStore, + IVexConnectorStateRepository stateRepository, + IVexNormalizerRouter normalizerRouter, + IVexSignatureVerifier signatureVerifier, + TimeProvider timeProvider, + ILogger logger) + { + _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + _rawStore = rawStore ?? throw new ArgumentNullException(nameof(rawStore)); + _claimStore = claimStore ?? throw new ArgumentNullException(nameof(claimStore)); + _providerStore = providerStore ?? throw new ArgumentNullException(nameof(providerStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _normalizerRouter = normalizerRouter ?? throw new ArgumentNullException(nameof(normalizerRouter)); + _signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + if (connectors is null) + { + throw new ArgumentNullException(nameof(connectors)); + } + + _connectors = connectors + .GroupBy(connector => connector.Id, StringComparer.OrdinalIgnoreCase) + .ToDictionary(group => group.Key, group => group.First(), StringComparer.OrdinalIgnoreCase); + } + + public async Task InitializeAsync(IngestInitOptions options, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + + var runId = Guid.NewGuid(); + var startedAt = _timeProvider.GetUtcNow(); + var results = ImmutableArray.CreateBuilder(); + + var (handles, missing) = ResolveConnectors(options.Providers); + foreach (var providerId in missing) + { + results.Add(new InitProviderResult(providerId, providerId, "missing", TimeSpan.Zero, "Provider connector is not registered.")); + } + + foreach (var handle in handles) + { + var stopwatch = Stopwatch.StartNew(); + try + { + await ValidateConnectorAsync(handle, cancellationToken).ConfigureAwait(false); + await EnsureProviderRegistrationAsync(handle.Descriptor, cancellationToken).ConfigureAwait(false); + stopwatch.Stop(); + + results.Add(new InitProviderResult( + handle.Descriptor.Id, + handle.Descriptor.DisplayName, + "succeeded", + stopwatch.Elapsed, + error: null)); + + _logger.LogInformation("Excititor init validated provider {ProviderId} in {Duration}ms.", handle.Descriptor.Id, stopwatch.Elapsed.TotalMilliseconds); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + stopwatch.Stop(); + results.Add(new InitProviderResult( + handle.Descriptor.Id, + handle.Descriptor.DisplayName, + "cancelled", + stopwatch.Elapsed, + "Operation cancelled.")); + _logger.LogWarning("Excititor init cancelled for provider {ProviderId}.", handle.Descriptor.Id); + } + catch (Exception ex) + { + stopwatch.Stop(); + results.Add(new InitProviderResult( + handle.Descriptor.Id, + handle.Descriptor.DisplayName, + "failed", + stopwatch.Elapsed, + ex.Message)); + _logger.LogError(ex, "Excititor init failed for provider {ProviderId}: {Message}", handle.Descriptor.Id, ex.Message); + } + } + + var completedAt = _timeProvider.GetUtcNow(); + return new InitSummary(runId, startedAt, completedAt, results.ToImmutable()); + } + + public async Task RunAsync(IngestRunOptions options, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + + var runId = Guid.NewGuid(); + var startedAt = _timeProvider.GetUtcNow(); + var since = ResolveSince(options.Since, options.Window, startedAt); + var results = ImmutableArray.CreateBuilder(); + + var (handles, missing) = ResolveConnectors(options.Providers); + foreach (var providerId in missing) + { + results.Add(ProviderRunResult.Missing(providerId, since)); + } + + foreach (var handle in handles) + { + var result = await ExecuteRunAsync(handle, since, options.Force, cancellationToken).ConfigureAwait(false); + results.Add(result); + } + + var completedAt = _timeProvider.GetUtcNow(); + return new IngestRunSummary(runId, startedAt, completedAt, results.ToImmutable()); + } + + public async Task ResumeAsync(IngestResumeOptions options, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + + var runId = Guid.NewGuid(); + var startedAt = _timeProvider.GetUtcNow(); + var results = ImmutableArray.CreateBuilder(); + + var (handles, missing) = ResolveConnectors(options.Providers); + foreach (var providerId in missing) + { + results.Add(ProviderRunResult.Missing(providerId, since: null)); + } + + foreach (var handle in handles) + { + var since = await ResolveResumeSinceAsync(handle.Descriptor.Id, options.Checkpoint, cancellationToken).ConfigureAwait(false); + var result = await ExecuteRunAsync(handle, since, force: false, cancellationToken).ConfigureAwait(false); + results.Add(result); + } + + var completedAt = _timeProvider.GetUtcNow(); + return new IngestRunSummary(runId, startedAt, completedAt, results.ToImmutable()); + } + + public async Task ReconcileAsync(ReconcileOptions options, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + + var runId = Guid.NewGuid(); + var startedAt = _timeProvider.GetUtcNow(); + var threshold = options.MaxAge is null ? (DateTimeOffset?)null : startedAt - options.MaxAge.Value; + var results = ImmutableArray.CreateBuilder(); + + var (handles, missing) = ResolveConnectors(options.Providers); + foreach (var providerId in missing) + { + results.Add(new ReconcileProviderResult(providerId, "missing", "missing", null, threshold, 0, 0, "Provider connector is not registered.")); + } + + foreach (var handle in handles) + { + try + { + var state = await _stateRepository.GetAsync(handle.Descriptor.Id, cancellationToken).ConfigureAwait(false); + var lastUpdated = state?.LastUpdated; + var stale = threshold.HasValue && (lastUpdated is null || lastUpdated < threshold.Value); + + if (stale || state is null) + { + var since = stale ? threshold : lastUpdated; + var result = await ExecuteRunAsync(handle, since, force: false, cancellationToken).ConfigureAwait(false); + results.Add(new ReconcileProviderResult( + handle.Descriptor.Id, + result.Status, + "reconciled", + result.LastUpdated ?? result.CompletedAt, + threshold, + result.Documents, + result.Claims, + result.Error)); + } + else + { + results.Add(new ReconcileProviderResult( + handle.Descriptor.Id, + "succeeded", + "skipped", + lastUpdated, + threshold, + documents: 0, + claims: 0, + error: null)); + } + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + results.Add(new ReconcileProviderResult( + handle.Descriptor.Id, + "cancelled", + "cancelled", + null, + threshold, + 0, + 0, + "Operation cancelled.")); + _logger.LogWarning("Excititor reconcile cancelled for provider {ProviderId}.", handle.Descriptor.Id); + } + catch (Exception ex) + { + results.Add(new ReconcileProviderResult( + handle.Descriptor.Id, + "failed", + "failed", + null, + threshold, + 0, + 0, + ex.Message)); + _logger.LogError(ex, "Excititor reconcile failed for provider {ProviderId}: {Message}", handle.Descriptor.Id, ex.Message); + } + } + + var completedAt = _timeProvider.GetUtcNow(); + return new ReconcileSummary(runId, startedAt, completedAt, results.ToImmutable()); + } + + private async Task ValidateConnectorAsync(ConnectorHandle handle, CancellationToken cancellationToken) + { + await handle.Connector.ValidateAsync(VexConnectorSettings.Empty, cancellationToken).ConfigureAwait(false); + } + + private async Task EnsureProviderRegistrationAsync(VexConnectorDescriptor descriptor, CancellationToken cancellationToken) + { + var existing = await _providerStore.FindAsync(descriptor.Id, cancellationToken).ConfigureAwait(false); + if (existing is not null) + { + return; + } + + var provider = new VexProvider(descriptor.Id, descriptor.DisplayName, descriptor.Kind); + await _providerStore.SaveAsync(provider, cancellationToken).ConfigureAwait(false); + } + + private async Task ExecuteRunAsync( + ConnectorHandle handle, + DateTimeOffset? since, + bool force, + CancellationToken cancellationToken) + { + var providerId = handle.Descriptor.Id; + var startedAt = _timeProvider.GetUtcNow(); + var stopwatch = Stopwatch.StartNew(); + + try + { + await ValidateConnectorAsync(handle, cancellationToken).ConfigureAwait(false); + await EnsureProviderRegistrationAsync(handle.Descriptor, cancellationToken).ConfigureAwait(false); + + if (force) + { + var resetState = new VexConnectorState(providerId, null, ImmutableArray.Empty); + await _stateRepository.SaveAsync(resetState, cancellationToken).ConfigureAwait(false); + } + + var context = new VexConnectorContext( + since, + VexConnectorSettings.Empty, + _rawStore, + _signatureVerifier, + _normalizerRouter, + _serviceProvider); + + var documents = 0; + var claims = 0; + string? lastDigest = null; + + await foreach (var document in handle.Connector.FetchAsync(context, cancellationToken).ConfigureAwait(false)) + { + documents++; + lastDigest = document.Digest; + + var batch = await _normalizerRouter.NormalizeAsync(document, cancellationToken).ConfigureAwait(false); + if (!batch.Claims.IsDefaultOrEmpty && batch.Claims.Length > 0) + { + claims += batch.Claims.Length; + await _claimStore.AppendAsync(batch.Claims, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); + } + } + + stopwatch.Stop(); + var completedAt = _timeProvider.GetUtcNow(); + var state = await _stateRepository.GetAsync(providerId, cancellationToken).ConfigureAwait(false); + + var checkpoint = state?.DocumentDigests.IsDefaultOrEmpty == false + ? state.DocumentDigests[^1] + : lastDigest; + + var result = new ProviderRunResult( + providerId, + "succeeded", + documents, + claims, + startedAt, + completedAt, + stopwatch.Elapsed, + lastDigest, + state?.LastUpdated, + checkpoint, + null, + since); + + _logger.LogInformation( + "Excititor ingest provider {ProviderId} completed: documents={Documents} claims={Claims} since={Since} duration={Duration}ms", + providerId, + documents, + claims, + since?.ToString("O", CultureInfo.InvariantCulture), + result.Duration.TotalMilliseconds); + + return result; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + stopwatch.Stop(); + var cancelledAt = _timeProvider.GetUtcNow(); + _logger.LogWarning("Excititor ingest provider {ProviderId} cancelled.", providerId); + return new ProviderRunResult( + providerId, + "cancelled", + 0, + 0, + startedAt, + cancelledAt, + stopwatch.Elapsed, + null, + null, + null, + "Operation cancelled.", + since); + } + catch (Exception ex) + { + stopwatch.Stop(); + var failedAt = _timeProvider.GetUtcNow(); + _logger.LogError(ex, "Excititor ingest provider {ProviderId} failed: {Message}", providerId, ex.Message); + return new ProviderRunResult( + providerId, + "failed", + 0, + 0, + startedAt, + failedAt, + stopwatch.Elapsed, + null, + null, + null, + ex.Message, + since); + } + } + + private async Task ResolveResumeSinceAsync(string providerId, string? checkpoint, CancellationToken cancellationToken) + { + if (!string.IsNullOrWhiteSpace(checkpoint)) + { + if (DateTimeOffset.TryParse( + checkpoint.Trim(), + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsed)) + { + return parsed; + } + + var digest = checkpoint.Trim(); + var document = await _rawStore.FindByDigestAsync(digest, cancellationToken).ConfigureAwait(false); + if (document is not null) + { + return document.RetrievedAt; + } + } + + var state = await _stateRepository.GetAsync(providerId, cancellationToken).ConfigureAwait(false); + return state?.LastUpdated; + } + + private static DateTimeOffset? ResolveSince(DateTimeOffset? since, TimeSpan? window, DateTimeOffset reference) + { + if (since.HasValue) + { + return since.Value; + } + + if (window is { } duration && duration > TimeSpan.Zero) + { + var candidate = reference - duration; + return candidate < DateTimeOffset.MinValue ? DateTimeOffset.MinValue : candidate; + } + + return null; + } + + private (IReadOnlyList Handles, ImmutableArray Missing) ResolveConnectors(ImmutableArray requestedProviders) + { + var handles = new List(); + var missing = ImmutableArray.CreateBuilder(); + + if (requestedProviders.IsDefaultOrEmpty || requestedProviders.Length == 0) + { + foreach (var connector in _connectors.Values.OrderBy(static x => x.Id, StringComparer.OrdinalIgnoreCase)) + { + handles.Add(new ConnectorHandle(connector, CreateDescriptor(connector))); + } + + return (handles, missing.ToImmutable()); + } + + foreach (var providerId in requestedProviders) + { + if (_connectors.TryGetValue(providerId, out var connector)) + { + handles.Add(new ConnectorHandle(connector, CreateDescriptor(connector))); + } + else + { + missing.Add(providerId); + } + } + + return (handles, missing.ToImmutable()); + } + + private static VexConnectorDescriptor CreateDescriptor(IVexConnector connector) + => connector switch + { + VexConnectorBase baseConnector => baseConnector.Descriptor, + _ => new VexConnectorDescriptor(connector.Id, connector.Kind, connector.Id) + }; + + private sealed record ConnectorHandle(IVexConnector Connector, VexConnectorDescriptor Descriptor); +} + +internal sealed record IngestInitOptions( + ImmutableArray Providers, + bool Resume); + +internal sealed record IngestRunOptions( + ImmutableArray Providers, + DateTimeOffset? Since, + TimeSpan? Window, + bool Force); + +internal sealed record IngestResumeOptions( + ImmutableArray Providers, + string? Checkpoint); + +internal sealed record ReconcileOptions( + ImmutableArray Providers, + TimeSpan? MaxAge); + +internal sealed record InitSummary( + Guid RunId, + DateTimeOffset StartedAt, + DateTimeOffset CompletedAt, + ImmutableArray Providers) +{ + public int ProviderCount => Providers.Length; + public int SuccessCount => Providers.Count(result => string.Equals(result.Status, "succeeded", StringComparison.OrdinalIgnoreCase)); + public int FailureCount => Providers.Count(result => string.Equals(result.Status, "failed", StringComparison.OrdinalIgnoreCase)); +} + +internal sealed record InitProviderResult( + string ProviderId, + string DisplayName, + string Status, + TimeSpan Duration, + string? Error); + +internal sealed record IngestRunSummary( + Guid RunId, + DateTimeOffset StartedAt, + DateTimeOffset CompletedAt, + ImmutableArray Providers) +{ + public int ProviderCount => Providers.Length; + + public int SuccessCount => Providers.Count(provider => string.Equals(provider.Status, "succeeded", StringComparison.OrdinalIgnoreCase)); + + public int FailureCount => Providers.Count(provider => string.Equals(provider.Status, "failed", StringComparison.OrdinalIgnoreCase)); + + public TimeSpan Duration => CompletedAt - StartedAt; +} + +internal sealed record ProviderRunResult( + string ProviderId, + string Status, + int Documents, + int Claims, + DateTimeOffset StartedAt, + DateTimeOffset CompletedAt, + TimeSpan Duration, + string? LastDigest, + DateTimeOffset? LastUpdated, + string? Checkpoint, + string? Error, + DateTimeOffset? Since) +{ + public static ProviderRunResult Missing(string providerId, DateTimeOffset? since) + => new(providerId, "missing", 0, 0, DateTimeOffset.MinValue, DateTimeOffset.MinValue, TimeSpan.Zero, null, null, null, "Provider connector is not registered.", since); +} + +internal sealed record ReconcileSummary( + Guid RunId, + DateTimeOffset StartedAt, + DateTimeOffset CompletedAt, + ImmutableArray Providers) +{ + public int ProviderCount => Providers.Length; + + public int ReconciledCount => Providers.Count(result => string.Equals(result.Action, "reconciled", StringComparison.OrdinalIgnoreCase)); + + public int SkippedCount => Providers.Count(result => string.Equals(result.Action, "skipped", StringComparison.OrdinalIgnoreCase)); + + public int FailureCount => Providers.Count(result => string.Equals(result.Status, "failed", StringComparison.OrdinalIgnoreCase)); + + public TimeSpan Duration => CompletedAt - StartedAt; +} + +internal sealed record ReconcileProviderResult( + string ProviderId, + string Status, + string Action, + DateTimeOffset? LastUpdated, + DateTimeOffset? Threshold, + int Documents, + int Claims, + string? Error); diff --git a/src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj b/src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj index 3c64af7d..1b29f166 100644 --- a/src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj +++ b/src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj @@ -10,7 +10,12 @@ + + + + +
diff --git a/src/StellaOps.Excititor.WebService/TASKS.md b/src/StellaOps.Excititor.WebService/TASKS.md index 86ed4955..a5325e40 100644 --- a/src/StellaOps.Excititor.WebService/TASKS.md +++ b/src/StellaOps.Excititor.WebService/TASKS.md @@ -3,7 +3,7 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-WEB-01-001 – Minimal API bootstrap & DI|Team Excititor WebService|EXCITITOR-CORE-01-003, EXCITITOR-STORAGE-01-003|**DONE (2025-10-17)** – Minimal API host composes storage/export/attestation/artifact stores, binds Mongo/attestation options, and exposes `/excititor/status` + health endpoints with regression coverage in `StatusEndpointTests`.| -|EXCITITOR-WEB-01-002 – Ingest & reconcile endpoints|Team Excititor WebService|EXCITITOR-WEB-01-001|TODO – Implement `/excititor/init`, `/excititor/ingest/run`, `/excititor/ingest/resume`, `/excititor/reconcile` with token scope enforcement and structured run telemetry.| -|EXCITITOR-WEB-01-003 – Export & verify endpoints|Team Excititor WebService|EXCITITOR-WEB-01-001, EXCITITOR-EXPORT-01-001, EXCITITOR-ATTEST-01-001|TODO – Add `/excititor/export`, `/excititor/export/{id}`, `/excititor/export/{id}/download`, `/excititor/verify`, returning artifact + attestation metadata with cache awareness.| -|EXCITITOR-WEB-01-004 – Resolve API & signed responses|Team Excititor WebService|EXCITITOR-WEB-01-001, EXCITITOR-ATTEST-01-002|TODO – Deliver `/excititor/resolve` (subject/context), return consensus + score envelopes, attach cosign/Rekor metadata, and document auth + rate guardrails.| -|EXCITITOR-WEB-01-005 – Mirror distribution endpoints|Team Excititor WebService|EXCITITOR-EXPORT-01-007, DEVOPS-MIRROR-08-001|TODO – Provide domain-scoped mirror index/download APIs for consensus exports, enforce quota/auth, and document sync workflow for downstream Excititor deployments.| +|EXCITITOR-WEB-01-002 – Ingest & reconcile endpoints|Team Excititor WebService|EXCITITOR-WEB-01-001|**DOING (2025-10-19)** – Prereqs EXCITITOR-WEB-01-001, EXCITITOR-EXPORT-01-001, and EXCITITOR-ATTEST-01-001 verified DONE; drafting `/excititor/init`, `/excititor/ingest/run`, `/excititor/ingest/resume`, `/excititor/reconcile` with scope enforcement & structured telemetry plan.| +|EXCITITOR-WEB-01-003 – Export & verify endpoints|Team Excititor WebService|EXCITITOR-WEB-01-001, EXCITITOR-EXPORT-01-001, EXCITITOR-ATTEST-01-001|**DOING (2025-10-19)** – Prereqs confirmed (EXCITITOR-WEB-01-001, EXCITITOR-EXPORT-01-001, EXCITITOR-ATTEST-01-001); preparing `/excititor/export*` surfaces and `/excititor/verify` with artifact/attestation metadata caching strategy.| +|EXCITITOR-WEB-01-004 – Resolve API & signed responses|Team Excititor WebService|EXCITITOR-WEB-01-001, EXCITITOR-ATTEST-01-002|**DOING (2025-10-19)** – Prereqs EXCITITOR-WEB-01-001, EXCITITOR-ATTEST-01-001, and EXCITITOR-ATTEST-01-002 verified DONE; planning `/excititor/resolve` signed response flow with consensus envelope + attestation metadata wiring.| +|EXCITITOR-WEB-01-005 – Mirror distribution endpoints|Team Excititor WebService|EXCITITOR-EXPORT-01-007, DEVOPS-MIRROR-08-001|**DONE (2025-10-19)** – `/excititor/mirror` surfaces domain listings, indices, metadata, and downloads with quota/auth checks; tests cover Happy-path listing/download (`dotnet test src/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj`).| diff --git a/src/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs b/src/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs index 6698ef32..18973f81 100644 --- a/src/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs +++ b/src/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs @@ -1,4 +1,5 @@ using FluentAssertions; +using StellaOps.Excititor.Core; using StellaOps.Excititor.Worker.Options; using StellaOps.Excititor.Worker.Scheduling; using Xunit; @@ -22,6 +23,7 @@ public sealed class VexWorkerOptionsTests schedules.Should().ContainSingle(); schedules[0].Interval.Should().Be(TimeSpan.FromMinutes(30)); + schedules[0].Settings.Should().Be(VexConnectorSettings.Empty); } [Fact] diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs b/src/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs index 1f18d43a..faf05b9b 100644 --- a/src/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs +++ b/src/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs @@ -1,5 +1,7 @@ using System.Collections.Generic; +using System.Collections.Immutable; using StellaOps.Excititor.Worker.Scheduling; +using StellaOps.Excititor.Core; namespace StellaOps.Excititor.Worker.Options; @@ -15,6 +17,8 @@ public sealed class VexWorkerOptions public IList Providers { get; } = new List(); + public VexWorkerRetryOptions Retry { get; } = new(); + internal IReadOnlyList ResolveSchedules() { var schedules = new List(); @@ -43,7 +47,11 @@ public sealed class VexWorkerOptions initialDelay = TimeSpan.Zero; } - schedules.Add(new VexWorkerSchedule(providerId, interval, initialDelay)); + var connectorSettings = provider.Settings.Count == 0 + ? VexConnectorSettings.Empty + : new VexConnectorSettings(provider.Settings.ToImmutableDictionary(StringComparer.Ordinal)); + + schedules.Add(new VexWorkerSchedule(providerId, interval, initialDelay, connectorSettings)); } return schedules; @@ -59,4 +67,6 @@ public sealed class VexWorkerProviderOptions public TimeSpan? Interval { get; set; } public TimeSpan? InitialDelay { get; set; } + + public IDictionary Settings { get; } = new Dictionary(StringComparer.Ordinal); } diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs b/src/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs index 6b4f96b8..e39e7ec2 100644 --- a/src/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs +++ b/src/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs @@ -24,6 +24,41 @@ internal sealed class VexWorkerOptionsValidator : IValidateOptions 1) + { + failures.Add("Excititor.Worker.Retry.JitterRatio must be between 0 and 1."); + } + + if (options.Retry.RetryCap < options.Retry.BaseDelay) + { + failures.Add("Excititor.Worker.Retry.RetryCap must be greater than or equal to BaseDelay."); + } + + if (options.Retry.RetryCap < options.Retry.MaxDelay) + { + failures.Add("Excititor.Worker.Retry.RetryCap must be greater than or equal to MaxDelay."); + } + for (var i = 0; i < options.Providers.Count; i++) { var provider = options.Providers[i]; diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs b/src/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs new file mode 100644 index 00000000..3ac8a655 --- /dev/null +++ b/src/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs @@ -0,0 +1,20 @@ +using System.ComponentModel.DataAnnotations; + +namespace StellaOps.Excititor.Worker.Options; + +public sealed class VexWorkerRetryOptions +{ + [Range(1, int.MaxValue)] + public int FailureThreshold { get; set; } = 3; + + [Range(typeof(double), "0.0", "1.0")] + public double JitterRatio { get; set; } = 0.2; + + public TimeSpan BaseDelay { get; set; } = TimeSpan.FromMinutes(5); + + public TimeSpan MaxDelay { get; set; } = TimeSpan.FromHours(6); + + public TimeSpan QuarantineDuration { get; set; } = TimeSpan.FromHours(12); + + public TimeSpan RetryCap { get; set; } = TimeSpan.FromHours(24); +} diff --git a/src/StellaOps.Excititor.Worker/Program.cs b/src/StellaOps.Excititor.Worker/Program.cs index 062de8c7..3f8c113b 100644 --- a/src/StellaOps.Excititor.Worker/Program.cs +++ b/src/StellaOps.Excititor.Worker/Program.cs @@ -6,6 +6,11 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Plugin; using StellaOps.Excititor.Connectors.RedHat.CSAF.DependencyInjection; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Formats.CSAF; +using StellaOps.Excititor.Formats.CycloneDX; +using StellaOps.Excititor.Formats.OpenVEX; +using StellaOps.Excititor.Storage.Mongo; using StellaOps.Excititor.Worker.Options; using StellaOps.Excititor.Worker.Scheduling; @@ -19,6 +24,16 @@ services.AddOptions() services.Configure(configuration.GetSection("Excititor:Worker:Plugins")); services.AddRedHatCsafConnector(); +services.AddOptions() + .Bind(configuration.GetSection("Excititor:Storage:Mongo")) + .ValidateOnStart(); + +services.AddExcititorMongoStorage(); +services.AddCsafNormalizer(); +services.AddCycloneDxNormalizer(); +services.AddOpenVexNormalizer(); +services.AddSingleton(); + services.AddSingleton, VexWorkerOptionsValidator>(); services.AddSingleton(TimeProvider.System); services.PostConfigure(options => diff --git a/src/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs b/src/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs index 0d09f945..a867e96f 100644 --- a/src/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs +++ b/src/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs @@ -1,8 +1,11 @@ using System; +using System.Collections.Immutable; using System.Linq; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using StellaOps.Plugin; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; namespace StellaOps.Excititor.Worker.Scheduling; @@ -11,18 +14,21 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner private readonly IServiceProvider _serviceProvider; private readonly PluginCatalog _pluginCatalog; private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; public DefaultVexProviderRunner( IServiceProvider serviceProvider, PluginCatalog pluginCatalog, - ILogger logger) + ILogger logger, + TimeProvider timeProvider) { _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); _pluginCatalog = pluginCatalog ?? throw new ArgumentNullException(nameof(pluginCatalog)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); } - public ValueTask RunAsync(string providerId, CancellationToken cancellationToken) + public async ValueTask RunAsync(string providerId, CancellationToken cancellationToken) { ArgumentException.ThrowIfNullOrWhiteSpace(providerId); @@ -31,17 +37,78 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner var matched = availablePlugins.FirstOrDefault(plugin => string.Equals(plugin.Name, providerId, StringComparison.OrdinalIgnoreCase)); - if (matched is null) + if (matched is not null) { - _logger.LogInformation("No connector plugin registered for provider {ProviderId}; nothing to execute.", providerId); - return ValueTask.CompletedTask; + _logger.LogInformation( + "Connector plugin {PluginName} ({ProviderId}) is available. Execution hooks will be added in subsequent tasks.", + matched.Name, + providerId); + } + else + { + _logger.LogInformation("No legacy connector plugin registered for provider {ProviderId}; falling back to DI-managed connectors.", providerId); + } + + var connectors = scope.ServiceProvider.GetServices(); + var connector = connectors.FirstOrDefault(c => string.Equals(c.Id, providerId, StringComparison.OrdinalIgnoreCase)); + + if (connector is null) + { + _logger.LogWarning("No IVexConnector implementation registered for provider {ProviderId}; skipping run.", providerId); + return; + } + + await ExecuteConnectorAsync(scope.ServiceProvider, connector, cancellationToken).ConfigureAwait(false); + } + + private async Task ExecuteConnectorAsync(IServiceProvider scopeProvider, IVexConnector connector, CancellationToken cancellationToken) + { + var rawStore = scopeProvider.GetRequiredService(); + var claimStore = scopeProvider.GetRequiredService(); + var providerStore = scopeProvider.GetRequiredService(); + var normalizerRouter = scopeProvider.GetRequiredService(); + var signatureVerifier = scopeProvider.GetRequiredService(); + + var descriptor = connector switch + { + VexConnectorBase baseConnector => baseConnector.Descriptor, + _ => new VexConnectorDescriptor(connector.Id, VexProviderKind.Vendor, connector.Id) + }; + + var provider = await providerStore.FindAsync(descriptor.Id, cancellationToken).ConfigureAwait(false) + ?? new VexProvider(descriptor.Id, descriptor.DisplayName, descriptor.Kind); + + await providerStore.SaveAsync(provider, cancellationToken).ConfigureAwait(false); + + await connector.ValidateAsync(VexConnectorSettings.Empty, cancellationToken).ConfigureAwait(false); + + var context = new VexConnectorContext( + Since: null, + Settings: VexConnectorSettings.Empty, + RawSink: rawStore, + SignatureVerifier: signatureVerifier, + Normalizers: normalizerRouter, + Services: scopeProvider); + + var documentCount = 0; + var claimCount = 0; + + await foreach (var document in connector.FetchAsync(context, cancellationToken)) + { + documentCount++; + + var batch = await normalizerRouter.NormalizeAsync(document, cancellationToken).ConfigureAwait(false); + if (!batch.Claims.IsDefaultOrEmpty && batch.Claims.Length > 0) + { + claimCount += batch.Claims.Length; + await claimStore.AppendAsync(batch.Claims, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); + } } _logger.LogInformation( - "Connector plugin {PluginName} ({ProviderId}) is available. Execution hooks will be added in subsequent tasks.", - matched.Name, - providerId); - - return ValueTask.CompletedTask; + "Connector {ConnectorId} persisted {DocumentCount} document(s) and {ClaimCount} claim(s) this run.", + connector.Id, + documentCount, + claimCount); } } diff --git a/src/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs b/src/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs index 78e2021b..edb9de71 100644 --- a/src/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs +++ b/src/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs @@ -2,5 +2,5 @@ namespace StellaOps.Excititor.Worker.Scheduling; internal interface IVexProviderRunner { - ValueTask RunAsync(string providerId, CancellationToken cancellationToken); + ValueTask RunAsync(VexWorkerSchedule schedule, CancellationToken cancellationToken); } diff --git a/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs b/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs index 8baf67ea..ab4ed168 100644 --- a/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs +++ b/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs @@ -75,7 +75,7 @@ internal sealed class VexWorkerHostedService : BackgroundService try { - await _runner.RunAsync(schedule.ProviderId, cancellationToken).ConfigureAwait(false); + await _runner.RunAsync(schedule, cancellationToken).ConfigureAwait(false); var completedAt = _timeProvider.GetUtcNow(); var elapsed = completedAt - startedAt; diff --git a/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs b/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs index bd582646..34ca0d91 100644 --- a/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs +++ b/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs @@ -1,3 +1,5 @@ +using StellaOps.Excititor.Core; + namespace StellaOps.Excititor.Worker.Scheduling; -internal sealed record VexWorkerSchedule(string ProviderId, TimeSpan Interval, TimeSpan InitialDelay); +internal sealed record VexWorkerSchedule(string ProviderId, TimeSpan Interval, TimeSpan InitialDelay, VexConnectorSettings Settings); diff --git a/src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj b/src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj index e1f41fef..936335a1 100644 --- a/src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj +++ b/src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj @@ -15,5 +15,8 @@ + + +
diff --git a/src/StellaOps.Excititor.Worker/TASKS.md b/src/StellaOps.Excititor.Worker/TASKS.md index e7186be3..dc68a06d 100644 --- a/src/StellaOps.Excititor.Worker/TASKS.md +++ b/src/StellaOps.Excititor.Worker/TASKS.md @@ -3,6 +3,7 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |EXCITITOR-WORKER-01-001 – Worker host & scheduling|Team Excititor Worker|EXCITITOR-STORAGE-01-003, EXCITITOR-WEB-01-001|**DONE (2025-10-17)** – Worker project bootstraps provider schedules from configuration, integrates plugin catalog discovery, and emits structured logs/metrics-ready events via `VexWorkerHostedService`; scheduling logic covered by `VexWorkerOptionsTests`.| -|EXCITITOR-WORKER-01-002 – Resume tokens & retry policy|Team Excititor Worker|EXCITITOR-WORKER-01-001|TODO – Implement durable resume markers, exponential backoff with jitter, and quarantine for failing connectors per architecture spec.| +|EXCITITOR-WORKER-01-002 – Resume tokens & retry policy|Team Excititor Worker|EXCITITOR-WORKER-01-001|DOING (2025-10-19) – Prereq EXCITITOR-WORKER-01-001 closed 2025-10-17; implementing durable resume markers, jittered backoff, and failure quarantine flow.| |EXCITITOR-WORKER-01-003 – Verification & cache GC loops|Team Excititor Worker|EXCITITOR-WORKER-01-001, EXCITITOR-ATTEST-01-003, EXCITITOR-EXPORT-01-002|TODO – Add scheduled attestation re-verification and cache pruning routines, surfacing metrics for export reuse ratios.| -|EXCITITOR-WORKER-01-004 – TTL refresh & stability damper|Team Excititor Worker|EXCITITOR-WORKER-01-001, EXCITITOR-CORE-02-001|TODO – Monitor consensus/VEX TTLs, apply 24–48h dampers before flipping published status/score, and trigger re-resolve when base image or kernel fingerprints change.| +|EXCITITOR-WORKER-01-004 – TTL refresh & stability damper|Team Excititor Worker|EXCITITOR-WORKER-01-001, EXCITITOR-CORE-02-001|DOING (2025-10-19) – Prereqs EXCITITOR-WORKER-01-001 (closed 2025-10-17) and EXCITITOR-CORE-02-001 (closed 2025-10-19) verified; building TTL monitor with dampers and re-resolve triggers.| +|EXCITITOR-WORKER-02-001 – Resolve Microsoft.Extensions.Caching.Memory advisory|Team Excititor Worker|EXCITITOR-WORKER-01-001|DOING (2025-10-19) – Prereq EXCITITOR-WORKER-01-001 closed 2025-10-17; upgrading `Microsoft.Extensions.Caching.Memory` stack and refreshing lockfiles/tests to clear NU1903.| diff --git a/src/StellaOps.Notify.Models.Tests/DocSampleTests.cs b/src/StellaOps.Notify.Models.Tests/DocSampleTests.cs new file mode 100644 index 00000000..8fc850a6 --- /dev/null +++ b/src/StellaOps.Notify.Models.Tests/DocSampleTests.cs @@ -0,0 +1,47 @@ +using System.Text.Json; +using System.Text.Json.Nodes; +using Xunit.Sdk; + +namespace StellaOps.Notify.Models.Tests; + +public sealed class DocSampleTests +{ + [Theory] + [InlineData("notify-rule@1.sample.json")] + [InlineData("notify-channel@1.sample.json")] + [InlineData("notify-template@1.sample.json")] + [InlineData("notify-event@1.sample.json")] + public void CanonicalSamplesStayInSync(string fileName) + { + var json = LoadSample(fileName); + var node = JsonNode.Parse(json) ?? throw new InvalidOperationException("Sample JSON null."); + + string canonical = fileName switch + { + "notify-rule@1.sample.json" => NotifyCanonicalJsonSerializer.Serialize(NotifySchemaMigration.UpgradeRule(node)), + "notify-channel@1.sample.json" => NotifyCanonicalJsonSerializer.Serialize(NotifySchemaMigration.UpgradeChannel(node)), + "notify-template@1.sample.json" => NotifyCanonicalJsonSerializer.Serialize(NotifySchemaMigration.UpgradeTemplate(node)), + "notify-event@1.sample.json" => NotifyCanonicalJsonSerializer.Serialize(NotifyCanonicalJsonSerializer.Deserialize(json)), + _ => throw new ArgumentOutOfRangeException(nameof(fileName), fileName, "Unsupported sample.") + }; + + var canonicalNode = JsonNode.Parse(canonical) ?? throw new InvalidOperationException("Canonical JSON null."); + if (!JsonNode.DeepEquals(node, canonicalNode)) + { + var expected = canonicalNode.ToJsonString(new JsonSerializerOptions { WriteIndented = true }); + var actual = node.ToJsonString(new JsonSerializerOptions { WriteIndented = true }); + throw new XunitException($"Sample '{fileName}' must remain canonical.\nExpected:\n{expected}\nActual:\n{actual}"); + } + } + + private static string LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); + } + + return File.ReadAllText(path); + } +} diff --git a/src/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs b/src/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs new file mode 100644 index 00000000..a949d4c2 --- /dev/null +++ b/src/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs @@ -0,0 +1,77 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Nodes; + +namespace StellaOps.Notify.Models.Tests; + +public sealed class NotifyCanonicalJsonSerializerTests +{ + [Fact] + public void SerializeRuleIsDeterministic() + { + var ruleA = NotifyRule.Create( + ruleId: "rule-1", + tenantId: "tenant-a", + name: "critical", + match: NotifyRuleMatch.Create(eventKinds: new[] { NotifyEventKinds.ScannerReportReady }), + actions: new[] + { + NotifyRuleAction.Create(actionId: "b", channel: "slack:sec"), + NotifyRuleAction.Create(actionId: "a", channel: "email:soc") + }, + metadata: new Dictionary + { + ["beta"] = "2", + ["alpha"] = "1" + }, + createdAt: DateTimeOffset.Parse("2025-10-18T00:00:00Z"), + updatedAt: DateTimeOffset.Parse("2025-10-18T00:00:00Z")); + + var ruleB = NotifyRule.Create( + ruleId: "rule-1", + tenantId: "tenant-a", + name: "critical", + match: NotifyRuleMatch.Create(eventKinds: new[] { NotifyEventKinds.ScannerReportReady }), + actions: new[] + { + NotifyRuleAction.Create(actionId: "a", channel: "email:soc"), + NotifyRuleAction.Create(actionId: "b", channel: "slack:sec") + }, + metadata: new Dictionary + { + ["alpha"] = "1", + ["beta"] = "2" + }, + createdAt: DateTimeOffset.Parse("2025-10-18T00:00:00Z"), + updatedAt: DateTimeOffset.Parse("2025-10-18T00:00:00Z")); + + var jsonA = NotifyCanonicalJsonSerializer.Serialize(ruleA); + var jsonB = NotifyCanonicalJsonSerializer.Serialize(ruleB); + + Assert.Equal(jsonA, jsonB); + Assert.Contains("\"schemaVersion\":\"notify.rule@1\"", jsonA, StringComparison.Ordinal); + } + + [Fact] + public void SerializeEventOrdersPayloadKeys() + { + var payload = JsonNode.Parse("{\"b\":2,\"a\":1}"); + var @event = NotifyEvent.Create( + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + tenant: "tenant-a", + ts: DateTimeOffset.Parse("2025-10-18T05:41:22Z"), + payload: payload, + scope: NotifyEventScope.Create(repo: "ghcr.io/acme/api", digest: "sha256:123")); + + var json = NotifyCanonicalJsonSerializer.Serialize(@event); + + var payloadIndex = json.IndexOf("\"payload\":{", StringComparison.Ordinal); + Assert.NotEqual(-1, payloadIndex); + + var aIndex = json.IndexOf("\"a\":1", payloadIndex, StringComparison.Ordinal); + var bIndex = json.IndexOf("\"b\":2", payloadIndex, StringComparison.Ordinal); + + Assert.True(aIndex is >= 0 && bIndex is >= 0 && aIndex < bIndex, "Payload keys should be ordered alphabetically."); + } +} diff --git a/src/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs b/src/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs new file mode 100644 index 00000000..9c7f15e7 --- /dev/null +++ b/src/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs @@ -0,0 +1,46 @@ +using System; +using System.Linq; + +namespace StellaOps.Notify.Models.Tests; + +public sealed class NotifyDeliveryTests +{ + [Fact] + public void AttemptsAreSortedChronologically() + { + var attempts = new[] + { + new NotifyDeliveryAttempt(DateTimeOffset.Parse("2025-10-19T12:25:00Z"), NotifyDeliveryAttemptStatus.Succeeded), + new NotifyDeliveryAttempt(DateTimeOffset.Parse("2025-10-19T12:15:00Z"), NotifyDeliveryAttemptStatus.Sending), + }; + + var delivery = NotifyDelivery.Create( + deliveryId: "delivery-1", + tenantId: "tenant-a", + ruleId: "rule-1", + actionId: "action-1", + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + status: NotifyDeliveryStatus.Sent, + attempts: attempts); + + Assert.Collection( + delivery.Attempts, + attempt => Assert.Equal(NotifyDeliveryAttemptStatus.Sending, attempt.Status), + attempt => Assert.Equal(NotifyDeliveryAttemptStatus.Succeeded, attempt.Status)); + } + + [Fact] + public void RenderedNormalizesAttachments() + { + var rendered = NotifyDeliveryRendered.Create( + channelType: NotifyChannelType.Slack, + format: NotifyDeliveryFormat.Slack, + target: "#sec", + title: "Alert", + body: "Body", + attachments: new[] { "B", "a", "a" }); + + Assert.Equal(new[] { "B", "a" }.OrderBy(x => x, StringComparer.Ordinal), rendered.Attachments); + } +} diff --git a/src/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs b/src/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs new file mode 100644 index 00000000..5d60fd4b --- /dev/null +++ b/src/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Notify.Models.Tests; + +public sealed class NotifyRuleTests +{ + [Fact] + public void ConstructorThrowsWhenActionsMissing() + { + var match = NotifyRuleMatch.Create(eventKinds: new[] { NotifyEventKinds.ScannerReportReady }); + + var exception = Assert.Throws(() => + NotifyRule.Create( + ruleId: "rule-1", + tenantId: "tenant-a", + name: "critical", + match: match, + actions: Array.Empty())); + + Assert.Contains("At least one action is required", exception.Message, StringComparison.Ordinal); + } + + [Fact] + public void ConstructorNormalizesCollections() + { + var rule = NotifyRule.Create( + ruleId: "rule-1", + tenantId: "tenant-a", + name: "critical", + match: NotifyRuleMatch.Create( + eventKinds: new[] { "Zastava.Admission", NotifyEventKinds.ScannerReportReady }), + actions: new[] + { + NotifyRuleAction.Create(actionId: "b", channel: "slack:sec-alerts", throttle: TimeSpan.FromMinutes(5)), + NotifyRuleAction.Create(actionId: "a", channel: "email:soc", metadata: new Dictionary + { + [" locale "] = " EN-us " + }) + }, + labels: new Dictionary + { + [" team "] = " SecOps " + }, + metadata: new Dictionary + { + ["source"] = "tests" + }); + + Assert.Equal(NotifySchemaVersions.Rule, rule.SchemaVersion); + Assert.Equal(new[] { "scanner.report.ready", "zastava.admission" }, rule.Match.EventKinds); + Assert.Equal(new[] { "a", "b" }, rule.Actions.Select(action => action.ActionId)); + Assert.Equal(TimeSpan.FromMinutes(5), rule.Actions.Last().Throttle); + Assert.Equal("secops", rule.Labels.Single().Value.ToLowerInvariant()); + Assert.Equal("en-us", rule.Actions.First().Metadata["locale"].ToLowerInvariant()); + + var json = NotifyCanonicalJsonSerializer.Serialize(rule); + Assert.Contains("\"schemaVersion\":\"notify.rule@1\"", json, StringComparison.Ordinal); + Assert.Contains("\"actions\":[{\"actionId\":\"a\"", json, StringComparison.Ordinal); + Assert.Contains("\"throttle\":\"PT5M\"", json, StringComparison.Ordinal); + } +} diff --git a/src/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs b/src/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs new file mode 100644 index 00000000..4ebac44b --- /dev/null +++ b/src/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs @@ -0,0 +1,101 @@ +using System; +using System.Text.Json.Nodes; + +namespace StellaOps.Notify.Models.Tests; + +public sealed class NotifySchemaMigrationTests +{ + [Fact] + public void UpgradeRuleAddsSchemaVersionWhenMissing() + { + var json = JsonNode.Parse( + """ + { + "ruleId": "rule-legacy", + "tenantId": "tenant-1", + "name": "legacy", + "enabled": true, + "match": { "eventKinds": ["scanner.report.ready"] }, + "actions": [ { "actionId": "send", "channel": "email:legacy", "enabled": true } ], + "createdAt": "2025-10-18T00:00:00Z", + "updatedAt": "2025-10-18T00:00:00Z" + } + """)!; + + var rule = NotifySchemaMigration.UpgradeRule(json); + + Assert.Equal(NotifySchemaVersions.Rule, rule.SchemaVersion); + Assert.Equal("rule-legacy", rule.RuleId); + } + + [Fact] + public void UpgradeRuleThrowsOnUnknownSchema() + { + var json = JsonNode.Parse( + """ + { + "schemaVersion": "notify.rule@2", + "ruleId": "rule-future", + "tenantId": "tenant-1", + "name": "future", + "enabled": true, + "match": { "eventKinds": ["scanner.report.ready"] }, + "actions": [ { "actionId": "send", "channel": "email:soc", "enabled": true } ], + "createdAt": "2025-10-18T00:00:00Z", + "updatedAt": "2025-10-18T00:00:00Z" + } + """)!; + + var exception = Assert.Throws(() => NotifySchemaMigration.UpgradeRule(json)); + Assert.Contains("notify rule schema version", exception.Message, StringComparison.Ordinal); + } + + [Fact] + public void UpgradeChannelDefaultsMissingVersion() + { + var json = JsonNode.Parse( + """ + { + "channelId": "channel-email", + "tenantId": "tenant-1", + "name": "email:soc", + "type": "email", + "config": { "secretRef": "ref://notify/channels/email/soc" }, + "enabled": true, + "createdAt": "2025-10-18T00:00:00Z", + "updatedAt": "2025-10-18T00:00:00Z" + } + """)!; + + var channel = NotifySchemaMigration.UpgradeChannel(json); + + Assert.Equal(NotifySchemaVersions.Channel, channel.SchemaVersion); + Assert.Equal("channel-email", channel.ChannelId); + } + + [Fact] + public void UpgradeTemplateDefaultsMissingVersion() + { + var json = JsonNode.Parse( + """ + { + "templateId": "tmpl-slack-concise", + "tenantId": "tenant-1", + "channelType": "slack", + "key": "concise", + "locale": "en-us", + "body": "{{summary}}", + "renderMode": "markdown", + "format": "slack", + "createdAt": "2025-10-18T00:00:00Z", + "updatedAt": "2025-10-18T00:00:00Z" + } + """)!; + + var template = NotifySchemaMigration.UpgradeTemplate(json); + + Assert.Equal(NotifySchemaVersions.Template, template.SchemaVersion); + Assert.Equal("tmpl-slack-concise", template.TemplateId); + } + +} diff --git a/src/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs b/src/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs new file mode 100644 index 00000000..1136bed3 --- /dev/null +++ b/src/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs @@ -0,0 +1,52 @@ +using System; +using System.IO; +using System.Text.Json; +using System.Text.Json.Nodes; +using StellaOps.Notify.Models; +using Xunit.Sdk; + +namespace StellaOps.Notify.Models.Tests; + +public sealed class PlatformEventSamplesTests +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + [Theory] + [InlineData("scanner.report.ready@1.sample.json", NotifyEventKinds.ScannerReportReady)] + [InlineData("scanner.scan.completed@1.sample.json", NotifyEventKinds.ScannerScanCompleted)] + [InlineData("scheduler.rescan.delta@1.sample.json", NotifyEventKinds.SchedulerRescanDelta)] + [InlineData("attestor.logged@1.sample.json", NotifyEventKinds.AttestorLogged)] + public void PlatformEventSamplesRoundtripThroughNotifySerializer(string fileName, string expectedKind) + { + var json = LoadSample(fileName); + var notifyEvent = JsonSerializer.Deserialize(json, SerializerOptions); + + Assert.NotNull(notifyEvent); + Assert.Equal(expectedKind, notifyEvent!.Kind); + Assert.NotEqual(Guid.Empty, notifyEvent.EventId); + Assert.False(string.IsNullOrWhiteSpace(notifyEvent.Tenant)); + Assert.Equal(TimeSpan.Zero, notifyEvent.Ts.Offset); + + var canonicalJson = NotifyCanonicalJsonSerializer.Serialize(notifyEvent); + var canonicalNode = JsonNode.Parse(canonicalJson) ?? throw new InvalidOperationException("Canonical JSON null."); + var sampleNode = JsonNode.Parse(json) ?? throw new InvalidOperationException("Sample JSON null."); + + if (!JsonNode.DeepEquals(sampleNode, canonicalNode)) + { + var expected = canonicalNode.ToJsonString(new JsonSerializerOptions { WriteIndented = true }); + var actual = sampleNode.ToJsonString(new JsonSerializerOptions { WriteIndented = true }); + throw new Xunit.Sdk.XunitException($"Sample '{fileName}' must remain canonical.\nExpected:\n{expected}\nActual:\n{actual}"); + } + } + + private static string LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to locate sample '{fileName}'.", path); + } + + return File.ReadAllText(path); + } +} diff --git a/src/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj b/src/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj new file mode 100644 index 00000000..30ba9939 --- /dev/null +++ b/src/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj @@ -0,0 +1,20 @@ + + + net10.0 + enable + enable + + + + + + + + + Always + + + Always + + + diff --git a/src/StellaOps.Notify.Models/Iso8601DurationConverter.cs b/src/StellaOps.Notify.Models/Iso8601DurationConverter.cs new file mode 100644 index 00000000..405cd736 --- /dev/null +++ b/src/StellaOps.Notify.Models/Iso8601DurationConverter.cs @@ -0,0 +1,28 @@ +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Xml; + +namespace StellaOps.Notify.Models; + +internal sealed class Iso8601DurationConverter : JsonConverter +{ + public override TimeSpan Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType is JsonTokenType.String) + { + var value = reader.GetString(); + if (!string.IsNullOrWhiteSpace(value)) + { + return XmlConvert.ToTimeSpan(value); + } + } + + throw new JsonException("Expected ISO 8601 duration string."); + } + + public override void Write(Utf8JsonWriter writer, TimeSpan value, JsonSerializerOptions options) + { + var normalized = XmlConvert.ToString(value); + writer.WriteStringValue(normalized); + } +} diff --git a/src/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs b/src/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs new file mode 100644 index 00000000..2112ad1e --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs @@ -0,0 +1,637 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; + +namespace StellaOps.Notify.Models; + +/// +/// Deterministic JSON serializer tuned for Notify canonical documents. +/// +public static class NotifyCanonicalJsonSerializer +{ + private static readonly JsonSerializerOptions CompactOptions = CreateOptions(writeIndented: false, useDeterministicResolver: true); + private static readonly JsonSerializerOptions PrettyOptions = CreateOptions(writeIndented: true, useDeterministicResolver: true); + private static readonly JsonSerializerOptions ReadOptions = CreateOptions(writeIndented: false, useDeterministicResolver: false); + + private static readonly IReadOnlyDictionary PropertyOrderOverrides = new Dictionary + { + { + typeof(NotifyRule), + new[] + { + "schemaVersion", + "ruleId", + "tenantId", + "name", + "description", + "enabled", + "match", + "actions", + "labels", + "metadata", + "createdBy", + "createdAt", + "updatedBy", + "updatedAt", + } + }, + { + typeof(NotifyRuleMatch), + new[] + { + "eventKinds", + "namespaces", + "repositories", + "digests", + "labels", + "componentPurls", + "minSeverity", + "verdicts", + "kevOnly", + "vex", + } + }, + { + typeof(NotifyRuleAction), + new[] + { + "actionId", + "channel", + "template", + "locale", + "digest", + "throttle", + "metadata", + "enabled", + } + }, + { + typeof(NotifyChannel), + new[] + { + "schemaVersion", + "channelId", + "tenantId", + "name", + "type", + "displayName", + "description", + "config", + "enabled", + "labels", + "metadata", + "createdBy", + "createdAt", + "updatedBy", + "updatedAt", + } + }, + { + typeof(NotifyChannelConfig), + new[] + { + "secretRef", + "target", + "endpoint", + "properties", + "limits", + } + }, + { + typeof(NotifyTemplate), + new[] + { + "schemaVersion", + "templateId", + "tenantId", + "channelType", + "key", + "locale", + "description", + "renderMode", + "body", + "format", + "metadata", + "createdBy", + "createdAt", + "updatedBy", + "updatedAt", + } + }, + { + typeof(NotifyEvent), + new[] + { + "eventId", + "kind", + "version", + "tenant", + "ts", + "actor", + "scope", + "payload", + "attributes", + } + }, + { + typeof(NotifyEventScope), + new[] + { + "namespace", + "repo", + "digest", + "component", + "image", + "labels", + "attributes", + } + }, + { + typeof(NotifyDelivery), + new[] + { + "deliveryId", + "tenantId", + "ruleId", + "actionId", + "eventId", + "kind", + "status", + "statusReason", + "createdAt", + "sentAt", + "completedAt", + "rendered", + "attempts", + "metadata", + } + }, + { + typeof(NotifyDeliveryAttempt), + new[] + { + "timestamp", + "status", + "statusCode", + "reason", + } + }, + { + typeof(NotifyDeliveryRendered), + new[] + { + "title", + "summary", + "target", + "locale", + "channelType", + "format", + "body", + "textBody", + "bodyHash", + "attachments", + } + }, + }; + + public static string Serialize(T value) + => JsonSerializer.Serialize(value, CompactOptions); + + public static string SerializeIndented(T value) + => JsonSerializer.Serialize(value, PrettyOptions); + + public static T Deserialize(string json) + { + if (typeof(T) == typeof(NotifyRule)) + { + var dto = JsonSerializer.Deserialize(json, ReadOptions) + ?? throw new InvalidOperationException("Unable to deserialize NotifyRule payload."); + return (T)(object)dto.ToModel(); + } + + if (typeof(T) == typeof(NotifyChannel)) + { + var dto = JsonSerializer.Deserialize(json, ReadOptions) + ?? throw new InvalidOperationException("Unable to deserialize NotifyChannel payload."); + return (T)(object)dto.ToModel(); + } + + if (typeof(T) == typeof(NotifyTemplate)) + { + var dto = JsonSerializer.Deserialize(json, ReadOptions) + ?? throw new InvalidOperationException("Unable to deserialize NotifyTemplate payload."); + return (T)(object)dto.ToModel(); + } + + if (typeof(T) == typeof(NotifyEvent)) + { + var dto = JsonSerializer.Deserialize(json, ReadOptions) + ?? throw new InvalidOperationException("Unable to deserialize NotifyEvent payload."); + return (T)(object)dto.ToModel(); + } + + if (typeof(T) == typeof(NotifyDelivery)) + { + var dto = JsonSerializer.Deserialize(json, ReadOptions) + ?? throw new InvalidOperationException("Unable to deserialize NotifyDelivery payload."); + return (T)(object)dto.ToModel(); + } + + return JsonSerializer.Deserialize(json, ReadOptions) + ?? throw new InvalidOperationException($"Unable to deserialize type {typeof(T).Name}."); + } + + private static JsonSerializerOptions CreateOptions(bool writeIndented, bool useDeterministicResolver) + { + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DictionaryKeyPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = writeIndented, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + }; + + if (useDeterministicResolver) + { + var baselineResolver = options.TypeInfoResolver ?? new DefaultJsonTypeInfoResolver(); + options.TypeInfoResolver = new DeterministicTypeInfoResolver(baselineResolver); + } + + options.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase, allowIntegerValues: false)); + options.Converters.Add(new Iso8601DurationConverter()); + return options; + } + + private sealed class DeterministicTypeInfoResolver : IJsonTypeInfoResolver + { + private readonly IJsonTypeInfoResolver _inner; + + public DeterministicTypeInfoResolver(IJsonTypeInfoResolver inner) + { + _inner = inner ?? throw new ArgumentNullException(nameof(inner)); + } + + public JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions options) + { + var info = _inner.GetTypeInfo(type, options) + ?? throw new InvalidOperationException($"Unable to resolve JsonTypeInfo for '{type}'."); + + if (info.Kind is JsonTypeInfoKind.Object && info.Properties is { Count: > 1 }) + { + var ordered = info.Properties + .OrderBy(property => GetPropertyOrder(type, property.Name)) + .ThenBy(property => property.Name, StringComparer.Ordinal) + .ToArray(); + + info.Properties.Clear(); + foreach (var property in ordered) + { + info.Properties.Add(property); + } + } + + return info; + } + + private static int GetPropertyOrder(Type type, string propertyName) + { + if (PropertyOrderOverrides.TryGetValue(type, out var order) && Array.IndexOf(order, propertyName) is { } index and >= 0) + { + return index; + } + + return int.MaxValue; + } + } +} + +internal sealed class NotifyRuleDto +{ + public string? SchemaVersion { get; set; } + public string? RuleId { get; set; } + public string? TenantId { get; set; } + public string? Name { get; set; } + public string? Description { get; set; } + public bool? Enabled { get; set; } + public NotifyRuleMatchDto? Match { get; set; } + public List? Actions { get; set; } + public Dictionary? Labels { get; set; } + public Dictionary? Metadata { get; set; } + public string? CreatedBy { get; set; } + public DateTimeOffset? CreatedAt { get; set; } + public string? UpdatedBy { get; set; } + public DateTimeOffset? UpdatedAt { get; set; } + + public NotifyRule ToModel() + => NotifyRule.Create( + RuleId ?? throw new InvalidOperationException("ruleId missing"), + TenantId ?? throw new InvalidOperationException("tenantId missing"), + Name ?? throw new InvalidOperationException("name missing"), + (Match ?? new NotifyRuleMatchDto()).ToModel(), + Actions?.Select(action => action.ToModel()) ?? Array.Empty(), + Enabled.GetValueOrDefault(true), + Description, + Labels, + Metadata, + CreatedBy, + CreatedAt, + UpdatedBy, + UpdatedAt, + SchemaVersion); +} + +internal sealed class NotifyRuleMatchDto +{ + public List? EventKinds { get; set; } + public List? Namespaces { get; set; } + public List? Repositories { get; set; } + public List? Digests { get; set; } + public List? Labels { get; set; } + public List? ComponentPurls { get; set; } + public string? MinSeverity { get; set; } + public List? Verdicts { get; set; } + public bool? KevOnly { get; set; } + public NotifyRuleMatchVexDto? Vex { get; set; } + + public NotifyRuleMatch ToModel() + => NotifyRuleMatch.Create( + EventKinds, + Namespaces, + Repositories, + Digests, + Labels, + ComponentPurls, + MinSeverity, + Verdicts, + KevOnly, + Vex?.ToModel()); +} + +internal sealed class NotifyRuleMatchVexDto +{ + public bool IncludeAcceptedJustifications { get; set; } = true; + public bool IncludeRejectedJustifications { get; set; } + public bool IncludeUnknownJustifications { get; set; } + public List? JustificationKinds { get; set; } + + public NotifyRuleMatchVex ToModel() + => NotifyRuleMatchVex.Create( + IncludeAcceptedJustifications, + IncludeRejectedJustifications, + IncludeUnknownJustifications, + JustificationKinds); +} + +internal sealed class NotifyRuleActionDto +{ + public string? ActionId { get; set; } + public string? Channel { get; set; } + public string? Template { get; set; } + public string? Digest { get; set; } + public TimeSpan? Throttle { get; set; } + public string? Locale { get; set; } + public bool? Enabled { get; set; } + public Dictionary? Metadata { get; set; } + + public NotifyRuleAction ToModel() + => NotifyRuleAction.Create( + ActionId ?? throw new InvalidOperationException("actionId missing"), + Channel ?? throw new InvalidOperationException("channel missing"), + Template, + Digest, + Throttle, + Locale, + Enabled.GetValueOrDefault(true), + Metadata); +} + +internal sealed class NotifyChannelDto +{ + public string? SchemaVersion { get; set; } + public string? ChannelId { get; set; } + public string? TenantId { get; set; } + public string? Name { get; set; } + public NotifyChannelType Type { get; set; } + public NotifyChannelConfigDto? Config { get; set; } + public string? DisplayName { get; set; } + public string? Description { get; set; } + public bool? Enabled { get; set; } + public Dictionary? Labels { get; set; } + public Dictionary? Metadata { get; set; } + public string? CreatedBy { get; set; } + public DateTimeOffset? CreatedAt { get; set; } + public string? UpdatedBy { get; set; } + public DateTimeOffset? UpdatedAt { get; set; } + + public NotifyChannel ToModel() + => NotifyChannel.Create( + ChannelId ?? throw new InvalidOperationException("channelId missing"), + TenantId ?? throw new InvalidOperationException("tenantId missing"), + Name ?? throw new InvalidOperationException("name missing"), + Type, + (Config ?? new NotifyChannelConfigDto()).ToModel(), + DisplayName, + Description, + Enabled.GetValueOrDefault(true), + Labels, + Metadata, + CreatedBy, + CreatedAt, + UpdatedBy, + UpdatedAt, + SchemaVersion); +} + +internal sealed class NotifyChannelConfigDto +{ + public string? SecretRef { get; set; } + public string? Target { get; set; } + public string? Endpoint { get; set; } + public Dictionary? Properties { get; set; } + public NotifyChannelLimitsDto? Limits { get; set; } + + public NotifyChannelConfig ToModel() + => NotifyChannelConfig.Create( + SecretRef ?? throw new InvalidOperationException("secretRef missing"), + Target, + Endpoint, + Properties, + Limits?.ToModel()); +} + +internal sealed class NotifyChannelLimitsDto +{ + public int? Concurrency { get; set; } + public int? RequestsPerMinute { get; set; } + public TimeSpan? Timeout { get; set; } + public int? MaxBatchSize { get; set; } + + public NotifyChannelLimits ToModel() + => new( + Concurrency, + RequestsPerMinute, + Timeout, + MaxBatchSize); +} + +internal sealed class NotifyTemplateDto +{ + public string? SchemaVersion { get; set; } + public string? TemplateId { get; set; } + public string? TenantId { get; set; } + public NotifyChannelType ChannelType { get; set; } + public string? Key { get; set; } + public string? Locale { get; set; } + public string? Body { get; set; } + public NotifyTemplateRenderMode RenderMode { get; set; } = NotifyTemplateRenderMode.Markdown; + public NotifyDeliveryFormat Format { get; set; } = NotifyDeliveryFormat.Json; + public string? Description { get; set; } + public Dictionary? Metadata { get; set; } + public string? CreatedBy { get; set; } + public DateTimeOffset? CreatedAt { get; set; } + public string? UpdatedBy { get; set; } + public DateTimeOffset? UpdatedAt { get; set; } + + public NotifyTemplate ToModel() + => NotifyTemplate.Create( + TemplateId ?? throw new InvalidOperationException("templateId missing"), + TenantId ?? throw new InvalidOperationException("tenantId missing"), + ChannelType, + Key ?? throw new InvalidOperationException("key missing"), + Locale ?? throw new InvalidOperationException("locale missing"), + Body ?? throw new InvalidOperationException("body missing"), + RenderMode, + Format, + Description, + Metadata, + CreatedBy, + CreatedAt, + UpdatedBy, + UpdatedAt, + SchemaVersion); +} + +internal sealed class NotifyEventDto +{ + public Guid EventId { get; set; } + public string? Kind { get; set; } + public string? Tenant { get; set; } + public DateTimeOffset Ts { get; set; } + public JsonNode? Payload { get; set; } + public NotifyEventScopeDto? Scope { get; set; } + public string? Version { get; set; } + public string? Actor { get; set; } + public Dictionary? Attributes { get; set; } + + public NotifyEvent ToModel() + => NotifyEvent.Create( + EventId, + Kind ?? throw new InvalidOperationException("kind missing"), + Tenant ?? throw new InvalidOperationException("tenant missing"), + Ts, + Payload, + Scope?.ToModel(), + Version, + Actor, + Attributes); +} + +internal sealed class NotifyEventScopeDto +{ + public string? Namespace { get; set; } + public string? Repo { get; set; } + public string? Digest { get; set; } + public string? Component { get; set; } + public string? Image { get; set; } + public Dictionary? Labels { get; set; } + public Dictionary? Attributes { get; set; } + + public NotifyEventScope ToModel() + => NotifyEventScope.Create( + Namespace, + Repo, + Digest, + Component, + Image, + Labels, + Attributes); +} + +internal sealed class NotifyDeliveryDto +{ + public string? DeliveryId { get; set; } + public string? TenantId { get; set; } + public string? RuleId { get; set; } + public string? ActionId { get; set; } + public Guid EventId { get; set; } + public string? Kind { get; set; } + public NotifyDeliveryStatus Status { get; set; } + public string? StatusReason { get; set; } + public NotifyDeliveryRenderedDto? Rendered { get; set; } + public List? Attempts { get; set; } + public Dictionary? Metadata { get; set; } + public DateTimeOffset? CreatedAt { get; set; } + public DateTimeOffset? SentAt { get; set; } + public DateTimeOffset? CompletedAt { get; set; } + + public NotifyDelivery ToModel() + => NotifyDelivery.Create( + DeliveryId ?? throw new InvalidOperationException("deliveryId missing"), + TenantId ?? throw new InvalidOperationException("tenantId missing"), + RuleId ?? throw new InvalidOperationException("ruleId missing"), + ActionId ?? throw new InvalidOperationException("actionId missing"), + EventId, + Kind ?? throw new InvalidOperationException("kind missing"), + Status, + StatusReason, + Rendered?.ToModel(), + Attempts?.Select(attempt => attempt.ToModel()), + Metadata, + CreatedAt, + SentAt, + CompletedAt); +} + +internal sealed class NotifyDeliveryAttemptDto +{ + public DateTimeOffset Timestamp { get; set; } + public NotifyDeliveryAttemptStatus Status { get; set; } + public int? StatusCode { get; set; } + public string? Reason { get; set; } + + public NotifyDeliveryAttempt ToModel() + => new(Timestamp, Status, StatusCode, Reason); +} + +internal sealed class NotifyDeliveryRenderedDto +{ + public NotifyChannelType ChannelType { get; set; } + public NotifyDeliveryFormat Format { get; set; } + public string? Target { get; set; } + public string? Title { get; set; } + public string? Body { get; set; } + public string? Summary { get; set; } + public string? TextBody { get; set; } + public string? Locale { get; set; } + public string? BodyHash { get; set; } + public List? Attachments { get; set; } + + public NotifyDeliveryRendered ToModel() + => NotifyDeliveryRendered.Create( + ChannelType, + Format, + Target ?? throw new InvalidOperationException("target missing"), + Title ?? throw new InvalidOperationException("title missing"), + Body ?? throw new InvalidOperationException("body missing"), + Summary, + TextBody, + Locale, + BodyHash, + Attachments); +} diff --git a/src/StellaOps.Notify.Models/NotifyChannel.cs b/src/StellaOps.Notify.Models/NotifyChannel.cs new file mode 100644 index 00000000..744e1d4c --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyChannel.cs @@ -0,0 +1,235 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Serialization; + +namespace StellaOps.Notify.Models; + +/// +/// Configured delivery channel (Slack workspace, Teams webhook, SMTP profile, etc.). +/// +public sealed record NotifyChannel +{ + [JsonConstructor] + public NotifyChannel( + string channelId, + string tenantId, + string name, + NotifyChannelType type, + NotifyChannelConfig config, + string? displayName = null, + string? description = null, + bool enabled = true, + ImmutableDictionary? labels = null, + ImmutableDictionary? metadata = null, + string? createdBy = null, + DateTimeOffset? createdAt = null, + string? updatedBy = null, + DateTimeOffset? updatedAt = null, + string? schemaVersion = null) + { + SchemaVersion = NotifySchemaVersions.EnsureChannel(schemaVersion); + ChannelId = NotifyValidation.EnsureNotNullOrWhiteSpace(channelId, nameof(channelId)); + TenantId = NotifyValidation.EnsureNotNullOrWhiteSpace(tenantId, nameof(tenantId)); + Name = NotifyValidation.EnsureNotNullOrWhiteSpace(name, nameof(name)); + Type = type; + Config = config ?? throw new ArgumentNullException(nameof(config)); + DisplayName = NotifyValidation.TrimToNull(displayName); + Description = NotifyValidation.TrimToNull(description); + Enabled = enabled; + + Labels = NotifyValidation.NormalizeStringDictionary(labels); + Metadata = NotifyValidation.NormalizeStringDictionary(metadata); + + CreatedBy = NotifyValidation.TrimToNull(createdBy); + CreatedAt = NotifyValidation.EnsureUtc(createdAt ?? DateTimeOffset.UtcNow); + UpdatedBy = NotifyValidation.TrimToNull(updatedBy); + UpdatedAt = NotifyValidation.EnsureUtc(updatedAt ?? CreatedAt); + } + + public static NotifyChannel Create( + string channelId, + string tenantId, + string name, + NotifyChannelType type, + NotifyChannelConfig config, + string? displayName = null, + string? description = null, + bool enabled = true, + IEnumerable>? labels = null, + IEnumerable>? metadata = null, + string? createdBy = null, + DateTimeOffset? createdAt = null, + string? updatedBy = null, + DateTimeOffset? updatedAt = null, + string? schemaVersion = null) + { + return new NotifyChannel( + channelId, + tenantId, + name, + type, + config, + displayName, + description, + enabled, + ToImmutableDictionary(labels), + ToImmutableDictionary(metadata), + createdBy, + createdAt, + updatedBy, + updatedAt, + schemaVersion); + } + + public string SchemaVersion { get; } + + public string ChannelId { get; } + + public string TenantId { get; } + + public string Name { get; } + + public NotifyChannelType Type { get; } + + public NotifyChannelConfig Config { get; } + + public string? DisplayName { get; } + + public string? Description { get; } + + public bool Enabled { get; } + + public ImmutableDictionary Labels { get; } + + public ImmutableDictionary Metadata { get; } + + public string? CreatedBy { get; } + + public DateTimeOffset CreatedAt { get; } + + public string? UpdatedBy { get; } + + public DateTimeOffset UpdatedAt { get; } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} + +/// +/// Channel configuration payload (secret reference, destination coordinates, connector-specific metadata). +/// +public sealed record NotifyChannelConfig +{ + [JsonConstructor] + public NotifyChannelConfig( + string secretRef, + string? target = null, + string? endpoint = null, + ImmutableDictionary? properties = null, + NotifyChannelLimits? limits = null) + { + SecretRef = NotifyValidation.EnsureNotNullOrWhiteSpace(secretRef, nameof(secretRef)); + Target = NotifyValidation.TrimToNull(target); + Endpoint = NotifyValidation.TrimToNull(endpoint); + Properties = NotifyValidation.NormalizeStringDictionary(properties); + Limits = limits; + } + + public static NotifyChannelConfig Create( + string secretRef, + string? target = null, + string? endpoint = null, + IEnumerable>? properties = null, + NotifyChannelLimits? limits = null) + { + return new NotifyChannelConfig( + secretRef, + target, + endpoint, + ToImmutableDictionary(properties), + limits); + } + + public string SecretRef { get; } + + public string? Target { get; } + + public string? Endpoint { get; } + + public ImmutableDictionary Properties { get; } + + public NotifyChannelLimits? Limits { get; } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} + +/// +/// Optional per-channel limits that influence worker behaviour. +/// +public sealed record NotifyChannelLimits +{ + [JsonConstructor] + public NotifyChannelLimits( + int? concurrency = null, + int? requestsPerMinute = null, + TimeSpan? timeout = null, + int? maxBatchSize = null) + { + if (concurrency is < 1) + { + throw new ArgumentOutOfRangeException(nameof(concurrency), "Concurrency must be positive when specified."); + } + + if (requestsPerMinute is < 1) + { + throw new ArgumentOutOfRangeException(nameof(requestsPerMinute), "Requests per minute must be positive when specified."); + } + + if (maxBatchSize is < 1) + { + throw new ArgumentOutOfRangeException(nameof(maxBatchSize), "Max batch size must be positive when specified."); + } + + Concurrency = concurrency; + RequestsPerMinute = requestsPerMinute; + Timeout = timeout is { Ticks: > 0 } ? timeout : null; + MaxBatchSize = maxBatchSize; + } + + public int? Concurrency { get; } + + public int? RequestsPerMinute { get; } + + public TimeSpan? Timeout { get; } + + public int? MaxBatchSize { get; } +} diff --git a/src/StellaOps.Notify.Models/NotifyDelivery.cs b/src/StellaOps.Notify.Models/NotifyDelivery.cs new file mode 100644 index 00000000..1fba7d2c --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyDelivery.cs @@ -0,0 +1,252 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Serialization; + +namespace StellaOps.Notify.Models; + +/// +/// Delivery ledger entry capturing render output, attempts, and status transitions. +/// +public sealed record NotifyDelivery +{ + [JsonConstructor] + public NotifyDelivery( + string deliveryId, + string tenantId, + string ruleId, + string actionId, + Guid eventId, + string kind, + NotifyDeliveryStatus status, + string? statusReason = null, + NotifyDeliveryRendered? rendered = null, + ImmutableArray attempts = default, + ImmutableDictionary? metadata = null, + DateTimeOffset? createdAt = null, + DateTimeOffset? sentAt = null, + DateTimeOffset? completedAt = null) + { + DeliveryId = NotifyValidation.EnsureNotNullOrWhiteSpace(deliveryId, nameof(deliveryId)); + TenantId = NotifyValidation.EnsureNotNullOrWhiteSpace(tenantId, nameof(tenantId)); + RuleId = NotifyValidation.EnsureNotNullOrWhiteSpace(ruleId, nameof(ruleId)); + ActionId = NotifyValidation.EnsureNotNullOrWhiteSpace(actionId, nameof(actionId)); + EventId = eventId; + Kind = NotifyValidation.EnsureNotNullOrWhiteSpace(kind, nameof(kind)).ToLowerInvariant(); + Status = status; + StatusReason = NotifyValidation.TrimToNull(statusReason); + Rendered = rendered; + + Attempts = NormalizeAttempts(attempts); + Metadata = NotifyValidation.NormalizeStringDictionary(metadata); + + CreatedAt = NotifyValidation.EnsureUtc(createdAt ?? DateTimeOffset.UtcNow); + SentAt = NotifyValidation.EnsureUtc(sentAt); + CompletedAt = NotifyValidation.EnsureUtc(completedAt); + } + + public static NotifyDelivery Create( + string deliveryId, + string tenantId, + string ruleId, + string actionId, + Guid eventId, + string kind, + NotifyDeliveryStatus status, + string? statusReason = null, + NotifyDeliveryRendered? rendered = null, + IEnumerable? attempts = null, + IEnumerable>? metadata = null, + DateTimeOffset? createdAt = null, + DateTimeOffset? sentAt = null, + DateTimeOffset? completedAt = null) + { + return new NotifyDelivery( + deliveryId, + tenantId, + ruleId, + actionId, + eventId, + kind, + status, + statusReason, + rendered, + ToImmutableArray(attempts), + ToImmutableDictionary(metadata), + createdAt, + sentAt, + completedAt); + } + + public string DeliveryId { get; } + + public string TenantId { get; } + + public string RuleId { get; } + + public string ActionId { get; } + + public Guid EventId { get; } + + public string Kind { get; } + + public NotifyDeliveryStatus Status { get; } + + public string? StatusReason { get; } + + public NotifyDeliveryRendered? Rendered { get; } + + public ImmutableArray Attempts { get; } + + public ImmutableDictionary Metadata { get; } + + public DateTimeOffset CreatedAt { get; } + + public DateTimeOffset? SentAt { get; } + + public DateTimeOffset? CompletedAt { get; } + + private static ImmutableArray NormalizeAttempts(ImmutableArray attempts) + { + var source = attempts.IsDefault ? Array.Empty() : attempts.AsEnumerable(); + return source + .Where(static attempt => attempt is not null) + .OrderBy(static attempt => attempt.Timestamp) + .ToImmutableArray(); + } + + private static ImmutableArray ToImmutableArray(IEnumerable? attempts) + { + if (attempts is null) + { + return ImmutableArray.Empty; + } + + return attempts.ToImmutableArray(); + } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} + +/// +/// Individual delivery attempt outcome. +/// +public sealed record NotifyDeliveryAttempt +{ + [JsonConstructor] + public NotifyDeliveryAttempt( + DateTimeOffset timestamp, + NotifyDeliveryAttemptStatus status, + int? statusCode = null, + string? reason = null) + { + Timestamp = NotifyValidation.EnsureUtc(timestamp); + Status = status; + if (statusCode is < 0) + { + throw new ArgumentOutOfRangeException(nameof(statusCode), "Status code must be positive when specified."); + } + + StatusCode = statusCode; + Reason = NotifyValidation.TrimToNull(reason); + } + + public DateTimeOffset Timestamp { get; } + + public NotifyDeliveryAttemptStatus Status { get; } + + public int? StatusCode { get; } + + public string? Reason { get; } +} + +/// +/// Rendered payload snapshot for audit purposes (redacted as needed). +/// +public sealed record NotifyDeliveryRendered +{ + [JsonConstructor] + public NotifyDeliveryRendered( + NotifyChannelType channelType, + NotifyDeliveryFormat format, + string target, + string title, + string body, + string? summary = null, + string? textBody = null, + string? locale = null, + string? bodyHash = null, + ImmutableArray attachments = default) + { + ChannelType = channelType; + Format = format; + Target = NotifyValidation.EnsureNotNullOrWhiteSpace(target, nameof(target)); + Title = NotifyValidation.EnsureNotNullOrWhiteSpace(title, nameof(title)); + Body = NotifyValidation.EnsureNotNullOrWhiteSpace(body, nameof(body)); + Summary = NotifyValidation.TrimToNull(summary); + TextBody = NotifyValidation.TrimToNull(textBody); + Locale = NotifyValidation.TrimToNull(locale)?.ToLowerInvariant(); + BodyHash = NotifyValidation.TrimToNull(bodyHash); + Attachments = NotifyValidation.NormalizeStringSet(attachments.IsDefault ? Array.Empty() : attachments.AsEnumerable()); + } + + public static NotifyDeliveryRendered Create( + NotifyChannelType channelType, + NotifyDeliveryFormat format, + string target, + string title, + string body, + string? summary = null, + string? textBody = null, + string? locale = null, + string? bodyHash = null, + IEnumerable? attachments = null) + { + return new NotifyDeliveryRendered( + channelType, + format, + target, + title, + body, + summary, + textBody, + locale, + bodyHash, + attachments is null ? ImmutableArray.Empty : attachments.ToImmutableArray()); + } + + public NotifyChannelType ChannelType { get; } + + public NotifyDeliveryFormat Format { get; } + + public string Target { get; } + + public string Title { get; } + + public string Body { get; } + + public string? Summary { get; } + + public string? TextBody { get; } + + public string? Locale { get; } + + public string? BodyHash { get; } + + public ImmutableArray Attachments { get; } +} diff --git a/src/StellaOps.Notify.Models/NotifyEnums.cs b/src/StellaOps.Notify.Models/NotifyEnums.cs new file mode 100644 index 00000000..6c87c404 --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyEnums.cs @@ -0,0 +1,70 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Notify.Models; + +/// +/// Supported Notify channel types. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum NotifyChannelType +{ + Slack, + Teams, + Email, + Webhook, + Custom, +} + +/// +/// Delivery lifecycle states tracked for audit and retries. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum NotifyDeliveryStatus +{ + Pending, + Sent, + Failed, + Throttled, + Digested, + Dropped, +} + +/// +/// Individual attempt status recorded during delivery retries. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum NotifyDeliveryAttemptStatus +{ + Enqueued, + Sending, + Succeeded, + Failed, + Throttled, + Skipped, +} + +/// +/// Rendering modes for templates to help connectors decide format handling. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum NotifyTemplateRenderMode +{ + Markdown, + Html, + AdaptiveCard, + PlainText, + Json, +} + +/// +/// Structured representation of rendered payload format. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum NotifyDeliveryFormat +{ + Slack, + Teams, + Email, + Webhook, + Json, +} diff --git a/src/StellaOps.Notify.Models/NotifyEvent.cs b/src/StellaOps.Notify.Models/NotifyEvent.cs new file mode 100644 index 00000000..d50c6e39 --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyEvent.cs @@ -0,0 +1,168 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; + +namespace StellaOps.Notify.Models; + +/// +/// Canonical platform event envelope consumed by Notify. +/// +public sealed record NotifyEvent +{ + [JsonConstructor] + public NotifyEvent( + Guid eventId, + string kind, + string tenant, + DateTimeOffset ts, + JsonNode? payload, + NotifyEventScope? scope = null, + string? version = null, + string? actor = null, + ImmutableDictionary? attributes = null) + { + EventId = eventId; + Kind = NotifyValidation.EnsureNotNullOrWhiteSpace(kind, nameof(kind)).ToLowerInvariant(); + Tenant = NotifyValidation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)); + Ts = NotifyValidation.EnsureUtc(ts); + Payload = NotifyValidation.NormalizeJsonNode(payload); + Scope = scope; + Version = NotifyValidation.TrimToNull(version); + Actor = NotifyValidation.TrimToNull(actor); + Attributes = NotifyValidation.NormalizeStringDictionary(attributes); + } + + public static NotifyEvent Create( + Guid eventId, + string kind, + string tenant, + DateTimeOffset ts, + JsonNode? payload, + NotifyEventScope? scope = null, + string? version = null, + string? actor = null, + IEnumerable>? attributes = null) + { + return new NotifyEvent( + eventId, + kind, + tenant, + ts, + payload, + scope, + version, + actor, + ToImmutableDictionary(attributes)); + } + + public Guid EventId { get; } + + public string Kind { get; } + + public string Tenant { get; } + + public DateTimeOffset Ts { get; } + + public JsonNode? Payload { get; } + + public NotifyEventScope? Scope { get; } + + public string? Version { get; } + + public string? Actor { get; } + + public ImmutableDictionary Attributes { get; } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} + +/// +/// Optional scope block describing where the event originated (namespace/repo/digest/etc.). +/// +public sealed record NotifyEventScope +{ + [JsonConstructor] + public NotifyEventScope( + string? @namespace = null, + string? repo = null, + string? digest = null, + string? component = null, + string? image = null, + ImmutableDictionary? labels = null, + ImmutableDictionary? attributes = null) + { + Namespace = NotifyValidation.TrimToNull(@namespace); + Repo = NotifyValidation.TrimToNull(repo); + Digest = NotifyValidation.TrimToNull(digest); + Component = NotifyValidation.TrimToNull(component); + Image = NotifyValidation.TrimToNull(image); + Labels = NotifyValidation.NormalizeStringDictionary(labels); + Attributes = NotifyValidation.NormalizeStringDictionary(attributes); + } + + public static NotifyEventScope Create( + string? @namespace = null, + string? repo = null, + string? digest = null, + string? component = null, + string? image = null, + IEnumerable>? labels = null, + IEnumerable>? attributes = null) + { + return new NotifyEventScope( + @namespace, + repo, + digest, + component, + image, + ToImmutableDictionary(labels), + ToImmutableDictionary(attributes)); + } + + public string? Namespace { get; } + + public string? Repo { get; } + + public string? Digest { get; } + + public string? Component { get; } + + public string? Image { get; } + + public ImmutableDictionary Labels { get; } + + public ImmutableDictionary Attributes { get; } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} diff --git a/src/StellaOps.Notify.Models/NotifyEventKinds.cs b/src/StellaOps.Notify.Models/NotifyEventKinds.cs new file mode 100644 index 00000000..8bfc93d4 --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyEventKinds.cs @@ -0,0 +1,15 @@ +namespace StellaOps.Notify.Models; + +/// +/// Known platform event kind identifiers consumed by Notify. +/// +public static class NotifyEventKinds +{ + public const string ScannerReportReady = "scanner.report.ready"; + public const string ScannerScanCompleted = "scanner.scan.completed"; + public const string SchedulerRescanDelta = "scheduler.rescan.delta"; + public const string AttestorLogged = "attestor.logged"; + public const string ZastavaAdmission = "zastava.admission"; + public const string FeedserExportCompleted = "feedser.export.completed"; + public const string VexerExportCompleted = "vexer.export.completed"; +} diff --git a/src/StellaOps.Notify.Models/NotifyRule.cs b/src/StellaOps.Notify.Models/NotifyRule.cs new file mode 100644 index 00000000..8c3e5145 --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyRule.cs @@ -0,0 +1,388 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Serialization; + +namespace StellaOps.Notify.Models; + +/// +/// Rule definition describing how platform events are matched and routed to delivery actions. +/// +public sealed record NotifyRule +{ + [JsonConstructor] + public NotifyRule( + string ruleId, + string tenantId, + string name, + NotifyRuleMatch match, + ImmutableArray actions, + bool enabled = true, + string? description = null, + ImmutableDictionary? labels = null, + ImmutableDictionary? metadata = null, + string? createdBy = null, + DateTimeOffset? createdAt = null, + string? updatedBy = null, + DateTimeOffset? updatedAt = null, + string? schemaVersion = null) + { + SchemaVersion = NotifySchemaVersions.EnsureRule(schemaVersion); + RuleId = NotifyValidation.EnsureNotNullOrWhiteSpace(ruleId, nameof(ruleId)); + TenantId = NotifyValidation.EnsureNotNullOrWhiteSpace(tenantId, nameof(tenantId)); + Name = NotifyValidation.EnsureNotNullOrWhiteSpace(name, nameof(name)); + Description = NotifyValidation.TrimToNull(description); + Match = match ?? throw new ArgumentNullException(nameof(match)); + Enabled = enabled; + + Actions = NormalizeActions(actions); + if (Actions.IsDefaultOrEmpty) + { + throw new ArgumentException("At least one action is required.", nameof(actions)); + } + + Labels = NotifyValidation.NormalizeStringDictionary(labels); + Metadata = NotifyValidation.NormalizeStringDictionary(metadata); + + CreatedBy = NotifyValidation.TrimToNull(createdBy); + CreatedAt = NotifyValidation.EnsureUtc(createdAt ?? DateTimeOffset.UtcNow); + UpdatedBy = NotifyValidation.TrimToNull(updatedBy); + UpdatedAt = NotifyValidation.EnsureUtc(updatedAt ?? CreatedAt); + } + + public static NotifyRule Create( + string ruleId, + string tenantId, + string name, + NotifyRuleMatch match, + IEnumerable? actions, + bool enabled = true, + string? description = null, + IEnumerable>? labels = null, + IEnumerable>? metadata = null, + string? createdBy = null, + DateTimeOffset? createdAt = null, + string? updatedBy = null, + DateTimeOffset? updatedAt = null, + string? schemaVersion = null) + { + return new NotifyRule( + ruleId, + tenantId, + name, + match, + ToImmutableArray(actions), + enabled, + description, + ToImmutableDictionary(labels), + ToImmutableDictionary(metadata), + createdBy, + createdAt, + updatedBy, + updatedAt, + schemaVersion); + } + + public string SchemaVersion { get; } + + public string RuleId { get; } + + public string TenantId { get; } + + public string Name { get; } + + public string? Description { get; } + + public bool Enabled { get; } + + public NotifyRuleMatch Match { get; } + + public ImmutableArray Actions { get; } + + public ImmutableDictionary Labels { get; } + + public ImmutableDictionary Metadata { get; } + + public string? CreatedBy { get; } + + public DateTimeOffset CreatedAt { get; } + + public string? UpdatedBy { get; } + + public DateTimeOffset UpdatedAt { get; } + + private static ImmutableArray NormalizeActions(ImmutableArray actions) + { + var source = actions.IsDefault ? Array.Empty() : actions.AsEnumerable(); + return source + .Where(static action => action is not null) + .Distinct() + .OrderBy(static action => action.ActionId, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static ImmutableArray ToImmutableArray(IEnumerable? actions) + { + if (actions is null) + { + return ImmutableArray.Empty; + } + + return actions.ToImmutableArray(); + } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} + +/// +/// Matching criteria used to evaluate whether an event should trigger the rule. +/// +public sealed record NotifyRuleMatch +{ + [JsonConstructor] + public NotifyRuleMatch( + ImmutableArray eventKinds, + ImmutableArray namespaces, + ImmutableArray repositories, + ImmutableArray digests, + ImmutableArray labels, + ImmutableArray componentPurls, + string? minSeverity, + ImmutableArray verdicts, + bool? kevOnly, + NotifyRuleMatchVex? vex) + { + EventKinds = NormalizeStringSet(eventKinds, lowerCase: true); + Namespaces = NormalizeStringSet(namespaces); + Repositories = NormalizeStringSet(repositories); + Digests = NormalizeStringSet(digests, lowerCase: true); + Labels = NormalizeStringSet(labels); + ComponentPurls = NormalizeStringSet(componentPurls); + Verdicts = NormalizeStringSet(verdicts, lowerCase: true); + MinSeverity = NotifyValidation.TrimToNull(minSeverity)?.ToLowerInvariant(); + KevOnly = kevOnly; + Vex = vex; + } + + public static NotifyRuleMatch Create( + IEnumerable? eventKinds = null, + IEnumerable? namespaces = null, + IEnumerable? repositories = null, + IEnumerable? digests = null, + IEnumerable? labels = null, + IEnumerable? componentPurls = null, + string? minSeverity = null, + IEnumerable? verdicts = null, + bool? kevOnly = null, + NotifyRuleMatchVex? vex = null) + { + return new NotifyRuleMatch( + ToImmutableArray(eventKinds), + ToImmutableArray(namespaces), + ToImmutableArray(repositories), + ToImmutableArray(digests), + ToImmutableArray(labels), + ToImmutableArray(componentPurls), + minSeverity, + ToImmutableArray(verdicts), + kevOnly, + vex); + } + + public ImmutableArray EventKinds { get; } + + public ImmutableArray Namespaces { get; } + + public ImmutableArray Repositories { get; } + + public ImmutableArray Digests { get; } + + public ImmutableArray Labels { get; } + + public ImmutableArray ComponentPurls { get; } + + public string? MinSeverity { get; } + + public ImmutableArray Verdicts { get; } + + public bool? KevOnly { get; } + + public NotifyRuleMatchVex? Vex { get; } + + private static ImmutableArray NormalizeStringSet(ImmutableArray values, bool lowerCase = false) + { + var enumerable = values.IsDefault ? Array.Empty() : values.AsEnumerable(); + var normalized = NotifyValidation.NormalizeStringSet(enumerable); + + if (!lowerCase) + { + return normalized; + } + + return normalized + .Select(static value => value.ToLowerInvariant()) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static ImmutableArray ToImmutableArray(IEnumerable? values) + { + if (values is null) + { + return ImmutableArray.Empty; + } + + return values.ToImmutableArray(); + } +} + +/// +/// Additional VEX (Vulnerability Exploitability eXchange) gating options. +/// +public sealed record NotifyRuleMatchVex +{ + [JsonConstructor] + public NotifyRuleMatchVex( + bool includeAcceptedJustifications = true, + bool includeRejectedJustifications = false, + bool includeUnknownJustifications = false, + ImmutableArray justificationKinds = default) + { + IncludeAcceptedJustifications = includeAcceptedJustifications; + IncludeRejectedJustifications = includeRejectedJustifications; + IncludeUnknownJustifications = includeUnknownJustifications; + JustificationKinds = NormalizeStringSet(justificationKinds); + } + + public static NotifyRuleMatchVex Create( + bool includeAcceptedJustifications = true, + bool includeRejectedJustifications = false, + bool includeUnknownJustifications = false, + IEnumerable? justificationKinds = null) + { + return new NotifyRuleMatchVex( + includeAcceptedJustifications, + includeRejectedJustifications, + includeUnknownJustifications, + ToImmutableArray(justificationKinds)); + } + + public bool IncludeAcceptedJustifications { get; } + + public bool IncludeRejectedJustifications { get; } + + public bool IncludeUnknownJustifications { get; } + + public ImmutableArray JustificationKinds { get; } + + private static ImmutableArray NormalizeStringSet(ImmutableArray values) + { + var enumerable = values.IsDefault ? Array.Empty() : values.AsEnumerable(); + return NotifyValidation.NormalizeStringSet(enumerable); + } + + private static ImmutableArray ToImmutableArray(IEnumerable? values) + { + if (values is null) + { + return ImmutableArray.Empty; + } + + return values.ToImmutableArray(); + } +} + +/// +/// Action executed when a rule matches an event. +/// +public sealed record NotifyRuleAction +{ + [JsonConstructor] + public NotifyRuleAction( + string actionId, + string channel, + string? template = null, + string? digest = null, + TimeSpan? throttle = null, + string? locale = null, + bool enabled = true, + ImmutableDictionary? metadata = null) + { + ActionId = NotifyValidation.EnsureNotNullOrWhiteSpace(actionId, nameof(actionId)); + Channel = NotifyValidation.EnsureNotNullOrWhiteSpace(channel, nameof(channel)); + Template = NotifyValidation.TrimToNull(template); + Digest = NotifyValidation.TrimToNull(digest); + Locale = NotifyValidation.TrimToNull(locale)?.ToLowerInvariant(); + Enabled = enabled; + Throttle = throttle is { Ticks: > 0 } ? throttle : null; + Metadata = NotifyValidation.NormalizeStringDictionary(metadata); + } + + public static NotifyRuleAction Create( + string actionId, + string channel, + string? template = null, + string? digest = null, + TimeSpan? throttle = null, + string? locale = null, + bool enabled = true, + IEnumerable>? metadata = null) + { + return new NotifyRuleAction( + actionId, + channel, + template, + digest, + throttle, + locale, + enabled, + ToImmutableDictionary(metadata)); + } + + public string ActionId { get; } + + public string Channel { get; } + + public string? Template { get; } + + public string? Digest { get; } + + public TimeSpan? Throttle { get; } + + public string? Locale { get; } + + public bool Enabled { get; } + + public ImmutableDictionary Metadata { get; } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} diff --git a/src/StellaOps.Notify.Models/NotifySchemaMigration.cs b/src/StellaOps.Notify.Models/NotifySchemaMigration.cs new file mode 100644 index 00000000..81030dbf --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifySchemaMigration.cs @@ -0,0 +1,74 @@ +using System.Text.Json.Nodes; + +namespace StellaOps.Notify.Models; + +/// +/// Upgrades Notify documents emitted by older schema revisions to the current DTOs. +/// +public static class NotifySchemaMigration +{ + public static NotifyRule UpgradeRule(JsonNode document) + { + ArgumentNullException.ThrowIfNull(document); + var (clone, schemaVersion) = Normalize(document, NotifySchemaVersions.Rule); + + return schemaVersion switch + { + NotifySchemaVersions.Rule => Deserialize(clone), + _ => throw new NotSupportedException($"Unsupported notify rule schema version '{schemaVersion}'.") + }; + } + + public static NotifyChannel UpgradeChannel(JsonNode document) + { + ArgumentNullException.ThrowIfNull(document); + var (clone, schemaVersion) = Normalize(document, NotifySchemaVersions.Channel); + + return schemaVersion switch + { + NotifySchemaVersions.Channel => Deserialize(clone), + _ => throw new NotSupportedException($"Unsupported notify channel schema version '{schemaVersion}'.") + }; + } + + public static NotifyTemplate UpgradeTemplate(JsonNode document) + { + ArgumentNullException.ThrowIfNull(document); + var (clone, schemaVersion) = Normalize(document, NotifySchemaVersions.Template); + + return schemaVersion switch + { + NotifySchemaVersions.Template => Deserialize(clone), + _ => throw new NotSupportedException($"Unsupported notify template schema version '{schemaVersion}'.") + }; + } + + private static (JsonObject Clone, string SchemaVersion) Normalize(JsonNode node, string fallback) + { + if (node is not JsonObject obj) + { + throw new ArgumentException("Document must be a JSON object.", nameof(node)); + } + + if (obj.DeepClone() is not JsonObject clone) + { + throw new InvalidOperationException("Unable to clone document as JsonObject."); + } + + string schemaVersion; + if (clone.TryGetPropertyValue("schemaVersion", out var value) && value is JsonValue jsonValue && jsonValue.TryGetValue(out string? version) && !string.IsNullOrWhiteSpace(version)) + { + schemaVersion = version.Trim(); + } + else + { + schemaVersion = fallback; + clone["schemaVersion"] = schemaVersion; + } + + return (clone, schemaVersion); + } + + private static T Deserialize(JsonObject json) + => NotifyCanonicalJsonSerializer.Deserialize(json.ToJsonString()); +} diff --git a/src/StellaOps.Notify.Models/NotifySchemaVersions.cs b/src/StellaOps.Notify.Models/NotifySchemaVersions.cs new file mode 100644 index 00000000..a8817109 --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifySchemaVersions.cs @@ -0,0 +1,23 @@ +namespace StellaOps.Notify.Models; + +/// +/// Canonical schema version identifiers for Notify documents. +/// +public static class NotifySchemaVersions +{ + public const string Rule = "notify.rule@1"; + public const string Channel = "notify.channel@1"; + public const string Template = "notify.template@1"; + + public static string EnsureRule(string? value) + => Normalize(value, Rule); + + public static string EnsureChannel(string? value) + => Normalize(value, Channel); + + public static string EnsureTemplate(string? value) + => Normalize(value, Template); + + private static string Normalize(string? value, string fallback) + => string.IsNullOrWhiteSpace(value) ? fallback : value.Trim(); +} diff --git a/src/StellaOps.Notify.Models/NotifyTemplate.cs b/src/StellaOps.Notify.Models/NotifyTemplate.cs new file mode 100644 index 00000000..d2b1b744 --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyTemplate.cs @@ -0,0 +1,130 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Serialization; + +namespace StellaOps.Notify.Models; + +/// +/// Stored template metadata and content for channel-specific rendering. +/// +public sealed record NotifyTemplate +{ + [JsonConstructor] + public NotifyTemplate( + string templateId, + string tenantId, + NotifyChannelType channelType, + string key, + string locale, + string body, + NotifyTemplateRenderMode renderMode = NotifyTemplateRenderMode.Markdown, + NotifyDeliveryFormat format = NotifyDeliveryFormat.Json, + string? description = null, + ImmutableDictionary? metadata = null, + string? createdBy = null, + DateTimeOffset? createdAt = null, + string? updatedBy = null, + DateTimeOffset? updatedAt = null, + string? schemaVersion = null) + { + SchemaVersion = NotifySchemaVersions.EnsureTemplate(schemaVersion); + TemplateId = NotifyValidation.EnsureNotNullOrWhiteSpace(templateId, nameof(templateId)); + TenantId = NotifyValidation.EnsureNotNullOrWhiteSpace(tenantId, nameof(tenantId)); + ChannelType = channelType; + Key = NotifyValidation.EnsureNotNullOrWhiteSpace(key, nameof(key)); + Locale = NotifyValidation.EnsureNotNullOrWhiteSpace(locale, nameof(locale)).ToLowerInvariant(); + Body = NotifyValidation.EnsureNotNullOrWhiteSpace(body, nameof(body)); + Description = NotifyValidation.TrimToNull(description); + RenderMode = renderMode; + Format = format; + Metadata = NotifyValidation.NormalizeStringDictionary(metadata); + + CreatedBy = NotifyValidation.TrimToNull(createdBy); + CreatedAt = NotifyValidation.EnsureUtc(createdAt ?? DateTimeOffset.UtcNow); + UpdatedBy = NotifyValidation.TrimToNull(updatedBy); + UpdatedAt = NotifyValidation.EnsureUtc(updatedAt ?? CreatedAt); + } + + public static NotifyTemplate Create( + string templateId, + string tenantId, + NotifyChannelType channelType, + string key, + string locale, + string body, + NotifyTemplateRenderMode renderMode = NotifyTemplateRenderMode.Markdown, + NotifyDeliveryFormat format = NotifyDeliveryFormat.Json, + string? description = null, + IEnumerable>? metadata = null, + string? createdBy = null, + DateTimeOffset? createdAt = null, + string? updatedBy = null, + DateTimeOffset? updatedAt = null, + string? schemaVersion = null) + { + return new NotifyTemplate( + templateId, + tenantId, + channelType, + key, + locale, + body, + renderMode, + format, + description, + ToImmutableDictionary(metadata), + createdBy, + createdAt, + updatedBy, + updatedAt, + schemaVersion); + } + + public string SchemaVersion { get; } + + public string TemplateId { get; } + + public string TenantId { get; } + + public NotifyChannelType ChannelType { get; } + + public string Key { get; } + + public string Locale { get; } + + public string Body { get; } + + public string? Description { get; } + + public NotifyTemplateRenderMode RenderMode { get; } + + public NotifyDeliveryFormat Format { get; } + + public ImmutableDictionary Metadata { get; } + + public string? CreatedBy { get; } + + public DateTimeOffset CreatedAt { get; } + + public string? UpdatedBy { get; } + + public DateTimeOffset UpdatedAt { get; } + + private static ImmutableDictionary? ToImmutableDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return null; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + builder[key] = value; + } + + return builder.ToImmutable(); + } +} diff --git a/src/StellaOps.Notify.Models/NotifyValidation.cs b/src/StellaOps.Notify.Models/NotifyValidation.cs new file mode 100644 index 00000000..1a9f1fb6 --- /dev/null +++ b/src/StellaOps.Notify.Models/NotifyValidation.cs @@ -0,0 +1,98 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Nodes; + +namespace StellaOps.Notify.Models; + +/// +/// Lightweight validation helpers shared across Notify model constructors. +/// +public static class NotifyValidation +{ + public static string EnsureNotNullOrWhiteSpace(string value, string paramName) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value cannot be null or whitespace.", paramName); + } + + return value.Trim(); + } + + public static string? TrimToNull(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + public static ImmutableArray NormalizeStringSet(IEnumerable? values) + => (values ?? Array.Empty()) + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Select(static value => value.Trim()) + .Distinct(StringComparer.Ordinal) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToImmutableArray(); + + public static ImmutableDictionary NormalizeStringDictionary(IEnumerable>? pairs) + { + if (pairs is null) + { + return ImmutableDictionary.Empty; + } + + var builder = ImmutableSortedDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var (key, value) in pairs) + { + if (string.IsNullOrWhiteSpace(key)) + { + continue; + } + + var normalizedKey = key.Trim(); + var normalizedValue = value?.Trim() ?? string.Empty; + builder[normalizedKey] = normalizedValue; + } + + return ImmutableDictionary.CreateRange(StringComparer.Ordinal, builder); + } + + public static DateTimeOffset EnsureUtc(DateTimeOffset value) + => value.ToUniversalTime(); + + public static DateTimeOffset? EnsureUtc(DateTimeOffset? value) + => value?.ToUniversalTime(); + + public static JsonNode? NormalizeJsonNode(JsonNode? node) + { + if (node is null) + { + return null; + } + + switch (node) + { + case JsonObject jsonObject: + { + var normalized = new JsonObject(); + foreach (var property in jsonObject + .Where(static pair => pair.Key is not null) + .OrderBy(static pair => pair.Key, StringComparer.Ordinal)) + { + normalized[property.Key!] = NormalizeJsonNode(property.Value?.DeepClone()); + } + + return normalized; + } + case JsonArray jsonArray: + { + var normalized = new JsonArray(); + foreach (var element in jsonArray) + { + normalized.Add(NormalizeJsonNode(element?.DeepClone())); + } + + return normalized; + } + default: + return node.DeepClone(); + } + } +} diff --git a/src/StellaOps.Notify.Models/TASKS.md b/src/StellaOps.Notify.Models/TASKS.md index 885d48a6..1e1dc242 100644 --- a/src/StellaOps.Notify.Models/TASKS.md +++ b/src/StellaOps.Notify.Models/TASKS.md @@ -2,6 +2,6 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| NOTIFY-MODELS-15-101 | TODO | Notify Models Guild | — | Define core DTOs (Rule, Channel, Template, Event envelope, Delivery) with validation helpers and canonical JSON serialization. | DTOs merged with tests; documented; serialization deterministic. | -| NOTIFY-MODELS-15-102 | TODO | Notify Models Guild | NOTIFY-MODELS-15-101 | Publish schema docs + sample payloads for channels, rules, events (used by UI + connectors). | Markdown/JSON schema generated; linked in docs; integration tests reference samples. | -| NOTIFY-MODELS-15-103 | TODO | Notify Models Guild | NOTIFY-MODELS-15-101 | Provide versioning and migration helpers (e.g., rule evolution, template revisions). | Migration helpers implemented; tests cover upgrade/downgrade; guidance captured in docs. | +| NOTIFY-MODELS-15-101 | DONE (2025-10-19) | Notify Models Guild | — | Define core DTOs (Rule, Channel, Template, Event envelope, Delivery) with validation helpers and canonical JSON serialization. | DTOs merged with tests; documented; serialization deterministic. | +| NOTIFY-MODELS-15-102 | DONE (2025-10-19) | Notify Models Guild | NOTIFY-MODELS-15-101 | Publish schema docs + sample payloads for channels, rules, events (used by UI + connectors). | Markdown/JSON schema generated; linked in docs; integration tests reference samples. | +| NOTIFY-MODELS-15-103 | DONE (2025-10-19) | Notify Models Guild | NOTIFY-MODELS-15-101 | Provide versioning and migration helpers (e.g., rule evolution, template revisions). | Migration helpers implemented; tests cover upgrade/downgrade; guidance captured in docs. | diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs new file mode 100644 index 00000000..21712008 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using Xunit; + +[assembly: CollectionBehavior(DisableTestParallelization = true)] diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs new file mode 100644 index 00000000..c802f448 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs @@ -0,0 +1 @@ +global using Xunit; diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs new file mode 100644 index 00000000..e5bd2398 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Internal; + +public sealed class NotifyMongoMigrationTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + + public NotifyMongoMigrationTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-migration-tests", + DeliveryHistoryRetention = TimeSpan.FromDays(45), + MigrationsCollection = "notify_migrations_tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + [Fact] + public async Task EnsureIndexesCreatesExpectedDefinitions() + { + // run twice to ensure idempotency + await _initializer.EnsureIndexesAsync(); + + var deliveriesIndexes = await GetIndexesAsync(_context.Options.DeliveriesCollection); + Assert.Contains("tenant_sortKey", deliveriesIndexes.Select(doc => doc["name"].AsString)); + Assert.Contains("tenant_status", deliveriesIndexes.Select(doc => doc["name"].AsString)); + var ttlIndex = deliveriesIndexes.Single(doc => doc["name"].AsString == "completedAt_ttl"); + Assert.Equal(_context.Options.DeliveryHistoryRetention.TotalSeconds, ttlIndex["expireAfterSeconds"].ToDouble()); + + var locksIndexes = await GetIndexesAsync(_context.Options.LocksCollection); + Assert.Contains("tenant_resource", locksIndexes.Select(doc => doc["name"].AsString)); + Assert.True(locksIndexes.Single(doc => doc["name"].AsString == "tenant_resource")["unique"].ToBoolean()); + Assert.Contains("expiresAt_ttl", locksIndexes.Select(doc => doc["name"].AsString)); + + var digestsIndexes = await GetIndexesAsync(_context.Options.DigestsCollection); + Assert.Contains("tenant_actionKey", digestsIndexes.Select(doc => doc["name"].AsString)); + + var rulesIndexes = await GetIndexesAsync(_context.Options.RulesCollection); + Assert.Contains("tenant_enabled", rulesIndexes.Select(doc => doc["name"].AsString)); + + var migrationsIndexes = await GetIndexesAsync(_context.Options.MigrationsCollection); + Assert.Contains("migrationId_unique", migrationsIndexes.Select(doc => doc["name"].AsString)); + } + + private async Task> GetIndexesAsync(string collectionName) + { + var collection = _context.Database.GetCollection(collectionName); + var cursor = await collection.Indexes.ListAsync().ConfigureAwait(false); + return await cursor.ToListAsync().ConfigureAwait(false); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs new file mode 100644 index 00000000..03df3099 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs @@ -0,0 +1,75 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using MongoDB.Bson; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Repositories; + +public sealed class NotifyAuditRepositoryTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + private readonly NotifyAuditRepository _repository; + + public NotifyAuditRepositoryTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-audit-tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + _repository = new NotifyAuditRepository(_context); + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + [Fact] + public async Task AppendAndQuery() + { + var entry = new NotifyAuditEntryDocument + { + TenantId = "tenant-a", + Actor = "user@example.com", + Action = "create-rule", + EntityId = "rule-1", + EntityType = "rule", + Timestamp = DateTimeOffset.UtcNow, + Payload = new BsonDocument("ruleId", "rule-1") + }; + + await _repository.AppendAsync(entry); + var list = await _repository.QueryAsync("tenant-a", DateTimeOffset.UtcNow.AddMinutes(-5), 10); + Assert.Single(list); + Assert.Equal("create-rule", list[0].Action); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs new file mode 100644 index 00000000..4e52a8e4 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs @@ -0,0 +1,77 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Repositories; + +public sealed class NotifyChannelRepositoryTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + private readonly NotifyChannelRepository _repository; + + public NotifyChannelRepositoryTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-channel-tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + _repository = new NotifyChannelRepository(_context); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + [Fact] + public async Task UpsertChannelPersistsData() + { + var channel = NotifyChannel.Create( + channelId: "channel-1", + tenantId: "tenant-a", + name: "slack:sec", + type: NotifyChannelType.Slack, + config: NotifyChannelConfig.Create(secretRef: "ref://secret")); + + await _repository.UpsertAsync(channel); + + var fetched = await _repository.GetAsync("tenant-a", "channel-1"); + Assert.NotNull(fetched); + Assert.Equal(channel.ChannelId, fetched!.ChannelId); + + var listed = await _repository.ListAsync("tenant-a"); + Assert.Single(listed); + + await _repository.DeleteAsync("tenant-a", "channel-1"); + Assert.Null(await _repository.GetAsync("tenant-a", "channel-1")); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs new file mode 100644 index 00000000..692671ce --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs @@ -0,0 +1,119 @@ +using System; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Repositories; + +public sealed class NotifyDeliveryRepositoryTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + private readonly NotifyDeliveryRepository _repository; + + public NotifyDeliveryRepositoryTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-delivery-tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + _repository = new NotifyDeliveryRepository(_context); + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + [Fact] + public async Task AppendAndQueryWithPaging() + { + var now = DateTimeOffset.UtcNow; + var deliveries = new[] + { + NotifyDelivery.Create( + deliveryId: "delivery-1", + tenantId: "tenant-a", + ruleId: "rule-1", + actionId: "action-1", + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + status: NotifyDeliveryStatus.Sent, + createdAt: now.AddMinutes(-2), + sentAt: now.AddMinutes(-2)), + NotifyDelivery.Create( + deliveryId: "delivery-2", + tenantId: "tenant-a", + ruleId: "rule-2", + actionId: "action-2", + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + status: NotifyDeliveryStatus.Failed, + createdAt: now.AddMinutes(-1), + completedAt: now.AddMinutes(-1)), + NotifyDelivery.Create( + deliveryId: "delivery-3", + tenantId: "tenant-a", + ruleId: "rule-3", + actionId: "action-3", + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + status: NotifyDeliveryStatus.Sent, + createdAt: now, + sentAt: now) + }; + + foreach (var delivery in deliveries) + { + await _repository.AppendAsync(delivery); + } + + var fetched = await _repository.GetAsync("tenant-a", "delivery-3"); + Assert.NotNull(fetched); + Assert.Equal("delivery-3", fetched!.DeliveryId); + + var page1 = await _repository.QueryAsync("tenant-a", now.AddHours(-1), "sent", 1); + Assert.Single(page1.Items); + Assert.Equal("delivery-3", page1.Items[0].DeliveryId); + Assert.False(string.IsNullOrWhiteSpace(page1.ContinuationToken)); + + var page2 = await _repository.QueryAsync("tenant-a", now.AddHours(-1), "sent", 1, page1.ContinuationToken); + Assert.Single(page2.Items); + Assert.Equal("delivery-1", page2.Items[0].DeliveryId); + Assert.Null(page2.ContinuationToken); + } + + [Fact] + public async Task QueryAsyncWithInvalidContinuationThrows() + { + await Assert.ThrowsAsync(() => _repository.QueryAsync("tenant-a", null, null, 10, "not-a-token")); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs new file mode 100644 index 00000000..24098d45 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs @@ -0,0 +1,79 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Repositories; + +public sealed class NotifyDigestRepositoryTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + private readonly NotifyDigestRepository _repository; + + public NotifyDigestRepositoryTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-digest-tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + _repository = new NotifyDigestRepository(_context); + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + [Fact] + public async Task UpsertAndRemove() + { + var digest = new NotifyDigestDocument + { + TenantId = "tenant-a", + ActionKey = "action-1", + Window = "hourly", + OpenedAt = DateTimeOffset.UtcNow, + Status = "open", + Items = new List + { + new() { EventId = Guid.NewGuid().ToString() } + } + }; + + await _repository.UpsertAsync(digest); + var fetched = await _repository.GetAsync("tenant-a", "action-1"); + Assert.NotNull(fetched); + Assert.Equal("action-1", fetched!.ActionKey); + + await _repository.RemoveAsync("tenant-a", "action-1"); + Assert.Null(await _repository.GetAsync("tenant-a", "action-1")); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs new file mode 100644 index 00000000..63223f87 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs @@ -0,0 +1,67 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Repositories; + +public sealed class NotifyLockRepositoryTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + private readonly NotifyLockRepository _repository; + + public NotifyLockRepositoryTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-lock-tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + _repository = new NotifyLockRepository(_context); + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + [Fact] + public async Task AcquireAndRelease() + { + var acquired = await _repository.TryAcquireAsync("tenant-a", "resource-1", "owner-1", TimeSpan.FromMinutes(1)); + Assert.True(acquired); + + var second = await _repository.TryAcquireAsync("tenant-a", "resource-1", "owner-2", TimeSpan.FromMinutes(1)); + Assert.False(second); + + await _repository.ReleaseAsync("tenant-a", "resource-1", "owner-1"); + var third = await _repository.TryAcquireAsync("tenant-a", "resource-1", "owner-2", TimeSpan.FromMinutes(1)); + Assert.True(third); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs new file mode 100644 index 00000000..0f2e84fe --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs @@ -0,0 +1,79 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Repositories; + +public sealed class NotifyRuleRepositoryTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + private readonly NotifyRuleRepository _repository; + + public NotifyRuleRepositoryTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-rule-tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + _repository = new NotifyRuleRepository(_context); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + [Fact] + public async Task UpsertRoundtripsData() + { + var rule = NotifyRule.Create( + ruleId: "rule-1", + tenantId: "tenant-a", + name: "Critical Alerts", + match: NotifyRuleMatch.Create(eventKinds: new[] { NotifyEventKinds.ScannerReportReady }), + actions: new[] { new NotifyRuleAction("action-1", "slack:sec") }); + + await _repository.UpsertAsync(rule); + + var fetched = await _repository.GetAsync("tenant-a", "rule-1"); + Assert.NotNull(fetched); + Assert.Equal(rule.RuleId, fetched!.RuleId); + Assert.Equal(rule.SchemaVersion, fetched.SchemaVersion); + + var listed = await _repository.ListAsync("tenant-a"); + Assert.Single(listed); + + await _repository.DeleteAsync("tenant-a", "rule-1"); + var deleted = await _repository.GetAsync("tenant-a", "rule-1"); + Assert.Null(deleted); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs new file mode 100644 index 00000000..02ac059b --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs @@ -0,0 +1,80 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Repositories; + +public sealed class NotifyTemplateRepositoryTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner = MongoDbRunner.Start(singleNodeReplSet: true); + private readonly NotifyMongoContext _context; + private readonly NotifyMongoInitializer _initializer; + private readonly NotifyTemplateRepository _repository; + + public NotifyTemplateRepositoryTests() + { + var options = Microsoft.Extensions.Options.Options.Create(new NotifyMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = "notify-template-tests" + }); + + _context = new NotifyMongoContext(options, NullLogger.Instance); + _initializer = CreateInitializer(_context); + _repository = new NotifyTemplateRepository(_context); + } + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + public async Task InitializeAsync() + { + await _initializer.EnsureIndexesAsync(); + } + + [Fact] + public async Task UpsertTemplatePersistsData() + { + var template = NotifyTemplate.Create( + templateId: "template-1", + tenantId: "tenant-a", + channelType: NotifyChannelType.Slack, + key: "concise", + locale: "en-us", + body: "{{summary}}", + renderMode: NotifyTemplateRenderMode.Markdown, + format: NotifyDeliveryFormat.Slack); + + await _repository.UpsertAsync(template); + + var fetched = await _repository.GetAsync("tenant-a", "template-1"); + Assert.NotNull(fetched); + Assert.Equal(template.TemplateId, fetched!.TemplateId); + + var listed = await _repository.ListAsync("tenant-a"); + Assert.Single(listed); + + await _repository.DeleteAsync("tenant-a", "template-1"); + Assert.Null(await _repository.GetAsync("tenant-a", "template-1")); + } + + private static NotifyMongoInitializer CreateInitializer(NotifyMongoContext context) + { + var migrations = new INotifyMongoMigration[] + { + new EnsureNotifyCollectionsMigration(NullLogger.Instance), + new EnsureNotifyIndexesMigration() + }; + + var runner = new NotifyMongoMigrationRunner(context, migrations, NullLogger.Instance); + return new NotifyMongoInitializer(context, runner, NullLogger.Instance); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs new file mode 100644 index 00000000..9f4f497a --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs @@ -0,0 +1,35 @@ +using System.Text.Json.Nodes; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Serialization; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Serialization; + +public sealed class NotifyChannelDocumentMapperTests +{ + [Fact] + public void RoundTripSampleChannelMaintainsCanonicalShape() + { + var sample = LoadSample("notify-channel@1.sample.json"); + var node = JsonNode.Parse(sample) ?? throw new InvalidOperationException("Sample JSON null."); + + var channel = NotifySchemaMigration.UpgradeChannel(node); + var bson = NotifyChannelDocumentMapper.ToBsonDocument(channel); + var restored = NotifyChannelDocumentMapper.FromBsonDocument(bson); + + var canonical = NotifyCanonicalJsonSerializer.Serialize(restored); + var canonicalNode = JsonNode.Parse(canonical) ?? throw new InvalidOperationException("Canonical JSON null."); + + Assert.True(JsonNode.DeepEquals(node, canonicalNode), "Canonical JSON should match sample document."); + } + + private static string LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); + } + + return File.ReadAllText(path); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs new file mode 100644 index 00000000..47da05ca --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs @@ -0,0 +1,36 @@ +using System.Text.Json.Nodes; +using MongoDB.Bson; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Serialization; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Serialization; + +public sealed class NotifyRuleDocumentMapperTests +{ + [Fact] + public void RoundTripSampleRuleMaintainsCanonicalShape() + { + var sample = LoadSample("notify-rule@1.sample.json"); + var node = JsonNode.Parse(sample) ?? throw new InvalidOperationException("Sample JSON null."); + + var rule = NotifySchemaMigration.UpgradeRule(node); + var bson = NotifyRuleDocumentMapper.ToBsonDocument(rule); + var restored = NotifyRuleDocumentMapper.FromBsonDocument(bson); + + var canonical = NotifyCanonicalJsonSerializer.Serialize(restored); + var canonicalNode = JsonNode.Parse(canonical) ?? throw new InvalidOperationException("Canonical JSON null."); + + Assert.True(JsonNode.DeepEquals(node, canonicalNode), "Canonical JSON should match sample document."); + } + + private static string LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); + } + + return File.ReadAllText(path); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs b/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs new file mode 100644 index 00000000..d2964900 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs @@ -0,0 +1,35 @@ +using System.Text.Json.Nodes; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Serialization; + +namespace StellaOps.Notify.Storage.Mongo.Tests.Serialization; + +public sealed class NotifyTemplateDocumentMapperTests +{ + [Fact] + public void RoundTripSampleTemplateMaintainsCanonicalShape() + { + var sample = LoadSample("notify-template@1.sample.json"); + var node = JsonNode.Parse(sample) ?? throw new InvalidOperationException("Sample JSON null."); + + var template = NotifySchemaMigration.UpgradeTemplate(node); + var bson = NotifyTemplateDocumentMapper.ToBsonDocument(template); + var restored = NotifyTemplateDocumentMapper.FromBsonDocument(bson); + + var canonical = NotifyCanonicalJsonSerializer.Serialize(restored); + var canonicalNode = JsonNode.Parse(canonical) ?? throw new InvalidOperationException("Canonical JSON null."); + + Assert.True(JsonNode.DeepEquals(node, canonicalNode), "Canonical JSON should match sample document."); + } + + private static string LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); + } + + return File.ReadAllText(path); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj b/src/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj new file mode 100644 index 00000000..07da5734 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj @@ -0,0 +1,28 @@ + + + net10.0 + enable + enable + false + + + + + + + + + + + + + + + + + + + Always + + + diff --git a/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs b/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs new file mode 100644 index 00000000..2384cdea --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs @@ -0,0 +1,31 @@ +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Notify.Storage.Mongo.Documents; + +public sealed class NotifyAuditEntryDocument +{ + [BsonId] + public ObjectId Id { get; init; } + + [BsonElement("tenantId")] + public required string TenantId { get; init; } + + [BsonElement("actor")] + public required string Actor { get; init; } + + [BsonElement("action")] + public required string Action { get; init; } + + [BsonElement("entityId")] + public string EntityId { get; init; } = string.Empty; + + [BsonElement("entityType")] + public string EntityType { get; init; } = string.Empty; + + [BsonElement("timestamp")] + public required DateTimeOffset Timestamp { get; init; } + + [BsonElement("payload")] + public BsonDocument Payload { get; init; } = new(); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs b/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs new file mode 100644 index 00000000..3d186c73 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs @@ -0,0 +1,39 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Notify.Storage.Mongo.Documents; + +public sealed class NotifyDigestDocument +{ + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("tenantId")] + public required string TenantId { get; init; } + + [BsonElement("actionKey")] + public required string ActionKey { get; init; } + + [BsonElement("window")] + public required string Window { get; init; } + + [BsonElement("openedAt")] + public required DateTimeOffset OpenedAt { get; init; } + + [BsonElement("status")] + public required string Status { get; init; } + + [BsonElement("items")] + public List Items { get; init; } = new(); +} + +public sealed class NotifyDigestItemDocument +{ + [BsonElement("eventId")] + public string EventId { get; init; } = string.Empty; + + [BsonElement("scope")] + public Dictionary Scope { get; init; } = new(); + + [BsonElement("delta")] + public Dictionary Delta { get; init; } = new(); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs b/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs new file mode 100644 index 00000000..9849b207 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs @@ -0,0 +1,24 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Notify.Storage.Mongo.Documents; + +public sealed class NotifyLockDocument +{ + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("tenantId")] + public required string TenantId { get; init; } + + [BsonElement("resource")] + public required string Resource { get; init; } + + [BsonElement("acquiredAt")] + public required DateTimeOffset AcquiredAt { get; init; } + + [BsonElement("expiresAt")] + public required DateTimeOffset ExpiresAt { get; init; } + + [BsonElement("owner")] + public string Owner { get; init; } = string.Empty; +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs b/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs new file mode 100644 index 00000000..7fd33220 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs @@ -0,0 +1,45 @@ +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Options; + +namespace StellaOps.Notify.Storage.Mongo.Internal; + +internal sealed class NotifyMongoContext +{ + public NotifyMongoContext(IOptions options, ILogger logger) + { + ArgumentNullException.ThrowIfNull(logger); + var value = options?.Value ?? throw new ArgumentNullException(nameof(options)); + + if (string.IsNullOrWhiteSpace(value.ConnectionString)) + { + throw new InvalidOperationException("Notify Mongo connection string is not configured."); + } + + if (string.IsNullOrWhiteSpace(value.Database)) + { + throw new InvalidOperationException("Notify Mongo database name is not configured."); + } + + Client = new MongoClient(value.ConnectionString); + var settings = new MongoDatabaseSettings(); + if (value.UseMajorityReadConcern) + { + settings.ReadConcern = ReadConcern.Majority; + } + if (value.UseMajorityWriteConcern) + { + settings.WriteConcern = WriteConcern.WMajority; + } + + Database = Client.GetDatabase(value.Database, settings); + Options = value; + } + + public MongoClient Client { get; } + + public IMongoDatabase Database { get; } + + public NotifyMongoOptions Options { get; } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs b/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs new file mode 100644 index 00000000..b818bbfb --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs @@ -0,0 +1,32 @@ +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Storage.Mongo.Migrations; + +namespace StellaOps.Notify.Storage.Mongo.Internal; + +internal interface INotifyMongoInitializer +{ + Task EnsureIndexesAsync(CancellationToken cancellationToken = default); +} + +internal sealed class NotifyMongoInitializer : INotifyMongoInitializer +{ + private readonly NotifyMongoContext _context; + private readonly NotifyMongoMigrationRunner _migrationRunner; + private readonly ILogger _logger; + + public NotifyMongoInitializer( + NotifyMongoContext context, + NotifyMongoMigrationRunner migrationRunner, + ILogger logger) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + _migrationRunner = migrationRunner ?? throw new ArgumentNullException(nameof(migrationRunner)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task EnsureIndexesAsync(CancellationToken cancellationToken = default) + { + _logger.LogInformation("Ensuring Notify Mongo migrations are applied for database {Database}.", _context.Options.Database); + await _migrationRunner.RunAsync(cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs b/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs new file mode 100644 index 00000000..76861444 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs @@ -0,0 +1,49 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Internal; + +namespace StellaOps.Notify.Storage.Mongo.Migrations; + +internal sealed class EnsureNotifyCollectionsMigration : INotifyMongoMigration +{ + private readonly ILogger _logger; + + public EnsureNotifyCollectionsMigration(ILogger logger) + => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + public string Id => "20251019_notify_collections_v1"; + + public async ValueTask ExecuteAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var requiredCollections = new[] + { + context.Options.RulesCollection, + context.Options.ChannelsCollection, + context.Options.TemplatesCollection, + context.Options.DeliveriesCollection, + context.Options.DigestsCollection, + context.Options.LocksCollection, + context.Options.AuditCollection, + context.Options.MigrationsCollection + }; + + var cursor = await context.Database + .ListCollectionNamesAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + var existingNames = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + + foreach (var collection in requiredCollections) + { + if (existingNames.Contains(collection, StringComparer.Ordinal)) + { + continue; + } + + _logger.LogInformation("Creating Notify Mongo collection '{CollectionName}'.", collection); + await context.Database.CreateCollectionAsync(collection, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs b/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs new file mode 100644 index 00000000..3d9ed7fb --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs @@ -0,0 +1,165 @@ +using System; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Internal; + +namespace StellaOps.Notify.Storage.Mongo.Migrations; + +internal sealed class EnsureNotifyIndexesMigration : INotifyMongoMigration +{ + public string Id => "20251019_notify_indexes_v1"; + + public async ValueTask ExecuteAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + await EnsureRulesIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureChannelsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureTemplatesIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureDeliveriesIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureDigestsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureLocksIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureAuditIndexesAsync(context, cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureRulesIndexesAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.RulesCollection); + var keys = Builders.IndexKeys + .Ascending("tenantId") + .Ascending("enabled"); + + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "tenant_enabled" + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureChannelsIndexesAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.ChannelsCollection); + var keys = Builders.IndexKeys + .Ascending("tenantId") + .Ascending("type") + .Ascending("enabled"); + + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "tenant_type_enabled" + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureTemplatesIndexesAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.TemplatesCollection); + var keys = Builders.IndexKeys + .Ascending("tenantId") + .Ascending("channelType") + .Ascending("key") + .Ascending("locale"); + + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "tenant_channel_key_locale", + Unique = true + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureDeliveriesIndexesAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.DeliveriesCollection); + var keys = Builders.IndexKeys + .Ascending("tenantId") + .Descending("sortKey"); + + var sortModel = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "tenant_sortKey" + }); + + await collection.Indexes.CreateOneAsync(sortModel, cancellationToken: cancellationToken).ConfigureAwait(false); + + var statusModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("tenantId").Ascending("status"), + new CreateIndexOptions + { + Name = "tenant_status" + }); + + await collection.Indexes.CreateOneAsync(statusModel, cancellationToken: cancellationToken).ConfigureAwait(false); + + if (context.Options.DeliveryHistoryRetention > TimeSpan.Zero) + { + var ttlModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("completedAt"), + new CreateIndexOptions + { + Name = "completedAt_ttl", + ExpireAfter = context.Options.DeliveryHistoryRetention + }); + + await collection.Indexes.CreateOneAsync(ttlModel, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private static async Task EnsureDigestsIndexesAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.DigestsCollection); + var keys = Builders.IndexKeys + .Ascending("tenantId") + .Ascending("actionKey"); + + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "tenant_actionKey" + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureLocksIndexesAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.LocksCollection); + var uniqueModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("tenantId").Ascending("resource"), + new CreateIndexOptions + { + Name = "tenant_resource", + Unique = true + }); + + await collection.Indexes.CreateOneAsync(uniqueModel, cancellationToken: cancellationToken).ConfigureAwait(false); + + var ttlModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "expiresAt_ttl", + ExpireAfter = TimeSpan.Zero + }); + + await collection.Indexes.CreateOneAsync(ttlModel, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureAuditIndexesAsync(NotifyMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.AuditCollection); + var keys = Builders.IndexKeys + .Ascending("tenantId") + .Descending("timestamp"); + + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "tenant_timestamp" + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs b/src/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs new file mode 100644 index 00000000..9257574f --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs @@ -0,0 +1,12 @@ +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Storage.Mongo.Internal; + +namespace StellaOps.Notify.Storage.Mongo.Migrations; + +internal interface INotifyMongoMigration +{ + string Id { get; } + + ValueTask ExecuteAsync(NotifyMongoContext context, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs b/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs new file mode 100644 index 00000000..8a981831 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs @@ -0,0 +1,16 @@ +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Notify.Storage.Mongo.Migrations; + +internal sealed class NotifyMongoMigrationRecord +{ + [BsonId] + public ObjectId Id { get; init; } + + [BsonElement("migrationId")] + public required string MigrationId { get; init; } + + [BsonElement("appliedAt")] + public required DateTimeOffset AppliedAt { get; init; } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs b/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs new file mode 100644 index 00000000..a78870f1 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs @@ -0,0 +1,78 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Internal; + +namespace StellaOps.Notify.Storage.Mongo.Migrations; + +internal sealed class NotifyMongoMigrationRunner +{ + private readonly NotifyMongoContext _context; + private readonly IReadOnlyList _migrations; + private readonly ILogger _logger; + + public NotifyMongoMigrationRunner( + NotifyMongoContext context, + IEnumerable migrations, + ILogger logger) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + ArgumentNullException.ThrowIfNull(migrations); + _migrations = migrations.OrderBy(migration => migration.Id, StringComparer.Ordinal).ToArray(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask RunAsync(CancellationToken cancellationToken) + { + if (_migrations.Count == 0) + { + return; + } + + var collection = _context.Database.GetCollection(_context.Options.MigrationsCollection); + await EnsureMigrationIndexAsync(collection, cancellationToken).ConfigureAwait(false); + + var applied = await collection + .Find(FilterDefinition.Empty) + .Project(record => record.MigrationId) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + var appliedSet = applied.ToHashSet(StringComparer.Ordinal); + + foreach (var migration in _migrations) + { + if (appliedSet.Contains(migration.Id)) + { + continue; + } + + _logger.LogInformation("Applying Notify Mongo migration {MigrationId}.", migration.Id); + await migration.ExecuteAsync(_context, cancellationToken).ConfigureAwait(false); + + var record = new NotifyMongoMigrationRecord + { + Id = ObjectId.GenerateNewId(), + MigrationId = migration.Id, + AppliedAt = DateTimeOffset.UtcNow + }; + + await collection.InsertOneAsync(record, cancellationToken: cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Completed Notify Mongo migration {MigrationId}.", migration.Id); + } + } + + private static async Task EnsureMigrationIndexAsync( + IMongoCollection collection, + CancellationToken cancellationToken) + { + var keys = Builders.IndexKeys.Ascending(record => record.MigrationId); + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "migrationId_unique", + Unique = true + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs b/src/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs new file mode 100644 index 00000000..1d2b9b7c --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs @@ -0,0 +1,32 @@ +using System; + +namespace StellaOps.Notify.Storage.Mongo.Options; + +public sealed class NotifyMongoOptions +{ + public string ConnectionString { get; set; } = "mongodb://localhost:27017"; + + public string Database { get; set; } = "stellaops_notify"; + + public string RulesCollection { get; set; } = "rules"; + + public string ChannelsCollection { get; set; } = "channels"; + + public string TemplatesCollection { get; set; } = "templates"; + + public string DeliveriesCollection { get; set; } = "deliveries"; + + public string DigestsCollection { get; set; } = "digests"; + + public string LocksCollection { get; set; } = "locks"; + + public string AuditCollection { get; set; } = "audit"; + + public string MigrationsCollection { get; set; } = "_notify_migrations"; + + public TimeSpan DeliveryHistoryRetention { get; set; } = TimeSpan.FromDays(90); + + public bool UseMajorityReadConcern { get; set; } = true; + + public bool UseMajorityWriteConcern { get; set; } = true; +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs b/src/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..a9c5d4fc --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Notify.Storage.Mongo.Tests")] diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs new file mode 100644 index 00000000..3841c422 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs @@ -0,0 +1,10 @@ +using StellaOps.Notify.Storage.Mongo.Documents; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public interface INotifyAuditRepository +{ + Task AppendAsync(NotifyAuditEntryDocument entry, CancellationToken cancellationToken = default); + + Task> QueryAsync(string tenantId, DateTimeOffset? since, int? limit, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs new file mode 100644 index 00000000..c3aff15e --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs @@ -0,0 +1,14 @@ +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public interface INotifyChannelRepository +{ + Task UpsertAsync(NotifyChannel channel, CancellationToken cancellationToken = default); + + Task GetAsync(string tenantId, string channelId, CancellationToken cancellationToken = default); + + Task> ListAsync(string tenantId, CancellationToken cancellationToken = default); + + Task DeleteAsync(string tenantId, string channelId, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs new file mode 100644 index 00000000..2cc779d7 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs @@ -0,0 +1,20 @@ +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public interface INotifyDeliveryRepository +{ + Task AppendAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default); + + Task UpdateAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default); + + Task GetAsync(string tenantId, string deliveryId, CancellationToken cancellationToken = default); + + Task QueryAsync( + string tenantId, + DateTimeOffset? since, + string? status, + int? limit, + string? continuationToken = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs new file mode 100644 index 00000000..d4ceea94 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs @@ -0,0 +1,12 @@ +using StellaOps.Notify.Storage.Mongo.Documents; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public interface INotifyDigestRepository +{ + Task GetAsync(string tenantId, string actionKey, CancellationToken cancellationToken = default); + + Task UpsertAsync(NotifyDigestDocument document, CancellationToken cancellationToken = default); + + Task RemoveAsync(string tenantId, string actionKey, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs new file mode 100644 index 00000000..d58d33e5 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs @@ -0,0 +1,8 @@ +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public interface INotifyLockRepository +{ + Task TryAcquireAsync(string tenantId, string resource, string owner, TimeSpan ttl, CancellationToken cancellationToken = default); + + Task ReleaseAsync(string tenantId, string resource, string owner, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs new file mode 100644 index 00000000..bb9a9040 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs @@ -0,0 +1,14 @@ +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public interface INotifyRuleRepository +{ + Task UpsertAsync(NotifyRule rule, CancellationToken cancellationToken = default); + + Task GetAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default); + + Task> ListAsync(string tenantId, CancellationToken cancellationToken = default); + + Task DeleteAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs new file mode 100644 index 00000000..ca9b044d --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs @@ -0,0 +1,14 @@ +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public interface INotifyTemplateRepository +{ + Task UpsertAsync(NotifyTemplate template, CancellationToken cancellationToken = default); + + Task GetAsync(string tenantId, string templateId, CancellationToken cancellationToken = default); + + Task> ListAsync(string tenantId, CancellationToken cancellationToken = default); + + Task DeleteAsync(string tenantId, string templateId, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs new file mode 100644 index 00000000..62b11bf2 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs @@ -0,0 +1,40 @@ +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Internal; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +internal sealed class NotifyAuditRepository : INotifyAuditRepository +{ + private readonly IMongoCollection _collection; + + public NotifyAuditRepository(NotifyMongoContext context) + { + ArgumentNullException.ThrowIfNull(context); + _collection = context.Database.GetCollection(context.Options.AuditCollection); + } + + public async Task AppendAsync(NotifyAuditEntryDocument entry, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(entry); + await _collection.InsertOneAsync(entry, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public async Task> QueryAsync(string tenantId, DateTimeOffset? since, int? limit, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.TenantId, tenantId); + if (since is not null) + { + filter &= Builders.Filter.Gte(x => x.Timestamp, since.Value); + } + + var ordered = _collection.Find(filter).SortByDescending(x => x.Timestamp); + IFindFluent query = ordered; + if (limit is > 0) + { + query = query.Limit(limit); + } + + return await query.ToListAsync(cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs new file mode 100644 index 00000000..3a804afb --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs @@ -0,0 +1,70 @@ +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Serialization; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +internal sealed class NotifyChannelRepository : INotifyChannelRepository +{ + private readonly IMongoCollection _collection; + + public NotifyChannelRepository(NotifyMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection(context.Options.ChannelsCollection); + } + + public async Task UpsertAsync(NotifyChannel channel, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(channel); + var document = NotifyChannelDocumentMapper.ToBsonDocument(channel); + var filter = Builders.Filter.Eq("_id", CreateDocumentId(channel.TenantId, channel.ChannelId)); + + await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + + public async Task GetAsync(string tenantId, string channelId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("_id", CreateDocumentId(tenantId, channelId)) + & Builders.Filter.Or( + Builders.Filter.Exists("deletedAt", false), + Builders.Filter.Eq("deletedAt", BsonNull.Value)); + + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : NotifyChannelDocumentMapper.FromBsonDocument(document); + } + + public async Task> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("tenantId", tenantId) + & Builders.Filter.Or( + Builders.Filter.Exists("deletedAt", false), + Builders.Filter.Eq("deletedAt", BsonNull.Value)); + var cursor = await _collection.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false); + return cursor.Select(NotifyChannelDocumentMapper.FromBsonDocument).ToArray(); + } + + public async Task DeleteAsync(string tenantId, string channelId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("_id", CreateDocumentId(tenantId, channelId)); + await _collection.UpdateOneAsync(filter, + Builders.Update.Set("deletedAt", DateTime.UtcNow).Set("enabled", false), + new UpdateOptions { IsUpsert = false }, + cancellationToken).ConfigureAwait(false); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs new file mode 100644 index 00000000..d1cff2eb --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs @@ -0,0 +1,6 @@ +using System.Collections.Generic; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +public sealed record NotifyDeliveryQueryResult(IReadOnlyList Items, string? ContinuationToken); diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs new file mode 100644 index 00000000..c204bfaa --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs @@ -0,0 +1,179 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Serialization; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +internal sealed class NotifyDeliveryRepository : INotifyDeliveryRepository +{ + private readonly IMongoCollection _collection; + + public NotifyDeliveryRepository(NotifyMongoContext context) + { + ArgumentNullException.ThrowIfNull(context); + _collection = context.Database.GetCollection(context.Options.DeliveriesCollection); + } + + public Task AppendAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) + => UpdateAsync(delivery, cancellationToken); + + public async Task UpdateAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(delivery); + var document = NotifyDeliveryDocumentMapper.ToBsonDocument(delivery); + var filter = Builders.Filter.Eq("_id", CreateDocumentId(delivery.TenantId, delivery.DeliveryId)); + + await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + + public async Task GetAsync(string tenantId, string deliveryId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("_id", CreateDocumentId(tenantId, deliveryId)); + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : NotifyDeliveryDocumentMapper.FromBsonDocument(document); + } + + public async Task QueryAsync( + string tenantId, + DateTimeOffset? since, + string? status, + int? limit, + string? continuationToken = null, + CancellationToken cancellationToken = default) + { + var builder = Builders.Filter; + var filter = builder.Eq("tenantId", tenantId); + if (since is not null) + { + filter &= builder.Gte("sortKey", since.Value.UtcDateTime); + } + + if (!string.IsNullOrWhiteSpace(status)) + { + var statuses = status + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Select(static value => value.ToLowerInvariant()) + .ToArray(); + + if (statuses.Length == 1) + { + filter &= builder.Eq("status", statuses[0]); + } + else if (statuses.Length > 1) + { + filter &= builder.In("status", statuses); + } + } + + if (!string.IsNullOrWhiteSpace(continuationToken)) + { + if (!TryParseContinuationToken(continuationToken, out var continuationSortKey, out var continuationId)) + { + throw new ArgumentException("The continuation token is invalid.", nameof(continuationToken)); + } + + var lessThanSort = builder.Lt("sortKey", continuationSortKey); + var equalSortLowerId = builder.And(builder.Eq("sortKey", continuationSortKey), builder.Lte("_id", continuationId)); + filter &= builder.Or(lessThanSort, equalSortLowerId); + } + + var find = _collection.Find(filter) + .Sort(Builders.Sort.Descending("sortKey").Descending("_id")); + + List documents; + if (limit is > 0) + { + documents = await find.Limit(limit.Value + 1).ToListAsync(cancellationToken).ConfigureAwait(false); + } + else + { + documents = await find.ToListAsync(cancellationToken).ConfigureAwait(false); + } + + string? nextToken = null; + if (limit is > 0 && documents.Count > limit.Value) + { + var overflow = documents[^1]; + documents.RemoveAt(documents.Count - 1); + nextToken = BuildContinuationToken(overflow); + } + + var deliveries = documents.Select(NotifyDeliveryDocumentMapper.FromBsonDocument).ToArray(); + return new NotifyDeliveryQueryResult(deliveries, nextToken); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); + + private static string BuildContinuationToken(BsonDocument document) + { + var sortKey = ResolveSortKey(document); + if (!document.TryGetValue("_id", out var idValue) || !idValue.IsString) + { + throw new InvalidOperationException("Delivery document missing string _id required for continuation token."); + } + + return BuildContinuationToken(sortKey, idValue.AsString); + } + + private static DateTime ResolveSortKey(BsonDocument document) + { + if (document.TryGetValue("sortKey", out var sortValue) && sortValue.IsValidDateTime) + { + return sortValue.ToUniversalTime(); + } + + if (document.TryGetValue("completedAt", out var completed) && completed.IsValidDateTime) + { + return completed.ToUniversalTime(); + } + + if (document.TryGetValue("sentAt", out var sent) && sent.IsValidDateTime) + { + return sent.ToUniversalTime(); + } + + var created = document["createdAt"]; + return created.ToUniversalTime(); + } + + private static string BuildContinuationToken(DateTime sortKey, string id) + => FormattableString.Invariant($"{sortKey:O}|{id}"); + + private static bool TryParseContinuationToken(string token, out DateTime sortKey, out string id) + { + sortKey = default; + id = string.Empty; + + var parts = token.Split('|', 2, StringSplitOptions.TrimEntries); + if (parts.Length != 2) + { + return false; + } + + if (!DateTime.TryParseExact(parts[0], "O", CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var parsedSort)) + { + return false; + } + + if (string.IsNullOrWhiteSpace(parts[1])) + { + return false; + } + + sortKey = parsedSort.ToUniversalTime(); + id = parts[1]; + return true; + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs new file mode 100644 index 00000000..131ca2b3 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs @@ -0,0 +1,44 @@ +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Internal; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +internal sealed class NotifyDigestRepository : INotifyDigestRepository +{ + private readonly IMongoCollection _collection; + + public NotifyDigestRepository(NotifyMongoContext context) + { + ArgumentNullException.ThrowIfNull(context); + _collection = context.Database.GetCollection(context.Options.DigestsCollection); + } + + public async Task GetAsync(string tenantId, string actionKey, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.Id, CreateDocumentId(tenantId, actionKey)); + return await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + } + + public async Task UpsertAsync(NotifyDigestDocument document, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(document); + document.Id = CreateDocumentId(document.TenantId, document.ActionKey); + var filter = Builders.Filter.Eq(x => x.Id, document.Id); + await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + + public async Task RemoveAsync(string tenantId, string actionKey, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.Id, CreateDocumentId(tenantId, actionKey)); + await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + } + + private static string CreateDocumentId(string tenantId, string actionKey) + => string.Create(tenantId.Length + actionKey.Length + 1, (tenantId, actionKey), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.actionKey.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs new file mode 100644 index 00000000..2d47e4b9 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs @@ -0,0 +1,71 @@ +using MongoDB.Driver; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Internal; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +internal sealed class NotifyLockRepository : INotifyLockRepository +{ + private readonly IMongoCollection _collection; + + public NotifyLockRepository(NotifyMongoContext context) + { + ArgumentNullException.ThrowIfNull(context); + _collection = context.Database.GetCollection(context.Options.LocksCollection); + } + + public async Task TryAcquireAsync(string tenantId, string resource, string owner, TimeSpan ttl, CancellationToken cancellationToken = default) + { + var now = DateTimeOffset.UtcNow; + var document = new NotifyLockDocument + { + Id = CreateDocumentId(tenantId, resource), + TenantId = tenantId, + Resource = resource, + Owner = owner, + AcquiredAt = now, + ExpiresAt = now.Add(ttl) + }; + + var candidateFilter = Builders.Filter.Eq(x => x.Id, document.Id); + var takeoverFilter = candidateFilter & Builders.Filter.Lt(x => x.ExpiresAt, now.UtcDateTime); + var sameOwnerFilter = candidateFilter & Builders.Filter.Eq(x => x.Owner, owner); + + var update = Builders.Update + .Set(x => x.TenantId, document.TenantId) + .Set(x => x.Resource, document.Resource) + .Set(x => x.Owner, document.Owner) + .Set(x => x.AcquiredAt, document.AcquiredAt) + .Set(x => x.ExpiresAt, document.ExpiresAt); + + try + { + var result = await _collection.UpdateOneAsync( + takeoverFilter | sameOwnerFilter, + update.SetOnInsert(x => x.Id, document.Id), + new UpdateOptions { IsUpsert = true }, + cancellationToken).ConfigureAwait(false); + + return result.MatchedCount > 0 || result.UpsertedId != null; + } + catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey) + { + return false; + } + } + + public async Task ReleaseAsync(string tenantId, string resource, string owner, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq(x => x.Id, CreateDocumentId(tenantId, resource)) + & Builders.Filter.Eq(x => x.Owner, owner); + await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs new file mode 100644 index 00000000..e50f806c --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs @@ -0,0 +1,73 @@ +using System; +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Serialization; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +internal sealed class NotifyRuleRepository : INotifyRuleRepository +{ + private readonly IMongoCollection _collection; + + public NotifyRuleRepository(NotifyMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection(context.Options.RulesCollection); + } + + public async Task UpsertAsync(NotifyRule rule, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(rule); + var document = NotifyRuleDocumentMapper.ToBsonDocument(rule); + var filter = Builders.Filter.Eq("_id", CreateDocumentId(rule.TenantId, rule.RuleId)); + + await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + + public async Task GetAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("_id", CreateDocumentId(tenantId, ruleId)) + & Builders.Filter.Or( + Builders.Filter.Exists("deletedAt", false), + Builders.Filter.Eq("deletedAt", BsonNull.Value)); + + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : NotifyRuleDocumentMapper.FromBsonDocument(document); + } + + public async Task> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("tenantId", tenantId) + & Builders.Filter.Or( + Builders.Filter.Exists("deletedAt", false), + Builders.Filter.Eq("deletedAt", BsonNull.Value)); + var cursor = await _collection.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false); + return cursor.Select(NotifyRuleDocumentMapper.FromBsonDocument).ToArray(); + } + + public async Task DeleteAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("_id", CreateDocumentId(tenantId, ruleId)); + await _collection.UpdateOneAsync(filter, + Builders.Update + .Set("deletedAt", DateTime.UtcNow) + .Set("enabled", false), + new UpdateOptions { IsUpsert = false }, + cancellationToken).ConfigureAwait(false); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs new file mode 100644 index 00000000..f062d253 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs @@ -0,0 +1,70 @@ +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Serialization; + +namespace StellaOps.Notify.Storage.Mongo.Repositories; + +internal sealed class NotifyTemplateRepository : INotifyTemplateRepository +{ + private readonly IMongoCollection _collection; + + public NotifyTemplateRepository(NotifyMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection(context.Options.TemplatesCollection); + } + + public async Task UpsertAsync(NotifyTemplate template, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(template); + var document = NotifyTemplateDocumentMapper.ToBsonDocument(template); + var filter = Builders.Filter.Eq("_id", CreateDocumentId(template.TenantId, template.TemplateId)); + + await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + + public async Task GetAsync(string tenantId, string templateId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("_id", CreateDocumentId(tenantId, templateId)) + & Builders.Filter.Or( + Builders.Filter.Exists("deletedAt", false), + Builders.Filter.Eq("deletedAt", BsonNull.Value)); + + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : NotifyTemplateDocumentMapper.FromBsonDocument(document); + } + + public async Task> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("tenantId", tenantId) + & Builders.Filter.Or( + Builders.Filter.Exists("deletedAt", false), + Builders.Filter.Eq("deletedAt", BsonNull.Value)); + var cursor = await _collection.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false); + return cursor.Select(NotifyTemplateDocumentMapper.FromBsonDocument).ToArray(); + } + + public async Task DeleteAsync(string tenantId, string templateId, CancellationToken cancellationToken = default) + { + var filter = Builders.Filter.Eq("_id", CreateDocumentId(tenantId, templateId)); + await _collection.UpdateOneAsync(filter, + Builders.Update.Set("deletedAt", DateTime.UtcNow), + new UpdateOptions { IsUpsert = false }, + cancellationToken).ConfigureAwait(false); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs b/src/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs new file mode 100644 index 00000000..790ce7bc --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs @@ -0,0 +1,129 @@ +using System.Globalization; +using System.Text.Json.Nodes; +using MongoDB.Bson; +using MongoDB.Bson.IO; + +namespace StellaOps.Notify.Storage.Mongo.Serialization; + +internal static class BsonDocumentJsonExtensions +{ + public static JsonNode ToCanonicalJsonNode(this BsonDocument document, params string[] fieldsToRemove) + { + ArgumentNullException.ThrowIfNull(document); + + var clone = document.DeepClone().AsBsonDocument; + clone.Remove("_id"); + if (fieldsToRemove is { Length: > 0 }) + { + foreach (var field in fieldsToRemove) + { + clone.Remove(field); + } + } + + var json = clone.ToJson(new JsonWriterSettings + { + OutputMode = JsonOutputMode.RelaxedExtendedJson, + Indent = false + }); + + var node = JsonNode.Parse(json) ?? throw new InvalidOperationException("Unable to parse BsonDocument JSON."); + return NormalizeExtendedJson(node); + } + + private static JsonNode NormalizeExtendedJson(JsonNode node) + { + if (node is JsonObject obj) + { + if (TryConvertExtendedDate(obj, out var replacement)) + { + return replacement; + } + + foreach (var property in obj.ToList()) + { + if (property.Value is null) + { + continue; + } + + var normalized = NormalizeExtendedJson(property.Value); + if (!ReferenceEquals(normalized, property.Value)) + { + obj[property.Key] = normalized; + } + } + + return obj; + } + + if (node is JsonArray array) + { + for (var i = 0; i < array.Count; i++) + { + if (array[i] is null) + { + continue; + } + + var normalized = NormalizeExtendedJson(array[i]!); + if (!ReferenceEquals(normalized, array[i])) + { + array[i] = normalized; + } + } + + return array; + } + + return node; + } + + private static bool TryConvertExtendedDate(JsonObject obj, out JsonNode replacement) + { + replacement = obj; + if (obj.Count != 1 || !obj.TryGetPropertyValue("$date", out var value) || value is null) + { + return false; + } + + if (value is JsonValue directValue) + { + if (directValue.TryGetValue(out string? dateString) && TryParseIso(dateString, out var iso)) + { + replacement = JsonValue.Create(iso); + return true; + } + + if (directValue.TryGetValue(out long epochMilliseconds)) + { + replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(epochMilliseconds).ToString("O")); + return true; + } + } + else if (value is JsonObject nested && nested.TryGetPropertyValue("$numberLong", out var numberNode) && numberNode is JsonValue numberValue && numberValue.TryGetValue(out string? numberString) && long.TryParse(numberString, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ms)) + { + replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(ms).ToString("O")); + return true; + } + + return false; + } + + private static bool TryParseIso(string? value, out string iso) + { + iso = string.Empty; + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var parsed)) + { + iso = parsed.ToUniversalTime().ToString("O"); + return true; + } + + return false; + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs new file mode 100644 index 00000000..a1fb7850 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs @@ -0,0 +1,33 @@ +using System.Text.Json.Nodes; +using MongoDB.Bson; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Serialization; + +internal static class NotifyChannelDocumentMapper +{ + public static BsonDocument ToBsonDocument(NotifyChannel channel) + { + ArgumentNullException.ThrowIfNull(channel); + var json = NotifyCanonicalJsonSerializer.Serialize(channel); + var document = BsonDocument.Parse(json); + document["_id"] = BsonValue.Create(CreateDocumentId(channel.TenantId, channel.ChannelId)); + return document; + } + + public static NotifyChannel FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + var node = document.ToCanonicalJsonNode(); + return NotifySchemaMigration.UpgradeChannel(node); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs new file mode 100644 index 00000000..f65090d2 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs @@ -0,0 +1,46 @@ +using System.Text.Json.Nodes; +using MongoDB.Bson; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Serialization; + +internal static class NotifyDeliveryDocumentMapper +{ + public static BsonDocument ToBsonDocument(NotifyDelivery delivery) + { + ArgumentNullException.ThrowIfNull(delivery); + var json = NotifyCanonicalJsonSerializer.Serialize(delivery); + var document = BsonDocument.Parse(json); + document["_id"] = BsonValue.Create(CreateDocumentId(delivery.TenantId, delivery.DeliveryId)); + document["tenantId"] = delivery.TenantId; + document["createdAt"] = delivery.CreatedAt.UtcDateTime; + if (delivery.SentAt is not null) + { + document["sentAt"] = delivery.SentAt.Value.UtcDateTime; + } + + if (delivery.CompletedAt is not null) + { + document["completedAt"] = delivery.CompletedAt.Value.UtcDateTime; + } + + var sortTimestamp = delivery.CompletedAt ?? delivery.SentAt ?? delivery.CreatedAt; + document["sortKey"] = sortTimestamp.UtcDateTime; + return document; + } + + public static NotifyDelivery FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + var node = document.ToCanonicalJsonNode("sortKey"); + return NotifyCanonicalJsonSerializer.Deserialize(node.ToJsonString()); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs new file mode 100644 index 00000000..1a62b568 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs @@ -0,0 +1,33 @@ +using System.Text.Json.Nodes; +using MongoDB.Bson; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Serialization; + +internal static class NotifyRuleDocumentMapper +{ + public static BsonDocument ToBsonDocument(NotifyRule rule) + { + ArgumentNullException.ThrowIfNull(rule); + var json = NotifyCanonicalJsonSerializer.Serialize(rule); + var document = BsonDocument.Parse(json); + document["_id"] = BsonValue.Create(CreateDocumentId(rule.TenantId, rule.RuleId)); + return document; + } + + public static NotifyRule FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + var node = document.ToCanonicalJsonNode(); + return NotifySchemaMigration.UpgradeRule(node); + } + + private static string CreateDocumentId(string tenantId, string ruleId) + => string.Create(tenantId.Length + ruleId.Length + 1, (tenantId, ruleId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.ruleId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs new file mode 100644 index 00000000..4b58f22e --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs @@ -0,0 +1,33 @@ +using System.Text.Json.Nodes; +using MongoDB.Bson; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Storage.Mongo.Serialization; + +internal static class NotifyTemplateDocumentMapper +{ + public static BsonDocument ToBsonDocument(NotifyTemplate template) + { + ArgumentNullException.ThrowIfNull(template); + var json = NotifyCanonicalJsonSerializer.Serialize(template); + var document = BsonDocument.Parse(json); + document["_id"] = BsonValue.Create(CreateDocumentId(template.TenantId, template.TemplateId)); + return document; + } + + public static NotifyTemplate FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + var node = document.ToCanonicalJsonNode(); + return NotifySchemaMigration.UpgradeTemplate(node); + } + + private static string CreateDocumentId(string tenantId, string resourceId) + => string.Create(tenantId.Length + resourceId.Length + 1, (tenantId, resourceId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.resourceId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs b/src/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..70ef8b88 --- /dev/null +++ b/src/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs @@ -0,0 +1,33 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Notify.Storage.Mongo.Internal; +using StellaOps.Notify.Storage.Mongo.Migrations; +using StellaOps.Notify.Storage.Mongo.Options; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.Storage.Mongo; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddNotifyMongoStorage(this IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.Configure(configuration); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + return services; + } +} diff --git a/src/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj b/src/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj index 6c3a8871..f53b4064 100644 --- a/src/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj +++ b/src/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj @@ -4,4 +4,15 @@ enable enable + + + + + + + + + + + diff --git a/src/StellaOps.Notify.Storage.Mongo/TASKS.md b/src/StellaOps.Notify.Storage.Mongo/TASKS.md index a65bbdc6..6b3d929c 100644 --- a/src/StellaOps.Notify.Storage.Mongo/TASKS.md +++ b/src/StellaOps.Notify.Storage.Mongo/TASKS.md @@ -2,6 +2,6 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| NOTIFY-STORAGE-15-201 | TODO | Notify Storage Guild | NOTIFY-MODELS-15-101 | Create Mongo schemas/collections (rules, channels, deliveries, digests, locks, audit) with indexes per architecture §7. | Migration scripts authored; indexes tested; integration tests cover CRUD/read paths. | -| NOTIFY-STORAGE-15-202 | TODO | Notify Storage Guild | NOTIFY-STORAGE-15-201 | Implement repositories/services with tenant scoping, soft deletes, TTL, causal consistency (majority) options. | Repositories unit-tested; soft delete + TTL validated; majority read/write configuration documented. | -| NOTIFY-STORAGE-15-203 | TODO | Notify Storage Guild | NOTIFY-STORAGE-15-201 | Delivery history retention + query APIs (paging, filters). | History queries return expected data; paging verified; docs updated. | +| NOTIFY-STORAGE-15-201 | DONE (2025-10-19) | Notify Storage Guild | NOTIFY-MODELS-15-101 | Create Mongo schemas/collections (rules, channels, deliveries, digests, locks, audit) with indexes per architecture §7. | Migration scripts authored; indexes tested; integration tests cover CRUD/read paths. | +| NOTIFY-STORAGE-15-202 | DONE (2025-10-19) | Notify Storage Guild | NOTIFY-STORAGE-15-201 | Implement repositories/services with tenant scoping, soft deletes, TTL, causal consistency (majority) options. | Repositories unit-tested; soft delete + TTL validated; majority read/write configuration documented. | +| NOTIFY-STORAGE-15-203 | DONE (2025-10-19) | Notify Storage Guild | NOTIFY-STORAGE-15-201 | Delivery history retention + query APIs (paging, filters). | History queries return expected data; paging verified; docs updated. | diff --git a/src/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs b/src/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs new file mode 100644 index 00000000..79019435 --- /dev/null +++ b/src/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs @@ -0,0 +1,246 @@ +using System.IdentityModel.Tokens.Jwt; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Text.Json.Nodes; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.WebService.Tests; + +public sealed class CrudEndpointsTests : IClassFixture>, IAsyncLifetime +{ + private const string SigningKey = "super-secret-test-key-1234567890"; + private const string Issuer = "test-issuer"; + private const string Audience = "notify"; + + private readonly WebApplicationFactory _factory; + private readonly string _adminToken; + private readonly string _readToken; + + public CrudEndpointsTests(WebApplicationFactory factory) + { + _factory = factory.WithWebHostBuilder(builder => + { + builder.UseSetting("notify:storage:driver", "memory"); + builder.UseSetting("notify:authority:enabled", "false"); + builder.UseSetting("notify:authority:developmentSigningKey", SigningKey); + builder.UseSetting("notify:authority:issuer", Issuer); + builder.UseSetting("notify:authority:audiences:0", Audience); + builder.UseSetting("notify:authority:adminScope", "notify.admin"); + builder.UseSetting("notify:authority:readScope", "notify.read"); + builder.UseSetting("notify:telemetry:enableRequestLogging", "false"); + }); + + _adminToken = CreateToken("notify.admin"); + _readToken = CreateToken("notify.read"); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() => Task.CompletedTask; + + [Fact] + public async Task RuleCrudLifecycle() + { + var client = _factory.CreateClient(); + var payload = LoadSample("notify-rule@1.sample.json"); + payload["ruleId"] = "rule-web"; + payload["tenantId"] = "tenant-web"; + payload["actions"]!.AsArray()[0]! ["actionId"] = "action-web"; + + await PostAsync(client, "/api/v1/notify/rules", payload); + + var list = await GetJsonArrayAsync(client, "/api/v1/notify/rules", useAdminToken: false); + Assert.Equal("rule-web", list?[0]? ["ruleId"]?.GetValue()); + + var single = await GetJsonObjectAsync(client, "/api/v1/notify/rules/rule-web", useAdminToken: false); + Assert.Equal("tenant-web", single? ["tenantId"]?.GetValue()); + + await DeleteAsync(client, "/api/v1/notify/rules/rule-web"); + var afterDelete = await SendAsync(client, HttpMethod.Get, "/api/v1/notify/rules/rule-web", useAdminToken: false); + Assert.Equal(HttpStatusCode.NotFound, afterDelete.StatusCode); + } + + [Fact] + public async Task ChannelTemplateDeliveryAndAuditFlows() + { + var client = _factory.CreateClient(); + + var channelPayload = LoadSample("notify-channel@1.sample.json"); + channelPayload["channelId"] = "channel-web"; + channelPayload["tenantId"] = "tenant-web"; + await PostAsync(client, "/api/v1/notify/channels", channelPayload); + + var templatePayload = LoadSample("notify-template@1.sample.json"); + templatePayload["templateId"] = "template-web"; + templatePayload["tenantId"] = "tenant-web"; + await PostAsync(client, "/api/v1/notify/templates", templatePayload); + + var delivery = NotifyDelivery.Create( + deliveryId: "delivery-web", + tenantId: "tenant-web", + ruleId: "rule-web", + actionId: "channel-web", + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + status: NotifyDeliveryStatus.Sent, + createdAt: DateTimeOffset.UtcNow, + sentAt: DateTimeOffset.UtcNow); + + var deliveryNode = JsonNode.Parse(NotifyCanonicalJsonSerializer.Serialize(delivery))!; + await PostAsync(client, "/api/v1/notify/deliveries", deliveryNode); + + var deliveries = await GetJsonArrayAsync(client, "/api/v1/notify/deliveries?limit=10", useAdminToken: false); + Assert.NotNull(deliveries); + Assert.NotEmpty(deliveries!.OfType()); + + var digestNode = new JsonObject + { + ["tenantId"] = "tenant-web", + ["actionKey"] = "channel-web", + ["window"] = "hourly", + ["openedAt"] = DateTimeOffset.UtcNow.ToString("O"), + ["status"] = "open", + ["items"] = new JsonArray() + }; + await PostAsync(client, "/api/v1/notify/digests", digestNode); + + var digest = await GetJsonObjectAsync(client, "/api/v1/notify/digests/channel-web", useAdminToken: false); + Assert.Equal("channel-web", digest? ["actionKey"]?.GetValue()); + + var auditPayload = JsonNode.Parse(""" + { + "action": "create-rule", + "entityType": "rule", + "entityId": "rule-web", + "payload": {"ruleId": "rule-web"} + } + """)!; + await PostAsync(client, "/api/v1/notify/audit", auditPayload); + + var audits = await GetJsonArrayAsync(client, "/api/v1/notify/audit", useAdminToken: false); + Assert.NotNull(audits); + Assert.Contains(audits!.OfType(), entry => entry?["action"]?.GetValue() == "create-rule"); + + await DeleteAsync(client, "/api/v1/notify/digests/channel-web"); + var digestAfterDelete = await SendAsync(client, HttpMethod.Get, "/api/v1/notify/digests/channel-web", useAdminToken: false); + Assert.Equal(HttpStatusCode.NotFound, digestAfterDelete.StatusCode); + } + + [Fact] + public async Task LockEndpointsAllowAcquireAndRelease() + { + var client = _factory.CreateClient(); + var acquirePayload = JsonNode.Parse(""" + { + "resource": "workers", + "owner": "worker-1", + "ttlSeconds": 30 + } + """)!; + + var acquireResponse = await PostAsync(client, "/api/v1/notify/locks/acquire", acquirePayload); + var acquireContent = JsonNode.Parse(await acquireResponse.Content.ReadAsStringAsync()); + Assert.True(acquireContent? ["acquired"]?.GetValue()); + + await PostAsync(client, "/api/v1/notify/locks/release", JsonNode.Parse(""" + { + "resource": "workers", + "owner": "worker-1" + } + """)!); + + var secondAcquire = await PostAsync(client, "/api/v1/notify/locks/acquire", acquirePayload); + var secondContent = JsonNode.Parse(await secondAcquire.Content.ReadAsStringAsync()); + Assert.True(secondContent? ["acquired"]?.GetValue()); + } + + private static JsonNode LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); + } + + return JsonNode.Parse(File.ReadAllText(path)) ?? throw new InvalidOperationException("Sample JSON null."); + } + + private async Task GetJsonArrayAsync(HttpClient client, string path, bool useAdminToken) + { + var response = await SendAsync(client, HttpMethod.Get, path, useAdminToken); + response.EnsureSuccessStatusCode(); + var content = await response.Content.ReadAsStringAsync(); + return JsonNode.Parse(content) as JsonArray; + } + + private async Task GetJsonObjectAsync(HttpClient client, string path, bool useAdminToken) + { + var response = await SendAsync(client, HttpMethod.Get, path, useAdminToken); + response.EnsureSuccessStatusCode(); + var content = await response.Content.ReadAsStringAsync(); + return JsonNode.Parse(content) as JsonObject; + } + + private async Task PostAsync(HttpClient client, string path, JsonNode payload, bool useAdminToken = true) + { + var request = new HttpRequestMessage(HttpMethod.Post, path) + { + Content = new StringContent(payload.ToJsonString(), Encoding.UTF8, "application/json") + }; + + var response = await SendAsync(client, request, useAdminToken); + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(); + throw new InvalidOperationException($"Request to {path} failed with {(int)response.StatusCode} {response.StatusCode}: {body}"); + } + + return response; + } + + private Task PostAsync(HttpClient client, string path, JsonNode payload) + => PostAsync(client, path, payload, useAdminToken: true); + + private async Task DeleteAsync(HttpClient client, string path) + { + var response = await SendAsync(client, HttpMethod.Delete, path); + response.EnsureSuccessStatusCode(); + } + + private Task SendAsync(HttpClient client, HttpMethod method, string path, bool useAdminToken = true) + => SendAsync(client, new HttpRequestMessage(method, path), useAdminToken); + + private Task SendAsync(HttpClient client, HttpRequestMessage request, bool useAdminToken = true) + { + request.Headers.Add("X-StellaOps-Tenant", "tenant-web"); + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", useAdminToken ? _adminToken : _readToken); + return client.SendAsync(request); + } + + private static string CreateToken(string scope) + { + var handler = new JwtSecurityTokenHandler(); + var key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(SigningKey)); + var descriptor = new SecurityTokenDescriptor + { + Issuer = Issuer, + Audience = Audience, + Expires = DateTime.UtcNow.AddMinutes(10), + SigningCredentials = new SigningCredentials(key, SecurityAlgorithms.HmacSha256), + Subject = new System.Security.Claims.ClaimsIdentity(new[] + { + new System.Security.Claims.Claim("scope", scope), + new System.Security.Claims.Claim(System.Security.Claims.ClaimTypes.Name, "integration-test") + }) + }; + + var token = handler.CreateToken(descriptor); + return handler.WriteToken(token); + } +} diff --git a/src/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs b/src/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs new file mode 100644 index 00000000..556ae81f --- /dev/null +++ b/src/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs @@ -0,0 +1,86 @@ +using System.Net.Http.Json; +using System.Text.Json.Nodes; +using Microsoft.AspNetCore.Mvc.Testing; + +namespace StellaOps.Notify.WebService.Tests; + +public sealed class NormalizeEndpointsTests : IClassFixture>, IAsyncLifetime +{ + private readonly WebApplicationFactory _factory; + + public NormalizeEndpointsTests(WebApplicationFactory factory) + { + _factory = factory.WithWebHostBuilder(builder => + { + builder.UseSetting("notify:storage:driver", "memory"); + builder.UseSetting("notify:authority:enabled", "false"); + builder.UseSetting("notify:authority:developmentSigningKey", "normalize-tests-signing-key-1234567890"); + builder.UseSetting("notify:authority:issuer", "test-issuer"); + builder.UseSetting("notify:authority:audiences:0", "notify"); + builder.UseSetting("notify:telemetry:enableRequestLogging", "false"); + }); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() => Task.CompletedTask; + + [Fact] + public async Task RuleNormalizeAddsSchemaVersion() + { + var client = _factory.CreateClient(); + var payload = LoadSampleNode("notify-rule@1.sample.json"); + payload!.AsObject().Remove("schemaVersion"); + + var response = await client.PostAsJsonAsync("/internal/notify/rules/normalize", payload); + response.EnsureSuccessStatusCode(); + + var content = await response.Content.ReadAsStringAsync(); + var normalized = JsonNode.Parse(content); + + Assert.Equal("notify.rule@1", normalized?["schemaVersion"]?.GetValue()); + } + + [Fact] + public async Task ChannelNormalizeAddsSchemaVersion() + { + var client = _factory.CreateClient(); + var payload = LoadSampleNode("notify-channel@1.sample.json"); + payload!.AsObject().Remove("schemaVersion"); + + var response = await client.PostAsJsonAsync("/internal/notify/channels/normalize", payload); + response.EnsureSuccessStatusCode(); + + var content = await response.Content.ReadAsStringAsync(); + var normalized = JsonNode.Parse(content); + + Assert.Equal("notify.channel@1", normalized?["schemaVersion"]?.GetValue()); + } + + [Fact] + public async Task TemplateNormalizeAddsSchemaVersion() + { + var client = _factory.CreateClient(); + var payload = LoadSampleNode("notify-template@1.sample.json"); + payload!.AsObject().Remove("schemaVersion"); + + var response = await client.PostAsJsonAsync("/internal/notify/templates/normalize", payload); + response.EnsureSuccessStatusCode(); + + var content = await response.Content.ReadAsStringAsync(); + var normalized = JsonNode.Parse(content); + + Assert.Equal("notify.template@1", normalized?["schemaVersion"]?.GetValue()); + } + + private static JsonNode? LoadSampleNode(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to load sample '{fileName}'.", path); + } + + return JsonNode.Parse(File.ReadAllText(path)); + } +} diff --git a/src/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj b/src/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj new file mode 100644 index 00000000..15795482 --- /dev/null +++ b/src/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj @@ -0,0 +1,18 @@ + + + net10.0 + enable + enable + + + + + + + + + + Always + + + diff --git a/src/StellaOps.Notify.WebService/Contracts/LockRequests.cs b/src/StellaOps.Notify.WebService/Contracts/LockRequests.cs new file mode 100644 index 00000000..09234f76 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Contracts/LockRequests.cs @@ -0,0 +1,5 @@ +namespace StellaOps.Notify.WebService.Contracts; + +internal sealed record AcquireLockRequest(string Resource, string Owner, int TtlSeconds); + +internal sealed record ReleaseLockRequest(string Resource, string Owner); diff --git a/src/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs b/src/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs new file mode 100644 index 00000000..e4034177 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs @@ -0,0 +1,47 @@ +using System; + +namespace StellaOps.Notify.WebService.Diagnostics; + +/// +/// Tracks Notify WebService readiness information for `/readyz`. +/// +internal sealed class ServiceStatus +{ + private readonly TimeProvider _timeProvider; + private readonly DateTimeOffset _startedAt; + private ReadySnapshot _readySnapshot; + + public ServiceStatus(TimeProvider timeProvider) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _startedAt = _timeProvider.GetUtcNow(); + _readySnapshot = ReadySnapshot.CreateInitial(_startedAt); + } + + public ServiceSnapshot CreateSnapshot() + { + var now = _timeProvider.GetUtcNow(); + return new ServiceSnapshot(_startedAt, now, _readySnapshot); + } + + public void RecordReadyCheck(bool success, TimeSpan latency, string? errorMessage = null) + { + var timestamp = _timeProvider.GetUtcNow(); + _readySnapshot = new ReadySnapshot(timestamp, latency, success, success ? null : errorMessage); + } + + public readonly record struct ServiceSnapshot( + DateTimeOffset StartedAt, + DateTimeOffset CapturedAt, + ReadySnapshot Ready); + + public readonly record struct ReadySnapshot( + DateTimeOffset CheckedAt, + TimeSpan? Latency, + bool IsReady, + string? Error) + { + public static ReadySnapshot CreateInitial(DateTimeOffset timestamp) + => new(timestamp, null, false, "initialising"); + } +} diff --git a/src/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs b/src/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs new file mode 100644 index 00000000..6addeb14 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs @@ -0,0 +1,37 @@ +using System.IO; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Configuration; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace StellaOps.Notify.WebService.Extensions; + +internal static class ConfigurationExtensions +{ + public static IConfigurationBuilder AddNotifyYaml(this IConfigurationBuilder builder, string path) + { + ArgumentNullException.ThrowIfNull(builder); + + if (string.IsNullOrWhiteSpace(path) || !File.Exists(path)) + { + return builder; + } + + var deserializer = new DeserializerBuilder() + .WithNamingConvention(CamelCaseNamingConvention.Instance) + .Build(); + + using var reader = File.OpenText(path); + var yamlObject = deserializer.Deserialize(reader); + + if (yamlObject is null) + { + return builder; + } + + var payload = JsonSerializer.Serialize(yamlObject); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(payload)); + return builder.AddJsonStream(stream); + } +} diff --git a/src/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs b/src/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs new file mode 100644 index 00000000..976ad24e --- /dev/null +++ b/src/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs @@ -0,0 +1,44 @@ +using System; +using System.IO; +using StellaOps.Notify.WebService.Options; +using StellaOps.Plugin.Hosting; + +namespace StellaOps.Notify.WebService.Hosting; + +internal static class NotifyPluginHostFactory +{ + public static PluginHostOptions Build(NotifyWebServiceOptions options, string contentRootPath) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentNullException.ThrowIfNull(contentRootPath); + + var hostOptions = new PluginHostOptions + { + BaseDirectory = options.Plugins.BaseDirectory ?? Path.Combine(contentRootPath, ".."), + PluginsDirectory = options.Plugins.Directory ?? Path.Combine("plugins", "notify"), + PrimaryPrefix = "StellaOps.Notify" + }; + + if (!Path.IsPathRooted(hostOptions.BaseDirectory)) + { + hostOptions.BaseDirectory = Path.GetFullPath(Path.Combine(contentRootPath, hostOptions.BaseDirectory)); + } + + if (!Path.IsPathRooted(hostOptions.PluginsDirectory)) + { + hostOptions.PluginsDirectory = Path.Combine(hostOptions.BaseDirectory, hostOptions.PluginsDirectory); + } + + foreach (var pattern in options.Plugins.SearchPatterns) + { + hostOptions.SearchPatterns.Add(pattern); + } + + foreach (var prefix in options.Plugins.OrderedPlugins) + { + hostOptions.PluginOrder.Add(prefix); + } + + return hostOptions; + } +} diff --git a/src/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs b/src/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs new file mode 100644 index 00000000..311c58a7 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs @@ -0,0 +1,29 @@ +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Notify.WebService.Internal; + +internal sealed class JsonHttpResult : IResult +{ + private readonly string _payload; + private readonly int _statusCode; + private readonly string? _location; + + public JsonHttpResult(string payload, int statusCode, string? location) + { + _payload = payload; + _statusCode = statusCode; + _location = location; + } + + public async Task ExecuteAsync(HttpContext httpContext) + { + httpContext.Response.StatusCode = _statusCode; + httpContext.Response.ContentType = "application/json"; + if (!string.IsNullOrWhiteSpace(_location)) + { + httpContext.Response.Headers.Location = _location; + } + + await httpContext.Response.WriteAsync(_payload); + } +} diff --git a/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs b/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs new file mode 100644 index 00000000..c6b69d0d --- /dev/null +++ b/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs @@ -0,0 +1,107 @@ +using System.Collections.Generic; + +namespace StellaOps.Notify.WebService.Options; + +/// +/// Strongly typed configuration for the Notify WebService host. +/// +public sealed class NotifyWebServiceOptions +{ + public const string SectionName = "notify"; + + /// + /// Schema version that downstream consumers can use to detect breaking changes. + /// + public int SchemaVersion { get; set; } = 1; + + /// + /// Authority / authentication configuration. + /// + public AuthorityOptions Authority { get; set; } = new(); + + /// + /// Mongo storage configuration for configuration state and audit logs. + /// + public StorageOptions Storage { get; set; } = new(); + + /// + /// Plug-in loader configuration. + /// + public PluginOptions Plugins { get; set; } = new(); + + /// + /// HTTP API behaviour. + /// + public ApiOptions Api { get; set; } = new(); + + /// + /// Telemetry configuration toggles. + /// + public TelemetryOptions Telemetry { get; set; } = new(); + + public sealed class AuthorityOptions + { + public bool Enabled { get; set; } = true; + + public bool AllowAnonymousFallback { get; set; } + + public string Issuer { get; set; } = "https://authority.local"; + + public string? MetadataAddress { get; set; } + + public bool RequireHttpsMetadata { get; set; } = true; + + public int BackchannelTimeoutSeconds { get; set; } = 30; + + public int TokenClockSkewSeconds { get; set; } = 60; + + public IList Audiences { get; set; } = new List { "notify" }; + + public string ReadScope { get; set; } = "notify.read"; + + public string AdminScope { get; set; } = "notify.admin"; + + /// + /// Optional development signing key for symmetric JWT validation when Authority is disabled. + /// + public string? DevelopmentSigningKey { get; set; } + } + + public sealed class StorageOptions + { + public string Driver { get; set; } = "mongo"; + + public string ConnectionString { get; set; } = string.Empty; + + public string Database { get; set; } = "notify"; + + public int CommandTimeoutSeconds { get; set; } = 30; + } + + public sealed class PluginOptions + { + public string? BaseDirectory { get; set; } + + public string? Directory { get; set; } + + public IList SearchPatterns { get; set; } = new List(); + + public IList OrderedPlugins { get; set; } = new List(); + } + + public sealed class ApiOptions + { + public string BasePath { get; set; } = "/api/v1/notify"; + + public string InternalBasePath { get; set; } = "/internal/notify"; + + public string TenantHeader { get; set; } = "X-StellaOps-Tenant"; + } + + public sealed class TelemetryOptions + { + public bool EnableRequestLogging { get; set; } = true; + + public string MinimumLogLevel { get; set; } = "Information"; + } +} diff --git a/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs b/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs new file mode 100644 index 00000000..8b2a1080 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs @@ -0,0 +1,47 @@ +using System; +using System.IO; + +namespace StellaOps.Notify.WebService.Options; + +internal static class NotifyWebServiceOptionsPostConfigure +{ + public static void Apply(NotifyWebServiceOptions options, string contentRootPath) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentNullException.ThrowIfNull(contentRootPath); + + NormalizePluginOptions(options.Plugins, contentRootPath); + } + + private static void NormalizePluginOptions(NotifyWebServiceOptions.PluginOptions plugins, string contentRootPath) + { + ArgumentNullException.ThrowIfNull(plugins); + + var baseDirectory = plugins.BaseDirectory; + if (string.IsNullOrWhiteSpace(baseDirectory)) + { + baseDirectory = Path.Combine(contentRootPath, ".."); + } + else if (!Path.IsPathRooted(baseDirectory)) + { + baseDirectory = Path.GetFullPath(Path.Combine(contentRootPath, baseDirectory)); + } + + plugins.BaseDirectory = baseDirectory; + + if (string.IsNullOrWhiteSpace(plugins.Directory)) + { + plugins.Directory = Path.Combine("plugins", "notify"); + } + + if (!Path.IsPathRooted(plugins.Directory)) + { + plugins.Directory = Path.Combine(baseDirectory, plugins.Directory); + } + + if (plugins.SearchPatterns.Count == 0) + { + plugins.SearchPatterns.Add("StellaOps.Notify.Connectors.*.dll"); + } + } +} diff --git a/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs b/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs new file mode 100644 index 00000000..a4c4c9d0 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs @@ -0,0 +1,96 @@ +using System; +using System.Linq; + +namespace StellaOps.Notify.WebService.Options; + +internal static class NotifyWebServiceOptionsValidator +{ + public static void Validate(NotifyWebServiceOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + ValidateStorage(options.Storage); + ValidateAuthority(options.Authority); + ValidateApi(options.Api); + } + + private static void ValidateStorage(NotifyWebServiceOptions.StorageOptions storage) + { + ArgumentNullException.ThrowIfNull(storage); + + var driver = storage.Driver ?? string.Empty; + if (!string.Equals(driver, "mongo", StringComparison.OrdinalIgnoreCase) && + !string.Equals(driver, "memory", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Unsupported storage driver '{storage.Driver}'."); + } + + if (string.Equals(driver, "mongo", StringComparison.OrdinalIgnoreCase)) + { + if (string.IsNullOrWhiteSpace(storage.ConnectionString)) + { + throw new InvalidOperationException("notify:storage:connectionString must be provided."); + } + + if (string.IsNullOrWhiteSpace(storage.Database)) + { + throw new InvalidOperationException("notify:storage:database must be provided."); + } + + if (storage.CommandTimeoutSeconds <= 0) + { + throw new InvalidOperationException("notify:storage:commandTimeoutSeconds must be positive."); + } + } + } + + private static void ValidateAuthority(NotifyWebServiceOptions.AuthorityOptions authority) + { + ArgumentNullException.ThrowIfNull(authority); + + if (authority.Enabled) + { + if (string.IsNullOrWhiteSpace(authority.Issuer)) + { + throw new InvalidOperationException("notify:authority:issuer must be provided when authority is enabled."); + } + + if (authority.Audiences is null || authority.Audiences.Count == 0) + { + throw new InvalidOperationException("notify:authority:audiences must include at least one value."); + } + + if (string.IsNullOrWhiteSpace(authority.AdminScope) || string.IsNullOrWhiteSpace(authority.ReadScope)) + { + throw new InvalidOperationException("notify:authority admin and read scopes must be configured."); + } + } + else + { + if (string.IsNullOrWhiteSpace(authority.DevelopmentSigningKey) || authority.DevelopmentSigningKey.Length < 32) + { + throw new InvalidOperationException("notify:authority:developmentSigningKey must be at least 32 characters when authority is disabled."); + } + } + } + + private static void ValidateApi(NotifyWebServiceOptions.ApiOptions api) + { + ArgumentNullException.ThrowIfNull(api); + + if (!api.BasePath.StartsWith("/", StringComparison.Ordinal)) + { + throw new InvalidOperationException("notify:api:basePath must start with '/'."); + } + + if (!api.InternalBasePath.StartsWith("/", StringComparison.Ordinal)) + { + throw new InvalidOperationException("notify:api:internalBasePath must start with '/'."); + } + + if (string.IsNullOrWhiteSpace(api.TenantHeader)) + { + throw new InvalidOperationException("notify:api:tenantHeader must be provided."); + } + } +} diff --git a/src/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs b/src/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs new file mode 100644 index 00000000..599b6a3c --- /dev/null +++ b/src/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs @@ -0,0 +1,55 @@ +using System; +using System.Threading; +using Microsoft.Extensions.Logging; +using StellaOps.Plugin.Hosting; + +namespace StellaOps.Notify.WebService.Plugins; + +internal interface INotifyPluginRegistry +{ + Task WarmupAsync(CancellationToken cancellationToken = default); +} + +internal sealed class NotifyPluginRegistry : INotifyPluginRegistry +{ + private readonly PluginHostOptions _hostOptions; + private readonly ILogger _logger; + + public NotifyPluginRegistry( + PluginHostOptions hostOptions, + ILogger logger) + { + _hostOptions = hostOptions ?? throw new ArgumentNullException(nameof(hostOptions)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public Task WarmupAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var result = PluginHost.LoadPlugins(_hostOptions, _logger); + + if (result.Plugins.Count == 0) + { + _logger.LogWarning( + "No Notify plug-ins discovered under '{PluginDirectory}'.", + result.PluginDirectory); + } + else + { + _logger.LogInformation( + "Loaded {PluginCount} Notify plug-in(s) from '{PluginDirectory}'.", + result.Plugins.Count, + result.PluginDirectory); + } + + if (result.MissingOrderedPlugins.Count > 0) + { + _logger.LogWarning( + "Configured plug-ins missing from disk: {Missing}.", + string.Join(", ", result.MissingOrderedPlugins)); + } + + return Task.FromResult(result.Plugins.Count); + } +} diff --git a/src/StellaOps.Notify.WebService/Program.Partial.cs b/src/StellaOps.Notify.WebService/Program.Partial.cs new file mode 100644 index 00000000..0b330a68 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Program.Partial.cs @@ -0,0 +1,3 @@ +namespace StellaOps.Notify.WebService; + +public partial class Program; diff --git a/src/StellaOps.Notify.WebService/Program.cs b/src/StellaOps.Notify.WebService/Program.cs new file mode 100644 index 00000000..6721c257 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Program.cs @@ -0,0 +1,712 @@ +using System; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Security.Claims; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using Microsoft.AspNetCore.Authentication.JwtBearer; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using Microsoft.Extensions.Logging; +using Serilog; +using Serilog.Events; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Configuration; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Repositories; +using StellaOps.Notify.WebService.Diagnostics; +using StellaOps.Notify.WebService.Extensions; +using StellaOps.Notify.WebService.Hosting; +using StellaOps.Notify.WebService.Options; +using StellaOps.Notify.WebService.Plugins; +using StellaOps.Notify.WebService.Security; +using StellaOps.Notify.WebService.Services; +using StellaOps.Notify.WebService.Internal; +using StellaOps.Notify.WebService.Storage.InMemory; +using StellaOps.Plugin.DependencyInjection; +using MongoDB.Bson; +using StellaOps.Notify.WebService.Contracts; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "NOTIFY_"; + options.ConfigureBuilder = configurationBuilder => + { + configurationBuilder.AddNotifyYaml(Path.Combine(builder.Environment.ContentRootPath, "../etc/notify.yaml")); + }; +}); + +var contentRootPath = builder.Environment.ContentRootPath; + +var bootstrapOptions = builder.Configuration.BindOptions( + NotifyWebServiceOptions.SectionName, + (opts, _) => + { + NotifyWebServiceOptionsPostConfigure.Apply(opts, contentRootPath); + NotifyWebServiceOptionsValidator.Validate(opts); + }); + +builder.Services.AddOptions() + .Bind(builder.Configuration.GetSection(NotifyWebServiceOptions.SectionName)) + .PostConfigure(options => + { + NotifyWebServiceOptionsPostConfigure.Apply(options, contentRootPath); + NotifyWebServiceOptionsValidator.Validate(options); + }) + .ValidateOnStart(); + +builder.Host.UseSerilog((context, services, loggerConfiguration) => +{ + var minimumLevel = MapLogLevel(bootstrapOptions.Telemetry.MinimumLogLevel); + + loggerConfiguration + .MinimumLevel.Is(minimumLevel) + .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Warning) + .Enrich.FromLogContext() + .WriteTo.Console(); +}); + +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +if (string.Equals(bootstrapOptions.Storage.Driver, "mongo", StringComparison.OrdinalIgnoreCase)) +{ + builder.Services.AddNotifyMongoStorage(builder.Configuration.GetSection("notify:storage")); +} +else +{ + builder.Services.AddInMemoryNotifyStorage(); +} + +var pluginHostOptions = NotifyPluginHostFactory.Build(bootstrapOptions, contentRootPath); +builder.Services.AddSingleton(pluginHostOptions); +builder.Services.RegisterPluginRoutines(builder.Configuration, pluginHostOptions); +builder.Services.AddSingleton(); + +ConfigureAuthentication(builder, bootstrapOptions); + +builder.Services.AddEndpointsApiExplorer(); + +var app = builder.Build(); + +var readyStatus = app.Services.GetRequiredService(); + +var resolvedOptions = app.Services.GetRequiredService>().Value; +await InitialiseAsync(app.Services, readyStatus, app.Logger, resolvedOptions); + +ConfigureRequestPipeline(app, bootstrapOptions); +ConfigureEndpoints(app); + +await app.RunAsync(); + +static void ConfigureAuthentication(WebApplicationBuilder builder, NotifyWebServiceOptions options) +{ + if (options.Authority.Enabled) + { + builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: null, + configure: resourceOptions => + { + resourceOptions.Authority = options.Authority.Issuer; + resourceOptions.RequireHttpsMetadata = options.Authority.RequireHttpsMetadata; + resourceOptions.MetadataAddress = options.Authority.MetadataAddress; + resourceOptions.BackchannelTimeout = TimeSpan.FromSeconds(options.Authority.BackchannelTimeoutSeconds); + resourceOptions.TokenClockSkew = TimeSpan.FromSeconds(options.Authority.TokenClockSkewSeconds); + + resourceOptions.Audiences.Clear(); + foreach (var audience in options.Authority.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + }); + + builder.Services.AddAuthorization(auth => + { + auth.AddStellaOpsScopePolicy(NotifyPolicies.Read, options.Authority.ReadScope); + auth.AddStellaOpsScopePolicy(NotifyPolicies.Admin, options.Authority.AdminScope); + }); + } + else + { + builder.Services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) + .AddJwtBearer(jwt => + { + jwt.RequireHttpsMetadata = false; + jwt.TokenValidationParameters = new TokenValidationParameters + { + ValidateIssuer = true, + ValidIssuer = options.Authority.Issuer, + ValidateAudience = options.Authority.Audiences.Count > 0, + ValidAudiences = options.Authority.Audiences, + ValidateIssuerSigningKey = true, + IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(options.Authority.DevelopmentSigningKey!)), + ValidateLifetime = true, + ClockSkew = TimeSpan.FromSeconds(options.Authority.TokenClockSkewSeconds), + NameClaimType = ClaimTypes.Name + }; + }); + + builder.Services.AddAuthorization(auth => + { + auth.AddPolicy( + NotifyPolicies.Read, + policy => policy + .RequireAuthenticatedUser() + .RequireAssertion(ctx => + HasScope(ctx.User, options.Authority.ReadScope) || + HasScope(ctx.User, options.Authority.AdminScope))); + + auth.AddPolicy( + NotifyPolicies.Admin, + policy => policy + .RequireAuthenticatedUser() + .RequireAssertion(ctx => HasScope(ctx.User, options.Authority.AdminScope))); + }); + } +} + +static async Task InitialiseAsync(IServiceProvider services, ServiceStatus status, Microsoft.Extensions.Logging.ILogger logger, NotifyWebServiceOptions options) +{ + var stopwatch = Stopwatch.StartNew(); + + try + { + await using var scope = services.CreateAsyncScope(); + if (string.Equals(options.Storage.Driver, "mongo", StringComparison.OrdinalIgnoreCase)) + { + await RunMongoMigrationsAsync(scope.ServiceProvider); + } + + var registry = scope.ServiceProvider.GetRequiredService(); + var count = await registry.WarmupAsync(); + + stopwatch.Stop(); + status.RecordReadyCheck(success: true, stopwatch.Elapsed); + logger.LogInformation("Notify WebService initialised in {ElapsedMs} ms; loaded {PluginCount} plug-in(s).", stopwatch.Elapsed.TotalMilliseconds, count); + } + catch (Exception ex) + { + stopwatch.Stop(); + status.RecordReadyCheck(success: false, stopwatch.Elapsed, ex.Message); + logger.LogError(ex, "Failed to initialise Notify WebService."); + throw; + } +} + +static async Task RunMongoMigrationsAsync(IServiceProvider services) +{ + var initializerType = Type.GetType("StellaOps.Notify.Storage.Mongo.Internal.NotifyMongoInitializer, StellaOps.Notify.Storage.Mongo"); + if (initializerType is null) + { + return; + } + + var initializer = services.GetService(initializerType); + if (initializer is null) + { + return; + } + + var method = initializerType.GetMethod("EnsureIndexesAsync", new[] { typeof(CancellationToken) }); + if (method is null) + { + return; + } + + if (method.Invoke(initializer, new object[] { CancellationToken.None }) is Task task) + { + await task.ConfigureAwait(false); + } +} + +static void ConfigureRequestPipeline(WebApplication app, NotifyWebServiceOptions options) +{ + if (options.Telemetry.EnableRequestLogging) + { + app.UseSerilogRequestLogging(c => + { + c.IncludeQueryInRequestPath = true; + c.GetLevel = (_, _, exception) => exception is null ? LogEventLevel.Information : LogEventLevel.Error; + }); + } + + app.UseAuthentication(); + app.UseAuthorization(); +} + +static void ConfigureEndpoints(WebApplication app) +{ + app.MapGet("/healthz", () => Results.Ok(new { status = "ok" })); + + app.MapGet("/readyz", (ServiceStatus status) => + { + var snapshot = status.CreateSnapshot(); + if (snapshot.Ready.IsReady) + { + return Results.Ok(new + { + status = "ready", + checkedAt = snapshot.Ready.CheckedAt, + latencyMs = snapshot.Ready.Latency?.TotalMilliseconds, + snapshot.StartedAt + }); + } + + return JsonResponse( + new + { + status = "unready", + snapshot.Ready.Error, + checkedAt = snapshot.Ready.CheckedAt, + latencyMs = snapshot.Ready.Latency?.TotalMilliseconds + }, + StatusCodes.Status503ServiceUnavailable); + }); + + var options = app.Services.GetRequiredService>().Value; + var tenantHeader = options.Api.TenantHeader; + var apiBasePath = options.Api.BasePath.TrimEnd('/'); + var apiGroup = app.MapGroup(options.Api.BasePath); + var internalGroup = app.MapGroup(options.Api.InternalBasePath); + + internalGroup.MapPost("/rules/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeRule)) + .WithName("notify.rules.normalize") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest); + + internalGroup.MapPost("/channels/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeChannel)) + .WithName("notify.channels.normalize"); + + internalGroup.MapPost("/templates/normalize", (JsonNode? body, NotifySchemaMigrationService service) => Normalize(body, service.UpgradeTemplate)) + .WithName("notify.templates.normalize"); + + apiGroup.MapGet("/rules", async ([FromServices] INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var rules = await repository.ListAsync(tenant, cancellationToken); + return JsonResponse(rules); + }) + .RequireAuthorization(NotifyPolicies.Read); + + apiGroup.MapGet("/rules/{ruleId}", async (string ruleId, [FromServices] INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var rule = await repository.GetAsync(tenant, ruleId, cancellationToken); + return rule is null ? Results.NotFound() : JsonResponse(rule); + }) + .RequireAuthorization(NotifyPolicies.Read); + + apiGroup.MapPost("/rules", async (JsonNode? body, NotifySchemaMigrationService service, INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var rule = service.UpgradeRule(body); + if (!string.Equals(rule.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(rule, cancellationToken); + + return CreatedJson(BuildResourceLocation(apiBasePath, "rules", rule.RuleId), rule); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapDelete("/rules/{ruleId}", async (string ruleId, INotifyRuleRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.DeleteAsync(tenant, ruleId, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapGet("/channels", async (INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var channels = await repository.ListAsync(tenant, cancellationToken); + return JsonResponse(channels); + }) + .RequireAuthorization(NotifyPolicies.Read); + + apiGroup.MapPost("/channels", async (JsonNode? body, NotifySchemaMigrationService service, INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var channel = service.UpgradeChannel(body); + if (!string.Equals(channel.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(channel, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "channels", channel.ChannelId), channel); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapDelete("/channels/{channelId}", async (string channelId, INotifyChannelRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.DeleteAsync(tenant, channelId, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapGet("/templates", async (INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var templates = await repository.ListAsync(tenant, cancellationToken); + return JsonResponse(templates); + }) + .RequireAuthorization(NotifyPolicies.Read); + + apiGroup.MapPost("/templates", async (JsonNode? body, NotifySchemaMigrationService service, INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var template = service.UpgradeTemplate(body); + if (!string.Equals(template.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(template, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "templates", template.TemplateId), template); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapDelete("/templates/{templateId}", async (string templateId, INotifyTemplateRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.DeleteAsync(tenant, templateId, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapPost("/deliveries", async (JsonNode? body, INotifyDeliveryRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var delivery = NotifyCanonicalJsonSerializer.Deserialize(body.ToJsonString()); + if (!string.Equals(delivery.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpdateAsync(delivery, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "deliveries", delivery.DeliveryId), delivery); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapGet("/deliveries", async ([FromServices] INotifyDeliveryRepository repository, HttpContext context, [FromQuery] DateTimeOffset? since, [FromQuery] string? status, [FromQuery] int? limit, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var deliveries = await repository.QueryAsync(tenant, since, status, limit, continuationToken: null, cancellationToken); + return JsonResponse(deliveries.Items); + }) + .RequireAuthorization(NotifyPolicies.Read); + + apiGroup.MapGet("/deliveries/{deliveryId}", async (string deliveryId, INotifyDeliveryRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var delivery = await repository.GetAsync(tenant, deliveryId, cancellationToken); + return delivery is null ? Results.NotFound() : JsonResponse(delivery); + }) + .RequireAuthorization(NotifyPolicies.Read); + + apiGroup.MapPost("/digests", async ([FromBody] NotifyDigestDocument payload, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (!string.Equals(payload.TenantId, tenant, StringComparison.Ordinal)) + { + return Results.BadRequest(new { error = "Tenant mismatch between header and payload." }); + } + + await repository.UpsertAsync(payload, cancellationToken); + return Results.Ok(); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapGet("/digests/{actionKey}", async (string actionKey, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var digest = await repository.GetAsync(tenant, actionKey, cancellationToken); + return digest is null ? Results.NotFound() : JsonResponse(digest); + }) + .RequireAuthorization(NotifyPolicies.Read); + + apiGroup.MapDelete("/digests/{actionKey}", async (string actionKey, INotifyDigestRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.RemoveAsync(tenant, actionKey, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapPost("/locks/acquire", async ([FromBody] AcquireLockRequest request, INotifyLockRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var acquired = await repository.TryAcquireAsync(tenant, request.Resource, request.Owner, TimeSpan.FromSeconds(request.TtlSeconds), cancellationToken); + return JsonResponse(new { acquired }); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapPost("/locks/release", async ([FromBody] ReleaseLockRequest request, INotifyLockRepository repository, HttpContext context, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + await repository.ReleaseAsync(tenant, request.Resource, request.Owner, cancellationToken); + return Results.NoContent(); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapPost("/audit", async ([FromBody] JsonNode? body, INotifyAuditRepository repository, HttpContext context, ClaimsPrincipal user, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + var action = body["action"]?.GetValue(); + if (string.IsNullOrWhiteSpace(action)) + { + return Results.BadRequest(new { error = "Action is required." }); + } + + var entry = new NotifyAuditEntryDocument + { + Id = ObjectId.GenerateNewId(), + TenantId = tenant, + Action = action, + Actor = user.Identity?.Name ?? "unknown", + EntityId = body["entityId"]?.GetValue() ?? string.Empty, + EntityType = body["entityType"]?.GetValue() ?? string.Empty, + Timestamp = DateTimeOffset.UtcNow, + Payload = body["payload"] is JsonObject payloadObj + ? BsonDocument.Parse(payloadObj.ToJsonString()) + : new BsonDocument() + }; + + await repository.AppendAsync(entry, cancellationToken); + return CreatedJson(BuildResourceLocation(apiBasePath, "audit", entry.Id.ToString()), new { entry.Id }); + }) + .RequireAuthorization(NotifyPolicies.Admin); + + apiGroup.MapGet("/audit", async (INotifyAuditRepository repository, HttpContext context, [FromQuery] DateTimeOffset? since, [FromQuery] int? limit, CancellationToken cancellationToken) => + { + if (!TryResolveTenant(context, tenantHeader, out var tenant, out var error)) + { + return error!; + } + + var entries = await repository.QueryAsync(tenant, since, limit, cancellationToken); + var response = entries.Select(e => new + { + e.Id, + e.TenantId, + e.Actor, + e.Action, + e.EntityId, + e.EntityType, + e.Timestamp, + Payload = JsonNode.Parse(e.Payload.ToJson()) + }); + + return JsonResponse(response); + }) + .RequireAuthorization(NotifyPolicies.Read); +} + +static bool TryResolveTenant(HttpContext context, string tenantHeader, out string tenant, out IResult? error) +{ + if (!context.Request.Headers.TryGetValue(tenantHeader, out var header) || string.IsNullOrWhiteSpace(header)) + { + tenant = string.Empty; + error = Results.BadRequest(new { error = $"{tenantHeader} header is required." }); + return false; + } + + tenant = header.ToString().Trim(); + error = null; + return true; +} + +static string BuildResourceLocation(string basePath, params string[] segments) +{ + if (segments.Length == 0) + { + return basePath; + } + + var builder = new StringBuilder(basePath); + foreach (var segment in segments) + { + builder.Append('/'); + builder.Append(Uri.EscapeDataString(segment)); + } + + return builder.ToString(); +} + +static IResult JsonResponse(T value, int statusCode = StatusCodes.Status200OK, string? location = null) +{ + var payload = JsonSerializer.Serialize(value, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + return new JsonHttpResult(payload, statusCode, location); +} + +static IResult CreatedJson(string location, T value) + => JsonResponse(value, StatusCodes.Status201Created, location); + +static IResult Normalize(JsonNode? body, Func upgrade) +{ + if (body is null) + { + return Results.BadRequest(new { error = "Request body is required." }); + } + + try + { + var model = upgrade(body); + var json = NotifyCanonicalJsonSerializer.Serialize(model); + return Results.Content(json, "application/json"); + } + catch (Exception ex) + { + return Results.BadRequest(new { error = ex.Message }); + } +} + +static bool HasScope(ClaimsPrincipal principal, string scope) +{ + if (principal is null || string.IsNullOrWhiteSpace(scope)) + { + return false; + } + + foreach (var claim in principal.FindAll("scope")) + { + if (string.Equals(claim.Value, scope, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; +} + +static LogEventLevel MapLogLevel(string configuredLevel) +{ + return configuredLevel?.ToLowerInvariant() switch + { + "verbose" => LogEventLevel.Verbose, + "debug" => LogEventLevel.Debug, + "warning" => LogEventLevel.Warning, + "error" => LogEventLevel.Error, + "fatal" => LogEventLevel.Fatal, + _ => LogEventLevel.Information + }; +} diff --git a/src/StellaOps.Notify.WebService/Security/NotifyPolicies.cs b/src/StellaOps.Notify.WebService/Security/NotifyPolicies.cs new file mode 100644 index 00000000..e11b22d0 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Security/NotifyPolicies.cs @@ -0,0 +1,7 @@ +namespace StellaOps.Notify.WebService.Security; + +internal static class NotifyPolicies +{ + public const string Read = "notify.read"; + public const string Admin = "notify.admin"; +} diff --git a/src/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs b/src/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs new file mode 100644 index 00000000..f6978b32 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs @@ -0,0 +1,17 @@ +using System; +using System.Text.Json.Nodes; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.WebService.Services; + +internal sealed class NotifySchemaMigrationService +{ + public NotifyRule UpgradeRule(JsonNode json) + => NotifySchemaMigration.UpgradeRule(json ?? throw new ArgumentNullException(nameof(json))); + + public NotifyChannel UpgradeChannel(JsonNode json) + => NotifySchemaMigration.UpgradeChannel(json ?? throw new ArgumentNullException(nameof(json))); + + public NotifyTemplate UpgradeTemplate(JsonNode json) + => NotifySchemaMigration.UpgradeTemplate(json ?? throw new ArgumentNullException(nameof(json))); +} diff --git a/src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj b/src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj index 4c69387c..351ed673 100644 --- a/src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj +++ b/src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj @@ -1,7 +1,26 @@ net10.0 + preview enable enable + true + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs b/src/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs new file mode 100644 index 00000000..09937fb9 --- /dev/null +++ b/src/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs @@ -0,0 +1,360 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Documents; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notify.WebService.Storage.InMemory; + +internal static class InMemoryStorageModule +{ + public static IServiceCollection AddInMemoryNotifyStorage(this IServiceCollection services) + { + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + return services; + } + + private sealed class InMemoryStore + { + public ConcurrentDictionary> Rules { get; } = new(StringComparer.Ordinal); + + public ConcurrentDictionary> Channels { get; } = new(StringComparer.Ordinal); + + public ConcurrentDictionary> Templates { get; } = new(StringComparer.Ordinal); + + public ConcurrentDictionary> Deliveries { get; } = new(StringComparer.Ordinal); + + public ConcurrentDictionary> Digests { get; } = new(StringComparer.Ordinal); + + public ConcurrentDictionary> Locks { get; } = new(StringComparer.Ordinal); + + public ConcurrentDictionary> AuditEntries { get; } = new(StringComparer.Ordinal); + } + + private sealed class InMemoryRuleRepository : INotifyRuleRepository + { + private readonly InMemoryStore _store; + + public InMemoryRuleRepository(InMemoryStore store) => _store = store ?? throw new ArgumentNullException(nameof(store)); + + public Task UpsertAsync(NotifyRule rule, CancellationToken cancellationToken = default) + { + var map = _store.Rules.GetOrAdd(rule.TenantId, _ => new ConcurrentDictionary(StringComparer.Ordinal)); + map[rule.RuleId] = rule; + return Task.CompletedTask; + } + + public Task GetAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) + { + if (_store.Rules.TryGetValue(tenantId, out var map) && map.TryGetValue(ruleId, out var rule)) + { + return Task.FromResult(rule); + } + + return Task.FromResult(null); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + if (_store.Rules.TryGetValue(tenantId, out var map)) + { + return Task.FromResult>(map.Values.OrderBy(static r => r.RuleId, StringComparer.Ordinal).ToList()); + } + + return Task.FromResult>(Array.Empty()); + } + + public Task DeleteAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) + { + if (_store.Rules.TryGetValue(tenantId, out var map)) + { + map.TryRemove(ruleId, out _); + } + + return Task.CompletedTask; + } + } + + private sealed class InMemoryChannelRepository : INotifyChannelRepository + { + private readonly InMemoryStore _store; + + public InMemoryChannelRepository(InMemoryStore store) => _store = store ?? throw new ArgumentNullException(nameof(store)); + + public Task UpsertAsync(NotifyChannel channel, CancellationToken cancellationToken = default) + { + var map = _store.Channels.GetOrAdd(channel.TenantId, _ => new ConcurrentDictionary(StringComparer.Ordinal)); + map[channel.ChannelId] = channel; + return Task.CompletedTask; + } + + public Task GetAsync(string tenantId, string channelId, CancellationToken cancellationToken = default) + { + if (_store.Channels.TryGetValue(tenantId, out var map) && map.TryGetValue(channelId, out var channel)) + { + return Task.FromResult(channel); + } + + return Task.FromResult(null); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + if (_store.Channels.TryGetValue(tenantId, out var map)) + { + return Task.FromResult>(map.Values.OrderBy(static c => c.ChannelId, StringComparer.Ordinal).ToList()); + } + + return Task.FromResult>(Array.Empty()); + } + + public Task DeleteAsync(string tenantId, string channelId, CancellationToken cancellationToken = default) + { + if (_store.Channels.TryGetValue(tenantId, out var map)) + { + map.TryRemove(channelId, out _); + } + + return Task.CompletedTask; + } + } + + private sealed class InMemoryTemplateRepository : INotifyTemplateRepository + { + private readonly InMemoryStore _store; + + public InMemoryTemplateRepository(InMemoryStore store) => _store = store ?? throw new ArgumentNullException(nameof(store)); + + public Task UpsertAsync(NotifyTemplate template, CancellationToken cancellationToken = default) + { + var map = _store.Templates.GetOrAdd(template.TenantId, _ => new ConcurrentDictionary(StringComparer.Ordinal)); + map[template.TemplateId] = template; + return Task.CompletedTask; + } + + public Task GetAsync(string tenantId, string templateId, CancellationToken cancellationToken = default) + { + if (_store.Templates.TryGetValue(tenantId, out var map) && map.TryGetValue(templateId, out var template)) + { + return Task.FromResult(template); + } + + return Task.FromResult(null); + } + + public Task> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + if (_store.Templates.TryGetValue(tenantId, out var map)) + { + return Task.FromResult>(map.Values.OrderBy(static t => t.TemplateId, StringComparer.Ordinal).ToList()); + } + + return Task.FromResult>(Array.Empty()); + } + + public Task DeleteAsync(string tenantId, string templateId, CancellationToken cancellationToken = default) + { + if (_store.Templates.TryGetValue(tenantId, out var map)) + { + map.TryRemove(templateId, out _); + } + + return Task.CompletedTask; + } + } + + private sealed class InMemoryDeliveryRepository : INotifyDeliveryRepository + { + private readonly InMemoryStore _store; + + public InMemoryDeliveryRepository(InMemoryStore store) => _store = store ?? throw new ArgumentNullException(nameof(store)); + + public Task AppendAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) + => UpdateAsync(delivery, cancellationToken); + + public Task UpdateAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) + { + var map = _store.Deliveries.GetOrAdd(delivery.TenantId, _ => new ConcurrentDictionary(StringComparer.Ordinal)); + map[delivery.DeliveryId] = delivery; + return Task.CompletedTask; + } + + public Task GetAsync(string tenantId, string deliveryId, CancellationToken cancellationToken = default) + { + if (_store.Deliveries.TryGetValue(tenantId, out var map) && map.TryGetValue(deliveryId, out var delivery)) + { + return Task.FromResult(delivery); + } + + return Task.FromResult(null); + } + + public Task QueryAsync(string tenantId, DateTimeOffset? since, string? status, int? limit, string? continuationToken = null, CancellationToken cancellationToken = default) + { + if (!_store.Deliveries.TryGetValue(tenantId, out var map)) + { + return Task.FromResult(new NotifyDeliveryQueryResult(Array.Empty(), null)); + } + + var query = map.Values.AsEnumerable(); + if (since.HasValue) + { + query = query.Where(d => d.CreatedAt >= since.Value); + } + + if (!string.IsNullOrWhiteSpace(status) && Enum.TryParse(status, true, out var parsed)) + { + query = query.Where(d => d.Status == parsed); + } + + query = query.OrderByDescending(d => d.CreatedAt).ThenBy(d => d.DeliveryId, StringComparer.Ordinal); + + if (limit.HasValue && limit.Value > 0) + { + query = query.Take(limit.Value); + } + + var items = query.ToList(); + return Task.FromResult(new NotifyDeliveryQueryResult(items, ContinuationToken: null)); + } + } + + private sealed class InMemoryDigestRepository : INotifyDigestRepository + { + private readonly InMemoryStore _store; + + public InMemoryDigestRepository(InMemoryStore store) => _store = store ?? throw new ArgumentNullException(nameof(store)); + + public Task UpsertAsync(NotifyDigestDocument document, CancellationToken cancellationToken = default) + { + var map = _store.Digests.GetOrAdd(document.TenantId, _ => new ConcurrentDictionary(StringComparer.Ordinal)); + map[document.ActionKey] = document; + return Task.CompletedTask; + } + + public Task GetAsync(string tenantId, string actionKey, CancellationToken cancellationToken = default) + { + if (_store.Digests.TryGetValue(tenantId, out var map) && map.TryGetValue(actionKey, out var document)) + { + return Task.FromResult(document); + } + + return Task.FromResult(null); + } + + public Task RemoveAsync(string tenantId, string actionKey, CancellationToken cancellationToken = default) + { + if (_store.Digests.TryGetValue(tenantId, out var map)) + { + map.TryRemove(actionKey, out _); + } + + return Task.CompletedTask; + } + } + + private sealed class InMemoryLockRepository : INotifyLockRepository + { + private readonly InMemoryStore _store; + + public InMemoryLockRepository(InMemoryStore store) => _store = store ?? throw new ArgumentNullException(nameof(store)); + + public Task TryAcquireAsync(string tenantId, string resource, string owner, TimeSpan ttl, CancellationToken cancellationToken = default) + { + var map = _store.Locks.GetOrAdd(tenantId, _ => new ConcurrentDictionary(StringComparer.Ordinal)); + + var now = DateTimeOffset.UtcNow; + var entry = map.GetOrAdd(resource, _ => new LockEntry(owner, now, now.Add(ttl))); + + lock (entry) + { + if (entry.Owner == owner || entry.ExpiresAt <= now) + { + entry.Owner = owner; + entry.AcquiredAt = now; + entry.ExpiresAt = now.Add(ttl); + return Task.FromResult(true); + } + + return Task.FromResult(false); + } + } + + public Task ReleaseAsync(string tenantId, string resource, string owner, CancellationToken cancellationToken = default) + { + if (_store.Locks.TryGetValue(tenantId, out var map) && map.TryGetValue(resource, out var entry)) + { + lock (entry) + { + if (entry.Owner == owner) + { + map.TryRemove(resource, out _); + } + } + } + + return Task.CompletedTask; + } + } + + private sealed class InMemoryAuditRepository : INotifyAuditRepository + { + private readonly InMemoryStore _store; + + public InMemoryAuditRepository(InMemoryStore store) => _store = store ?? throw new ArgumentNullException(nameof(store)); + + public Task AppendAsync(NotifyAuditEntryDocument entry, CancellationToken cancellationToken = default) + { + var queue = _store.AuditEntries.GetOrAdd(entry.TenantId, _ => new ConcurrentQueue()); + queue.Enqueue(entry); + return Task.CompletedTask; + } + + public Task> QueryAsync(string tenantId, DateTimeOffset? since, int? limit, CancellationToken cancellationToken = default) + { + if (!_store.AuditEntries.TryGetValue(tenantId, out var queue)) + { + return Task.FromResult>(Array.Empty()); + } + + var items = queue + .Where(entry => !since.HasValue || entry.Timestamp >= since.Value) + .OrderByDescending(entry => entry.Timestamp) + .ThenBy(entry => entry.Id.ToString(), StringComparer.Ordinal) + .ToList(); + + if (limit.HasValue && limit.Value > 0 && items.Count > limit.Value) + { + items = items.Take(limit.Value).ToList(); + } + + return Task.FromResult>(items); + } + } + + private sealed class LockEntry + { + public LockEntry(string owner, DateTimeOffset acquiredAt, DateTimeOffset expiresAt) + { + Owner = owner; + AcquiredAt = acquiredAt; + ExpiresAt = expiresAt; + } + + public string Owner { get; set; } + + public DateTimeOffset AcquiredAt { get; set; } + + public DateTimeOffset ExpiresAt { get; set; } + } +} diff --git a/src/StellaOps.Notify.WebService/TASKS.md b/src/StellaOps.Notify.WebService/TASKS.md index 9c9a189f..e4c3103c 100644 --- a/src/StellaOps.Notify.WebService/TASKS.md +++ b/src/StellaOps.Notify.WebService/TASKS.md @@ -2,7 +2,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| NOTIFY-WEB-15-101 | TODO | Notify WebService Guild | NOTIFY-MODELS-15-101 | Bootstrap minimal API host with Authority auth, health endpoints, and plug-in discovery per architecture. | Service starts with config validation, `/healthz`/`/readyz` pass, plug-ins loaded at restart. | -| NOTIFY-WEB-15-102 | TODO | Notify WebService Guild | NOTIFY-WEB-15-101 | Rules/channel/template CRUD endpoints with tenant scoping, validation, audit logging. | CRUD endpoints tested; invalid inputs rejected; audit entries persisted. | +| NOTIFY-WEB-15-101 | DONE (2025-10-19) | Notify WebService Guild | NOTIFY-MODELS-15-101 | Bootstrap minimal API host with Authority auth, health endpoints, and plug-in discovery per architecture. | Service starts with config validation, `/healthz`/`/readyz` pass, plug-ins loaded at restart. | +| NOTIFY-WEB-15-102 | DONE (2025-10-19) | Notify WebService Guild | NOTIFY-WEB-15-101 | Rules/channel/template CRUD endpoints with tenant scoping, validation, audit logging. | CRUD endpoints tested; invalid inputs rejected; audit entries persisted. | | NOTIFY-WEB-15-103 | TODO | Notify WebService Guild | NOTIFY-WEB-15-102 | Delivery history + test-send endpoints with rate limits. | `/deliveries` and `/channels/{id}/test` tested; rate limits enforced. | | NOTIFY-WEB-15-104 | TODO | Notify WebService Guild | NOTIFY-STORAGE-15-201, NOTIFY-QUEUE-15-401 | Configuration binding for Mongo/queue/secrets; startup diagnostics. | Misconfiguration fails fast; diagnostics logged; integration tests cover env overrides. | diff --git a/src/StellaOps.Plugin/TASKS.md b/src/StellaOps.Plugin/TASKS.md new file mode 100644 index 00000000..f07fe8d2 --- /dev/null +++ b/src/StellaOps.Plugin/TASKS.md @@ -0,0 +1,5 @@ +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|PLUGIN-DI-08-001 Scoped service support in plugin bootstrap|Plugin Platform Guild (DOING 2025-10-19)|StellaOps.DependencyInjection|Introduce plugin metadata capable of registering scoped services; update plugin loader to instantiate DI routines once per plugin while honouring lifetime hints; document registration contract.| +|PLUGIN-DI-08-002 Authority plugin integration updates|Plugin Platform Guild, Authority Core|PLUGIN-DI-08-001|Update Authority identity-provider plugin registrar/registry to resolve scoped services correctly; adjust bootstrap flows and background services to create scopes when needed; add regression tests.| diff --git a/src/StellaOps.Policy.Tests/PolicyEvaluationTests.cs b/src/StellaOps.Policy.Tests/PolicyEvaluationTests.cs new file mode 100644 index 00000000..89d27fdf --- /dev/null +++ b/src/StellaOps.Policy.Tests/PolicyEvaluationTests.cs @@ -0,0 +1,133 @@ +using System.Collections.Immutable; +using Xunit; + +namespace StellaOps.Policy.Tests; + +public sealed class PolicyEvaluationTests +{ + [Fact] + public void EvaluateFinding_AppliesTrustAndReachabilityWeights() + { + var action = new PolicyAction(PolicyActionType.Block, null, null, null, false); + var rule = PolicyRule.Create( + "BlockMedium", + action, + ImmutableArray.Create(PolicySeverity.Medium), + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + PolicyRuleMatchCriteria.Empty, + expires: null, + justification: null); + var document = new PolicyDocument( + PolicySchema.CurrentVersion, + ImmutableArray.Create(rule), + ImmutableDictionary.Empty); + + var config = PolicyScoringConfig.Default; + var finding = PolicyFinding.Create( + "finding-medium", + PolicySeverity.Medium, + source: "community", + tags: ImmutableArray.Create("reachability:indirect")); + + var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding); + + Assert.Equal(PolicyVerdictStatus.Blocked, verdict.Status); + Assert.Equal(19.5, verdict.Score, 3); + + var inputs = verdict.GetInputs(); + Assert.Equal(50, inputs["severityWeight"]); + Assert.Equal(0.65, inputs["trustWeight"], 3); + Assert.Equal(0.6, inputs["reachabilityWeight"], 3); + Assert.Equal(19.5, inputs["baseScore"], 3); + } + + [Fact] + public void EvaluateFinding_QuietWithRequireVexAppliesQuietPenalty() + { + var ignoreOptions = new PolicyIgnoreOptions(null, null); + var requireVexOptions = new PolicyRequireVexOptions( + ImmutableArray.Empty, + ImmutableArray.Empty); + var action = new PolicyAction(PolicyActionType.Ignore, ignoreOptions, null, requireVexOptions, true); + var rule = PolicyRule.Create( + "QuietIgnore", + action, + ImmutableArray.Create(PolicySeverity.Critical), + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + PolicyRuleMatchCriteria.Empty, + expires: null, + justification: null); + + var document = new PolicyDocument( + PolicySchema.CurrentVersion, + ImmutableArray.Create(rule), + ImmutableDictionary.Empty); + + var config = PolicyScoringConfig.Default; + var finding = PolicyFinding.Create( + "finding-critical", + PolicySeverity.Critical, + tags: ImmutableArray.Create("reachability:entrypoint")); + + var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding); + + Assert.Equal(PolicyVerdictStatus.Ignored, verdict.Status); + Assert.True(verdict.Quiet); + Assert.Equal("QuietIgnore", verdict.QuietedBy); + Assert.Equal(10, verdict.Score, 3); + + var inputs = verdict.GetInputs(); + Assert.Equal(90, inputs["baseScore"], 3); + Assert.Equal(config.IgnorePenalty, inputs["ignorePenalty"]); + Assert.Equal(config.QuietPenalty, inputs["quietPenalty"]); + } + + [Fact] + public void EvaluateFinding_UnknownSeverityComputesConfidence() + { + var action = new PolicyAction(PolicyActionType.Block, null, null, null, false); + var rule = PolicyRule.Create( + "BlockUnknown", + action, + ImmutableArray.Create(PolicySeverity.Unknown), + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + ImmutableArray.Empty, + PolicyRuleMatchCriteria.Empty, + expires: null, + justification: null); + + var document = new PolicyDocument( + PolicySchema.CurrentVersion, + ImmutableArray.Create(rule), + ImmutableDictionary.Empty); + + var config = PolicyScoringConfig.Default; + var finding = PolicyFinding.Create( + "finding-unknown", + PolicySeverity.Unknown, + tags: ImmutableArray.Create("reachability:unknown", "unknown-age-days:5")); + + var verdict = PolicyEvaluation.EvaluateFinding(document, config, finding); + + Assert.Equal(PolicyVerdictStatus.Blocked, verdict.Status); + Assert.Equal(30, verdict.Score, 3); // 60 * 1 * 0.5 + Assert.Equal(0.55, verdict.UnknownConfidence ?? 0, 3); + Assert.Equal("medium", verdict.ConfidenceBand); + Assert.Equal(5, verdict.UnknownAgeDays ?? 0, 3); + + var inputs = verdict.GetInputs(); + Assert.Equal(0.55, inputs["unknownConfidence"], 3); + Assert.Equal(5, inputs["unknownAgeDays"], 3); + } +} diff --git a/src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs b/src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs index 7d4cdb44..7912a0ea 100644 --- a/src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs +++ b/src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs @@ -5,11 +5,19 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Time.Testing; using Xunit; +using Xunit.Abstractions; namespace StellaOps.Policy.Tests; public sealed class PolicyPreviewServiceTests { + private readonly ITestOutputHelper _output; + + public PolicyPreviewServiceTests(ITestOutputHelper output) + { + _output = output ?? throw new ArgumentNullException(nameof(output)); + } + [Fact] public async Task PreviewAsync_ComputesDiffs_ForBlockingRule() { @@ -131,6 +139,17 @@ rules: """; var binding = PolicyBinder.Bind(yaml, PolicyDocumentFormat.Yaml); + if (!binding.Success) + { + foreach (var issue in binding.Issues) + { + _output.WriteLine($"{issue.Severity} {issue.Code} {issue.Path} :: {issue.Message}"); + } + + var parseMethod = typeof(PolicyBinder).GetMethod("ParseToNode", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Static); + var node = (System.Text.Json.Nodes.JsonNode?)parseMethod?.Invoke(null, new object[] { yaml, PolicyDocumentFormat.Yaml }); + _output.WriteLine(node?.ToJsonString() ?? ""); + } Assert.True(binding.Success); Assert.Empty(binding.Issues); Assert.False(binding.Document.Rules[0].Metadata.ContainsKey("quiet")); diff --git a/src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs b/src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs index fae02586..b4a223a5 100644 --- a/src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs +++ b/src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs @@ -1,4 +1,5 @@ -using System.Threading.Tasks; +using System; +using System.IO; using Xunit; namespace StellaOps.Policy.Tests; @@ -14,6 +15,10 @@ public sealed class PolicyScoringConfigTests Assert.NotEmpty(config.SeverityWeights); Assert.True(config.SeverityWeights.ContainsKey(PolicySeverity.Critical)); Assert.True(config.QuietPenalty > 0); + Assert.NotEmpty(config.ReachabilityBuckets); + Assert.Contains("entrypoint", config.ReachabilityBuckets.Keys); + Assert.False(config.UnknownConfidence.Bands.IsDefaultOrEmpty); + Assert.Equal("high", config.UnknownConfidence.Bands[0].Name); } [Fact] @@ -23,4 +28,39 @@ public sealed class PolicyScoringConfigTests Assert.False(result.Success); Assert.NotEmpty(result.Issues); } + + [Fact] + public void BindRejectsInvalidSchema() + { + const string json = """ +{ + "version": "1.0", + "severityWeights": { + "Critical": 90.0 + } +} +"""; + + var result = PolicyScoringConfigBinder.Bind(json, PolicyDocumentFormat.Json); + Assert.False(result.Success); + Assert.Contains(result.Issues, issue => issue.Code.StartsWith("scoring.schema", StringComparison.OrdinalIgnoreCase)); + Assert.Null(result.Config); + } + + [Fact] + public void DefaultResourceDigestMatchesGolden() + { + var assembly = typeof(PolicyScoringConfig).Assembly; + using var stream = assembly.GetManifestResourceStream("StellaOps.Policy.Schemas.policy-scoring-default.json") + ?? throw new InvalidOperationException("Unable to locate embedded scoring default resource."); + using var reader = new StreamReader(stream); + var json = reader.ReadToEnd(); + + var binding = PolicyScoringConfigBinder.Bind(json, PolicyDocumentFormat.Json); + Assert.True(binding.Success); + Assert.NotNull(binding.Config); + + var digest = PolicyScoringConfigDigest.Compute(binding.Config!); + Assert.Equal("5ef2e43a112cb00753beb7811dd2e1720f2385e2289d0fb6abcf7bbbb8cda2d2", digest); + } } diff --git a/src/StellaOps.Policy/PolicyBinder.cs b/src/StellaOps.Policy/PolicyBinder.cs index 2cc30b47..544ecde3 100644 --- a/src/StellaOps.Policy/PolicyBinder.cs +++ b/src/StellaOps.Policy/PolicyBinder.cs @@ -122,6 +122,8 @@ public static class PolicyBinder { case null: return null; + case string s when bool.TryParse(s, out var boolValue): + return JsonValue.Create(boolValue); case string s: return JsonValue.Create(s); case bool b: diff --git a/src/StellaOps.Policy/PolicyEvaluation.cs b/src/StellaOps.Policy/PolicyEvaluation.cs index 42ff055e..60050eaa 100644 --- a/src/StellaOps.Policy/PolicyEvaluation.cs +++ b/src/StellaOps.Policy/PolicyEvaluation.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Immutable; +using System.Globalization; namespace StellaOps.Policy; @@ -25,6 +26,19 @@ public static class PolicyEvaluation var severityWeight = scoringConfig.SeverityWeights.TryGetValue(finding.Severity, out var weight) ? weight : scoringConfig.SeverityWeights.GetValueOrDefault(PolicySeverity.Unknown, 0); + var trustKey = ResolveTrustKey(finding); + var trustWeight = ResolveTrustWeight(scoringConfig, trustKey); + var reachabilityKey = ResolveReachabilityKey(finding); + var reachabilityWeight = ResolveReachabilityWeight(scoringConfig, reachabilityKey, out var resolvedReachabilityKey); + var baseScore = severityWeight * trustWeight * reachabilityWeight; + var components = new ScoringComponents( + severityWeight, + trustWeight, + reachabilityWeight, + baseScore, + trustKey, + resolvedReachabilityKey); + var unknownConfidence = ComputeUnknownConfidence(scoringConfig.UnknownConfidence, finding); foreach (var rule in document.Rules) { @@ -33,73 +47,108 @@ public static class PolicyEvaluation continue; } - return BuildVerdict(rule, finding, scoringConfig, severityWeight); + return BuildVerdict(rule, finding, scoringConfig, components, unknownConfidence); } - return PolicyVerdict.CreateBaseline(finding.FindingId, scoringConfig); + var baseline = PolicyVerdict.CreateBaseline(finding.FindingId, scoringConfig); + return ApplyUnknownConfidence(baseline, unknownConfidence); } private static PolicyVerdict BuildVerdict( PolicyRule rule, PolicyFinding finding, PolicyScoringConfig config, - double severityWeight) + ScoringComponents components, + UnknownConfidenceResult? unknownConfidence) { var action = rule.Action; var status = MapAction(action); var notes = BuildNotes(action); var inputs = ImmutableDictionary.CreateBuilder(StringComparer.OrdinalIgnoreCase); - inputs["severityWeight"] = severityWeight; + inputs["severityWeight"] = components.SeverityWeight; + inputs["trustWeight"] = components.TrustWeight; + inputs["reachabilityWeight"] = components.ReachabilityWeight; + inputs["baseScore"] = components.BaseScore; + if (!string.IsNullOrWhiteSpace(components.TrustKey)) + { + inputs[$"trustWeight.{components.TrustKey}"] = components.TrustWeight; + } + if (!string.IsNullOrWhiteSpace(components.ReachabilityKey)) + { + inputs[$"reachability.{components.ReachabilityKey}"] = components.ReachabilityWeight; + } + if (unknownConfidence is { Band.Description: { Length: > 0 } description }) + { + notes = AppendNote(notes, description); + } + if (unknownConfidence is { } unknownDetails) + { + inputs["unknownConfidence"] = unknownDetails.Confidence; + inputs["unknownAgeDays"] = unknownDetails.AgeDays; + } - double score = severityWeight; + double score = components.BaseScore; string? quietedBy = null; var quiet = false; + var quietRequested = action.Quiet; + var quietAllowed = quietRequested && (action.RequireVex is not null || action.Type == PolicyActionType.RequireVex); + + if (quietRequested && !quietAllowed) + { + var warnInputs = ImmutableDictionary.CreateBuilder(StringComparer.OrdinalIgnoreCase); + foreach (var pair in inputs) + { + warnInputs[pair.Key] = pair.Value; + } + if (unknownConfidence is { } unknownInfo) + { + warnInputs["unknownConfidence"] = unknownInfo.Confidence; + warnInputs["unknownAgeDays"] = unknownInfo.AgeDays; + } + + var warnPenalty = config.WarnPenalty; + warnInputs["warnPenalty"] = warnPenalty; + var warnScore = Math.Max(0, components.BaseScore - warnPenalty); + var warnNotes = AppendNote(notes, "Quiet flag ignored: rule must specify requireVex justifications."); + + return new PolicyVerdict( + finding.FindingId, + PolicyVerdictStatus.Warned, + rule.Name, + action.Type.ToString(), + warnNotes, + warnScore, + config.Version, + warnInputs.ToImmutable(), + QuietedBy: null, + Quiet: false, + UnknownConfidence: unknownConfidence?.Confidence, + ConfidenceBand: unknownConfidence?.Band.Name, + UnknownAgeDays: unknownConfidence?.AgeDays, + SourceTrust: components.TrustKey, + Reachability: components.ReachabilityKey); + } + switch (status) { case PolicyVerdictStatus.Ignored: - score = Math.Max(0, severityWeight - config.IgnorePenalty); - inputs["ignorePenalty"] = config.IgnorePenalty; + score = ApplyPenalty(score, config.IgnorePenalty, inputs, "ignorePenalty"); break; case PolicyVerdictStatus.Warned: - score = Math.Max(0, severityWeight - config.WarnPenalty); - inputs["warnPenalty"] = config.WarnPenalty; + score = ApplyPenalty(score, config.WarnPenalty, inputs, "warnPenalty"); break; case PolicyVerdictStatus.Deferred: - score = Math.Max(0, severityWeight - (config.WarnPenalty / 2)); - inputs["deferPenalty"] = config.WarnPenalty / 2; + var deferPenalty = config.WarnPenalty / 2; + score = ApplyPenalty(score, deferPenalty, inputs, "deferPenalty"); break; } - if (action.Quiet) + if (quietAllowed) { - var quietAllowed = action.RequireVex is not null || action.Type == PolicyActionType.RequireVex; - if (quietAllowed) - { - score = Math.Max(0, score - config.QuietPenalty); - inputs["quietPenalty"] = config.QuietPenalty; - quietedBy = rule.Name; - quiet = true; - } - else - { - inputs.Remove("ignorePenalty"); - var warnScore = Math.Max(0, severityWeight - config.WarnPenalty); - inputs["warnPenalty"] = config.WarnPenalty; - var warnNotes = AppendNote(notes, "Quiet flag ignored: rule must specify requireVex justifications."); - - return new PolicyVerdict( - finding.FindingId, - PolicyVerdictStatus.Warned, - rule.Name, - action.Type.ToString(), - warnNotes, - warnScore, - config.Version, - inputs.ToImmutable(), - QuietedBy: null, - Quiet: false); - } + score = ApplyPenalty(score, config.QuietPenalty, inputs, "quietPenalty"); + quietedBy = rule.Name; + quiet = true; } return new PolicyVerdict( @@ -112,7 +161,240 @@ public static class PolicyEvaluation config.Version, inputs.ToImmutable(), quietedBy, - quiet); + quiet, + unknownConfidence?.Confidence, + unknownConfidence?.Band.Name, + unknownConfidence?.AgeDays, + components.TrustKey, + components.ReachabilityKey); + } + + private static double ApplyPenalty(double score, double penalty, ImmutableDictionary.Builder inputs, string key) + { + if (penalty <= 0) + { + return score; + } + + inputs[key] = penalty; + return Math.Max(0, score - penalty); + } + + private static PolicyVerdict ApplyUnknownConfidence(PolicyVerdict verdict, UnknownConfidenceResult? unknownConfidence) + { + if (unknownConfidence is null) + { + return verdict; + } + + var inputsBuilder = ImmutableDictionary.CreateBuilder(StringComparer.OrdinalIgnoreCase); + foreach (var pair in verdict.GetInputs()) + { + inputsBuilder[pair.Key] = pair.Value; + } + + inputsBuilder["unknownConfidence"] = unknownConfidence.Value.Confidence; + inputsBuilder["unknownAgeDays"] = unknownConfidence.Value.AgeDays; + + return verdict with + { + Inputs = inputsBuilder.ToImmutable(), + UnknownConfidence = unknownConfidence.Value.Confidence, + ConfidenceBand = unknownConfidence.Value.Band.Name, + UnknownAgeDays = unknownConfidence.Value.AgeDays, + }; + } + + private static UnknownConfidenceResult? ComputeUnknownConfidence(PolicyUnknownConfidenceConfig config, PolicyFinding finding) + { + if (!IsUnknownFinding(finding)) + { + return null; + } + + var ageDays = ResolveUnknownAgeDays(finding); + var rawConfidence = config.Initial - (ageDays * config.DecayPerDay); + var confidence = config.Clamp(rawConfidence); + var band = config.ResolveBand(confidence); + return new UnknownConfidenceResult(ageDays, confidence, band); + } + + private static bool IsUnknownFinding(PolicyFinding finding) + { + if (finding.Severity == PolicySeverity.Unknown) + { + return true; + } + + if (!finding.Tags.IsDefaultOrEmpty) + { + foreach (var tag in finding.Tags) + { + if (string.Equals(tag, "state:unknown", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + } + + return false; + } + + private static double ResolveUnknownAgeDays(PolicyFinding finding) + { + var ageTag = TryGetTagValue(finding.Tags, "unknown-age-days:"); + if (!string.IsNullOrWhiteSpace(ageTag) && + double.TryParse(ageTag, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsedAge) && + parsedAge >= 0) + { + return parsedAge; + } + + var sinceTag = TryGetTagValue(finding.Tags, "unknown-since:"); + if (string.IsNullOrWhiteSpace(sinceTag)) + { + return 0; + } + + if (!DateTimeOffset.TryParse(sinceTag, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var since)) + { + return 0; + } + + var observedTag = TryGetTagValue(finding.Tags, "observed-at:"); + if (!string.IsNullOrWhiteSpace(observedTag) && + DateTimeOffset.TryParse(observedTag, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var observed) && + observed > since) + { + return Math.Max(0, (observed - since).TotalDays); + } + + return 0; + } + + private static string? ResolveTrustKey(PolicyFinding finding) + { + if (!finding.Tags.IsDefaultOrEmpty) + { + var tagged = TryGetTagValue(finding.Tags, "trust:"); + if (!string.IsNullOrWhiteSpace(tagged)) + { + return tagged; + } + } + + if (!string.IsNullOrWhiteSpace(finding.Source)) + { + return finding.Source; + } + + if (!string.IsNullOrWhiteSpace(finding.Vendor)) + { + return finding.Vendor; + } + + return null; + } + + private static double ResolveTrustWeight(PolicyScoringConfig config, string? key) + { + if (string.IsNullOrWhiteSpace(key) || config.TrustOverrides.IsEmpty) + { + return 1.0; + } + + return config.TrustOverrides.TryGetValue(key, out var weight) ? weight : 1.0; + } + + private static string? ResolveReachabilityKey(PolicyFinding finding) + { + if (finding.Tags.IsDefaultOrEmpty) + { + return null; + } + + var reachability = TryGetTagValue(finding.Tags, "reachability:"); + if (!string.IsNullOrWhiteSpace(reachability)) + { + return reachability; + } + + var usage = TryGetTagValue(finding.Tags, "usage:"); + if (!string.IsNullOrWhiteSpace(usage)) + { + return usage; + } + + return null; + } + + private static double ResolveReachabilityWeight(PolicyScoringConfig config, string? key, out string? resolvedKey) + { + if (!string.IsNullOrWhiteSpace(key) && config.ReachabilityBuckets.TryGetValue(key, out var weight)) + { + resolvedKey = key; + return weight; + } + + if (config.ReachabilityBuckets.TryGetValue("unknown", out var unknownWeight)) + { + resolvedKey = "unknown"; + return unknownWeight; + } + + resolvedKey = key; + return 1.0; + } + + private static string? TryGetTagValue(ImmutableArray tags, string prefix) + { + if (tags.IsDefaultOrEmpty) + { + return null; + } + + foreach (var tag in tags) + { + if (string.IsNullOrWhiteSpace(tag)) + { + continue; + } + + if (tag.StartsWith(prefix, StringComparison.OrdinalIgnoreCase)) + { + var value = tag[prefix.Length..].Trim(); + if (!string.IsNullOrEmpty(value)) + { + return value; + } + } + } + + return null; + } + + private readonly record struct ScoringComponents( + double SeverityWeight, + double TrustWeight, + double ReachabilityWeight, + double BaseScore, + string? TrustKey, + string? ReachabilityKey); + + private readonly struct UnknownConfidenceResult + { + public UnknownConfidenceResult(double ageDays, double confidence, PolicyUnknownConfidenceBand band) + { + AgeDays = ageDays; + Confidence = confidence; + Band = band; + } + + public double AgeDays { get; } + + public double Confidence { get; } + + public PolicyUnknownConfidenceBand Band { get; } } private static bool RuleMatches(PolicyRule rule, PolicyFinding finding) diff --git a/src/StellaOps.Policy/PolicyScoringConfig.cs b/src/StellaOps.Policy/PolicyScoringConfig.cs index c3ca3795..f0351774 100644 --- a/src/StellaOps.Policy/PolicyScoringConfig.cs +++ b/src/StellaOps.Policy/PolicyScoringConfig.cs @@ -8,7 +8,9 @@ public sealed record PolicyScoringConfig( double QuietPenalty, double WarnPenalty, double IgnorePenalty, - ImmutableDictionary TrustOverrides) + ImmutableDictionary TrustOverrides, + ImmutableDictionary ReachabilityBuckets, + PolicyUnknownConfidenceConfig UnknownConfidence) { public static string BaselineVersion => "1.0"; diff --git a/src/StellaOps.Policy/PolicyScoringConfigBinder.cs b/src/StellaOps.Policy/PolicyScoringConfigBinder.cs index 3ce37b69..a704fd04 100644 --- a/src/StellaOps.Policy/PolicyScoringConfigBinder.cs +++ b/src/StellaOps.Policy/PolicyScoringConfigBinder.cs @@ -9,6 +9,7 @@ using System.Reflection; using System.Text; using System.Text.Json; using System.Text.Json.Nodes; +using Json.Schema; using YamlDotNet.Serialization; using YamlDotNet.Serialization.NamingConventions; @@ -23,12 +24,11 @@ public static class PolicyScoringConfigBinder { private const string DefaultResourceName = "StellaOps.Policy.Schemas.policy-scoring-default.json"; - private static readonly JsonSerializerOptions SerializerOptions = new() - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - AllowTrailingCommas = true, - }; + private static readonly JsonSchema ScoringSchema = PolicyScoringSchema.Schema; + + private static readonly ImmutableDictionary DefaultReachabilityBuckets = CreateDefaultReachabilityBuckets(); + + private static readonly PolicyUnknownConfidenceConfig DefaultUnknownConfidence = CreateDefaultUnknownConfidence(); private static readonly IDeserializer YamlDeserializer = new DeserializerBuilder() .WithNamingConvention(CamelCaseNamingConvention.Instance) @@ -69,6 +69,13 @@ public static class PolicyScoringConfigBinder } var issues = ImmutableArray.CreateBuilder(); + var schemaIssues = ValidateAgainstSchema(root); + issues.AddRange(schemaIssues); + if (schemaIssues.Any(static issue => issue.Severity == PolicyIssueSeverity.Error)) + { + return new PolicyScoringBindingResult(false, null, issues.ToImmutable()); + } + var config = BuildConfig(obj, issues); var hasErrors = issues.Any(issue => issue.Severity == PolicyIssueSeverity.Error); return new PolicyScoringBindingResult(!hasErrors, config, issues.ToImmutable()); @@ -101,6 +108,127 @@ public static class PolicyScoringConfigBinder return PolicyBinderUtilities.ConvertYamlObject(yamlObject); } + private static ImmutableArray ValidateAgainstSchema(JsonNode root) + { + try + { + using var document = JsonDocument.Parse(root.ToJsonString(new JsonSerializerOptions + { + WriteIndented = false, + })); + + var result = ScoringSchema.Evaluate(document.RootElement, new EvaluationOptions + { + OutputFormat = OutputFormat.List, + RequireFormatValidation = true, + }); + + if (result.IsValid) + { + return ImmutableArray.Empty; + } + + var issues = ImmutableArray.CreateBuilder(); + var seen = new HashSet(StringComparer.Ordinal); + CollectSchemaIssues(result, issues, seen); + return issues.ToImmutable(); + } + catch (JsonException ex) + { + return ImmutableArray.Create(PolicyIssue.Error("scoring.schema.normalize", $"Failed to normalize scoring configuration for schema validation: {ex.Message}", "$")); + } + } + + private static void CollectSchemaIssues(EvaluationResults result, ImmutableArray.Builder issues, HashSet seen) + { + if (result.Errors is { Count: > 0 }) + { + foreach (var pair in result.Errors) + { + var keyword = SanitizeKeyword(pair.Key); + var path = ConvertPointerToPath(result.InstanceLocation?.ToString() ?? "#"); + var message = pair.Value ?? "Schema violation."; + var key = $"{path}|{keyword}|{message}"; + if (seen.Add(key)) + { + issues.Add(PolicyIssue.Error($"scoring.schema.{keyword}", message, path)); + } + } + } + + if (result.Details is null) + { + return; + } + + foreach (var detail in result.Details) + { + CollectSchemaIssues(detail, issues, seen); + } + } + + private static string ConvertPointerToPath(string pointer) + { + if (string.IsNullOrEmpty(pointer) || pointer == "#") + { + return "$"; + } + + if (pointer[0] == '#') + { + pointer = pointer.Length > 1 ? pointer[1..] : string.Empty; + } + + if (string.IsNullOrEmpty(pointer)) + { + return "$"; + } + + var segments = pointer.Split('/', StringSplitOptions.RemoveEmptyEntries); + var builder = new StringBuilder("$"); + foreach (var segment in segments) + { + var unescaped = segment.Replace("~1", "/").Replace("~0", "~"); + if (int.TryParse(unescaped, out var index)) + { + builder.Append('[').Append(index).Append(']'); + } + else + { + builder.Append('.').Append(unescaped); + } + } + + return builder.ToString(); + } + + private static string SanitizeKeyword(string keyword) + { + if (string.IsNullOrWhiteSpace(keyword)) + { + return "unknown"; + } + + var builder = new StringBuilder(keyword.Length); + foreach (var ch in keyword) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + else if (ch is '.' or '_' or '-') + { + builder.Append(ch); + } + else + { + builder.Append('_'); + } + } + + return builder.Length == 0 ? "unknown" : builder.ToString(); + } + private static PolicyScoringConfig BuildConfig(JsonObject obj, ImmutableArray.Builder issues) { var version = ReadString(obj, "version", issues, required: true) ?? PolicyScoringConfig.BaselineVersion; @@ -110,6 +238,8 @@ public static class PolicyScoringConfigBinder var warnPenalty = ReadDouble(obj, "warnPenalty", issues, defaultValue: 15); var ignorePenalty = ReadDouble(obj, "ignorePenalty", issues, defaultValue: 35); var trustOverrides = ReadTrustOverrides(obj, issues); + var reachabilityBuckets = ReadReachabilityBuckets(obj, issues); + var unknownConfidence = ReadUnknownConfidence(obj, issues); return new PolicyScoringConfig( version, @@ -117,7 +247,212 @@ public static class PolicyScoringConfigBinder quietPenalty, warnPenalty, ignorePenalty, - trustOverrides); + trustOverrides, + reachabilityBuckets, + unknownConfidence); + } + + private static ImmutableDictionary CreateDefaultReachabilityBuckets() + { + var builder = ImmutableDictionary.CreateBuilder(StringComparer.OrdinalIgnoreCase); + builder["entrypoint"] = 1.0; + builder["direct"] = 0.85; + builder["indirect"] = 0.6; + builder["runtime"] = 0.45; + builder["unreachable"] = 0.25; + builder["unknown"] = 0.5; + return builder.ToImmutable(); + } + + private static PolicyUnknownConfidenceConfig CreateDefaultUnknownConfidence() + { + var bands = ImmutableArray.Create( + new PolicyUnknownConfidenceBand("high", 0.65, "Fresh unknowns with recent telemetry."), + new PolicyUnknownConfidenceBand("medium", 0.35, "Unknowns aging toward action required."), + new PolicyUnknownConfidenceBand("low", 0.0, "Stale unknowns that must be triaged.")); + return new PolicyUnknownConfidenceConfig(0.8, 0.05, 0.2, bands); + } + + private static ImmutableDictionary ReadReachabilityBuckets(JsonObject obj, ImmutableArray.Builder issues) + { + if (!obj.TryGetPropertyValue("reachabilityBuckets", out var node)) + { + issues.Add(PolicyIssue.Warning("scoring.reachability.default", "reachabilityBuckets not specified; defaulting to baseline weights.", "$.reachabilityBuckets")); + return DefaultReachabilityBuckets; + } + + if (node is not JsonObject bucketsObj) + { + issues.Add(PolicyIssue.Error("scoring.reachability.type", "reachabilityBuckets must be an object.", "$.reachabilityBuckets")); + return DefaultReachabilityBuckets; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.OrdinalIgnoreCase); + foreach (var pair in bucketsObj) + { + if (pair.Value is null) + { + issues.Add(PolicyIssue.Warning("scoring.reachability.null", $"Bucket '{pair.Key}' is null; defaulting to 0.", $"$.reachabilityBuckets.{pair.Key}")); + builder[pair.Key] = 0; + continue; + } + + var value = ExtractDouble(pair.Value, issues, $"$.reachabilityBuckets.{pair.Key}"); + builder[pair.Key] = value; + } + + if (builder.Count == 0) + { + issues.Add(PolicyIssue.Warning("scoring.reachability.empty", "No reachability buckets defined; using defaults.", "$.reachabilityBuckets")); + return DefaultReachabilityBuckets; + } + + return builder.ToImmutable(); + } + + private static PolicyUnknownConfidenceConfig ReadUnknownConfidence(JsonObject obj, ImmutableArray.Builder issues) + { + if (!obj.TryGetPropertyValue("unknownConfidence", out var node)) + { + issues.Add(PolicyIssue.Warning("scoring.unknown.default", "unknownConfidence not specified; defaulting to baseline decay settings.", "$.unknownConfidence")); + return DefaultUnknownConfidence; + } + + if (node is not JsonObject configObj) + { + issues.Add(PolicyIssue.Error("scoring.unknown.type", "unknownConfidence must be an object.", "$.unknownConfidence")); + return DefaultUnknownConfidence; + } + + var initial = DefaultUnknownConfidence.Initial; + if (configObj.TryGetPropertyValue("initial", out var initialNode)) + { + initial = ExtractDouble(initialNode, issues, "$.unknownConfidence.initial"); + } + else + { + issues.Add(PolicyIssue.Warning("scoring.unknown.initial.default", "initial not specified; using baseline value.", "$.unknownConfidence.initial")); + } + + var decay = DefaultUnknownConfidence.DecayPerDay; + if (configObj.TryGetPropertyValue("decayPerDay", out var decayNode)) + { + decay = ExtractDouble(decayNode, issues, "$.unknownConfidence.decayPerDay"); + } + else + { + issues.Add(PolicyIssue.Warning("scoring.unknown.decay.default", "decayPerDay not specified; using baseline value.", "$.unknownConfidence.decayPerDay")); + } + + var floor = DefaultUnknownConfidence.Floor; + if (configObj.TryGetPropertyValue("floor", out var floorNode)) + { + floor = ExtractDouble(floorNode, issues, "$.unknownConfidence.floor"); + } + else + { + issues.Add(PolicyIssue.Warning("scoring.unknown.floor.default", "floor not specified; using baseline value.", "$.unknownConfidence.floor")); + } + + var bands = ReadConfidenceBands(configObj, issues); + if (bands.IsDefaultOrEmpty) + { + bands = DefaultUnknownConfidence.Bands; + } + + if (initial < 0 || initial > 1) + { + issues.Add(PolicyIssue.Warning("scoring.unknown.initial.range", "initial confidence should be between 0 and 1. Clamping to valid range.", "$.unknownConfidence.initial")); + initial = Math.Clamp(initial, 0, 1); + } + + if (decay < 0 || decay > 1) + { + issues.Add(PolicyIssue.Warning("scoring.unknown.decay.range", "decayPerDay should be between 0 and 1. Clamping to valid range.", "$.unknownConfidence.decayPerDay")); + decay = Math.Clamp(decay, 0, 1); + } + + if (floor < 0 || floor > 1) + { + issues.Add(PolicyIssue.Warning("scoring.unknown.floor.range", "floor should be between 0 and 1. Clamping to valid range.", "$.unknownConfidence.floor")); + floor = Math.Clamp(floor, 0, 1); + } + + return new PolicyUnknownConfidenceConfig(initial, decay, floor, bands); + } + + private static ImmutableArray ReadConfidenceBands(JsonObject configObj, ImmutableArray.Builder issues) + { + if (!configObj.TryGetPropertyValue("bands", out var node)) + { + return ImmutableArray.Empty; + } + + if (node is not JsonArray array) + { + issues.Add(PolicyIssue.Error("scoring.unknown.bands.type", "unknownConfidence.bands must be an array.", "$.unknownConfidence.bands")); + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(); + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + for (var index = 0; index < array.Count; index++) + { + var element = array[index]; + if (element is not JsonObject bandObj) + { + issues.Add(PolicyIssue.Warning("scoring.unknown.band.type", "Band entry must be an object.", $"$.unknownConfidence.bands[{index}]")); + continue; + } + + string? name = null; + if (bandObj.TryGetPropertyValue("name", out var nameNode) && nameNode is JsonValue nameValue && nameValue.TryGetValue(out string? text)) + { + name = text?.Trim(); + } + + if (string.IsNullOrWhiteSpace(name)) + { + issues.Add(PolicyIssue.Error("scoring.unknown.band.name", "Band entry requires a non-empty 'name'.", $"$.unknownConfidence.bands[{index}].name")); + continue; + } + + if (!seen.Add(name)) + { + issues.Add(PolicyIssue.Warning("scoring.unknown.band.duplicate", $"Duplicate band '{name}' encountered.", $"$.unknownConfidence.bands[{index}].name")); + continue; + } + + if (!bandObj.TryGetPropertyValue("min", out var minNode)) + { + issues.Add(PolicyIssue.Error("scoring.unknown.band.min", $"Band '{name}' is missing 'min'.", $"$.unknownConfidence.bands[{index}].min")); + continue; + } + + var min = ExtractDouble(minNode, issues, $"$.unknownConfidence.bands[{index}].min"); + if (min < 0 || min > 1) + { + issues.Add(PolicyIssue.Warning("scoring.unknown.band.range", $"Band '{name}' min should be between 0 and 1. Clamping to valid range.", $"$.unknownConfidence.bands[{index}].min")); + min = Math.Clamp(min, 0, 1); + } + + string? description = null; + if (bandObj.TryGetPropertyValue("description", out var descriptionNode) && descriptionNode is JsonValue descriptionValue && descriptionValue.TryGetValue(out string? descriptionText)) + { + description = descriptionText?.Trim(); + } + + builder.Add(new PolicyUnknownConfidenceBand(name, min, description)); + } + + if (builder.Count == 0) + { + return ImmutableArray.Empty; + } + + return builder.ToImmutable() + .OrderByDescending(static band => band.Min) + .ToImmutableArray(); } private static ImmutableDictionary ReadSeverityWeights(JsonObject obj, ImmutableArray.Builder issues) @@ -228,6 +563,8 @@ internal static class PolicyBinderUtilities { case null: return null; + case string s when bool.TryParse(s, out var boolValue): + return JsonValue.Create(boolValue); case string s: return JsonValue.Create(s); case bool b: diff --git a/src/StellaOps.Policy/PolicyScoringConfigDigest.cs b/src/StellaOps.Policy/PolicyScoringConfigDigest.cs new file mode 100644 index 00000000..dfd7877a --- /dev/null +++ b/src/StellaOps.Policy/PolicyScoringConfigDigest.cs @@ -0,0 +1,100 @@ +using System; +using System.Buffers; +using System.Collections.Immutable; +using System.Linq; +using System.Security.Cryptography; +using System.Text.Json; + +namespace StellaOps.Policy; + +public static class PolicyScoringConfigDigest +{ + public static string Compute(PolicyScoringConfig config) + { + ArgumentNullException.ThrowIfNull(config); + + var buffer = new ArrayBufferWriter(); + using (var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions + { + SkipValidation = true, + })) + { + WriteConfig(writer, config); + } + + var hash = SHA256.HashData(buffer.WrittenSpan); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static void WriteConfig(Utf8JsonWriter writer, PolicyScoringConfig config) + { + writer.WriteStartObject(); + writer.WriteString("version", config.Version); + + writer.WritePropertyName("severityWeights"); + writer.WriteStartObject(); + foreach (var severity in Enum.GetValues()) + { + var key = severity.ToString(); + var value = config.SeverityWeights.TryGetValue(severity, out var weight) ? weight : 0; + writer.WriteNumber(key, value); + } + writer.WriteEndObject(); + + writer.WriteNumber("quietPenalty", config.QuietPenalty); + writer.WriteNumber("warnPenalty", config.WarnPenalty); + writer.WriteNumber("ignorePenalty", config.IgnorePenalty); + + if (!config.TrustOverrides.IsEmpty) + { + writer.WritePropertyName("trustOverrides"); + writer.WriteStartObject(); + foreach (var pair in config.TrustOverrides.OrderBy(static kvp => kvp.Key, StringComparer.OrdinalIgnoreCase)) + { + writer.WriteNumber(pair.Key, pair.Value); + } + writer.WriteEndObject(); + } + + if (!config.ReachabilityBuckets.IsEmpty) + { + writer.WritePropertyName("reachabilityBuckets"); + writer.WriteStartObject(); + foreach (var pair in config.ReachabilityBuckets.OrderBy(static kvp => kvp.Key, StringComparer.OrdinalIgnoreCase)) + { + writer.WriteNumber(pair.Key, pair.Value); + } + writer.WriteEndObject(); + } + + writer.WritePropertyName("unknownConfidence"); + writer.WriteStartObject(); + writer.WriteNumber("initial", config.UnknownConfidence.Initial); + writer.WriteNumber("decayPerDay", config.UnknownConfidence.DecayPerDay); + writer.WriteNumber("floor", config.UnknownConfidence.Floor); + + if (!config.UnknownConfidence.Bands.IsDefaultOrEmpty) + { + writer.WritePropertyName("bands"); + writer.WriteStartArray(); + foreach (var band in config.UnknownConfidence.Bands + .OrderByDescending(static b => b.Min) + .ThenBy(static b => b.Name, StringComparer.OrdinalIgnoreCase)) + { + writer.WriteStartObject(); + writer.WriteString("name", band.Name); + writer.WriteNumber("min", band.Min); + if (!string.IsNullOrWhiteSpace(band.Description)) + { + writer.WriteString("description", band.Description); + } + writer.WriteEndObject(); + } + writer.WriteEndArray(); + } + + writer.WriteEndObject(); + writer.WriteEndObject(); + writer.Flush(); + } +} diff --git a/src/StellaOps.Policy/PolicyScoringSchema.cs b/src/StellaOps.Policy/PolicyScoringSchema.cs new file mode 100644 index 00000000..2d6f6d43 --- /dev/null +++ b/src/StellaOps.Policy/PolicyScoringSchema.cs @@ -0,0 +1,27 @@ +using System; +using System.IO; +using System.Reflection; +using System.Text; +using System.Threading; +using Json.Schema; + +namespace StellaOps.Policy; + +public static class PolicyScoringSchema +{ + private const string SchemaResourceName = "StellaOps.Policy.Schemas.policy-scoring-schema@1.json"; + + private static readonly Lazy CachedSchema = new(LoadSchema, LazyThreadSafetyMode.ExecutionAndPublication); + + public static JsonSchema Schema => CachedSchema.Value; + + private static JsonSchema LoadSchema() + { + var assembly = Assembly.GetExecutingAssembly(); + using var stream = assembly.GetManifestResourceStream(SchemaResourceName) + ?? throw new InvalidOperationException($"Embedded resource '{SchemaResourceName}' was not found."); + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true); + var schemaJson = reader.ReadToEnd(); + return JsonSchema.FromText(schemaJson); + } +} diff --git a/src/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs b/src/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs new file mode 100644 index 00000000..4ae81c2d --- /dev/null +++ b/src/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections.Immutable; + +namespace StellaOps.Policy; + +public sealed record PolicyUnknownConfidenceConfig( + double Initial, + double DecayPerDay, + double Floor, + ImmutableArray Bands) +{ + public double Clamp(double value) + => Math.Clamp(value, Floor, 1.0); + + public PolicyUnknownConfidenceBand ResolveBand(double value) + { + if (Bands.IsDefaultOrEmpty) + { + return PolicyUnknownConfidenceBand.Default; + } + + foreach (var band in Bands) + { + if (value >= band.Min) + { + return band; + } + } + + return Bands[Bands.Length - 1]; + } +} + +public sealed record PolicyUnknownConfidenceBand(string Name, double Min, string? Description = null) +{ + public static PolicyUnknownConfidenceBand Default { get; } = new("unspecified", 0, null); +} diff --git a/src/StellaOps.Policy/PolicyVerdict.cs b/src/StellaOps.Policy/PolicyVerdict.cs index 9eb758ef..5b37e0db 100644 --- a/src/StellaOps.Policy/PolicyVerdict.cs +++ b/src/StellaOps.Policy/PolicyVerdict.cs @@ -24,7 +24,12 @@ public sealed record PolicyVerdict( string ConfigVersion = "1.0", ImmutableDictionary? Inputs = null, string? QuietedBy = null, - bool Quiet = false) + bool Quiet = false, + double? UnknownConfidence = null, + string? ConfidenceBand = null, + double? UnknownAgeDays = null, + string? SourceTrust = null, + string? Reachability = null) { public static PolicyVerdict CreateBaseline(string findingId, PolicyScoringConfig scoringConfig) { @@ -39,7 +44,12 @@ public sealed record PolicyVerdict( ConfigVersion: scoringConfig.Version, Inputs: inputs, QuietedBy: null, - Quiet: false); + Quiet: false, + UnknownConfidence: null, + ConfidenceBand: null, + UnknownAgeDays: null, + SourceTrust: null, + Reachability: null); } public ImmutableDictionary GetInputs() @@ -74,6 +84,28 @@ public sealed record PolicyVerdictDiff( return true; } + var baselineConfidence = Baseline.UnknownConfidence ?? 0; + var projectedConfidence = Projected.UnknownConfidence ?? 0; + if (Math.Abs(baselineConfidence - projectedConfidence) > 0.0001) + { + return true; + } + + if (!string.Equals(Baseline.ConfidenceBand, Projected.ConfidenceBand, StringComparison.Ordinal)) + { + return true; + } + + if (!string.Equals(Baseline.SourceTrust, Projected.SourceTrust, StringComparison.Ordinal)) + { + return true; + } + + if (!string.Equals(Baseline.Reachability, Projected.Reachability, StringComparison.Ordinal)) + { + return true; + } + return false; } } diff --git a/src/StellaOps.Policy/Schemas/policy-scoring-default.json b/src/StellaOps.Policy/Schemas/policy-scoring-default.json index eaa41270..74417b17 100644 --- a/src/StellaOps.Policy/Schemas/policy-scoring-default.json +++ b/src/StellaOps.Policy/Schemas/policy-scoring-default.json @@ -17,5 +17,35 @@ "distro": 0.85, "platform": 0.75, "community": 0.65 + }, + "reachabilityBuckets": { + "entrypoint": 1.0, + "direct": 0.85, + "indirect": 0.6, + "runtime": 0.45, + "unreachable": 0.25, + "unknown": 0.5 + }, + "unknownConfidence": { + "initial": 0.8, + "decayPerDay": 0.05, + "floor": 0.2, + "bands": [ + { + "name": "high", + "min": 0.65, + "description": "Fresh unknowns with recent telemetry." + }, + { + "name": "medium", + "min": 0.35, + "description": "Unknowns aging toward action required." + }, + { + "name": "low", + "min": 0.0, + "description": "Stale unknowns that must be triaged." + } + ] } } diff --git a/src/StellaOps.Policy/Schemas/policy-scoring-schema@1.json b/src/StellaOps.Policy/Schemas/policy-scoring-schema@1.json new file mode 100644 index 00000000..603b1f89 --- /dev/null +++ b/src/StellaOps.Policy/Schemas/policy-scoring-schema@1.json @@ -0,0 +1,156 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://schemas.stella-ops.org/policy/policy-scoring-schema@1.json", + "title": "StellaOps Policy Scoring Configuration v1", + "type": "object", + "additionalProperties": false, + "required": [ + "version", + "severityWeights" + ], + "properties": { + "version": { + "type": "string", + "pattern": "^[0-9]+\\.[0-9]+$" + }, + "severityWeights": { + "type": "object", + "additionalProperties": false, + "required": [ + "Critical", + "High", + "Medium", + "Low", + "Informational", + "None", + "Unknown" + ], + "properties": { + "Critical": { + "$ref": "#/$defs/weight" + }, + "High": { + "$ref": "#/$defs/weight" + }, + "Medium": { + "$ref": "#/$defs/weight" + }, + "Low": { + "$ref": "#/$defs/weight" + }, + "Informational": { + "$ref": "#/$defs/weight" + }, + "None": { + "$ref": "#/$defs/weight" + }, + "Unknown": { + "$ref": "#/$defs/weight" + } + } + }, + "quietPenalty": { + "$ref": "#/$defs/penalty" + }, + "warnPenalty": { + "$ref": "#/$defs/penalty" + }, + "ignorePenalty": { + "$ref": "#/$defs/penalty" + }, + "trustOverrides": { + "type": "object", + "propertyNames": { + "pattern": "^[a-z][a-z0-9_.-]*$" + }, + "additionalProperties": { + "$ref": "#/$defs/trustWeight" + } + }, + "reachabilityBuckets": { + "type": "object", + "minProperties": 1, + "propertyNames": { + "pattern": "^[a-z][a-z0-9_.-]*$" + }, + "additionalProperties": { + "$ref": "#/$defs/reachabilityWeight" + } + }, + "unknownConfidence": { + "type": "object", + "additionalProperties": false, + "required": [ + "initial", + "decayPerDay", + "floor", + "bands" + ], + "properties": { + "initial": { + "$ref": "#/$defs/confidence" + }, + "decayPerDay": { + "type": "number", + "minimum": 0, + "maximum": 1 + }, + "floor": { + "$ref": "#/$defs/confidence" + }, + "bands": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "additionalProperties": false, + "required": [ + "name", + "min" + ], + "properties": { + "name": { + "type": "string", + "pattern": "^[a-z][a-z0-9_.-]*$" + }, + "min": { + "$ref": "#/$defs/confidence" + }, + "description": { + "type": "string", + "maxLength": 256 + } + } + } + } + } + } + }, + "$defs": { + "weight": { + "type": "number", + "minimum": 0, + "maximum": 100 + }, + "penalty": { + "type": "number", + "minimum": 0, + "maximum": 200 + }, + "trustWeight": { + "type": "number", + "minimum": 0, + "maximum": 5 + }, + "reachabilityWeight": { + "type": "number", + "minimum": 0, + "maximum": 1.5 + }, + "confidence": { + "type": "number", + "minimum": 0, + "maximum": 1 + } + } +} diff --git a/src/StellaOps.Policy/StellaOps.Policy.csproj b/src/StellaOps.Policy/StellaOps.Policy.csproj index c04fee44..59c7b9c1 100644 --- a/src/StellaOps.Policy/StellaOps.Policy.csproj +++ b/src/StellaOps.Policy/StellaOps.Policy.csproj @@ -11,10 +11,12 @@ +
+ diff --git a/src/StellaOps.Policy/TASKS.md b/src/StellaOps.Policy/TASKS.md index 968388f9..2c692eff 100644 --- a/src/StellaOps.Policy/TASKS.md +++ b/src/StellaOps.Policy/TASKS.md @@ -5,12 +5,13 @@ | POLICY-CORE-09-001 | DONE | Policy Guild | SCANNER-WEB-09-101 | Define YAML schema/binder, diagnostics, CLI validation for policy files. | Schema doc published; binder loads sample policy; validation errors actionable. | | POLICY-CORE-09-002 | DONE | Policy Guild | POLICY-CORE-09-001 | Implement policy snapshot store + revision digests + audit logging. | Snapshots persisted with digest; tests compare revisions; audit entries created. | | POLICY-CORE-09-003 | DONE | Policy Guild | POLICY-CORE-09-002 | `/policy/preview` API (image digest → projected verdict delta). | Preview returns diff JSON; integration tests with mocked report; docs updated. | -| POLICY-CORE-09-004 | TODO | Policy Guild | POLICY-CORE-09-001 | Versioned scoring config with schema validation, trust table, and golden fixtures. | Scoring config documented; fixtures stored; validation CLI passes. | -| POLICY-CORE-09-005 | TODO | Policy Guild | POLICY-CORE-09-004 | Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. | Engine unit tests cover severity weighting; outputs include provenance data. | -| POLICY-CORE-09-006 | TODO | Policy Guild | POLICY-CORE-09-005 | Unknown state & confidence decay – deterministic bands surfaced in policy outputs. | Confidence decay tests pass; docs updated; preview endpoint displays banding. | -| POLICY-CORE-09-004 | TODO | Policy Guild | POLICY-CORE-09-001 | Versioned scoring config (weights, trust table, reachability buckets) with schema validation, binder, and golden fixtures. | Config serialized with semantic version, binder loads defaults, fixtures assert deterministic hash. | -| POLICY-CORE-09-005 | TODO | Policy Guild | POLICY-CORE-09-004, POLICY-CORE-09-002 | Implement scoring/quiet engine: compute score from config, enforce VEX-only quiet rules, emit inputs + `quietedBy` metadata in policy verdicts. | `/reports` policy result includes score, inputs, configVersion, quiet provenance; unit/integration tests prove reproducibility. | -| POLICY-CORE-09-006 | TODO | Policy Guild | POLICY-CORE-09-005, FEEDCORE-ENGINE-07-003 | Track unknown states with deterministic confidence bands that decay over time; expose state in policy outputs and docs. | Unknown flags + confidence band persisted, decay job deterministic, preview/report APIs show state with tests covering decay math. | +| POLICY-CORE-09-004 | DOING (2025-10-19) | Policy Guild | — | Versioned scoring config with schema validation, trust table, and golden fixtures. | Scoring config documented; fixtures stored; validation CLI passes. | +| POLICY-CORE-09-005 | DOING (2025-10-19) | Policy Guild | — | Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. | Engine unit tests cover severity weighting; outputs include provenance data. | +| POLICY-CORE-09-006 | DOING (2025-10-19) | Policy Guild | — | Unknown state & confidence decay – deterministic bands surfaced in policy outputs. | Confidence decay tests pass; docs updated; preview endpoint displays banding. | +| POLICY-CORE-09-004 | DONE | Policy Guild | POLICY-CORE-09-001 | Versioned scoring config (weights, trust table, reachability buckets) with schema validation, binder, and golden fixtures. | Config serialized with semantic version, binder loads defaults, fixtures assert deterministic hash. | +| POLICY-CORE-09-005 | DONE | Policy Guild | POLICY-CORE-09-004, POLICY-CORE-09-002 | Implement scoring/quiet engine: compute score from config, enforce VEX-only quiet rules, emit inputs + `quietedBy` metadata in policy verdicts. | `/reports` policy result includes score, inputs, configVersion, quiet provenance; unit/integration tests prove reproducibility. | +| POLICY-CORE-09-006 | DONE | Policy Guild | POLICY-CORE-09-005, FEEDCORE-ENGINE-07-003 | Track unknown states with deterministic confidence bands that decay over time; expose state in policy outputs and docs. | Unknown flags + confidence band persisted, decay job deterministic, preview/report APIs show state with tests covering decay math. | +| POLICY-RUNTIME-17-201 | TODO | Policy Guild, Scanner WebService Guild | ZASTAVA-OBS-17-005 | Define runtime reachability feed contract and alignment plan for `SCANNER-RUNTIME-17-401` once Zastava endpoints land; document policy expectations for reachability tags. | Contract note published, sample payload agreed with Scanner team, dependencies captured in scanner/runtime task boards. | ## Notes - 2025-10-18: POLICY-CORE-09-001 completed. Binder + diagnostics + CLI scaffolding landed with tests; schema embedded at `src/StellaOps.Policy/Schemas/policy-schema@1.json` and referenced by docs/11_DATA_SCHEMAS.md. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md new file mode 100644 index 00000000..e13d9f18 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md @@ -0,0 +1,29 @@ +# StellaOps.Scanner.Analyzers.Lang.DotNet — Agent Charter + +## Role +Create the .NET analyzer plug-in that inspects `*.deps.json`, `runtimeconfig.json`, assemblies, and RID-specific assets to deliver accurate NuGet components with signing metadata. + +## Scope +- Parse dependency graphs from `*.deps.json` and merge with `runtimeconfig.json` and bundle manifests. +- Capture assembly metadata (strong name, file version, Authenticode) and correlate with packages. +- Handle RID-specific asset selection, self-contained apps, and crossgen/native dependency hints. +- Package plug-in manifest, determinism fixtures, benchmarks, and Offline Kit documentation. + +## Out of Scope +- Policy evaluation or Signer integration (handled elsewhere). +- Native dependency resolution outside RID mapping. +- Windows-specific MSI/SxS analyzers (covered by native analyzer roadmap). + +## Expectations +- Performance target: multi-target app fixture <1.2 s, memory <250 MB. +- Deterministic RID collapsing to reduce component duplication by ≥40 % vs naive approach. +- Offline-first; support air-gapped strong-name/Authenticode validation using cached root store. +- Rich telemetry (components per RID, strong-name validations) conforming to Scanner metrics. + +## Dependencies +- Shared language analyzer infrastructure; Worker dispatcher; optional security key store for signature verification. + +## Testing & Artifacts +- Fixtures for framework-dependent and self-contained apps (linux-musl, win-x64). +- Golden outputs capturing signature metadata and RID grouping. +- Benchmark comparing analyzer fidelity vs market competitors. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs new file mode 100644 index 00000000..be78d3d3 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs @@ -0,0 +1,7 @@ +global using System; +global using System.Collections.Generic; +global using System.IO; +global using System.Threading; +global using System.Threading.Tasks; + +global using StellaOps.Scanner.Analyzers.Lang; diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Placeholder.cs b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Placeholder.cs new file mode 100644 index 00000000..c8c17007 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Placeholder.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Scanner.Analyzers.Lang.DotNet; + +internal static class Placeholder +{ + // Analyzer implementation will be added during Sprint LA4. +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj new file mode 100644 index 00000000..3e6ba793 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md new file mode 100644 index 00000000..afb82eaa --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md @@ -0,0 +1,10 @@ +# .NET Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-305A | TODO | SCANNER-ANALYZERS-LANG-10-307 | Parse `*.deps.json` + `runtimeconfig.json`, build RID graph, and normalize to `pkg:nuget` components. | RID graph deterministic; fixtures confirm consistent component ordering; fallback to `bin:{sha256}` documented. | +| 2 | SCANNER-ANALYZERS-LANG-10-305B | TODO | SCANNER-ANALYZERS-LANG-10-305A | Extract assembly metadata (strong name, file/product info) and optional Authenticode details when offline cert bundle provided. | Signing metadata captured for signed assemblies; offline trust store documented; hash validations deterministic. | +| 3 | SCANNER-ANALYZERS-LANG-10-305C | TODO | SCANNER-ANALYZERS-LANG-10-305B | Handle self-contained apps and native assets; merge with EntryTrace usage hints. | Self-contained fixtures map to components with RID flags; usage hints propagate; tests cover linux/win variants. | +| 4 | SCANNER-ANALYZERS-LANG-10-307D | TODO | SCANNER-ANALYZERS-LANG-10-305C | Integrate shared helpers (license mapping, quiet provenance) and concurrency-safe caches. | Shared helpers reused; concurrency tests for parallel layer scans pass; no redundant allocations. | +| 5 | SCANNER-ANALYZERS-LANG-10-308D | TODO | SCANNER-ANALYZERS-LANG-10-307D | Determinism fixtures + benchmark harness; compare to competitor scanners for accuracy/perf. | Fixtures in `Fixtures/lang/dotnet/`; determinism CI guard; benchmark demonstrates lower duplication + faster runtime. | +| 6 | SCANNER-ANALYZERS-LANG-10-309D | TODO | SCANNER-ANALYZERS-LANG-10-308D | Package plug-in (manifest, DI registration) and update Offline Kit instructions. | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer; Offline Kit doc updated. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md b/src/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md new file mode 100644 index 00000000..09372f56 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md @@ -0,0 +1,29 @@ +# StellaOps.Scanner.Analyzers.Lang.Go — Agent Charter + +## Role +Build the Go analyzer plug-in that reads Go build info, module metadata, and DWARF notes to attribute binaries with rich provenance inside Scanner. + +## Scope +- Inspect binaries for build info (`.note.go.buildid`, Go build info blob) and extract module, version, VCS metadata. +- Parse DWARF-lite sections for commit hash / dirty flag and map to components. +- Manage shared hash cache to dedupe identical binaries across layers. +- Provide benchmarks and determinism fixtures; package plug-in manifest. + +## Out of Scope +- Native library link analysis (belongs to native analyzer). +- VCS remote fetching or symbol download. +- Policy decisions or vulnerability joins. + +## Expectations +- Latency targets: ≤400 µs (hot) / ≤2 ms (cold) per binary; minimal allocations via buffer pooling. +- Deterministic fallback to `bin:{sha256}` when metadata absent; heuristics clearly identified. +- Offline-first: rely solely on embedded metadata. +- Telemetry for binaries processed, metadata coverage, heuristics usage. + +## Dependencies +- Shared language analyzer core; Worker dispatcher; caching infrastructure (layer cache + file CAS). + +## Testing & Artifacts +- Golden fixtures for modules with/without VCS info, stripped binaries, cross-compiled variants. +- Benchmark comparison with competitor scanners to demonstrate speed/fidelity advantages. +- ADR documenting heuristics and risk mitigation. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs b/src/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs new file mode 100644 index 00000000..be78d3d3 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs @@ -0,0 +1,7 @@ +global using System; +global using System.Collections.Generic; +global using System.IO; +global using System.Threading; +global using System.Threading.Tasks; + +global using StellaOps.Scanner.Analyzers.Lang; diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Placeholder.cs b/src/StellaOps.Scanner.Analyzers.Lang.Go/Placeholder.cs new file mode 100644 index 00000000..3e8c3a49 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Go/Placeholder.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Go; + +internal static class Placeholder +{ + // Analyzer implementation will be added during Sprint LA3. +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj b/src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj new file mode 100644 index 00000000..3e6ba793 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md b/src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md new file mode 100644 index 00000000..68d681d1 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md @@ -0,0 +1,10 @@ +# Go Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-304A | TODO | SCANNER-ANALYZERS-LANG-10-307 | Parse Go build info blob (`runtime/debug` format) and `.note.go.buildid`; map to module/version and evidence. | Build info extracted across Go 1.18–1.23 fixtures; evidence includes VCS, module path, and build settings. | +| 2 | SCANNER-ANALYZERS-LANG-10-304B | TODO | SCANNER-ANALYZERS-LANG-10-304A | Implement DWARF-lite reader for VCS metadata + dirty flag; add cache to avoid re-reading identical binaries. | DWARF reader supplies commit hash for ≥95 % fixtures; cache reduces duplicated IO by ≥70 %. | +| 3 | SCANNER-ANALYZERS-LANG-10-304C | TODO | SCANNER-ANALYZERS-LANG-10-304B | Fallback heuristics for stripped binaries with deterministic `bin:{sha256}` labeling and quiet provenance. | Heuristic labels clearly separated; tests ensure no false “observed” provenance; documentation updated. | +| 4 | SCANNER-ANALYZERS-LANG-10-307G | TODO | SCANNER-ANALYZERS-LANG-10-304C | Wire shared helpers (license mapping, usage flags) and ensure concurrency-safe buffer reuse. | Analyzer reuses shared infrastructure; concurrency tests with parallel scans pass; no data races. | +| 5 | SCANNER-ANALYZERS-LANG-10-308G | TODO | SCANNER-ANALYZERS-LANG-10-307G | Determinism fixtures + benchmark harness (Vs competitor). | Fixtures under `Fixtures/lang/go/`; CI determinism check; benchmark runs showing ≥20 % speed advantage. | +| 6 | SCANNER-ANALYZERS-LANG-10-309G | TODO | SCANNER-ANALYZERS-LANG-10-308G | Package plug-in manifest + Offline Kit notes; ensure Worker DI registration. | Manifest copied; Worker loads analyzer; Offline Kit docs updated with Go analyzer presence. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs b/src/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs new file mode 100644 index 00000000..68605428 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs @@ -0,0 +1,10 @@ +global using System; +global using System.Collections.Generic; +global using System.IO; +global using System.IO.Compression; +global using System.Security.Cryptography; +global using System.Text; +global using System.Threading; +global using System.Threading.Tasks; + +global using StellaOps.Scanner.Analyzers.Lang; diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs b/src/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs new file mode 100644 index 00000000..c3ef2fad --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs @@ -0,0 +1,360 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Java; + +public sealed class JavaLanguageAnalyzer : ILanguageAnalyzer +{ + private static readonly HashSet SupportedExtensions = new(StringComparer.OrdinalIgnoreCase) + { + ".jar", + ".war", + ".ear", + ".jmod", + }; + + private static readonly EnumerationOptions EnumerationOptions = new() + { + RecurseSubdirectories = true, + IgnoreInaccessible = true, + AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint, + }; + + public string Id => "java"; + + public string DisplayName => "Java/Maven Analyzer"; + + public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(writer); + + foreach (var jarPath in EnumerateCandidateArchives(context.RootPath)) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + await ProcessArchiveAsync(jarPath, Id, context, writer, cancellationToken).ConfigureAwait(false); + } + catch (IOException) + { + // Ignore corrupt archives to keep scans resilient. + } + catch (InvalidDataException) + { + // Non-zip payloads should not break the scan. + } + } + } + + private static IEnumerable EnumerateCandidateArchives(string root) + { + foreach (var file in Directory.EnumerateFiles(root, "*", EnumerationOptions)) + { + if (!SupportedExtensions.Contains(Path.GetExtension(file))) + { + continue; + } + + yield return file; + } + } + + private static async ValueTask ProcessArchiveAsync(string archivePath, string analyzerId, LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + using var fileStream = new FileStream(archivePath, FileMode.Open, FileAccess.Read, FileShare.Read); + using var archive = new ZipArchive(fileStream, ZipArchiveMode.Read, leaveOpen: false); + + ManifestMetadata? manifestMetadata = null; + var manifestEntry = archive.GetEntry("META-INF/MANIFEST.MF"); + if (manifestEntry is not null) + { + manifestMetadata = await ParseManifestAsync(manifestEntry, cancellationToken).ConfigureAwait(false); + } + + foreach (var entry in archive.Entries) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (IsManifestEntry(entry.FullName)) + { + continue; + } + + if (!IsPomPropertiesEntry(entry.FullName)) + { + continue; + } + + var artifact = await ParsePomPropertiesAsync(entry, cancellationToken).ConfigureAwait(false); + if (artifact is null) + { + continue; + } + + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["groupId"] = artifact.GroupId, + ["artifactId"] = artifact.ArtifactId, + ["jarPath"] = context.GetRelativePath(archivePath), + }; + + if (!string.IsNullOrEmpty(artifact.Packaging)) + { + metadata["packaging"] = artifact.Packaging; + } + + if (!string.IsNullOrEmpty(artifact.Name)) + { + metadata["displayName"] = artifact.Name; + } + + if (manifestMetadata is not null) + { + manifestMetadata.ApplyMetadata(metadata); + } + + var evidence = new List + { + new(LanguageEvidenceKind.File, "pom.properties", BuildLocator(context, archivePath, entry.FullName), null, artifact.PomSha256), + }; + + if (manifestMetadata is not null) + { + evidence.Add(manifestMetadata.CreateEvidence(context, archivePath)); + } + + var usedByEntrypoint = context.UsageHints.IsPathUsed(archivePath); + + writer.AddFromPurl( + analyzerId: analyzerId, + purl: artifact.Purl, + name: artifact.ArtifactId, + version: artifact.Version, + type: "maven", + metadata: metadata, + evidence: evidence, + usedByEntrypoint: usedByEntrypoint); + } + } + + private static string BuildLocator(LanguageAnalyzerContext context, string archivePath, string entryPath) + { + var relativeArchive = context.GetRelativePath(archivePath); + if (string.IsNullOrEmpty(relativeArchive) || relativeArchive == ".") + { + return NormalizeEntry(entryPath); + } + + return string.Concat(relativeArchive, "!", NormalizeEntry(entryPath)); + } + + private static string NormalizeEntry(string entryPath) + => entryPath.Replace('\\', '/'); + + private static bool IsPomPropertiesEntry(string entryName) + => entryName.StartsWith("META-INF/maven/", StringComparison.OrdinalIgnoreCase) + && entryName.EndsWith("/pom.properties", StringComparison.OrdinalIgnoreCase); + + private static bool IsManifestEntry(string entryName) + => string.Equals(entryName, "META-INF/MANIFEST.MF", StringComparison.OrdinalIgnoreCase); + + private static async ValueTask ParsePomPropertiesAsync(ZipArchiveEntry entry, CancellationToken cancellationToken) + { + await using var entryStream = entry.Open(); + using var buffer = new MemoryStream(); + await entryStream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false); + buffer.Position = 0; + + using var reader = new StreamReader(buffer, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: true); + var properties = new Dictionary(StringComparer.OrdinalIgnoreCase); + + while (await reader.ReadLineAsync().ConfigureAwait(false) is { } line) + { + cancellationToken.ThrowIfCancellationRequested(); + + line = line.Trim(); + if (line.Length == 0 || line.StartsWith('#')) + { + continue; + } + + var separatorIndex = line.IndexOf('='); + if (separatorIndex <= 0) + { + continue; + } + + var key = line[..separatorIndex].Trim(); + var value = line[(separatorIndex + 1)..].Trim(); + if (key.Length == 0) + { + continue; + } + + properties[key] = value; + } + + if (!properties.TryGetValue("groupId", out var groupId) || string.IsNullOrWhiteSpace(groupId)) + { + return null; + } + + if (!properties.TryGetValue("artifactId", out var artifactId) || string.IsNullOrWhiteSpace(artifactId)) + { + return null; + } + + if (!properties.TryGetValue("version", out var version) || string.IsNullOrWhiteSpace(version)) + { + return null; + } + + var packaging = properties.TryGetValue("packaging", out var packagingValue) ? packagingValue : "jar"; + var name = properties.TryGetValue("name", out var nameValue) ? nameValue : null; + + var purl = BuildPurl(groupId, artifactId, version, packaging); + buffer.Position = 0; + var pomSha = Convert.ToHexString(SHA256.HashData(buffer)).ToLowerInvariant(); + + return new MavenArtifact( + GroupId: groupId.Trim(), + ArtifactId: artifactId.Trim(), + Version: version.Trim(), + Packaging: packaging?.Trim(), + Name: name?.Trim(), + Purl: purl, + PomSha256: pomSha); + } + + private static async ValueTask ParseManifestAsync(ZipArchiveEntry entry, CancellationToken cancellationToken) + { + await using var entryStream = entry.Open(); + using var reader = new StreamReader(entryStream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: false); + + string? title = null; + string? version = null; + string? vendor = null; + + while (await reader.ReadLineAsync().ConfigureAwait(false) is { } line) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var separatorIndex = line.IndexOf(':'); + if (separatorIndex <= 0) + { + continue; + } + + var key = line[..separatorIndex].Trim(); + var value = line[(separatorIndex + 1)..].Trim(); + + if (key.Equals("Implementation-Title", StringComparison.OrdinalIgnoreCase)) + { + title ??= value; + } + else if (key.Equals("Implementation-Version", StringComparison.OrdinalIgnoreCase)) + { + version ??= value; + } + else if (key.Equals("Implementation-Vendor", StringComparison.OrdinalIgnoreCase)) + { + vendor ??= value; + } + } + + if (title is null && version is null && vendor is null) + { + return null; + } + + return new ManifestMetadata(title, version, vendor); + } + + private static string BuildPurl(string groupId, string artifactId, string version, string? packaging) + { + var normalizedGroup = groupId.Replace('.', '/'); + var builder = new StringBuilder(); + builder.Append("pkg:maven/"); + builder.Append(normalizedGroup); + builder.Append('/'); + builder.Append(artifactId); + builder.Append('@'); + builder.Append(version); + + if (!string.IsNullOrWhiteSpace(packaging) && !packaging.Equals("jar", StringComparison.OrdinalIgnoreCase)) + { + builder.Append("?type="); + builder.Append(packaging); + } + + return builder.ToString(); + } + + private sealed record MavenArtifact( + string GroupId, + string ArtifactId, + string Version, + string? Packaging, + string? Name, + string Purl, + string PomSha256); + + private sealed record ManifestMetadata(string? ImplementationTitle, string? ImplementationVersion, string? ImplementationVendor) + { + public void ApplyMetadata(IDictionary target) + { + if (!string.IsNullOrWhiteSpace(ImplementationTitle)) + { + target["manifestTitle"] = ImplementationTitle; + } + + if (!string.IsNullOrWhiteSpace(ImplementationVersion)) + { + target["manifestVersion"] = ImplementationVersion; + } + + if (!string.IsNullOrWhiteSpace(ImplementationVendor)) + { + target["manifestVendor"] = ImplementationVendor; + } + } + + public LanguageComponentEvidence CreateEvidence(LanguageAnalyzerContext context, string archivePath) + { + var locator = BuildLocator(context, archivePath, "META-INF/MANIFEST.MF"); + var valueBuilder = new StringBuilder(); + + if (!string.IsNullOrWhiteSpace(ImplementationTitle)) + { + valueBuilder.Append("title=").Append(ImplementationTitle); + } + + if (!string.IsNullOrWhiteSpace(ImplementationVersion)) + { + if (valueBuilder.Length > 0) + { + valueBuilder.Append(';'); + } + + valueBuilder.Append("version=").Append(ImplementationVersion); + } + + if (!string.IsNullOrWhiteSpace(ImplementationVendor)) + { + if (valueBuilder.Length > 0) + { + valueBuilder.Append(';'); + } + + valueBuilder.Append("vendor=").Append(ImplementationVendor); + } + + var value = valueBuilder.Length > 0 ? valueBuilder.ToString() : null; + return new LanguageComponentEvidence(LanguageEvidenceKind.File, "MANIFEST.MF", locator, value, null); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj b/src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj new file mode 100644 index 00000000..3e6ba793 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json b/src/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json new file mode 100644 index 00000000..ac1f17c0 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json @@ -0,0 +1,22 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.java", + "displayName": "StellaOps Java / Maven Analyzer", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Java.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Java.JavaLanguageAnalyzer" + }, + "capabilities": [ + "language-analyzer", + "java", + "maven" + ], + "metadata": { + "org.stellaops.analyzer.language": "java", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md b/src/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md new file mode 100644 index 00000000..b3343476 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md @@ -0,0 +1,39 @@ +# StellaOps.Scanner.Analyzers.Lang.Node — Agent Charter + +## Role +Deliver the Node.js / npm / Yarn / PNPM analyzer plug-in that resolves workspace graphs, symlinks, and script metadata for Scanner Workers. + +## Scope +- Deterministic filesystem walker for `node_modules`, PNPM store, Yarn Plug'n'Play, and workspace roots. +- Component identity normalization to `pkg:npm` with provenance evidence (manifest path, integrity hashes, lockfile references). +- Workspace + symlink attribution, script metadata (postinstall, lifecycle), and policy hints for risky scripts. +- Plug-in manifest authoring, DI bootstrap, and benchmark harness integration. + +## Out of Scope +- OS package detection, native library linkage, or vulnerability joins. +- Language analyzers for other ecosystems (Python, Go, .NET, Rust). +- CLI/UI surfacing of analyzer diagnostics (handed to UI guild post-gate). + +## Expectations +- Deterministic output across Yarn/NPM/PNPM variations; normalized casing and path separators. +- Performance targets: 10 k-module fixture <1.8 s, <220 MB RSS on 4 vCPU runner. +- Offline-first; no network dependency to resolve registries. +- Emit structured metrics + logs (`analyzer=node`) compatible with Scanner telemetry model. +- Update `TASKS.md`, `SPRINTS_LANG_IMPLEMENTATION_PLAN.md`, and corresponding fixtures as progress occurs. + +## Dependencies +- Shared language analyzer core (`StellaOps.Scanner.Analyzers.Lang`). +- Worker dispatcher for plug-in discovery. +- EntryTrace usage hints (for script usage classification). + +## Testing & Artifacts +- Determinism golden fixtures under `Fixtures/lang/node/`. +- Benchmark CSV + flamegraph stored in `bench/Scanner.Analyzers/`. +- Plug-in manifest + cosign workflow added to Offline Kit instructions once analyzer is production-ready. + +## Telemetry & Policy Hints +- Metrics: `scanner_analyzer_node_scripts_total{script}` increments for each install lifecycle script discovered. +- Metadata keys: + - `policyHint.installLifecycle` lists lifecycle scripts (`preinstall;install;postinstall`) observed for a package. + - `script.` stores the canonical command string for each lifecycle script. +- Evidence: lifecycle script entries emit `LanguageEvidenceKind.Metadata` pointing to `package.json#scripts.` with SHA-256 hashes for determinism. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs new file mode 100644 index 00000000..5be5fd97 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs @@ -0,0 +1,9 @@ +global using System; +global using System.Collections.Generic; +global using System.IO; +global using System.Linq; +global using System.Text.Json; +global using System.Threading; +global using System.Threading.Tasks; + +global using StellaOps.Scanner.Analyzers.Lang; diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs new file mode 100644 index 00000000..631af900 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs @@ -0,0 +1,31 @@ +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +internal static class NodeAnalyzerMetrics +{ + private static readonly Meter Meter = new("StellaOps.Scanner.Analyzers.Lang.Node", "1.0.0"); + private static readonly Counter LifecycleScriptsCounter = Meter.CreateCounter( + "scanner_analyzer_node_scripts_total", + unit: "scripts", + description: "Counts Node.js install lifecycle scripts discovered by the language analyzer."); + + public static void RecordLifecycleScript(string scriptName) + { + var normalized = Normalize(scriptName); + LifecycleScriptsCounter.Add( + 1, + new KeyValuePair("script", normalized)); + } + + private static string Normalize(string? scriptName) + { + if (string.IsNullOrWhiteSpace(scriptName)) + { + return "unknown"; + } + + return scriptName.Trim().ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs new file mode 100644 index 00000000..6212ebe9 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs @@ -0,0 +1,37 @@ +using System.Diagnostics.CodeAnalysis; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +internal sealed record NodeLifecycleScript +{ + public NodeLifecycleScript(string name, string command) + { + ArgumentException.ThrowIfNullOrWhiteSpace(name); + ArgumentException.ThrowIfNullOrWhiteSpace(command); + + Name = name.Trim(); + Command = command.Trim(); + Sha256 = ComputeSha256(Command); + } + + public string Name { get; } + + public string Command { get; } + + public string Sha256 { get; } + + [SuppressMessage("Security", "CA5350:Do Not Use Weak Cryptographic Algorithms", Justification = "SHA256 is required for deterministic evidence hashing.")] + private static string ComputeSha256(string value) + { + if (string.IsNullOrEmpty(value)) + { + return string.Empty; + } + + var bytes = Encoding.UTF8.GetBytes(value); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs new file mode 100644 index 00000000..002ae17a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs @@ -0,0 +1,446 @@ +using System.Text.Json; + +namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +internal sealed class NodeLockData +{ + private static readonly NodeLockData Empty = new(new Dictionary(StringComparer.Ordinal), new Dictionary(StringComparer.OrdinalIgnoreCase)); + + private readonly Dictionary _byPath; + private readonly Dictionary _byName; + + private NodeLockData(Dictionary byPath, Dictionary byName) + { + _byPath = byPath; + _byName = byName; + } + + public static ValueTask LoadAsync(string rootPath, CancellationToken cancellationToken) + { + var byPath = new Dictionary(StringComparer.Ordinal); + var byName = new Dictionary(StringComparer.OrdinalIgnoreCase); + + LoadPackageLockJson(rootPath, byPath, byName, cancellationToken); + LoadYarnLock(rootPath, byName); + LoadPnpmLock(rootPath, byName); + + if (byPath.Count == 0 && byName.Count == 0) + { + return ValueTask.FromResult(Empty); + } + + return ValueTask.FromResult(new NodeLockData(byPath, byName)); + } + + public bool TryGet(string relativePath, string packageName, out NodeLockEntry? entry) + { + var normalizedPath = NormalizeLockPath(relativePath); + if (_byPath.TryGetValue(normalizedPath, out var byPathEntry)) + { + entry = byPathEntry; + return true; + } + + if (!string.IsNullOrEmpty(packageName)) + { + var normalizedName = packageName.StartsWith('@') ? packageName : packageName; + if (_byName.TryGetValue(normalizedName, out var byNameEntry)) + { + entry = byNameEntry; + return true; + } + } + + entry = null; + return false; + } + + private static NodeLockEntry? CreateEntry(JsonElement element) + { + string? version = null; + string? resolved = null; + string? integrity = null; + + if (element.TryGetProperty("version", out var versionElement) && versionElement.ValueKind == JsonValueKind.String) + { + version = versionElement.GetString(); + } + + if (element.TryGetProperty("resolved", out var resolvedElement) && resolvedElement.ValueKind == JsonValueKind.String) + { + resolved = resolvedElement.GetString(); + } + + if (element.TryGetProperty("integrity", out var integrityElement) && integrityElement.ValueKind == JsonValueKind.String) + { + integrity = integrityElement.GetString(); + } + + if (version is null && resolved is null && integrity is null) + { + return null; + } + + return new NodeLockEntry(version, resolved, integrity); + } + + private static void TraverseLegacyDependencies( + string currentPath, + JsonElement dependenciesElement, + IDictionary byPath, + IDictionary byName) + { + foreach (var dependency in dependenciesElement.EnumerateObject()) + { + var depValue = dependency.Value; + var path = $"{currentPath}/{dependency.Name}"; + var entry = CreateEntry(depValue); + if (entry is not null) + { + var normalizedPath = NormalizeLockPath(path); + byPath[normalizedPath] = entry; + byName[dependency.Name] = entry; + } + + if (depValue.TryGetProperty("dependencies", out var childDependencies) && childDependencies.ValueKind == JsonValueKind.Object) + { + TraverseLegacyDependencies(path + "/node_modules", childDependencies, byPath, byName); + } + } + } + + private static void LoadPackageLockJson(string rootPath, IDictionary byPath, IDictionary byName, CancellationToken cancellationToken) + { + var packageLockPath = Path.Combine(rootPath, "package-lock.json"); + if (!File.Exists(packageLockPath)) + { + return; + } + + try + { + using var stream = File.OpenRead(packageLockPath); + using var document = JsonDocument.Parse(stream); + cancellationToken.ThrowIfCancellationRequested(); + + var root = document.RootElement; + + if (root.TryGetProperty("packages", out var packagesElement) && packagesElement.ValueKind == JsonValueKind.Object) + { + foreach (var packageProperty in packagesElement.EnumerateObject()) + { + var entry = CreateEntry(packageProperty.Value); + if (entry is null) + { + continue; + } + + var key = NormalizeLockPath(packageProperty.Name); + byPath[key] = entry; + + var name = ExtractNameFromPath(key); + if (!string.IsNullOrEmpty(name)) + { + byName[name] = entry; + } + + if (packageProperty.Value.TryGetProperty("name", out var explicitNameElement) && explicitNameElement.ValueKind == JsonValueKind.String) + { + var explicitName = explicitNameElement.GetString(); + if (!string.IsNullOrWhiteSpace(explicitName)) + { + byName[explicitName] = entry; + } + } + } + } + else if (root.TryGetProperty("dependencies", out var dependenciesElement) && dependenciesElement.ValueKind == JsonValueKind.Object) + { + TraverseLegacyDependencies("node_modules", dependenciesElement, byPath, byName); + } + } + catch (IOException) + { + // Ignore unreadable package-lock. + } + catch (JsonException) + { + // Ignore malformed package-lock. + } + } + + private static void LoadYarnLock(string rootPath, IDictionary byName) + { + var yarnLockPath = Path.Combine(rootPath, "yarn.lock"); + if (!File.Exists(yarnLockPath)) + { + return; + } + + try + { + var lines = File.ReadAllLines(yarnLockPath); + string? currentName = null; + string? version = null; + string? resolved = null; + string? integrity = null; + + void Flush() + { + if (string.IsNullOrWhiteSpace(currentName)) + { + version = null; + resolved = null; + integrity = null; + return; + } + + var simpleName = ExtractPackageNameFromYarnKey(currentName!); + if (string.IsNullOrEmpty(simpleName)) + { + version = null; + resolved = null; + integrity = null; + return; + } + + var entry = new NodeLockEntry(version, resolved, integrity); + byName[simpleName] = entry; + version = null; + resolved = null; + integrity = null; + } + + foreach (var line in lines) + { + var trimmed = line.Trim(); + if (string.IsNullOrEmpty(trimmed)) + { + Flush(); + currentName = null; + continue; + } + + if (!char.IsWhiteSpace(line, 0) && trimmed.EndsWith(':')) + { + Flush(); + currentName = trimmed.TrimEnd(':').Trim('"'); + continue; + } + + if (trimmed.StartsWith("version", StringComparison.OrdinalIgnoreCase)) + { + version = ExtractQuotedValue(trimmed); + } + else if (trimmed.StartsWith("resolved", StringComparison.OrdinalIgnoreCase)) + { + resolved = ExtractQuotedValue(trimmed); + } + else if (trimmed.StartsWith("integrity", StringComparison.OrdinalIgnoreCase)) + { + integrity = ExtractQuotedValue(trimmed); + } + } + + Flush(); + } + catch (IOException) + { + // Ignore unreadable yarn.lock + } + } + + private static void LoadPnpmLock(string rootPath, IDictionary byName) + { + var pnpmLockPath = Path.Combine(rootPath, "pnpm-lock.yaml"); + if (!File.Exists(pnpmLockPath)) + { + return; + } + + try + { + using var reader = new StreamReader(pnpmLockPath); + string? currentPackage = null; + string? version = null; + string? resolved = null; + string? integrity = null; + var inPackages = false; + + while (reader.ReadLine() is { } line) + { + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + if (!inPackages) + { + if (line.StartsWith("packages:", StringComparison.Ordinal)) + { + inPackages = true; + } + continue; + } + + if (line.StartsWith(" /", StringComparison.Ordinal)) + { + if (!string.IsNullOrEmpty(currentPackage) && !string.IsNullOrEmpty(integrity)) + { + var name = ExtractNameFromPnpmKey(currentPackage); + if (!string.IsNullOrEmpty(name)) + { + byName[name] = new NodeLockEntry(version, resolved, integrity); + } + } + + currentPackage = line.Trim().TrimEnd(':').TrimStart('/'); + version = null; + resolved = null; + integrity = null; + continue; + } + + if (string.IsNullOrEmpty(currentPackage)) + { + continue; + } + + var trimmed = line.Trim(); + if (trimmed.StartsWith("resolution:", StringComparison.Ordinal)) + { + var integrityIndex = trimmed.IndexOf("integrity", StringComparison.OrdinalIgnoreCase); + if (integrityIndex >= 0) + { + var integrityValue = trimmed[(integrityIndex + 9)..].Trim(' ', ':', '{', '}', '"'); + integrity = integrityValue; + } + + var tarballIndex = trimmed.IndexOf("tarball", StringComparison.OrdinalIgnoreCase); + if (tarballIndex >= 0) + { + var tarballValue = trimmed[(tarballIndex + 7)..].Trim(' ', ':', '{', '}', '"'); + resolved = tarballValue; + } + } + else if (trimmed.StartsWith("integrity:", StringComparison.Ordinal)) + { + integrity = trimmed[("integrity:".Length)..].Trim(); + } + else if (trimmed.StartsWith("tarball:", StringComparison.Ordinal)) + { + resolved = trimmed[("tarball:".Length)..].Trim(); + } + else if (trimmed.StartsWith("version:", StringComparison.Ordinal)) + { + version = trimmed[("version:".Length)..].Trim(); + } + } + + if (!string.IsNullOrEmpty(currentPackage) && !string.IsNullOrEmpty(integrity)) + { + var name = ExtractNameFromPnpmKey(currentPackage); + if (!string.IsNullOrEmpty(name)) + { + byName[name] = new NodeLockEntry(version, resolved, integrity); + } + } + } + catch (IOException) + { + // Ignore unreadable pnpm lock file. + } + } + + private static string? ExtractQuotedValue(string line) + { + var quoteStart = line.IndexOf('"'); + if (quoteStart < 0) + { + return null; + } + + var quoteEnd = line.LastIndexOf('"'); + if (quoteEnd <= quoteStart) + { + return null; + } + + return line.Substring(quoteStart + 1, quoteEnd - quoteStart - 1); + } + + private static string ExtractPackageNameFromYarnKey(string key) + { + var commaIndex = key.IndexOf(','); + var trimmed = commaIndex > 0 ? key[..commaIndex] : key; + trimmed = trimmed.Trim('"'); + + var atIndex = trimmed.IndexOf('@', 1); + if (atIndex > 0) + { + return trimmed[..atIndex]; + } + + return trimmed; + } + + private static string ExtractNameFromPnpmKey(string key) + { + var parts = key.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (parts.Length == 0) + { + return string.Empty; + } + + if (parts[0].StartsWith('@')) + { + return parts.Length >= 2 ? $"{parts[0]}/{parts[1]}" : parts[0]; + } + + return parts[0]; + } + + private static string NormalizeLockPath(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return string.Empty; + } + + var normalized = path.Replace('\\', '/'); + normalized = normalized.TrimStart('.', '/'); + return normalized; + } + + private static string ExtractNameFromPath(string normalizedPath) + { + if (string.IsNullOrEmpty(normalizedPath)) + { + return string.Empty; + } + + var segments = normalizedPath.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (segments.Length == 0) + { + return string.Empty; + } + + if (segments[0] == "node_modules") + { + if (segments.Length >= 3 && segments[1].StartsWith('@')) + { + return $"{segments[1]}/{segments[2]}"; + } + + return segments.Length >= 2 ? segments[1] : string.Empty; + } + + var last = segments[^1]; + if (last.StartsWith('@') && segments.Length >= 2) + { + return $"{segments[^2]}/{last}"; + } + + return last; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs new file mode 100644 index 00000000..c08541c0 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs @@ -0,0 +1,3 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +internal sealed record NodeLockEntry(string? Version, string? Resolved, string? Integrity); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs new file mode 100644 index 00000000..7079e9b0 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs @@ -0,0 +1,179 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +internal sealed class NodePackage +{ + public NodePackage( + string name, + string version, + string relativePath, + string packageJsonLocator, + bool? isPrivate, + NodeLockEntry? lockEntry, + bool isWorkspaceMember, + string? workspaceRoot, + IReadOnlyList workspaceTargets, + string? workspaceLink, + IReadOnlyList lifecycleScripts, + bool usedByEntrypoint) + { + Name = name; + Version = version; + RelativePath = relativePath; + PackageJsonLocator = packageJsonLocator; + IsPrivate = isPrivate; + LockEntry = lockEntry; + IsWorkspaceMember = isWorkspaceMember; + WorkspaceRoot = workspaceRoot; + WorkspaceTargets = workspaceTargets; + WorkspaceLink = workspaceLink; + LifecycleScripts = lifecycleScripts ?? Array.Empty(); + IsUsedByEntrypoint = usedByEntrypoint; + } + + public string Name { get; } + + public string Version { get; } + + public string RelativePath { get; } + + public string PackageJsonLocator { get; } + + public bool? IsPrivate { get; } + + public NodeLockEntry? LockEntry { get; } + + public bool IsWorkspaceMember { get; } + + public string? WorkspaceRoot { get; } + + public IReadOnlyList WorkspaceTargets { get; } + + public string? WorkspaceLink { get; } + + public IReadOnlyList LifecycleScripts { get; } + + public bool HasInstallScripts => LifecycleScripts.Count > 0; + + public bool IsUsedByEntrypoint { get; } + + public string RelativePathNormalized => string.IsNullOrEmpty(RelativePath) ? string.Empty : RelativePath.Replace(Path.DirectorySeparatorChar, '/'); + + public string ComponentKey => $"purl::{Purl}"; + + public string Purl => BuildPurl(Name, Version); + + public IReadOnlyCollection CreateEvidence() + { + var evidence = new List + { + new LanguageComponentEvidence(LanguageEvidenceKind.File, "package.json", PackageJsonLocator, Value: null, Sha256: null) + }; + + foreach (var script in LifecycleScripts) + { + var locator = string.IsNullOrEmpty(PackageJsonLocator) + ? $"package.json#scripts.{script.Name}" + : $"{PackageJsonLocator}#scripts.{script.Name}"; + + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "package.json:scripts", + locator, + script.Command, + script.Sha256)); + } + + return evidence; + } + + public IReadOnlyCollection> CreateMetadata() + { + var entries = new List>(8) + { + new("path", string.IsNullOrEmpty(RelativePathNormalized) ? "." : RelativePathNormalized) + }; + + if (IsPrivate is bool isPrivate) + { + entries.Add(new KeyValuePair("private", isPrivate ? "true" : "false")); + } + + if (LockEntry is not null) + { + if (!string.IsNullOrWhiteSpace(LockEntry.Resolved)) + { + entries.Add(new KeyValuePair("resolved", LockEntry.Resolved)); + } + + if (!string.IsNullOrWhiteSpace(LockEntry.Integrity)) + { + entries.Add(new KeyValuePair("integrity", LockEntry.Integrity)); + } + } + + if (IsWorkspaceMember) + { + entries.Add(new KeyValuePair("workspaceMember", "true")); + if (!string.IsNullOrWhiteSpace(WorkspaceRoot)) + { + entries.Add(new KeyValuePair("workspaceRoot", WorkspaceRoot)); + } + } + + if (!string.IsNullOrWhiteSpace(WorkspaceLink)) + { + entries.Add(new KeyValuePair("workspaceLink", WorkspaceLink)); + } + + if (WorkspaceTargets.Count > 0) + { + entries.Add(new KeyValuePair("workspaceTargets", string.Join(';', WorkspaceTargets))); + } + + if (HasInstallScripts) + { + entries.Add(new KeyValuePair("installScripts", "true")); + var lifecycleNames = LifecycleScripts + .Select(static script => script.Name) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static name => name, StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (lifecycleNames.Length > 0) + { + entries.Add(new KeyValuePair("policyHint.installLifecycle", string.Join(';', lifecycleNames))); + } + + foreach (var script in LifecycleScripts.OrderBy(static script => script.Name, StringComparer.OrdinalIgnoreCase)) + { + entries.Add(new KeyValuePair($"script.{script.Name}", script.Command)); + } + } + + return entries + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToArray(); + } + + private static string BuildPurl(string name, string version) + { + var normalizedName = NormalizeName(name); + return $"pkg:npm/{normalizedName}@{version}"; + } + + private static string NormalizeName(string name) + { + if (string.IsNullOrWhiteSpace(name)) + { + return name; + } + + if (name[0] == '@') + { + var scopeAndName = name[1..]; + return $"%40{scopeAndName}"; + } + + return name; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs new file mode 100644 index 00000000..84160c1d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs @@ -0,0 +1,378 @@ +using System.Text.Json; + +namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +internal static class NodePackageCollector +{ + private static readonly string[] IgnoredDirectories = + { + ".bin", + ".cache", + ".store", + "__pycache__" + }; + + public static IReadOnlyList CollectPackages(LanguageAnalyzerContext context, NodeLockData lockData, CancellationToken cancellationToken) + { + var packages = new List(); + var visited = new HashSet(StringComparer.OrdinalIgnoreCase); + var pendingNodeModuleRoots = new List(); + + var rootPackageJson = Path.Combine(context.RootPath, "package.json"); + var workspaceIndex = NodeWorkspaceIndex.Create(context.RootPath); + + if (File.Exists(rootPackageJson)) + { + var rootPackage = TryCreatePackage(context, rootPackageJson, string.Empty, lockData, workspaceIndex, cancellationToken); + if (rootPackage is not null) + { + packages.Add(rootPackage); + visited.Add(rootPackage.RelativePathNormalized); + } + } + + foreach (var workspaceRelative in workspaceIndex.GetMembers()) + { + var workspaceAbsolute = Path.Combine(context.RootPath, workspaceRelative.Replace('/', Path.DirectorySeparatorChar)); + if (!Directory.Exists(workspaceAbsolute)) + { + continue; + } + + ProcessPackageDirectory(context, workspaceAbsolute, lockData, workspaceIndex, includeNestedNodeModules: false, packages, visited, cancellationToken); + + var workspaceNodeModules = Path.Combine(workspaceAbsolute, "node_modules"); + if (Directory.Exists(workspaceNodeModules)) + { + pendingNodeModuleRoots.Add(workspaceNodeModules); + } + } + + var nodeModules = Path.Combine(context.RootPath, "node_modules"); + TraverseDirectory(context, nodeModules, lockData, workspaceIndex, packages, visited, cancellationToken); + + foreach (var pendingRoot in pendingNodeModuleRoots.OrderBy(static path => path, StringComparer.Ordinal)) + { + TraverseDirectory(context, pendingRoot, lockData, workspaceIndex, packages, visited, cancellationToken); + } + + return packages; + } + + private static void TraverseDirectory( + LanguageAnalyzerContext context, + string directory, + NodeLockData lockData, + NodeWorkspaceIndex workspaceIndex, + List packages, + HashSet visited, + CancellationToken cancellationToken) + { + if (!Directory.Exists(directory)) + { + return; + } + + foreach (var child in Directory.EnumerateDirectories(directory)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var name = Path.GetFileName(child); + if (string.IsNullOrEmpty(name)) + { + continue; + } + + if (ShouldSkipDirectory(name)) + { + continue; + } + + if (string.Equals(name, ".pnpm", StringComparison.OrdinalIgnoreCase)) + { + TraversePnpmStore(context, child, lockData, workspaceIndex, packages, visited, cancellationToken); + continue; + } + + if (name.StartsWith('@')) + { + foreach (var scoped in Directory.EnumerateDirectories(child)) + { + ProcessPackageDirectory(context, scoped, lockData, workspaceIndex, includeNestedNodeModules: true, packages, visited, cancellationToken); + } + continue; + } + + ProcessPackageDirectory(context, child, lockData, workspaceIndex, includeNestedNodeModules: true, packages, visited, cancellationToken); + } + } + + private static void TraversePnpmStore( + LanguageAnalyzerContext context, + string pnpmDirectory, + NodeLockData lockData, + NodeWorkspaceIndex workspaceIndex, + List packages, + HashSet visited, + CancellationToken cancellationToken) + { + foreach (var storeEntry in Directory.EnumerateDirectories(pnpmDirectory)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var nestedNodeModules = Path.Combine(storeEntry, "node_modules"); + if (Directory.Exists(nestedNodeModules)) + { + TraverseDirectory(context, nestedNodeModules, lockData, workspaceIndex, packages, visited, cancellationToken); + } + } + } + + private static void ProcessPackageDirectory( + LanguageAnalyzerContext context, + string directory, + NodeLockData lockData, + NodeWorkspaceIndex workspaceIndex, + bool includeNestedNodeModules, + List packages, + HashSet visited, + CancellationToken cancellationToken) + { + var packageJsonPath = Path.Combine(directory, "package.json"); + var relativeDirectory = NormalizeRelativeDirectory(context, directory); + + if (!visited.Add(relativeDirectory)) + { + // Already processed this path. + if (includeNestedNodeModules) + { + TraverseNestedNodeModules(context, directory, lockData, workspaceIndex, packages, visited, cancellationToken); + } + return; + } + + if (File.Exists(packageJsonPath)) + { + var package = TryCreatePackage(context, packageJsonPath, relativeDirectory, lockData, workspaceIndex, cancellationToken); + if (package is not null) + { + packages.Add(package); + } + } + + if (includeNestedNodeModules) + { + TraverseNestedNodeModules(context, directory, lockData, workspaceIndex, packages, visited, cancellationToken); + } + } + + private static void TraverseNestedNodeModules( + LanguageAnalyzerContext context, + string directory, + NodeLockData lockData, + NodeWorkspaceIndex workspaceIndex, + List packages, + HashSet visited, + CancellationToken cancellationToken) + { + var nestedNodeModules = Path.Combine(directory, "node_modules"); + TraverseDirectory(context, nestedNodeModules, lockData, workspaceIndex, packages, visited, cancellationToken); + } + + private static NodePackage? TryCreatePackage( + LanguageAnalyzerContext context, + string packageJsonPath, + string relativeDirectory, + NodeLockData lockData, + NodeWorkspaceIndex workspaceIndex, + CancellationToken cancellationToken) + { + try + { + using var stream = File.OpenRead(packageJsonPath); + using var document = JsonDocument.Parse(stream); + + var root = document.RootElement; + if (!root.TryGetProperty("name", out var nameElement)) + { + return null; + } + + var name = nameElement.GetString(); + if (string.IsNullOrWhiteSpace(name)) + { + return null; + } + + if (!root.TryGetProperty("version", out var versionElement)) + { + return null; + } + + var version = versionElement.GetString(); + if (string.IsNullOrWhiteSpace(version)) + { + return null; + } + + bool? isPrivate = null; + if (root.TryGetProperty("private", out var privateElement) && privateElement.ValueKind is JsonValueKind.True or JsonValueKind.False) + { + isPrivate = privateElement.GetBoolean(); + } + + var lockEntry = lockData.TryGet(relativeDirectory, name, out var entry) ? entry : null; + var locator = BuildLocator(relativeDirectory); + var usedByEntrypoint = context.UsageHints.IsPathUsed(packageJsonPath); + + var isWorkspaceMember = workspaceIndex.TryGetMember(relativeDirectory, out var workspaceRoot); + var workspaceTargets = ExtractWorkspaceTargets(relativeDirectory, root, workspaceIndex); + var workspaceLink = !isWorkspaceMember && workspaceIndex.TryGetWorkspacePathByName(name, out var workspacePathByName) + ? NormalizeRelativeDirectory(context, Path.Combine(context.RootPath, relativeDirectory)) + : null; + var lifecycleScripts = ExtractLifecycleScripts(root); + + return new NodePackage( + name: name.Trim(), + version: version.Trim(), + relativePath: relativeDirectory, + packageJsonLocator: locator, + isPrivate: isPrivate, + lockEntry: lockEntry, + isWorkspaceMember: isWorkspaceMember, + workspaceRoot: workspaceRoot, + workspaceTargets: workspaceTargets, + workspaceLink: workspaceLink, + lifecycleScripts: lifecycleScripts, + usedByEntrypoint: usedByEntrypoint); + } + catch (IOException) + { + return null; + } + catch (JsonException) + { + return null; + } + } + + private static string NormalizeRelativeDirectory(LanguageAnalyzerContext context, string directory) + { + var relative = context.GetRelativePath(directory); + if (string.IsNullOrEmpty(relative) || relative == ".") + { + return string.Empty; + } + + return relative.Replace(Path.DirectorySeparatorChar, '/'); + } + + private static string BuildLocator(string relativeDirectory) + { + if (string.IsNullOrEmpty(relativeDirectory)) + { + return "package.json"; + } + + return relativeDirectory + "/package.json"; + } + + private static bool ShouldSkipDirectory(string name) + { + if (name.Length == 0) + { + return true; + } + + if (name[0] == '.') + { + return !string.Equals(name, ".pnpm", StringComparison.OrdinalIgnoreCase); + } + + return IgnoredDirectories.Any(ignored => string.Equals(name, ignored, StringComparison.OrdinalIgnoreCase)); + } + + private static IReadOnlyList ExtractWorkspaceTargets(string relativeDirectory, JsonElement root, NodeWorkspaceIndex workspaceIndex) + { + var dependencies = workspaceIndex.ResolveWorkspaceTargets(relativeDirectory, TryGetProperty(root, "dependencies")); + var devDependencies = workspaceIndex.ResolveWorkspaceTargets(relativeDirectory, TryGetProperty(root, "devDependencies")); + var peerDependencies = workspaceIndex.ResolveWorkspaceTargets(relativeDirectory, TryGetProperty(root, "peerDependencies")); + + if (dependencies.Count == 0 && devDependencies.Count == 0 && peerDependencies.Count == 0) + { + return Array.Empty(); + } + + var combined = new HashSet(StringComparer.Ordinal); + foreach (var item in dependencies) + { + combined.Add(item); + } + foreach (var item in devDependencies) + { + combined.Add(item); + } + foreach (var item in peerDependencies) + { + combined.Add(item); + } + + return combined.OrderBy(static x => x, StringComparer.Ordinal).ToArray(); + } + + private static JsonElement? TryGetProperty(JsonElement element, string propertyName) + => element.TryGetProperty(propertyName, out var property) ? property : null; + + private static IReadOnlyList ExtractLifecycleScripts(JsonElement root) + { + if (!root.TryGetProperty("scripts", out var scriptsElement) || scriptsElement.ValueKind != JsonValueKind.Object) + { + return Array.Empty(); + } + + var lifecycleScripts = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var script in scriptsElement.EnumerateObject()) + { + if (!IsLifecycleScriptName(script.Name)) + { + continue; + } + + if (script.Value.ValueKind != JsonValueKind.String) + { + continue; + } + + var command = script.Value.GetString(); + if (string.IsNullOrWhiteSpace(command)) + { + continue; + } + + var canonicalName = script.Name.Trim().ToLowerInvariant(); + var lifecycleScript = new NodeLifecycleScript(canonicalName, command); + + if (!lifecycleScripts.ContainsKey(canonicalName)) + { + NodeAnalyzerMetrics.RecordLifecycleScript(canonicalName); + } + + lifecycleScripts[canonicalName] = lifecycleScript; + } + + if (lifecycleScripts.Count == 0) + { + return Array.Empty(); + } + + return lifecycleScripts.Values + .OrderBy(static script => script.Name, StringComparer.Ordinal) + .ToArray(); + } + + private static bool IsLifecycleScriptName(string name) + => name.Equals("preinstall", StringComparison.OrdinalIgnoreCase) + || name.Equals("install", StringComparison.OrdinalIgnoreCase) + || name.Equals("postinstall", StringComparison.OrdinalIgnoreCase); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs new file mode 100644 index 00000000..262df68e --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs @@ -0,0 +1,278 @@ +using System.Text.Json; + +namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +internal sealed class NodeWorkspaceIndex +{ + private readonly string _rootPath; + private readonly HashSet _workspacePaths; + private readonly Dictionary _workspaceByName; + + private NodeWorkspaceIndex(string rootPath, HashSet workspacePaths, Dictionary workspaceByName) + { + _rootPath = rootPath; + _workspacePaths = workspacePaths; + _workspaceByName = workspaceByName; + } + + public static NodeWorkspaceIndex Create(string rootPath) + { + var normalizedRoot = Path.GetFullPath(rootPath); + var workspacePaths = new HashSet(StringComparer.Ordinal); + var workspaceByName = new Dictionary(StringComparer.OrdinalIgnoreCase); + + var packageJsonPath = Path.Combine(normalizedRoot, "package.json"); + if (!File.Exists(packageJsonPath)) + { + return new NodeWorkspaceIndex(normalizedRoot, workspacePaths, workspaceByName); + } + + try + { + using var stream = File.OpenRead(packageJsonPath); + using var document = JsonDocument.Parse(stream); + var root = document.RootElement; + if (!root.TryGetProperty("workspaces", out var workspacesElement)) + { + return new NodeWorkspaceIndex(normalizedRoot, workspacePaths, workspaceByName); + } + + var patterns = ExtractPatterns(workspacesElement); + foreach (var pattern in patterns) + { + foreach (var workspacePath in ExpandPattern(normalizedRoot, pattern)) + { + if (string.IsNullOrWhiteSpace(workspacePath)) + { + continue; + } + + workspacePaths.Add(workspacePath); + var packagePath = Path.Combine(normalizedRoot, workspacePath.Replace('/', Path.DirectorySeparatorChar), "package.json"); + if (!File.Exists(packagePath)) + { + continue; + } + + try + { + using var workspaceStream = File.OpenRead(packagePath); + using var workspaceDoc = JsonDocument.Parse(workspaceStream); + if (workspaceDoc.RootElement.TryGetProperty("name", out var nameElement)) + { + var name = nameElement.GetString(); + if (!string.IsNullOrWhiteSpace(name)) + { + workspaceByName[name] = workspacePath!; + } + } + } + catch (IOException) + { + // Ignore unreadable workspace package definitions. + } + catch (JsonException) + { + // Ignore malformed workspace package definitions. + } + } + } + } + catch (IOException) + { + // If the root package.json is unreadable we treat as no workspaces. + } + catch (JsonException) + { + // Malformed root package.json: treat as no workspaces. + } + + return new NodeWorkspaceIndex(normalizedRoot, workspacePaths, workspaceByName); + } + + public IEnumerable GetMembers() + => _workspacePaths.OrderBy(static path => path, StringComparer.Ordinal); + + public bool TryGetMember(string relativePath, out string normalizedPath) + { + if (string.IsNullOrEmpty(relativePath)) + { + normalizedPath = string.Empty; + return false; + } + + var normalized = NormalizeRelative(relativePath); + if (_workspacePaths.Contains(normalized)) + { + normalizedPath = normalized; + return true; + } + + normalizedPath = string.Empty; + return false; + } + + public bool TryGetWorkspacePathByName(string packageName, out string? relativePath) + => _workspaceByName.TryGetValue(packageName, out relativePath); + + public IReadOnlyList ResolveWorkspaceTargets(string relativeDirectory, JsonElement? dependencies) + { + if (dependencies is null || dependencies.Value.ValueKind != JsonValueKind.Object) + { + return Array.Empty(); + } + + var result = new HashSet(StringComparer.Ordinal); + foreach (var property in dependencies.Value.EnumerateObject()) + { + var value = property.Value; + if (value.ValueKind != JsonValueKind.String) + { + continue; + } + + var targetSpec = value.GetString(); + if (string.IsNullOrWhiteSpace(targetSpec)) + { + continue; + } + + const string workspacePrefix = "workspace:"; + if (!targetSpec.StartsWith(workspacePrefix, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var descriptor = targetSpec[workspacePrefix.Length..].Trim(); + if (string.IsNullOrEmpty(descriptor) || descriptor is "*" or "^") + { + if (_workspaceByName.TryGetValue(property.Name, out var workspaceByName)) + { + result.Add(workspaceByName); + } + + continue; + } + + if (TryResolveWorkspaceTarget(relativeDirectory, descriptor, out var resolved)) + { + result.Add(resolved); + } + } + + if (result.Count == 0) + { + return Array.Empty(); + } + + return result.OrderBy(static x => x, StringComparer.Ordinal).ToArray(); + } + + public bool TryResolveWorkspaceTarget(string relativeDirectory, string descriptor, out string normalized) + { + normalized = string.Empty; + var baseDirectory = string.IsNullOrEmpty(relativeDirectory) ? string.Empty : relativeDirectory; + var baseAbsolute = Path.GetFullPath(Path.Combine(_rootPath, baseDirectory)); + var candidate = Path.GetFullPath(Path.Combine(baseAbsolute, descriptor.Replace('/', Path.DirectorySeparatorChar))); + if (!IsUnderRoot(_rootPath, candidate)) + { + return false; + } + + var relative = NormalizeRelative(Path.GetRelativePath(_rootPath, candidate)); + if (_workspacePaths.Contains(relative)) + { + normalized = relative; + return true; + } + + return false; + } + + private static IEnumerable ExtractPatterns(JsonElement workspacesElement) + { + if (workspacesElement.ValueKind == JsonValueKind.Array) + { + foreach (var item in workspacesElement.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.String) + { + var value = item.GetString(); + if (!string.IsNullOrWhiteSpace(value)) + { + yield return value.Trim(); + } + } + } + } + else if (workspacesElement.ValueKind == JsonValueKind.Object) + { + if (workspacesElement.TryGetProperty("packages", out var packagesElement) && packagesElement.ValueKind == JsonValueKind.Array) + { + foreach (var pattern in ExtractPatterns(packagesElement)) + { + yield return pattern; + } + } + } + } + + private static IEnumerable ExpandPattern(string rootPath, string pattern) + { + var cleanedPattern = pattern.Replace('\\', '/').Trim(); + if (cleanedPattern.EndsWith("/*", StringComparison.Ordinal)) + { + var baseSegment = cleanedPattern[..^2]; + var baseAbsolute = CombineAndNormalize(rootPath, baseSegment); + if (baseAbsolute is null || !Directory.Exists(baseAbsolute)) + { + yield break; + } + + foreach (var directory in Directory.EnumerateDirectories(baseAbsolute)) + { + var normalized = NormalizeRelative(Path.GetRelativePath(rootPath, directory)); + yield return normalized; + } + } + else + { + var absolute = CombineAndNormalize(rootPath, cleanedPattern); + if (absolute is null || !Directory.Exists(absolute)) + { + yield break; + } + + var normalized = NormalizeRelative(Path.GetRelativePath(rootPath, absolute)); + yield return normalized; + } + } + + private static string? CombineAndNormalize(string rootPath, string relative) + { + var candidate = Path.GetFullPath(Path.Combine(rootPath, relative.Replace('/', Path.DirectorySeparatorChar))); + return IsUnderRoot(rootPath, candidate) ? candidate : null; + } + + private static string NormalizeRelative(string relativePath) + { + if (string.IsNullOrEmpty(relativePath) || relativePath == ".") + { + return string.Empty; + } + + var normalized = relativePath.Replace('\\', '/'); + normalized = normalized.TrimStart('.', '/'); + return normalized; + } + + private static bool IsUnderRoot(string rootPath, string absolutePath) + { + if (OperatingSystem.IsWindows()) + { + return absolutePath.StartsWith(rootPath, StringComparison.OrdinalIgnoreCase); + } + + return absolutePath.StartsWith(rootPath, StringComparison.Ordinal); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs new file mode 100644 index 00000000..079a11f3 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs @@ -0,0 +1,37 @@ +using StellaOps.Scanner.Analyzers.Lang.Node.Internal; + +namespace StellaOps.Scanner.Analyzers.Lang.Node; + +public sealed class NodeLanguageAnalyzer : ILanguageAnalyzer +{ + public string Id => "node"; + + public string DisplayName => "Node.js Analyzer"; + + public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(writer); + + var lockData = await NodeLockData.LoadAsync(context.RootPath, cancellationToken).ConfigureAwait(false); + var packages = NodePackageCollector.CollectPackages(context, lockData, cancellationToken); + + foreach (var package in packages.OrderBy(static p => p.ComponentKey, StringComparer.Ordinal)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var metadata = package.CreateMetadata(); + var evidence = package.CreateEvidence(); + + writer.AddFromPurl( + analyzerId: Id, + purl: package.Purl, + name: package.Name, + version: package.Version, + type: "npm", + metadata: metadata, + evidence: evidence, + usedByEntrypoint: package.IsUsedByEntrypoint); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Placeholder.cs b/src/StellaOps.Scanner.Analyzers.Lang.Node/Placeholder.cs new file mode 100644 index 00000000..756f07ea --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/Placeholder.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Node; + +internal static class Placeholder +{ + // Analyzer implementation will be added during Sprint LA1. +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj b/src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj new file mode 100644 index 00000000..3e6ba793 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md b/src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md new file mode 100644 index 00000000..ad21db98 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md @@ -0,0 +1,10 @@ +# Node Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-302A | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-307 | Build deterministic module graph walker covering npm, Yarn, and PNPM; capture package.json provenance and integrity metadata. | Walker indexes >100 k modules in <1.5 s (hot cache); golden fixtures verify deterministic ordering and path normalization. | +| 2 | SCANNER-ANALYZERS-LANG-10-302B | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-302A | Resolve workspaces/symlinks and attribute components to originating package with usage hints; guard against directory traversal. | Workspace attribution accurate on multi-workspace fixture; symlink resolver proves canonical path; security tests ensure no traversal. | +| 3 | SCANNER-ANALYZERS-LANG-10-302C | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-302B | Surface script metadata (postinstall/preinstall) and policy hints; emit telemetry counters and evidence records. | Analyzer output includes script metadata + evidence; metrics `scanner_analyzer_node_scripts_total` recorded; policy hints documented. | +| 4 | SCANNER-ANALYZERS-LANG-10-307N | TODO | SCANNER-ANALYZERS-LANG-10-302C | Integrate shared helpers for license/licence evidence, canonical JSON serialization, and usage flag propagation. | Reuse shared helpers without duplication; unit tests confirm stable metadata merge; no analyzer-specific serializer drift. | +| 5 | SCANNER-ANALYZERS-LANG-10-308N | TODO | SCANNER-ANALYZERS-LANG-10-307N | Author determinism harness + fixtures for Node analyzer; add benchmark suite. | Fixtures committed under `Fixtures/lang/node/`; determinism CI job compares JSON snapshots; benchmark CSV published. | +| 6 | SCANNER-ANALYZERS-LANG-10-309N | TODO | SCANNER-ANALYZERS-LANG-10-308N | Package Node analyzer as restart-time plug-in (manifest, DI registration, Offline Kit notes). | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer after restart; Offline Kit docs updated. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md b/src/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md new file mode 100644 index 00000000..f04e9b2a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md @@ -0,0 +1,32 @@ +# StellaOps.Scanner.Analyzers.Lang.Python — Agent Charter + +## Role +Implement the Python analyzer plug-in that inspects installed distributions, RECORD hashes, entry points, and editable installs to feed Scanner SBOM views. + +## Scope +- Parse `*.dist-info` and `*.data` directories, validating `METADATA`, `RECORD`, and `entry_points.txt`. +- Detect editable installs and pip caches, reconciling metadata with actual files. +- Integrate EntryTrace usage hints for runtime entry points and flag missing RECORD hashes. +- Package plug-in manifest and ensure deterministic fixtures + benchmarks. + +## Out of Scope +- Language analyzers for other ecosystems. +- Policy evaluation, vulnerability correlation, or packaging into UI flows. +- Building Python interpreters or executing scripts (analysis is static only). + +## Expectations +- Deterministic RECORD hashing with streaming IO; fallback heuristics clearly flagged. +- Performance target: ≥75 MB/s RECORD verification, end-to-end fixture <2.0 s. +- Offline-first: no PyPI calls; relies on local metadata only. +- Rich telemetry (components counted, hash mismatches) following Scanner metrics schema. +- Keep `TASKS.md` and `SPRINTS_LANG_IMPLEMENTATION_PLAN.md` in sync. + +## Dependencies +- Shared language analyzer infrastructure. +- EntryTrace usage hints (for script activation). +- Worker dispatcher for plug-in loading. + +## Testing & Artifacts +- Golden fixtures for venv, virtualenv, pipx, and editable installs. +- Benchmark results comparing hash-check throughput against competitor tools. +- Offline Kit guidance for bundling standard library metadata if required. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs b/src/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs new file mode 100644 index 00000000..be78d3d3 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs @@ -0,0 +1,7 @@ +global using System; +global using System.Collections.Generic; +global using System.IO; +global using System.Threading; +global using System.Threading.Tasks; + +global using StellaOps.Scanner.Analyzers.Lang; diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/Placeholder.cs b/src/StellaOps.Scanner.Analyzers.Lang.Python/Placeholder.cs new file mode 100644 index 00000000..fab3ad2a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Python/Placeholder.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Python; + +internal static class Placeholder +{ + // Analyzer implementation will be added during Sprint LA2. +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj b/src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj new file mode 100644 index 00000000..3e6ba793 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md b/src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md new file mode 100644 index 00000000..1e9034e9 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md @@ -0,0 +1,10 @@ +# Python Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-303A | TODO | SCANNER-ANALYZERS-LANG-10-307 | STREAM-based parser for `*.dist-info` (`METADATA`, `WHEEL`, `entry_points.txt`) with normalization + evidence capture. | Parser handles CPython 3.8–3.12 metadata variations; fixtures confirm canonical ordering and UTF-8 handling. | +| 2 | SCANNER-ANALYZERS-LANG-10-303B | TODO | SCANNER-ANALYZERS-LANG-10-303A | RECORD hash verifier with chunked hashing, Zip64 support, and mismatch diagnostics. | Verifier processes 5 GB RECORD fixture without allocations >2 MB; mismatches produce deterministic evidence records. | +| 3 | SCANNER-ANALYZERS-LANG-10-303C | TODO | SCANNER-ANALYZERS-LANG-10-303B | Editable install + pip cache detection; integrate EntryTrace hints for runtime usage flags. | Editable installs resolved to source path; usage flags propagated; regression tests cover mixed editable + wheel installs. | +| 4 | SCANNER-ANALYZERS-LANG-10-307P | TODO | SCANNER-ANALYZERS-LANG-10-303C | Shared helper integration (license metadata, quiet provenance, component merging). | Shared helpers reused; analyzer-specific metadata minimal; deterministic merge tests pass. | +| 5 | SCANNER-ANALYZERS-LANG-10-308P | TODO | SCANNER-ANALYZERS-LANG-10-307P | Golden fixtures + determinism harness for Python analyzer; add benchmark and hash throughput reporting. | Fixtures under `Fixtures/lang/python/`; determinism CI guard; benchmark CSV added with threshold alerts. | +| 6 | SCANNER-ANALYZERS-LANG-10-309P | TODO | SCANNER-ANALYZERS-LANG-10-308P | Package plug-in (manifest, DI registration) and document Offline Kit bundling of Python stdlib metadata if needed. | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer; Offline Kit doc updated. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md b/src/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md new file mode 100644 index 00000000..6d79cc0d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md @@ -0,0 +1,29 @@ +# StellaOps.Scanner.Analyzers.Lang.Rust — Agent Charter + +## Role +Develop the Rust analyzer plug-in that resolves crates from metadata (`.fingerprint`, Cargo.lock, embedded markers) and provides deterministic fallbacks for stripped binaries. + +## Scope +- Locate Cargo metadata in container layers (registry cache, target fingerprints, embedded Git info). +- Parse symbol tables / section data to heuristically identify crates when metadata missing, tagging provenance appropriately. +- Integrate binary hash fallback with quiet provenance classification. +- Package plug-in manifest, determinism fixtures, and performance/coverage benchmarks. + +## Out of Scope +- Native linker analysis beyond crate attribution. +- Fetching Cargo registry metadata from the network. +- Policy decisions or UI surfacing. + +## Expectations +- Accurate crate attribution (≥85 % on curated fixtures) with explicit heuristic labeling. +- Analyzer runtime <1 s over 500 binary corpus; minimal allocations through pooling. +- Offline-first; rely on local Cargo data. +- Telemetry capturing heuristic vs verified evidence ratios. + +## Dependencies +- Shared language analyzer infrastructure; Worker dispatcher; optionally EntryTrace hints for runtime coverage. + +## Testing & Artifacts +- Fixtures for cargo workspaces, release builds, stripped binaries, vendor caches. +- Determinism + benchmark artifacts comparing to competitor scanners. +- ADR documenting heuristic boundaries + risk mitigations. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs b/src/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs new file mode 100644 index 00000000..be78d3d3 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs @@ -0,0 +1,7 @@ +global using System; +global using System.Collections.Generic; +global using System.IO; +global using System.Threading; +global using System.Threading.Tasks; + +global using StellaOps.Scanner.Analyzers.Lang; diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/Placeholder.cs b/src/StellaOps.Scanner.Analyzers.Lang.Rust/Placeholder.cs new file mode 100644 index 00000000..62634ab8 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Rust/Placeholder.cs @@ -0,0 +1,6 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Rust; + +internal static class Placeholder +{ + // Analyzer implementation will be added during Sprint LA5. +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj b/src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj new file mode 100644 index 00000000..3e6ba793 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md b/src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md new file mode 100644 index 00000000..e8ede2cb --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md @@ -0,0 +1,10 @@ +# Rust Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-306A | TODO | SCANNER-ANALYZERS-LANG-10-307 | Parse Cargo metadata (`Cargo.lock`, `.fingerprint`, `.metadata`) and map crates to components with evidence. | Fixtures confirm crate attribution ≥85 % coverage; metadata normalized; evidence includes path + hash. | +| 2 | SCANNER-ANALYZERS-LANG-10-306B | TODO | SCANNER-ANALYZERS-LANG-10-306A | Implement heuristic classifier using ELF section names, symbol mangling, and `.comment` data for stripped binaries. | Heuristic output flagged as `heuristic`; regression tests ensure no false “observed” classifications. | +| 3 | SCANNER-ANALYZERS-LANG-10-306C | TODO | SCANNER-ANALYZERS-LANG-10-306B | Integrate binary hash fallback (`bin:{sha256}`) and tie into shared quiet provenance helpers. | Fallback path deterministic; shared helpers reused; tests verify consistent hashing. | +| 4 | SCANNER-ANALYZERS-LANG-10-307R | TODO | SCANNER-ANALYZERS-LANG-10-306C | Finalize shared helper usage (license, usage flags) and concurrency-safe caches. | Analyzer uses shared utilities; concurrency tests pass; no race conditions. | +| 5 | SCANNER-ANALYZERS-LANG-10-308R | TODO | SCANNER-ANALYZERS-LANG-10-307R | Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. | Fixtures `Fixtures/lang/rust/` committed; determinism guard; benchmark shows ≥15 % better coverage vs competitor. | +| 6 | SCANNER-ANALYZERS-LANG-10-309R | TODO | SCANNER-ANALYZERS-LANG-10-308R | Package plug-in manifest + Offline Kit documentation; ensure Worker integration. | Manifest copied; Worker loads analyzer; Offline Kit doc updated. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs new file mode 100644 index 00000000..d42c9f2b --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs @@ -0,0 +1,88 @@ +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.Core; + +public sealed class LanguageAnalyzerResultTests +{ + [Fact] + public async Task MergesDuplicateComponentsDeterministicallyAsync() + { + var analyzer = new DuplicateComponentAnalyzer(); + var engine = new LanguageAnalyzerEngine(new[] { analyzer }); + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var result = await engine.AnalyzeAsync(context, CancellationToken.None); + + var component = Assert.Single(result.Components); + Assert.Equal("purl::pkg:example/acme@2.0.0", component.ComponentKey); + Assert.Equal("pkg:example/acme@2.0.0", component.Purl); + Assert.True(component.UsedByEntrypoint); + Assert.Equal(2, component.Evidence.Count); + Assert.Equal(3, component.Metadata.Count); + + // Metadata retains stable ordering (sorted by key) + var keys = component.Metadata.Keys.ToArray(); + Assert.Equal(new[] { "artifactId", "groupId", "path" }, keys); + + // Evidence de-duplicates via comparison key + Assert.Equal(2, component.Evidence.Count); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + private sealed class DuplicateComponentAnalyzer : ILanguageAnalyzer + { + public string Id => "duplicate"; + + public string DisplayName => "Duplicate Analyzer"; + + public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + await Task.Yield(); + + var metadataA = new[] + { + new KeyValuePair("groupId", "example"), + new KeyValuePair("artifactId", "acme") + }; + + var metadataB = new[] + { + new KeyValuePair("artifactId", "acme"), + new KeyValuePair("path", ".") + }; + + var evidence = new[] + { + new LanguageComponentEvidence(LanguageEvidenceKind.File, "manifest", "META-INF/MANIFEST.MF", null, null), + new LanguageComponentEvidence(LanguageEvidenceKind.Metadata, "pom", "pom.xml", "groupId=example", null) + }; + + writer.AddFromPurl( + analyzerId: Id, + purl: "pkg:example/acme@2.0.0", + name: "acme", + version: "2.0.0", + type: "example", + metadata: metadataA, + evidence: evidence, + usedByEntrypoint: true); + + // duplicate insert with different metadata ordering + writer.AddFromPurl( + analyzerId: Id, + purl: "pkg:example/acme@2.0.0", + name: "acme", + version: "2.0.0", + type: "example", + metadata: metadataB, + evidence: evidence, + usedByEntrypoint: false); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs new file mode 100644 index 00000000..b399e800 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs @@ -0,0 +1,70 @@ +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.Core; + +public sealed class LanguageComponentMapperTests +{ + [Fact] + public void ToComponentRecordsProjectsDeterministicComponents() + { + // Arrange + var analyzerId = "node"; + var records = new[] + { + LanguageComponentRecord.FromPurl( + analyzerId: analyzerId, + purl: "pkg:npm/example@1.0.0", + name: "example", + version: "1.0.0", + type: "npm", + metadata: new Dictionary() + { + ["path"] = "packages/app", + ["license"] = "MIT" + }, + evidence: new[] + { + new LanguageComponentEvidence(LanguageEvidenceKind.File, "package.json", "packages/app/package.json", null, "abc123") + }, + usedByEntrypoint: true), + LanguageComponentRecord.FromExplicitKey( + analyzerId: analyzerId, + componentKey: "bin::sha256:deadbeef", + purl: null, + name: "app-binary", + version: null, + type: "binary", + metadata: new Dictionary() + { + ["description"] = "Utility binary" + }, + evidence: new[] + { + new LanguageComponentEvidence(LanguageEvidenceKind.Derived, "entrypoint", "/usr/local/bin/app", "ENTRYPOINT", null) + }) + }; + + // Act + var layerDigest = LanguageComponentMapper.ComputeLayerDigest(analyzerId); + var results = LanguageComponentMapper.ToComponentRecords(analyzerId, records, layerDigest); + + // Assert + Assert.Equal(2, results.Length); + Assert.All(results, component => Assert.Equal(layerDigest, component.LayerDigest)); + + var first = results[0]; + Assert.Equal("bin::sha256:deadbeef", first.Identity.Key); + Assert.Equal("Utility binary", first.Metadata!.Properties!["stellaops.lang.meta.description"]); + Assert.Equal("derived", first.Evidence.Single().Kind); + + var second = results[1]; + Assert.Equal("pkg:npm/example@1.0.0", second.Identity.Key); // prefix removed + Assert.True(second.Usage.UsedByEntrypoint); + Assert.Contains("MIT", second.Metadata!.Licenses!); + Assert.Equal("packages/app", second.Metadata.Properties!["stellaops.lang.meta.path"]); + Assert.Equal("abc123", second.Metadata.Properties!["stellaops.lang.evidence.0.sha256"]); + Assert.Equal("file", second.Evidence.Single().Kind); + Assert.Equal("packages/app/package.json", second.Evidence.Single().Value); + Assert.Equal("package.json", second.Evidence.Single().Source); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs new file mode 100644 index 00000000..2b3b5445 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs @@ -0,0 +1,102 @@ +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.Determinism; + +public sealed class LanguageAnalyzerHarnessTests +{ + [Fact] + public async Task HarnessProducesDeterministicOutputAsync() + { + var fixturePath = TestPaths.ResolveFixture("determinism", "basic", "input"); + var goldenPath = TestPaths.ResolveFixture("determinism", "basic", "expected.json"); + var cancellationToken = TestContext.Current.CancellationToken; + + var analyzers = new ILanguageAnalyzer[] + { + new FakeLanguageAnalyzer( + "fake-java", + LanguageComponentRecord.FromPurl( + analyzerId: "fake-java", + purl: "pkg:maven/org.example/example-lib@1.2.3", + name: "example-lib", + version: "1.2.3", + type: "maven", + metadata: new Dictionary + { + ["groupId"] = "org.example", + ["artifactId"] = "example-lib", + }, + evidence: new [] + { + new LanguageComponentEvidence(LanguageEvidenceKind.File, "pom.properties", "META-INF/maven/org.example/example-lib/pom.properties", null, "abc123"), + }), + LanguageComponentRecord.FromExplicitKey( + analyzerId: "fake-java", + componentKey: "bin::sha256:deadbeef", + purl: null, + name: "example-cli", + version: null, + type: "bin", + metadata: new Dictionary + { + ["sha256"] = "deadbeef", + }, + evidence: new [] + { + new LanguageComponentEvidence(LanguageEvidenceKind.File, "binary", "usr/local/bin/example", null, "deadbeef"), + })), + new FakeLanguageAnalyzer( + "fake-node", + LanguageComponentRecord.FromPurl( + analyzerId: "fake-node", + purl: "pkg:npm/example-package@4.5.6", + name: "example-package", + version: "4.5.6", + type: "npm", + metadata: new Dictionary + { + ["workspace"] = "packages/example", + }, + evidence: new [] + { + new LanguageComponentEvidence(LanguageEvidenceKind.File, "package.json", "packages/example/package.json", null, null), + }, + usedByEntrypoint: true)), + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync(fixturePath, goldenPath, analyzers, cancellationToken); + + var first = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken); + var second = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken); + Assert.Equal(first, second); + } + + private sealed class FakeLanguageAnalyzer : ILanguageAnalyzer + { + private readonly IReadOnlyList _components; + + public FakeLanguageAnalyzer(string id, params LanguageComponentRecord[] components) + { + Id = id; + DisplayName = id; + _components = components ?? Array.Empty(); + } + + public string Id { get; } + + public string DisplayName { get; } + + public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + await Task.Delay(5, cancellationToken).ConfigureAwait(false); // ensure asynchrony is handled + + // Intentionally add in reverse order to prove determinism. + foreach (var component in _components.Reverse()) + { + writer.Add(component); + } + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json new file mode 100644 index 00000000..aa94929a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json @@ -0,0 +1,60 @@ +[ + { + "analyzerId": "fake-java", + "componentKey": "bin::sha256:deadbeef", + "name": "example-cli", + "type": "bin", + "usedByEntrypoint": false, + "metadata": { + "sha256": "deadbeef" + }, + "evidence": [ + { + "kind": "file", + "source": "binary", + "locator": "usr/local/bin/example", + "sha256": "deadbeef" + } + ] + }, + { + "analyzerId": "fake-java", + "componentKey": "purl::pkg:maven/org.example/example-lib@1.2.3", + "purl": "pkg:maven/org.example/example-lib@1.2.3", + "name": "example-lib", + "version": "1.2.3", + "type": "maven", + "usedByEntrypoint": false, + "metadata": { + "artifactId": "example-lib", + "groupId": "org.example" + }, + "evidence": [ + { + "kind": "file", + "source": "pom.properties", + "locator": "META-INF/maven/org.example/example-lib/pom.properties", + "sha256": "abc123" + } + ] + }, + { + "analyzerId": "fake-node", + "componentKey": "purl::pkg:npm/example-package@4.5.6", + "purl": "pkg:npm/example-package@4.5.6", + "name": "example-package", + "version": "4.5.6", + "type": "npm", + "usedByEntrypoint": true, + "metadata": { + "workspace": "packages/example" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/example/package.json" + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt new file mode 100644 index 00000000..d64a3d96 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt @@ -0,0 +1 @@ +sample diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/java/basic/expected.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/java/basic/expected.json new file mode 100644 index 00000000..df18806d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/java/basic/expected.json @@ -0,0 +1,35 @@ +[ + { + "analyzerId": "java", + "componentKey": "purl::pkg:maven/com/example/demo@1.0.0", + "purl": "pkg:maven/com/example/demo@1.0.0", + "name": "demo", + "version": "1.0.0", + "type": "maven", + "usedByEntrypoint": true, + "metadata": { + "artifactId": "demo", + "displayName": "Demo Library", + "groupId": "com.example", + "jarPath": "libs/demo.jar", + "manifestTitle": "Demo", + "manifestVendor": "Example Corp", + "manifestVersion": "1.0.0", + "packaging": "jar" + }, + "evidence": [ + { + "kind": "file", + "source": "MANIFEST.MF", + "locator": "libs/demo.jar!META-INF/MANIFEST.MF", + "value": "title=Demo;version=1.0.0;vendor=Example Corp" + }, + { + "kind": "file", + "source": "pom.properties", + "locator": "libs/demo.jar!META-INF/maven/com.example/demo/pom.properties", + "sha256": "c20f36aa1b9d89d28cf9ed131519ffd6287a4dac0c7cb926130496f3f8157bf1" + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/expected.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/expected.json new file mode 100644 index 00000000..15ff8223 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/expected.json @@ -0,0 +1,134 @@ +[ + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/left-pad@1.3.0", + "purl": "pkg:npm/left-pad@1.3.0", + "name": "left-pad", + "version": "1.3.0", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "integrity": "sha512-LEFTPAD", + "path": "packages/app/node_modules/left-pad", + "resolved": "https://registry.example/left-pad-1.3.0.tgz" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/node_modules/left-pad/package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/lib@2.0.1", + "purl": "pkg:npm/lib@2.0.1", + "name": "lib", + "version": "2.0.1", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "integrity": "sha512-LIB", + "path": "packages/lib", + "resolved": "https://registry.example/lib-2.0.1.tgz", + "workspaceLink": "packages/app/node_modules/lib", + "workspaceMember": "true", + "workspaceRoot": "packages/lib" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/node_modules/lib/package.json" + }, + { + "kind": "file", + "source": "package.json", + "locator": "packages/lib/package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/root-workspace@1.0.0", + "purl": "pkg:npm/root-workspace@1.0.0", + "name": "root-workspace", + "version": "1.0.0", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "path": ".", + "private": "true" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/shared@3.1.4", + "purl": "pkg:npm/shared@3.1.4", + "name": "shared", + "version": "3.1.4", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "integrity": "sha512-SHARED", + "path": "packages/shared", + "resolved": "https://registry.example/shared-3.1.4.tgz", + "workspaceLink": "packages/app/node_modules/shared", + "workspaceMember": "true", + "workspaceRoot": "packages/shared", + "workspaceTargets": "packages/lib" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/node_modules/shared/package.json" + }, + { + "kind": "file", + "source": "package.json", + "locator": "packages/shared/package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/workspace-app@1.0.0", + "purl": "pkg:npm/workspace-app@1.0.0", + "name": "workspace-app", + "version": "1.0.0", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "installScripts": "true", + "path": "packages/app", + "policyHint.installLifecycle": "postinstall", + "script.postinstall": "node scripts/setup.js", + "workspaceMember": "true", + "workspaceRoot": "packages/app", + "workspaceTargets": "packages/lib;packages/shared" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/package.json" + }, + { + "kind": "metadata", + "source": "package.json:scripts", + "locator": "packages/app/package.json#scripts.postinstall", + "value": "node scripts/setup.js", + "sha256": "f9ae4e4c9313857d1acc31947cee9984232cbefe93c8a56c718804744992728a" + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/package-lock.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/package-lock.json new file mode 100644 index 00000000..435ce6bb --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/package-lock.json @@ -0,0 +1,49 @@ +{ + "name": "root-workspace", + "version": "1.0.0", + "lockfileVersion": 3, + "packages": { + "": { + "name": "root-workspace", + "version": "1.0.0", + "private": true, + "workspaces": [ + "packages/*" + ] + }, + "packages/app": { + "name": "workspace-app", + "version": "1.0.0" + }, + "packages/lib": { + "name": "lib", + "version": "2.0.1", + "resolved": "https://registry.example/lib-2.0.1.tgz", + "integrity": "sha512-LIB" + }, + "packages/shared": { + "name": "shared", + "version": "3.1.4", + "resolved": "https://registry.example/shared-3.1.4.tgz", + "integrity": "sha512-SHARED" + }, + "packages/app/node_modules/lib": { + "name": "lib", + "version": "2.0.1", + "resolved": "https://registry.example/lib-2.0.1.tgz", + "integrity": "sha512-LIB" + }, + "packages/app/node_modules/shared": { + "name": "shared", + "version": "3.1.4", + "resolved": "https://registry.example/shared-3.1.4.tgz", + "integrity": "sha512-SHARED" + }, + "packages/app/node_modules/left-pad": { + "name": "left-pad", + "version": "1.3.0", + "resolved": "https://registry.example/left-pad-1.3.0.tgz", + "integrity": "sha512-LEFTPAD" + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/package.json new file mode 100644 index 00000000..9fef5fcd --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/package.json @@ -0,0 +1,10 @@ +{ + "name": "root-workspace", + "version": "1.0.0", + "private": true, + "workspaces": [ + "packages/app", + "packages/lib", + "packages/shared" + ] +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/left-pad/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/left-pad/package.json new file mode 100644 index 00000000..94c0027d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/left-pad/package.json @@ -0,0 +1,5 @@ +{ + "name": "left-pad", + "version": "1.3.0", + "main": "index.js" +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/lib/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/lib/package.json new file mode 100644 index 00000000..dbc701c2 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/lib/package.json @@ -0,0 +1,5 @@ +{ + "name": "lib", + "version": "2.0.1", + "main": "index.js" +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/shared/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/shared/package.json new file mode 100644 index 00000000..d60be9d1 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/shared/package.json @@ -0,0 +1,5 @@ +{ + "name": "shared", + "version": "3.1.4", + "main": "index.js" +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/package.json new file mode 100644 index 00000000..42c39700 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/package.json @@ -0,0 +1,11 @@ +{ + "name": "workspace-app", + "version": "1.0.0", + "dependencies": { + "lib": "workspace:../lib", + "shared": "workspace:../shared" + }, + "scripts": { + "postinstall": "node scripts/setup.js" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js new file mode 100644 index 00000000..55078d87 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js @@ -0,0 +1 @@ +console.log('setup'); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json new file mode 100644 index 00000000..29b7eb9b --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json @@ -0,0 +1,7 @@ +{ + "name": "lib", + "version": "2.0.1", + "dependencies": { + "left-pad": "1.3.0" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json new file mode 100644 index 00000000..c72da176 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json @@ -0,0 +1,7 @@ +{ + "name": "shared", + "version": "3.1.4", + "dependencies": { + "lib": "workspace:../lib" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs new file mode 100644 index 00000000..72921567 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs @@ -0,0 +1,46 @@ +using StellaOps.Scanner.Analyzers.Lang; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.Harness; + +public static class LanguageAnalyzerTestHarness +{ + public static async Task RunToJsonAsync(string fixturePath, IEnumerable analyzers, CancellationToken cancellationToken = default, LanguageUsageHints? usageHints = null) + { + if (string.IsNullOrWhiteSpace(fixturePath)) + { + throw new ArgumentException("Fixture path is required", nameof(fixturePath)); + } + + var engine = new LanguageAnalyzerEngine(analyzers ?? Array.Empty()); + var context = new LanguageAnalyzerContext(fixturePath, TimeProvider.System, usageHints); + var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false); + return result.ToJson(indent: true); + } + + public static async Task AssertDeterministicAsync(string fixturePath, string goldenPath, IEnumerable analyzers, CancellationToken cancellationToken = default, LanguageUsageHints? usageHints = null) + { + var actual = await RunToJsonAsync(fixturePath, analyzers, cancellationToken, usageHints).ConfigureAwait(false); + var expected = await File.ReadAllTextAsync(goldenPath, cancellationToken).ConfigureAwait(false); + + // Normalize newlines for portability. + actual = NormalizeLineEndings(actual).TrimEnd(); + expected = NormalizeLineEndings(expected).TrimEnd(); + + if (!string.Equals(expected, actual, StringComparison.Ordinal)) + { + var actualPath = goldenPath + ".actual"; + var directory = Path.GetDirectoryName(actualPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + await File.WriteAllTextAsync(actualPath, actual, cancellationToken).ConfigureAwait(false); + } + + Assert.Equal(expected, actual); + } + + private static string NormalizeLineEndings(string value) + => value.Replace("\r\n", "\n", StringComparison.Ordinal); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Java/JavaLanguageAnalyzerTests.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Java/JavaLanguageAnalyzerTests.cs new file mode 100644 index 00000000..579889d2 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Java/JavaLanguageAnalyzerTests.cs @@ -0,0 +1,33 @@ +using StellaOps.Scanner.Analyzers.Lang.Java; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.Java; + +public sealed class JavaLanguageAnalyzerTests +{ + [Fact] + public async Task ExtractsMavenArtifactFromJarAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var jarPath = JavaFixtureBuilder.CreateSampleJar(root); + var usageHints = new LanguageUsageHints(new[] { jarPath }); + var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() }; + var goldenPath = TestPaths.ResolveFixture("java", "basic", "expected.json"); + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath: root, + goldenPath: goldenPath, + analyzers: analyzers, + cancellationToken: cancellationToken, + usageHints: usageHints); + } + finally + { + TestPaths.SafeDelete(root); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Node/NodeLanguageAnalyzerTests.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Node/NodeLanguageAnalyzerTests.cs new file mode 100644 index 00000000..0d811202 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/Node/NodeLanguageAnalyzerTests.cs @@ -0,0 +1,27 @@ +using StellaOps.Scanner.Analyzers.Lang.Node; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.Node; + +public sealed class NodeLanguageAnalyzerTests +{ + [Fact] + public async Task WorkspaceFixtureProducesDeterministicOutputAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "node", "workspaces"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + + var analyzers = new ILanguageAnalyzer[] + { + new NodeLanguageAnalyzer() + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, + cancellationToken); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj b/src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj new file mode 100644 index 00000000..da32bafe --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj @@ -0,0 +1,46 @@ + + + net10.0 + preview + enable + enable + true + false + Exe + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs new file mode 100644 index 00000000..25c3aeda --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs @@ -0,0 +1,46 @@ +using System.IO.Compression; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +public static class JavaFixtureBuilder +{ + public static string CreateSampleJar(string rootDirectory, string relativePath = "libs/demo.jar") + { + ArgumentNullException.ThrowIfNull(rootDirectory); + ArgumentException.ThrowIfNullOrEmpty(relativePath); + + var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar)); + Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); + + using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None); + using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false); + + var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero); + + var pomEntry = archive.CreateEntry("META-INF/maven/com.example/demo/pom.properties", CompressionLevel.NoCompression); + pomEntry.LastWriteTime = timestamp; + using (var writer = new StreamWriter(pomEntry.Open(), Encoding.UTF8, leaveOpen: false)) + { + writer.WriteLine("# Test pom.properties"); + writer.WriteLine("groupId=com.example"); + writer.WriteLine("artifactId=demo"); + writer.WriteLine("version=1.0.0"); + writer.WriteLine("name=Demo Library"); + writer.WriteLine("packaging=jar"); + } + + var manifestEntry = archive.CreateEntry("META-INF/MANIFEST.MF", CompressionLevel.NoCompression); + manifestEntry.LastWriteTime = timestamp; + using (var writer = new StreamWriter(manifestEntry.Open(), Encoding.UTF8, leaveOpen: false)) + { + writer.WriteLine("Manifest-Version: 1.0"); + writer.WriteLine("Implementation-Title: Demo"); + writer.WriteLine("Implementation-Version: 1.0.0"); + writer.WriteLine("Implementation-Vendor: Example Corp"); + writer.WriteLine(); + } + + return jarPath; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs b/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs new file mode 100644 index 00000000..e9234fcd --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs @@ -0,0 +1,53 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +public static class TestPaths +{ + public static string ResolveFixture(params string[] segments) + { + var baseDirectory = AppContext.BaseDirectory; + var parts = new List { baseDirectory }; + parts.AddRange(new[] { "Fixtures" }); + parts.AddRange(segments); + return Path.GetFullPath(Path.Combine(parts.ToArray())); + } + + public static string CreateTemporaryDirectory() + { + var root = Path.Combine(AppContext.BaseDirectory, "tmp", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(root); + return root; + } + + public static void SafeDelete(string directory) + { + if (string.IsNullOrWhiteSpace(directory) || !Directory.Exists(directory)) + { + return; + } + + try + { + Directory.Delete(directory, recursive: true); + } + catch + { + // Swallow cleanup exceptions to avoid masking test failures. + } + } + + public static string ResolveProjectRoot() + { + var directory = AppContext.BaseDirectory; + while (!string.IsNullOrEmpty(directory)) + { + if (File.Exists(Path.Combine(directory, "StellaOps.Scanner.Analyzers.Lang.Tests.csproj"))) + { + return directory; + } + + directory = Path.GetDirectoryName(directory) ?? string.Empty; + } + + throw new InvalidOperationException("Unable to locate project root."); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json b/src/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json new file mode 100644 index 00000000..86c7ea05 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/AGENTS.md b/src/StellaOps.Scanner.Analyzers.Lang/AGENTS.md new file mode 100644 index 00000000..6f0546a8 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/AGENTS.md @@ -0,0 +1,33 @@ +# StellaOps.Scanner.Analyzers.Lang — Agent Charter + +## Role +Deliver deterministic language ecosystem analyzers that run inside Scanner Workers, emit component evidence for SBOM assembly, and package as restart-time plug-ins. + +## Scope +- Shared analyzer abstractions for installed application ecosystems (Java, Node.js, Python, Go, .NET, Rust). +- Evidence helpers that map on-disk artefacts to canonical component identities (purl/bin sha) with provenance and usage flags. +- File-system traversal, metadata parsing, and normalization for language-specific package formats. +- Plug-in bootstrap, manifest authoring, and DI registration so Workers load analyzers at start-up. + +## Out of Scope +- OS package analyzers, native link graph, or EntryTrace plug-ins (handled by other guilds). +- SBOM composition, diffing, or signing (owned by Emit/Diff/Signer groups). +- Policy adjudication or vulnerability joins. + +## Expectations +- Deterministic output: identical inputs → identical component ordering and hashes. +- Memory discipline: streaming walkers, avoid loading entire trees; reuse buffers. +- Cancellation-aware and timeboxed per layer. +- Enrich telemetry (counters + timings) via Scanner.Core primitives. +- Update `TASKS.md` as work progresses (TODO → DOING → DONE/BLOCKED). + +## Dependencies +- Scanner.Core contracts + observability helpers. +- Scanner.Worker analyzer dispatcher. +- Upcoming Scanner.Emit models for SBOM assembly. +- Plugin host infrastructure under `StellaOps.Plugin`. + +## Testing & Artifacts +- Determinism harness with golden fixtures under `Fixtures/`. +- Microbench benchmarks recorded per language where feasible. +- Plugin manifests stored under `plugins/scanner/analyzers/lang/` with cosign workflow documented. diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs new file mode 100644 index 00000000..2301946b --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs @@ -0,0 +1,24 @@ +namespace StellaOps.Scanner.Analyzers.Lang; + +/// +/// Contract implemented by language ecosystem analyzers. Analyzers must be deterministic, +/// cancellation-aware, and refrain from mutating shared state. +/// +public interface ILanguageAnalyzer +{ + /// + /// Stable identifier (e.g., java, node). + /// + string Id { get; } + + /// + /// Human-readable display name for diagnostics. + /// + string DisplayName { get; } + + /// + /// Executes the analyzer against the resolved filesystem. + /// + ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken); +} + diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs new file mode 100644 index 00000000..53c5361d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs @@ -0,0 +1,16 @@ +namespace StellaOps.Scanner.Analyzers.Lang.Internal; + +internal static class LanguageAnalyzerJson +{ + public static JsonSerializerOptions CreateDefault(bool indent = false) + { + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = indent, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + options.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase)); + return options; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs new file mode 100644 index 00000000..e12c051d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs @@ -0,0 +1,67 @@ +namespace StellaOps.Scanner.Analyzers.Lang; + +public sealed class LanguageAnalyzerContext +{ + public LanguageAnalyzerContext(string rootPath, TimeProvider timeProvider, LanguageUsageHints? usageHints = null, IServiceProvider? services = null) + { + if (string.IsNullOrWhiteSpace(rootPath)) + { + throw new ArgumentException("Root path is required", nameof(rootPath)); + } + + RootPath = Path.GetFullPath(rootPath); + if (!Directory.Exists(RootPath)) + { + throw new DirectoryNotFoundException($"Root path '{RootPath}' does not exist."); + } + + TimeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + UsageHints = usageHints ?? LanguageUsageHints.Empty; + Services = services; + } + + public string RootPath { get; } + + public TimeProvider TimeProvider { get; } + + public LanguageUsageHints UsageHints { get; } + + public IServiceProvider? Services { get; } + + public bool TryGetService([NotNullWhen(true)] out T? service) where T : class + { + if (Services is null) + { + service = null; + return false; + } + + service = Services.GetService(typeof(T)) as T; + return service is not null; + } + + public string ResolvePath(ReadOnlySpan relative) + { + if (relative.IsEmpty) + { + return RootPath; + } + + var relativeString = new string(relative); + var combined = Path.Combine(RootPath, relativeString); + return Path.GetFullPath(combined); + } + + public string GetRelativePath(string absolutePath) + { + if (string.IsNullOrWhiteSpace(absolutePath)) + { + return string.Empty; + } + + var relative = Path.GetRelativePath(RootPath, absolutePath); + return OperatingSystem.IsWindows() + ? relative.Replace('\\', '/') + : relative; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs new file mode 100644 index 00000000..5f3028bf --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs @@ -0,0 +1,59 @@ +namespace StellaOps.Scanner.Analyzers.Lang; + +public sealed class LanguageAnalyzerEngine +{ + private readonly IReadOnlyList _analyzers; + + public LanguageAnalyzerEngine(IEnumerable analyzers) + { + if (analyzers is null) + { + throw new ArgumentNullException(nameof(analyzers)); + } + + _analyzers = analyzers + .Where(static analyzer => analyzer is not null) + .Distinct(new AnalyzerIdComparer()) + .OrderBy(static analyzer => analyzer.Id, StringComparer.Ordinal) + .ToArray(); + } + + public IReadOnlyList Analyzers => _analyzers; + + public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(context); + + var builder = new LanguageAnalyzerResultBuilder(); + var writer = new LanguageComponentWriter(builder); + + foreach (var analyzer in _analyzers) + { + cancellationToken.ThrowIfCancellationRequested(); + await analyzer.AnalyzeAsync(context, writer, cancellationToken).ConfigureAwait(false); + } + + return builder.Build(); + } + + private sealed class AnalyzerIdComparer : IEqualityComparer + { + public bool Equals(ILanguageAnalyzer? x, ILanguageAnalyzer? y) + { + if (ReferenceEquals(x, y)) + { + return true; + } + + if (x is null || y is null) + { + return false; + } + + return string.Equals(x.Id, y.Id, StringComparison.Ordinal); + } + + public int GetHashCode(ILanguageAnalyzer obj) + => obj?.Id is null ? 0 : StringComparer.Ordinal.GetHashCode(obj.Id); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs new file mode 100644 index 00000000..24be405a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs @@ -0,0 +1,111 @@ +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Analyzers.Lang; + +public sealed class LanguageAnalyzerResult +{ + private readonly ImmutableArray _components; + + internal LanguageAnalyzerResult(IEnumerable components) + { + _components = components + .OrderBy(static record => record.ComponentKey, StringComparer.Ordinal) + .ThenBy(static record => record.AnalyzerId, StringComparer.Ordinal) + .ToImmutableArray(); + } + + public IReadOnlyList Components => _components; + + public ImmutableArray ToComponentRecords(string analyzerId, string? layerDigest = null) + => LanguageComponentMapper.ToComponentRecords(analyzerId, _components, layerDigest); + + public LayerComponentFragment ToLayerFragment(string analyzerId, string? layerDigest = null) + => LanguageComponentMapper.ToLayerFragment(analyzerId, _components, layerDigest); + + public IReadOnlyList ToSnapshots() + => _components.Select(static component => component.ToSnapshot()).ToImmutableArray(); + + public string ToJson(bool indent = true) + { + var snapshots = ToSnapshots(); + var options = Internal.LanguageAnalyzerJson.CreateDefault(indent); + return JsonSerializer.Serialize(snapshots, options); + } +} + +internal sealed class LanguageAnalyzerResultBuilder +{ + private readonly Dictionary _records = new(StringComparer.Ordinal); + private readonly object _sync = new(); + + public void Add(LanguageComponentRecord record) + { + ArgumentNullException.ThrowIfNull(record); + + lock (_sync) + { + if (_records.TryGetValue(record.ComponentKey, out var existing)) + { + existing.Merge(record); + return; + } + + _records[record.ComponentKey] = record; + } + } + + public void AddRange(IEnumerable records) + { + foreach (var record in records ?? Array.Empty()) + { + Add(record); + } + } + + public LanguageAnalyzerResult Build() + { + lock (_sync) + { + return new LanguageAnalyzerResult(_records.Values.ToArray()); + } + } +} + +public sealed class LanguageComponentWriter +{ + private readonly LanguageAnalyzerResultBuilder _builder; + + internal LanguageComponentWriter(LanguageAnalyzerResultBuilder builder) + { + _builder = builder ?? throw new ArgumentNullException(nameof(builder)); + } + + public void Add(LanguageComponentRecord record) + => _builder.Add(record); + + public void AddRange(IEnumerable records) + => _builder.AddRange(records); + + public void AddFromPurl( + string analyzerId, + string purl, + string name, + string? version, + string type, + IEnumerable>? metadata = null, + IEnumerable? evidence = null, + bool usedByEntrypoint = false) + => Add(LanguageComponentRecord.FromPurl(analyzerId, purl, name, version, type, metadata, evidence, usedByEntrypoint)); + + public void AddFromExplicitKey( + string analyzerId, + string componentKey, + string? purl, + string name, + string? version, + string type, + IEnumerable>? metadata = null, + IEnumerable? evidence = null, + bool usedByEntrypoint = false) + => Add(LanguageComponentRecord.FromExplicitKey(analyzerId, componentKey, purl, name, version, type, metadata, evidence, usedByEntrypoint)); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs new file mode 100644 index 00000000..8104627b --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs @@ -0,0 +1,18 @@ +namespace StellaOps.Scanner.Analyzers.Lang; + +public enum LanguageEvidenceKind +{ + File, + Metadata, + Derived, +} + +public sealed record LanguageComponentEvidence( + LanguageEvidenceKind Kind, + string Source, + string Locator, + string? Value, + string? Sha256) +{ + public string ComparisonKey => string.Join('|', Kind, Source, Locator, Value, Sha256); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs new file mode 100644 index 00000000..976ae2f2 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs @@ -0,0 +1,223 @@ +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Analyzers.Lang; + +/// +/// Helpers converting language analyzer component records into canonical scanner component models. +/// +public static class LanguageComponentMapper +{ + private const string LayerHashPrefix = "stellaops:lang:"; + private const string MetadataPrefix = "stellaops.lang"; + + /// + /// Computes a deterministic synthetic layer digest for the supplied analyzer identifier. + /// + public static string ComputeLayerDigest(string analyzerId) + { + ArgumentException.ThrowIfNullOrWhiteSpace(analyzerId); + + var payload = $"{LayerHashPrefix}{analyzerId.Trim().ToLowerInvariant()}"; + var bytes = Encoding.UTF8.GetBytes(payload); + var hash = SHA256.HashData(bytes); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } + + /// + /// Projects language component records into a deterministic set of component records. + /// + public static ImmutableArray ToComponentRecords( + string analyzerId, + IEnumerable components, + string? layerDigest = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(analyzerId); + ArgumentNullException.ThrowIfNull(components); + + var effectiveLayer = string.IsNullOrWhiteSpace(layerDigest) + ? ComputeLayerDigest(analyzerId) + : layerDigest!; + + var builder = ImmutableArray.CreateBuilder(); + foreach (var record in components.OrderBy(static component => component.ComponentKey, StringComparer.Ordinal)) + { + builder.Add(CreateComponentRecord(analyzerId, effectiveLayer, record)); + } + + return builder.ToImmutable(); + } + + /// + /// Creates a layer component fragment using the supplied component records. + /// + public static LayerComponentFragment ToLayerFragment( + string analyzerId, + IEnumerable components, + string? layerDigest = null) + { + var componentRecords = ToComponentRecords(analyzerId, components, layerDigest); + if (componentRecords.IsEmpty) + { + return LayerComponentFragment.Create(ComputeLayerDigest(analyzerId), componentRecords); + } + + return LayerComponentFragment.Create(componentRecords[0].LayerDigest, componentRecords); + } + + private static ComponentRecord CreateComponentRecord( + string analyzerId, + string layerDigest, + LanguageComponentRecord record) + { + ArgumentNullException.ThrowIfNull(record); + + var identity = ComponentIdentity.Create( + key: ResolveIdentityKey(record), + name: record.Name, + version: record.Version, + purl: record.Purl, + componentType: record.Type); + + var evidence = MapEvidence(record); + var metadata = BuildMetadata(analyzerId, record); + var usage = record.UsedByEntrypoint + ? ComponentUsage.Create(usedByEntrypoint: true) + : ComponentUsage.Unused; + + return new ComponentRecord + { + Identity = identity, + LayerDigest = layerDigest, + Evidence = evidence, + Dependencies = ImmutableArray.Empty, + Metadata = metadata, + Usage = usage, + }; + } + + private static ImmutableArray MapEvidence(LanguageComponentRecord record) + { + var builder = ImmutableArray.CreateBuilder(); + foreach (var item in record.Evidence) + { + if (item is null) + { + continue; + } + + var kind = item.Kind switch + { + LanguageEvidenceKind.File => "file", + LanguageEvidenceKind.Metadata => "metadata", + LanguageEvidenceKind.Derived => "derived", + _ => "unknown", + }; + + var value = string.IsNullOrWhiteSpace(item.Locator) ? item.Source : item.Locator; + if (string.IsNullOrWhiteSpace(value)) + { + value = kind; + } + + builder.Add(new ComponentEvidence + { + Kind = kind, + Value = value, + Source = string.IsNullOrWhiteSpace(item.Source) ? null : item.Source, + }); + } + + return builder.Count == 0 + ? ImmutableArray.Empty + : builder.ToImmutable(); + } + + private static ComponentMetadata? BuildMetadata(string analyzerId, LanguageComponentRecord record) + { + var properties = new SortedDictionary(StringComparer.Ordinal) + { + [$"{MetadataPrefix}.analyzerId"] = analyzerId + }; + + var licenseList = new List(); + + foreach (var pair in record.Metadata) + { + if (string.IsNullOrWhiteSpace(pair.Key)) + { + continue; + } + + if (!string.IsNullOrWhiteSpace(pair.Value)) + { + var value = pair.Value.Trim(); + properties[$"{MetadataPrefix}.meta.{pair.Key}"] = value; + + if (IsLicenseKey(pair.Key) && value.Length > 0) + { + foreach (var candidate in value.Split(new[] { ',', ';' }, StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries)) + { + if (candidate.Length > 0) + { + licenseList.Add(candidate); + } + } + } + } + } + + var evidenceIndex = 0; + foreach (var evidence in record.Evidence) + { + if (evidence is null) + { + continue; + } + + var prefix = $"{MetadataPrefix}.evidence.{evidenceIndex}"; + if (!string.IsNullOrWhiteSpace(evidence.Value)) + { + properties[$"{prefix}.value"] = evidence.Value.Trim(); + } + + if (!string.IsNullOrWhiteSpace(evidence.Sha256)) + { + properties[$"{prefix}.sha256"] = evidence.Sha256.Trim(); + } + + evidenceIndex++; + } + + IReadOnlyList? licenses = null; + if (licenseList.Count > 0) + { + licenses = licenseList + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static license => license, StringComparer.Ordinal) + .ToArray(); + } + + return new ComponentMetadata + { + Licenses = licenses, + Properties = properties.Count == 0 ? null : properties, + }; + } + + private static string ResolveIdentityKey(LanguageComponentRecord record) + { + var key = record.ComponentKey; + if (key.StartsWith("purl::", StringComparison.Ordinal)) + { + return key[6..]; + } + + return key; + } + + private static bool IsLicenseKey(string key) + => key.Contains("license", StringComparison.OrdinalIgnoreCase); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs new file mode 100644 index 00000000..5062b708 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs @@ -0,0 +1,219 @@ +namespace StellaOps.Scanner.Analyzers.Lang; + +public sealed class LanguageComponentRecord +{ + private readonly SortedDictionary _metadata; + private readonly SortedDictionary _evidence; + + private LanguageComponentRecord( + string analyzerId, + string componentKey, + string? purl, + string name, + string? version, + string type, + IEnumerable> metadata, + IEnumerable evidence, + bool usedByEntrypoint) + { + AnalyzerId = analyzerId ?? throw new ArgumentNullException(nameof(analyzerId)); + ComponentKey = componentKey ?? throw new ArgumentNullException(nameof(componentKey)); + Purl = string.IsNullOrWhiteSpace(purl) ? null : purl.Trim(); + Name = name ?? throw new ArgumentNullException(nameof(name)); + Version = string.IsNullOrWhiteSpace(version) ? null : version.Trim(); + Type = string.IsNullOrWhiteSpace(type) ? throw new ArgumentException("Type is required", nameof(type)) : type.Trim(); + UsedByEntrypoint = usedByEntrypoint; + + _metadata = new SortedDictionary(StringComparer.Ordinal); + foreach (var entry in metadata ?? Array.Empty>()) + { + if (string.IsNullOrWhiteSpace(entry.Key)) + { + continue; + } + + _metadata[entry.Key.Trim()] = entry.Value; + } + + _evidence = new SortedDictionary(StringComparer.Ordinal); + foreach (var evidenceItem in evidence ?? Array.Empty()) + { + if (evidenceItem is null) + { + continue; + } + + _evidence[evidenceItem.ComparisonKey] = evidenceItem; + } + } + + public string AnalyzerId { get; } + + public string ComponentKey { get; } + + public string? Purl { get; } + + public string Name { get; } + + public string? Version { get; } + + public string Type { get; } + + public bool UsedByEntrypoint { get; private set; } + + public IReadOnlyDictionary Metadata => _metadata; + + public IReadOnlyCollection Evidence => _evidence.Values; + + public static LanguageComponentRecord FromPurl( + string analyzerId, + string purl, + string name, + string? version, + string type, + IEnumerable>? metadata = null, + IEnumerable? evidence = null, + bool usedByEntrypoint = false) + { + if (string.IsNullOrWhiteSpace(purl)) + { + throw new ArgumentException("purl is required", nameof(purl)); + } + + var key = $"purl::{purl.Trim()}"; + return new LanguageComponentRecord( + analyzerId, + key, + purl, + name, + version, + type, + metadata ?? Array.Empty>(), + evidence ?? Array.Empty(), + usedByEntrypoint); + } + + public static LanguageComponentRecord FromExplicitKey( + string analyzerId, + string componentKey, + string? purl, + string name, + string? version, + string type, + IEnumerable>? metadata = null, + IEnumerable? evidence = null, + bool usedByEntrypoint = false) + { + if (string.IsNullOrWhiteSpace(componentKey)) + { + throw new ArgumentException("Component key is required", nameof(componentKey)); + } + + return new LanguageComponentRecord( + analyzerId, + componentKey.Trim(), + purl, + name, + version, + type, + metadata ?? Array.Empty>(), + evidence ?? Array.Empty(), + usedByEntrypoint); + } + + internal void Merge(LanguageComponentRecord other) + { + ArgumentNullException.ThrowIfNull(other); + + if (!ComponentKey.Equals(other.ComponentKey, StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Cannot merge component '{ComponentKey}' with '{other.ComponentKey}'."); + } + + UsedByEntrypoint |= other.UsedByEntrypoint; + + foreach (var entry in other._metadata) + { + if (!_metadata.TryGetValue(entry.Key, out var existing) || string.IsNullOrEmpty(existing)) + { + _metadata[entry.Key] = entry.Value; + } + } + + foreach (var evidenceItem in other._evidence) + { + _evidence[evidenceItem.Key] = evidenceItem.Value; + } + } + + public LanguageComponentSnapshot ToSnapshot() + { + return new LanguageComponentSnapshot + { + AnalyzerId = AnalyzerId, + ComponentKey = ComponentKey, + Purl = Purl, + Name = Name, + Version = Version, + Type = Type, + UsedByEntrypoint = UsedByEntrypoint, + Metadata = _metadata.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal), + Evidence = _evidence.Values.Select(static item => new LanguageComponentEvidenceSnapshot + { + Kind = item.Kind, + Source = item.Source, + Locator = item.Locator, + Value = item.Value, + Sha256 = item.Sha256, + }).ToArray(), + }; + } +} + +public sealed class LanguageComponentSnapshot +{ + [JsonPropertyName("analyzerId")] + public string AnalyzerId { get; set; } = string.Empty; + + [JsonPropertyName("componentKey")] + public string ComponentKey { get; set; } = string.Empty; + + [JsonPropertyName("purl")] + public string? Purl { get; set; } + + [JsonPropertyName("name")] + public string Name { get; set; } = string.Empty; + + [JsonPropertyName("version")] + public string? Version { get; set; } + + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + [JsonPropertyName("usedByEntrypoint")] + public bool UsedByEntrypoint { get; set; } + + [JsonPropertyName("metadata")] + public IDictionary Metadata { get; set; } = new Dictionary(StringComparer.Ordinal); + + [JsonPropertyName("evidence")] + public IReadOnlyList Evidence { get; set; } = Array.Empty(); +} + +public sealed class LanguageComponentEvidenceSnapshot +{ + [JsonPropertyName("kind")] + public LanguageEvidenceKind Kind { get; set; } + + [JsonPropertyName("source")] + public string Source { get; set; } = string.Empty; + + [JsonPropertyName("locator")] + public string Locator { get; set; } = string.Empty; + + [JsonPropertyName("value")] + public string? Value { get; set; } + + [JsonPropertyName("sha256")] + public string? Sha256 { get; set; } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs new file mode 100644 index 00000000..347900f5 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs @@ -0,0 +1,49 @@ +namespace StellaOps.Scanner.Analyzers.Lang; + +public sealed class LanguageUsageHints +{ + private static readonly StringComparer Comparer = OperatingSystem.IsWindows() + ? StringComparer.OrdinalIgnoreCase + : StringComparer.Ordinal; + + private readonly ImmutableHashSet _usedPaths; + + public static LanguageUsageHints Empty { get; } = new(Array.Empty()); + + public LanguageUsageHints(IEnumerable usedPaths) + { + if (usedPaths is null) + { + throw new ArgumentNullException(nameof(usedPaths)); + } + + _usedPaths = usedPaths + .Select(Normalize) + .Where(static path => path.Length > 0) + .ToImmutableHashSet(Comparer); + } + + public bool IsPathUsed(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return false; + } + + var normalized = Normalize(path); + return _usedPaths.Contains(normalized); + } + + private static string Normalize(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return string.Empty; + } + + var full = Path.GetFullPath(path); + return OperatingSystem.IsWindows() + ? full.Replace('\\', '/').TrimEnd('/') + : full; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs b/src/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs new file mode 100644 index 00000000..d5f42b51 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs @@ -0,0 +1,11 @@ +global using System; +global using System.Collections.Concurrent; +global using System.Collections.Generic; +global using System.Collections.Immutable; +global using System.Diagnostics.CodeAnalysis; +global using System.IO; +global using System.Linq; +global using System.Text.Json; +global using System.Text.Json.Serialization; +global using System.Threading; +global using System.Threading.Tasks; diff --git a/src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md b/src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md new file mode 100644 index 00000000..de76b8a6 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md @@ -0,0 +1,114 @@ +# StellaOps Scanner — Language Analyzer Implementation Plan (2025Q4) + +> **Goal.** Deliver best-in-class language analyzers that outperform competitors on fidelity, determinism, and offline readiness while integrating tightly with Scanner Worker orchestration and SBOM composition. + +All sprints below assume prerequisites from SP10-G2 (core scaffolding + Java analyzer) are complete. Each sprint is sized for a focused guild (≈1–1.5 weeks) and produces definitive gates for downstream teams (Emit, Policy, Scheduler). + +--- + +## Sprint LA1 — Node Analyzer & Workspace Intelligence (Tasks 10-302, 10-307, 10-308, 10-309 subset) *(DOING — 2025-10-19)* +- **Scope:** Resolve hoisted `node_modules`, PNPM structures, Yarn Berry Plug'n'Play, symlinked workspaces, and detect security-sensitive scripts. +- **Deliverables:** + - `StellaOps.Scanner.Analyzers.Lang.Node` plug-in with manifest + DI registration. + - Deterministic walker supporting >100 k modules with streaming JSON parsing. + - Workspace graph persisted as analyzer metadata (`package.json` provenance + symlink target proofs). +- **Acceptance Metrics:** + - 10 k module fixture scans <1.8 s on 4 vCPU (p95). + - Memory ceiling <220 MB (tracked via deterministic benchmark harness). + - All symlink targets canonicalized; path traversal guarded. +- **Gate Artifacts:** + - `Fixtures/lang/node/**` golden outputs. + - Analyzer benchmark CSV + flamegraph (commit under `bench/Scanner.Analyzers`). + - Worker integration sample enabling Node analyzer via manifest. +- **Progress (2025-10-19):** Module walker with package-lock/yarn/pnpm resolution, workspace attribution, integrity metadata, and deterministic fixture harness committed; Node tasks 10-302A/B marked DONE. Shared component mapper + canonical result harness landed, closing tasks 10-307/308. Script metadata & telemetry (10-302C) emit policy hints, hashed evidence, and feed `scanner_analyzer_node_scripts_total` into Worker OpenTelemetry pipeline. + +## Sprint LA2 — Python Analyzer & Entry Point Attribution (Tasks 10-303, 10-307, 10-308, 10-309 subset) +- **Scope:** Parse `*.dist-info`, `RECORD` hashes, entry points, and pip-installed editable packages; integrate usage hints from EntryTrace. +- **Deliverables:** + - `StellaOps.Scanner.Analyzers.Lang.Python` plug-in. + - RECORD hash validation with optional Zip64 support for `.whl` caches. + - Entry-point mapping into `UsageFlags` for Emit stage. +- **Acceptance Metrics:** + - Hash verification throughput ≥75 MB/s sustained with streaming reader. + - False-positive rate for editable installs <1 % on curated fixtures. + - Determinism check across CPython 3.8–3.12 generated metadata. +- **Gate Artifacts:** + - Golden fixtures for `site-packages`, virtualenv, and layered pip caches. + - Usage hint propagation tests (EntryTrace → analyzer → SBOM). + - Metrics counters (`scanner_analyzer_python_components_total`) documented. + +## Sprint LA3 — Go Analyzer & Build Info Synthesis (Tasks 10-304, 10-307, 10-308, 10-309 subset) +- **Scope:** Extract Go build metadata from `.note.go.buildid`, embedded module info, and fallback to `bin:{sha256}`; surface VCS provenance. +- **Deliverables:** + - `StellaOps.Scanner.Analyzers.Lang.Go` plug-in. + - DWARF-lite parser to enrich component origin (commit hash + dirty flag) when available. + - Shared hash cache to dedupe repeated binaries across layers. +- **Acceptance Metrics:** + - Analyzer latency ≤400 µs per binary (hot cache) / ≤2 ms (cold). + - Provenance coverage ≥95 % on representative Go fixture suite. + - Zero allocations in happy path beyond pooled buffers (validated via BenchmarkDotNet). +- **Gate Artifacts:** + - Benchmarks vs competitor open-source tool (Trivy or Syft) demonstrating faster metadata extraction. + - Documentation snippet explaining VCS metadata fields for Policy team. + +## Sprint LA4 — .NET Analyzer & RID Variants (Tasks 10-305, 10-307, 10-308, 10-309 subset) +- **Scope:** Parse `*.deps.json`, `runtimeconfig.json`, assembly metadata, and RID-specific assets; correlate with native dependencies. +- **Deliverables:** + - `StellaOps.Scanner.Analyzers.Lang.DotNet` plug-in. + - Strong-name + Authenticode optional verification when offline cert bundle provided. + - RID-aware component grouping with fallback to `bin:{sha256}` for self-contained apps. +- **Acceptance Metrics:** + - Multi-target app fixture processed <1.2 s; memory <250 MB. + - RID variant collapse reduces component explosion by ≥40 % vs naive listing. + - All security metadata (signing Publisher, timestamp) surfaced deterministically. +- **Gate Artifacts:** + - Signed .NET sample apps (framework-dependent & self-contained) under `samples/scanner/lang/dotnet/`. + - Tests verifying dual runtimeconfig merge logic. + - Guidance for Policy on license propagation from NuGet metadata. + +## Sprint LA5 — Rust Analyzer & Binary Fingerprinting (Tasks 10-306, 10-307, 10-308, 10-309 subset) +- **Scope:** Detect crates via metadata in `.fingerprint`, Cargo.lock fragments, or embedded `rustc` markers; robust fallback to binary hash classification. +- **Deliverables:** + - `StellaOps.Scanner.Analyzers.Lang.Rust` plug-in. + - Symbol table heuristics capable of attributing stripped binaries by leveraging `.comment` and section names without violating determinism. + - Quiet-provenance flags to differentiate heuristics from hard evidence. +- **Acceptance Metrics:** + - Accurate crate attribution ≥85 % on curated Cargo workspace fixtures. + - Heuristic fallback clearly labeled; no false “certain” claims. + - Analyzer completes <1 s on 500 binary corpus. +- **Gate Artifacts:** + - Fixtures covering cargo workspaces, binaries with embedded metadata stripped. + - ADR documenting heuristic boundaries + risk mitigations. + +## Sprint LA6 — Shared Evidence Enhancements & Worker Integration (Tasks 10-307, 10-308, 10-309 finalization) +- **Scope:** Finalize shared helpers, deterministic harness expansion, Worker/Emit wiring, and macro benchmarks. +- **Deliverables:** + - Consolidated `LanguageComponentWriter` extensions for license, vulnerability hints, and usage propagation. + - Worker dispatcher loading plug-ins via manifest registry + health checks. + - Combined analyzer benchmark suite executed in CI with regression thresholds. +- **Acceptance Metrics:** + - Worker executes mixed analyzer suite (Java+Node+Python+Go+.NET+Rust) within SLA: warm scan <6 s, cold <25 s. + - CI determinism guard catches output drift (>0 diff tolerance) across all fixtures. + - Telemetry coverage: each analyzer emits timing + component counters. +- **Gate Artifacts:** + - `SPRINTS_LANG_IMPLEMENTATION_PLAN.md` progress log updated (this file). + - `bench/Scanner.Analyzers/lang-matrix.csv` recorded + referenced in docs. + - Ops notes for packaging plug-ins into Offline Kit. + +--- + +## Cross-Sprint Considerations +- **Security:** All analyzers must enforce path canonicalization, guard against zip-slip, and expose provenance classifications (`observed`, `heuristic`, `attested`). +- **Offline-first:** No network calls; rely on cached metadata and optional offline bundles (license texts, signature roots). +- **Determinism:** Normalise timestamps to `0001-01-01T00:00:00Z` when persisting synthetic data; sort collections by stable keys. +- **Benchmarking:** Extend `bench/Scanner.Analyzers` to compare against open-source scanners (Syft/Trivy) and document performance wins. +- **Hand-offs:** Emit guild requires consistent component schemas; Policy needs license + provenance metadata; Scheduler depends on usage flags for ImpactIndex. + +## Tracking & Reporting +- Update `TASKS.md` per sprint (TODO → DOING → DONE) with date stamps. +- Log sprint summaries in `docs/updates/` once each sprint lands. +- Use module-specific CI pipeline to run analyzer suites nightly (determinism + perf). + +--- + +**Next Action:** Start Sprint LA1 (Node Analyzer) — move tasks 10-302, 10-307, 10-308, 10-309 → DOING and spin up fixtures + benchmarks. diff --git a/src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj b/src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj new file mode 100644 index 00000000..927ea5cc --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj @@ -0,0 +1,21 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.Lang/TASKS.md b/src/StellaOps.Scanner.Analyzers.Lang/TASKS.md new file mode 100644 index 00000000..95629b1c --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.Lang/TASKS.md @@ -0,0 +1,13 @@ +# Language Analyzer Task Board + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-LANG-10-301 | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-CORE-09-501, SCANNER-WORKER-09-203 | Java analyzer emitting deterministic `pkg:maven` components using pom.properties / MANIFEST evidence. | Java analyzer extracts coordinates+version+licenses with provenance; golden fixtures deterministic; microbenchmark meets target. | +| SCANNER-ANALYZERS-LANG-10-302 | DOING (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Node analyzer resolving workspaces/symlinks into `pkg:npm` identities. | Node analyzer handles symlinks/workspaces; outputs sorted components; determinism harness covers hoisted deps. | +| SCANNER-ANALYZERS-LANG-10-303 | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Python analyzer consuming `*.dist-info` metadata and RECORD hashes. | Analyzer binds METADATA + RECORD evidence, includes entry points, determinism fixtures stable. | +| SCANNER-ANALYZERS-LANG-10-304 | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Go analyzer leveraging buildinfo for `pkg:golang` components. | Buildinfo parser emits module path/version + vcs metadata; binaries without buildinfo downgraded gracefully. | +| SCANNER-ANALYZERS-LANG-10-305 | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | .NET analyzer parsing `*.deps.json`, assembly metadata, and RID variants. | Analyzer merges deps.json + assembly info; dedupes per RID; determinism verified. | +| SCANNER-ANALYZERS-LANG-10-306 | TODO | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Rust analyzer detecting crate provenance or falling back to `bin:{sha256}`. | Analyzer emits `pkg:cargo` when metadata present; falls back to binary hash; fixtures cover both paths. | +| SCANNER-ANALYZERS-LANG-10-307 | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-CORE-09-501 | Shared language evidence helpers + usage flag propagation. | Shared abstractions implemented; analyzers reuse helpers; evidence includes usage hints; unit tests cover canonical ordering. | +| SCANNER-ANALYZERS-LANG-10-308 | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Determinism + fixture harness for language analyzers. | Harness executes analyzers against fixtures; golden JSON stored; CI helper ensures stable hashes. | +| SCANNER-ANALYZERS-LANG-10-309 | DOING (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-301..308 | Package language analyzers as restart-time plug-ins (manifest + host registration). | Plugin manifests authored under `plugins/scanner/analyzers/lang`; Worker loads via DI; restart required flag enforced; tests confirm manifest integrity. | diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs b/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs new file mode 100644 index 00000000..fa19331b --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs @@ -0,0 +1,21 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Plugin; + +namespace StellaOps.Scanner.Analyzers.OS.Apk; + +public sealed class ApkAnalyzerPlugin : IOSAnalyzerPlugin +{ + public string Name => "StellaOps.Scanner.Analyzers.OS.Apk"; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public IOSPackageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + var loggerFactory = services.GetRequiredService(); + return new ApkPackageAnalyzer(loggerFactory.CreateLogger()); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs b/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs new file mode 100644 index 00000000..9ef94039 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs @@ -0,0 +1,203 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading; + +namespace StellaOps.Scanner.Analyzers.OS.Apk; + +internal sealed class ApkDatabaseParser +{ + public IReadOnlyList Parse(Stream stream, CancellationToken cancellationToken) + { + var packages = new List(); + var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 4096, leaveOpen: true); + + var current = new ApkPackageEntry(); + string? currentDirectory = "/"; + string? pendingDigest = null; + bool pendingConfig = false; + + string? line; + while ((line = reader.ReadLine()) != null) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (string.IsNullOrWhiteSpace(line)) + { + CommitCurrent(); + current = new ApkPackageEntry(); + currentDirectory = "/"; + pendingDigest = null; + pendingConfig = false; + continue; + } + + if (line.Length < 2) + { + continue; + } + + var key = line[0]; + var value = line.Length > 2 ? line[2..] : string.Empty; + + switch (key) + { + case 'C': + current.Channel = value; + break; + case 'P': + current.Name = value; + break; + case 'V': + current.Version = value; + break; + case 'A': + current.Architecture = value; + break; + case 'S': + current.InstalledSize = value; + break; + case 'I': + current.PackageSize = value; + break; + case 'T': + current.Description = value; + break; + case 'U': + current.Url = value; + break; + case 'L': + current.License = value; + break; + case 'o': + current.Origin = value; + break; + case 'm': + current.Maintainer = value; + break; + case 't': + current.BuildTime = value; + break; + case 'c': + current.Checksum = value; + break; + case 'D': + current.Depends.AddRange(SplitList(value)); + break; + case 'p': + current.Provides.AddRange(SplitList(value)); + break; + case 'F': + currentDirectory = NormalizeDirectory(value); + current.Files.Add(new ApkFileEntry(currentDirectory, true, false, null)); + break; + case 'R': + if (currentDirectory is null) + { + currentDirectory = "/"; + } + + var fullPath = CombinePath(currentDirectory, value); + current.Files.Add(new ApkFileEntry(fullPath, false, pendingConfig, pendingDigest)); + pendingDigest = null; + pendingConfig = false; + break; + case 'Z': + pendingDigest = string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + break; + case 'a': + pendingConfig = value.Contains("cfg", StringComparison.OrdinalIgnoreCase); + break; + default: + current.Metadata[key.ToString()] = value; + break; + } + } + + CommitCurrent(); + return packages; + + void CommitCurrent() + { + if (!string.IsNullOrWhiteSpace(current.Name) && + !string.IsNullOrWhiteSpace(current.Version) && + !string.IsNullOrWhiteSpace(current.Architecture)) + { + packages.Add(current); + } + } + } + + private static IEnumerable SplitList(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + yield break; + } + + foreach (var token in value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + yield return token; + } + } + + private static string NormalizeDirectory(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "/"; + } + + var path = value.Trim(); + if (!path.StartsWith('/')) + { + path = "/" + path; + } + + if (!path.EndsWith('/')) + { + path += "/"; + } + + return path.Replace("//", "/"); + } + + private static string CombinePath(string directory, string relative) + { + if (string.IsNullOrWhiteSpace(relative)) + { + return directory.TrimEnd('/'); + } + + if (!directory.EndsWith('/')) + { + directory += "/"; + } + + return (directory + relative.TrimStart('/')).Replace("//", "/"); + } +} + +internal sealed class ApkPackageEntry +{ + public string? Channel { get; set; } + public string? Name { get; set; } + public string? Version { get; set; } + public string? Architecture { get; set; } + public string? InstalledSize { get; set; } + public string? PackageSize { get; set; } + public string? Description { get; set; } + public string? Url { get; set; } + public string? License { get; set; } + public string? Origin { get; set; } + public string? Maintainer { get; set; } + public string? BuildTime { get; set; } + public string? Checksum { get; set; } + public List Depends { get; } = new(); + public List Provides { get; } = new(); + public List Files { get; } = new(); + public Dictionary Metadata { get; } = new(StringComparer.Ordinal); +} + +internal sealed record ApkFileEntry(string Path, bool IsDirectory, bool IsConfig, string? Digest); diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs b/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs new file mode 100644 index 00000000..7d05380b --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs @@ -0,0 +1,106 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Analyzers; +using StellaOps.Scanner.Analyzers.OS.Helpers; + +namespace StellaOps.Scanner.Analyzers.OS.Apk; + +internal sealed class ApkPackageAnalyzer : OsPackageAnalyzerBase +{ + private static readonly IReadOnlyList EmptyPackages = + new ReadOnlyCollection(System.Array.Empty()); + + private readonly ApkDatabaseParser _parser = new(); + + public ApkPackageAnalyzer(ILogger logger) + : base(logger) + { + } + + public override string AnalyzerId => "apk"; + + protected override ValueTask> ExecuteCoreAsync(OSPackageAnalyzerContext context, CancellationToken cancellationToken) + { + var installedPath = Path.Combine(context.RootPath, "lib", "apk", "db", "installed"); + if (!File.Exists(installedPath)) + { + Logger.LogInformation("Apk installed database not found at {Path}; skipping analyzer.", installedPath); + return ValueTask.FromResult>(EmptyPackages); + } + + using var stream = File.OpenRead(installedPath); + var entries = _parser.Parse(stream, cancellationToken); + + var records = new List(entries.Count); + foreach (var entry in entries) + { + if (string.IsNullOrWhiteSpace(entry.Name) || + string.IsNullOrWhiteSpace(entry.Version) || + string.IsNullOrWhiteSpace(entry.Architecture)) + { + continue; + } + + var versionParts = PackageVersionParser.ParseApkVersion(entry.Version); + var purl = PackageUrlBuilder.BuildAlpine(entry.Name, entry.Version, entry.Architecture); + + var vendorMetadata = new Dictionary(StringComparer.Ordinal) + { + ["origin"] = entry.Origin, + ["description"] = entry.Description, + ["homepage"] = entry.Url, + ["maintainer"] = entry.Maintainer, + ["checksum"] = entry.Checksum, + ["buildTime"] = entry.BuildTime, + }; + + foreach (var pair in entry.Metadata) + { + vendorMetadata[$"apk:{pair.Key}"] = pair.Value; + } + + var files = new List(entry.Files.Count); + foreach (var file in entry.Files) + { + files.Add(new OSPackageFileEvidence( + file.Path, + layerDigest: null, + sha256: file.Digest, + sizeBytes: null, + isConfigFile: file.IsConfig)); + } + + var cveHints = CveHintExtractor.Extract( + string.Join(' ', entry.Depends), + string.Join(' ', entry.Provides)); + + var record = new OSPackageRecord( + AnalyzerId, + purl, + entry.Name, + versionParts.BaseVersion, + entry.Architecture, + PackageEvidenceSource.ApkDatabase, + epoch: null, + release: versionParts.Release, + sourcePackage: entry.Origin, + license: entry.License, + cveHints: cveHints, + provides: entry.Provides, + depends: entry.Depends, + files: files, + vendorMetadata: vendorMetadata); + + records.Add(record); + } + + records.Sort(); + return ValueTask.FromResult>(records); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs b/src/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..d0ddbf86 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.OS.Tests")] diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj b/src/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj new file mode 100644 index 00000000..51bd740a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj @@ -0,0 +1,15 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json b/src/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json new file mode 100644 index 00000000..aad798dd --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json @@ -0,0 +1,19 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzers.os.apk", + "displayName": "StellaOps Alpine APK Analyzer", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.OS.Apk.dll" + }, + "capabilities": [ + "os-analyzer", + "apk" + ], + "metadata": { + "org.stellaops.analyzer.kind": "os", + "org.stellaops.analyzer.id": "apk" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs new file mode 100644 index 00000000..6e6dd53d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs @@ -0,0 +1,21 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Plugin; + +namespace StellaOps.Scanner.Analyzers.OS.Dpkg; + +public sealed class DpkgAnalyzerPlugin : IOSAnalyzerPlugin +{ + public string Name => "StellaOps.Scanner.Analyzers.OS.Dpkg"; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public IOSPackageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + var loggerFactory = services.GetRequiredService(); + return new DpkgPackageAnalyzer(loggerFactory.CreateLogger()); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs new file mode 100644 index 00000000..1b56e5e1 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs @@ -0,0 +1,267 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Analyzers; +using StellaOps.Scanner.Analyzers.OS.Helpers; + +namespace StellaOps.Scanner.Analyzers.OS.Dpkg; + +internal sealed class DpkgPackageAnalyzer : OsPackageAnalyzerBase +{ + private static readonly IReadOnlyList EmptyPackages = + new ReadOnlyCollection(System.Array.Empty()); + + private readonly DpkgStatusParser _parser = new(); + + public DpkgPackageAnalyzer(ILogger logger) + : base(logger) + { + } + + public override string AnalyzerId => "dpkg"; + + protected override ValueTask> ExecuteCoreAsync(OSPackageAnalyzerContext context, CancellationToken cancellationToken) + { + var statusPath = Path.Combine(context.RootPath, "var", "lib", "dpkg", "status"); + if (!File.Exists(statusPath)) + { + Logger.LogInformation("dpkg status file not found at {Path}; skipping analyzer.", statusPath); + return ValueTask.FromResult>(EmptyPackages); + } + + using var stream = File.OpenRead(statusPath); + var entries = _parser.Parse(stream, cancellationToken); + + var infoDirectory = Path.Combine(context.RootPath, "var", "lib", "dpkg", "info"); + var records = new List(); + + foreach (var entry in entries) + { + if (!IsInstalled(entry.Status)) + { + continue; + } + + if (string.IsNullOrWhiteSpace(entry.Name) || string.IsNullOrWhiteSpace(entry.Version) || string.IsNullOrWhiteSpace(entry.Architecture)) + { + continue; + } + + var versionParts = PackageVersionParser.ParseDebianVersion(entry.Version); + var sourceName = ParseSource(entry.Source) ?? entry.Name; + var distribution = entry.Origin; + if (distribution is null && entry.Metadata.TryGetValue("origin", out var originValue)) + { + distribution = originValue; + } + distribution ??= "debian"; + + var purl = PackageUrlBuilder.BuildDebian(distribution!, entry.Name, entry.Version, entry.Architecture); + + var vendorMetadata = new Dictionary(StringComparer.Ordinal) + { + ["source"] = entry.Source, + ["homepage"] = entry.Homepage, + ["maintainer"] = entry.Maintainer, + ["origin"] = entry.Origin, + ["priority"] = entry.Priority, + ["section"] = entry.Section, + }; + + foreach (var kvp in entry.Metadata) + { + vendorMetadata[$"dpkg:{kvp.Key}"] = kvp.Value; + } + + var dependencies = entry.Depends.Concat(entry.PreDepends).ToArray(); + var provides = entry.Provides.ToArray(); + + var fileEvidence = BuildFileEvidence(infoDirectory, entry, cancellationToken); + + var cveHints = CveHintExtractor.Extract(entry.Description, string.Join(' ', dependencies), string.Join(' ', provides)); + + var record = new OSPackageRecord( + AnalyzerId, + purl, + entry.Name, + versionParts.UpstreamVersion, + entry.Architecture, + PackageEvidenceSource.DpkgStatus, + epoch: versionParts.Epoch, + release: versionParts.Revision, + sourcePackage: sourceName, + license: entry.License, + cveHints: cveHints, + provides: provides, + depends: dependencies, + files: fileEvidence, + vendorMetadata: vendorMetadata); + + records.Add(record); + } + + records.Sort(); + return ValueTask.FromResult>(records); + } + + private static bool IsInstalled(string? status) + => status?.Contains("install ok installed", System.StringComparison.OrdinalIgnoreCase) == true; + + private static string? ParseSource(string? sourceField) + { + if (string.IsNullOrWhiteSpace(sourceField)) + { + return null; + } + + var parts = sourceField.Split(' ', 2, System.StringSplitOptions.TrimEntries | System.StringSplitOptions.RemoveEmptyEntries); + return parts.Length == 0 ? null : parts[0]; + } + + private static IReadOnlyList BuildFileEvidence(string infoDirectory, DpkgPackageEntry entry, CancellationToken cancellationToken) + { + if (!Directory.Exists(infoDirectory)) + { + return Array.Empty(); + } + + var files = new Dictionary(StringComparer.Ordinal); + void EnsureFile(string path) + { + if (!files.TryGetValue(path, out _)) + { + files[path] = new FileEvidenceBuilder(path); + } + } + + foreach (var conffile in entry.Conffiles) + { + var normalized = conffile.Path.Trim(); + if (string.IsNullOrWhiteSpace(normalized)) + { + continue; + } + + EnsureFile(normalized); + files[normalized].IsConfig = true; + if (!string.IsNullOrWhiteSpace(conffile.Checksum)) + { + files[normalized].Digests["md5"] = conffile.Checksum.Trim(); + } + } + + foreach (var candidate in GetInfoFileCandidates(entry.Name!, entry.Architecture!)) + { + var listPath = Path.Combine(infoDirectory, candidate + ".list"); + if (File.Exists(listPath)) + { + foreach (var line in File.ReadLines(listPath)) + { + cancellationToken.ThrowIfCancellationRequested(); + var trimmed = line.Trim(); + if (string.IsNullOrWhiteSpace(trimmed)) + { + continue; + } + + EnsureFile(trimmed); + } + } + + var confFilePath = Path.Combine(infoDirectory, candidate + ".conffiles"); + if (File.Exists(confFilePath)) + { + foreach (var line in File.ReadLines(confFilePath)) + { + cancellationToken.ThrowIfCancellationRequested(); + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var parts = line.Split(' ', System.StringSplitOptions.RemoveEmptyEntries | System.StringSplitOptions.TrimEntries); + if (parts.Length == 0) + { + continue; + } + + var path = parts[0]; + EnsureFile(path); + files[path].IsConfig = true; + if (parts.Length >= 2) + { + files[path].Digests["md5"] = parts[1]; + } + } + } + + var md5sumsPath = Path.Combine(infoDirectory, candidate + ".md5sums"); + if (File.Exists(md5sumsPath)) + { + foreach (var line in File.ReadLines(md5sumsPath)) + { + cancellationToken.ThrowIfCancellationRequested(); + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var parts = line.Split(' ', 2, System.StringSplitOptions.RemoveEmptyEntries | System.StringSplitOptions.TrimEntries); + if (parts.Length != 2) + { + continue; + } + + var hash = parts[0]; + var path = parts[1]; + EnsureFile(path); + files[path].Digests["md5"] = hash; + } + } + } + + if (files.Count == 0) + { + return Array.Empty(); + } + + var evidence = files.Values + .Select(builder => builder.ToEvidence()) + .OrderBy(e => e) + .ToArray(); + + return new ReadOnlyCollection(evidence); + } + + private static IEnumerable GetInfoFileCandidates(string packageName, string architecture) + { + yield return packageName + ":" + architecture; + yield return packageName; + } + + private sealed class FileEvidenceBuilder + { + public FileEvidenceBuilder(string path) + { + Path = path; + } + + public string Path { get; } + + public bool IsConfig { get; set; } + + public Dictionary Digests { get; } = new(StringComparer.OrdinalIgnoreCase); + + public OSPackageFileEvidence ToEvidence() + { + return new OSPackageFileEvidence(Path, isConfigFile: IsConfig, digests: Digests); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs new file mode 100644 index 00000000..79f0c2ae --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs @@ -0,0 +1,253 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading; + +namespace StellaOps.Scanner.Analyzers.OS.Dpkg; + +internal sealed class DpkgStatusParser +{ + public IReadOnlyList Parse(Stream stream, CancellationToken cancellationToken) + { + var packages = new List(); + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 4096, leaveOpen: true); + + var current = new DpkgPackageEntry(); + string? currentField = null; + + string? line; + while ((line = reader.ReadLine()) != null) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (string.IsNullOrWhiteSpace(line)) + { + CommitField(); + CommitPackage(); + current = new DpkgPackageEntry(); + currentField = null; + continue; + } + + if (char.IsWhiteSpace(line, 0)) + { + var continuation = line.Trim(); + if (currentField is not null) + { + current.AppendContinuation(currentField, continuation); + } + continue; + } + + var separator = line.IndexOf(':'); + if (separator <= 0) + { + continue; + } + + CommitField(); + + var fieldName = line[..separator]; + var value = line[(separator + 1)..].TrimStart(); + + currentField = fieldName; + current.SetField(fieldName, value); + } + + CommitField(); + CommitPackage(); + + return packages; + + void CommitField() + { + if (currentField is not null) + { + current.FieldCompleted(currentField); + } + } + + void CommitPackage() + { + if (current.IsValid) + { + packages.Add(current); + } + } + } +} + +internal sealed class DpkgPackageEntry +{ + private readonly StringBuilder _descriptionBuilder = new(); + private readonly Dictionary _metadata = new(StringComparer.OrdinalIgnoreCase); + private string? _currentMultilineField; + + public string? Name { get; private set; } + public string? Version { get; private set; } + public string? Architecture { get; private set; } + public string? Status { get; private set; } + public string? Source { get; private set; } + public string? Description { get; private set; } + public string? Homepage { get; private set; } + public string? Maintainer { get; private set; } + public string? Origin { get; private set; } + public string? Priority { get; private set; } + public string? Section { get; private set; } + public string? License { get; private set; } + public List Depends { get; } = new(); + public List PreDepends { get; } = new(); + public List Provides { get; } = new(); + public List Recommends { get; } = new(); + public List Suggests { get; } = new(); + public List Replaces { get; } = new(); + public List Conffiles { get; } = new(); + + public IReadOnlyDictionary Metadata => _metadata; + + public bool IsValid => !string.IsNullOrWhiteSpace(Name) + && !string.IsNullOrWhiteSpace(Version) + && !string.IsNullOrWhiteSpace(Architecture) + && !string.IsNullOrWhiteSpace(Status); + + public void SetField(string fieldName, string value) + { + switch (fieldName) + { + case "Package": + Name = value; + break; + case "Version": + Version = value; + break; + case "Architecture": + Architecture = value; + break; + case "Status": + Status = value; + break; + case "Source": + Source = value; + break; + case "Description": + _descriptionBuilder.Clear(); + _descriptionBuilder.Append(value); + Description = _descriptionBuilder.ToString(); + _currentMultilineField = fieldName; + break; + case "Homepage": + Homepage = value; + break; + case "Maintainer": + Maintainer = value; + break; + case "Origin": + Origin = value; + break; + case "Priority": + Priority = value; + break; + case "Section": + Section = value; + break; + case "License": + License = value; + break; + case "Depends": + Depends.AddRange(ParseRelations(value)); + break; + case "Pre-Depends": + PreDepends.AddRange(ParseRelations(value)); + break; + case "Provides": + Provides.AddRange(ParseRelations(value)); + break; + case "Recommends": + Recommends.AddRange(ParseRelations(value)); + break; + case "Suggests": + Suggests.AddRange(ParseRelations(value)); + break; + case "Replaces": + Replaces.AddRange(ParseRelations(value)); + break; + case "Conffiles": + _currentMultilineField = fieldName; + if (!string.IsNullOrWhiteSpace(value)) + { + AddConffile(value); + } + break; + default: + _metadata[fieldName] = value; + break; + } + } + + public void AppendContinuation(string fieldName, string continuation) + { + if (string.Equals(fieldName, "Description", StringComparison.OrdinalIgnoreCase)) + { + if (_descriptionBuilder.Length > 0) + { + _descriptionBuilder.AppendLine(); + } + + _descriptionBuilder.Append(continuation); + Description = _descriptionBuilder.ToString(); + _currentMultilineField = fieldName; + return; + } + + if (string.Equals(fieldName, "Conffiles", StringComparison.OrdinalIgnoreCase)) + { + AddConffile(continuation); + _currentMultilineField = fieldName; + return; + } + + if (_metadata.TryGetValue(fieldName, out var existing) && existing is not null) + { + _metadata[fieldName] = $"{existing}{Environment.NewLine}{continuation}"; + } + else + { + _metadata[fieldName] = continuation; + } + } + + public void FieldCompleted(string fieldName) + { + if (string.Equals(fieldName, _currentMultilineField, StringComparison.OrdinalIgnoreCase)) + { + _currentMultilineField = null; + } + } + + private void AddConffile(string value) + { + var tokens = value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (tokens.Length >= 1) + { + var path = tokens[0]; + var checksum = tokens.Length >= 2 ? tokens[1] : null; + Conffiles.Add(new DpkgConffileEntry(path, checksum)); + } + } + + private static IEnumerable ParseRelations(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + yield break; + } + + foreach (var segment in value.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + yield return segment; + } + } +} + +internal sealed record DpkgConffileEntry(string Path, string? Checksum); diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..d0ddbf86 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.OS.Tests")] diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj new file mode 100644 index 00000000..51bd740a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj @@ -0,0 +1,15 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json new file mode 100644 index 00000000..a4126885 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json @@ -0,0 +1,19 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzers.os.dpkg", + "displayName": "StellaOps Debian dpkg Analyzer", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.OS.Dpkg.dll" + }, + "capabilities": [ + "os-analyzer", + "dpkg" + ], + "metadata": { + "org.stellaops.analyzer.kind": "os", + "org.stellaops.analyzer.id": "dpkg" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs new file mode 100644 index 00000000..05e3e119 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs @@ -0,0 +1,10 @@ +using System.Collections.Generic; +using System.Threading; +using StellaOps.Scanner.Analyzers.OS.Rpm.Internal; + +namespace StellaOps.Scanner.Analyzers.OS.Rpm; + +internal interface IRpmDatabaseReader +{ + IReadOnlyList ReadHeaders(string rootPath, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs new file mode 100644 index 00000000..615323af --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs @@ -0,0 +1,86 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; + +namespace StellaOps.Scanner.Analyzers.OS.Rpm.Internal; + +internal sealed class RpmHeader +{ + public RpmHeader( + string name, + string version, + string architecture, + string? release, + string? epoch, + string? summary, + string? description, + string? license, + string? sourceRpm, + string? url, + string? vendor, + long? buildTime, + long? installTime, + IReadOnlyList provides, + IReadOnlyList provideVersions, + IReadOnlyList requires, + IReadOnlyList requireVersions, + IReadOnlyList files, + IReadOnlyList changeLogs, + IReadOnlyDictionary metadata) + { + Name = name; + Version = version; + Architecture = architecture; + Release = release; + Epoch = epoch; + Summary = summary; + Description = description; + License = license; + SourceRpm = sourceRpm; + Url = url; + Vendor = vendor; + BuildTime = buildTime; + InstallTime = installTime; + Provides = provides; + ProvideVersions = provideVersions; + Requires = requires; + RequireVersions = requireVersions; + Files = files; + ChangeLogs = changeLogs; + Metadata = metadata; + } + + public string Name { get; } + public string Version { get; } + public string Architecture { get; } + public string? Release { get; } + public string? Epoch { get; } + public string? Summary { get; } + public string? Description { get; } + public string? License { get; } + public string? SourceRpm { get; } + public string? Url { get; } + public string? Vendor { get; } + public long? BuildTime { get; } + public long? InstallTime { get; } + public IReadOnlyList Provides { get; } + public IReadOnlyList ProvideVersions { get; } + public IReadOnlyList Requires { get; } + public IReadOnlyList RequireVersions { get; } + public IReadOnlyList Files { get; } + public IReadOnlyList ChangeLogs { get; } + public IReadOnlyDictionary Metadata { get; } +} + +internal sealed class RpmFileEntry +{ + public RpmFileEntry(string path, bool isConfig, IReadOnlyDictionary digests) + { + Path = path; + IsConfig = isConfig; + Digests = digests; + } + + public string Path { get; } + public bool IsConfig { get; } + public IReadOnlyDictionary Digests { get; } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs new file mode 100644 index 00000000..6dc0612d --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs @@ -0,0 +1,479 @@ +using System; +using System.Buffers.Binary; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.OS.Rpm.Internal; + +internal sealed class RpmHeaderParser +{ + private const uint HeaderMagic = 0x8eade8ab; + private const int RpmFileConfigFlag = 1; + + public RpmHeader Parse(ReadOnlySpan buffer) + { + if (buffer.Length < 16) + { + throw new InvalidOperationException("RPM header buffer too small."); + } + + var reader = new HeaderReader(buffer); + var magic = reader.ReadUInt32(); + if (magic != HeaderMagic) + { + throw new InvalidOperationException("Invalid RPM header magic."); + } + + reader.ReadByte(); // version + reader.ReadByte(); // reserved + reader.ReadUInt16(); // reserved + + var indexCount = reader.ReadInt32(); + var storeSize = reader.ReadInt32(); + + if (indexCount < 0 || storeSize < 0) + { + throw new InvalidOperationException("Corrupt RPM header lengths."); + } + + var entries = new IndexEntry[indexCount]; + for (var i = 0; i < indexCount; i++) + { + var tag = reader.ReadInt32(); + var type = (RpmDataType)reader.ReadInt32(); + var offset = reader.ReadInt32(); + var count = reader.ReadInt32(); + entries[i] = new IndexEntry(tag, type, offset, count); + } + + var store = reader.ReadBytes(storeSize); + + for (var i = 0; i < entries.Length; i++) + { + var current = entries[i]; + var nextOffset = i + 1 < entries.Length ? entries[i + 1].Offset : storeSize; + var length = Math.Max(0, nextOffset - current.Offset); + current.SetLength(length); + entries[i] = current; + } + + var values = new Dictionary(entries.Length); + foreach (var entry in entries) + { + if (entry.Offset < 0 || entry.Offset + entry.Length > store.Length) + { + continue; + } + + var slice = store.Slice(entry.Offset, entry.Length); + values[entry.Tag] = entry.Type switch + { + RpmDataType.Null => null, + RpmDataType.Char => slice.ToArray(), + RpmDataType.Int8 => ReadSByteArray(slice, entry.Count), + RpmDataType.Int16 => ReadInt16Array(slice, entry.Count), + RpmDataType.Int32 => ReadInt32Array(slice, entry.Count), + RpmDataType.Int64 => ReadInt64Array(slice, entry.Count), + RpmDataType.String => ReadString(slice), + RpmDataType.Bin => slice.ToArray(), + RpmDataType.StringArray => ReadStringArray(slice, entry.Count), + RpmDataType.I18NString => ReadStringArray(slice, entry.Count), + _ => null, + }; + } + + var name = RequireString(values, RpmTags.Name); + var version = RequireString(values, RpmTags.Version); + var arch = GetString(values, RpmTags.Arch) ?? "noarch"; + var release = GetString(values, RpmTags.Release); + var epoch = GetEpoch(values); + var summary = GetString(values, RpmTags.Summary); + var description = GetString(values, RpmTags.Description); + var license = GetString(values, RpmTags.License); + var sourceRpm = GetString(values, RpmTags.SourceRpm); + var url = GetString(values, RpmTags.Url); + var vendor = GetString(values, RpmTags.Vendor); + var buildTime = GetFirstInt64(values, RpmTags.BuildTime); + var installTime = GetFirstInt64(values, RpmTags.InstallTime); + var provides = GetStringArray(values, RpmTags.ProvideName); + var provideVersions = GetStringArray(values, RpmTags.ProvideVersion); + var requires = GetStringArray(values, RpmTags.RequireName); + var requireVersions = GetStringArray(values, RpmTags.RequireVersion); + var changeLogs = GetStringArray(values, RpmTags.ChangeLogText); + + var fileEntries = BuildFiles(values); + + var metadata = new SortedDictionary(StringComparer.Ordinal) + { + ["summary"] = summary, + ["description"] = description, + ["vendor"] = vendor, + ["url"] = url, + ["packager"] = GetString(values, RpmTags.Packager), + ["group"] = GetString(values, RpmTags.Group), + ["buildHost"] = GetString(values, RpmTags.BuildHost), + ["size"] = GetFirstInt64(values, RpmTags.Size)?.ToString(System.Globalization.CultureInfo.InvariantCulture), + ["buildTime"] = buildTime?.ToString(System.Globalization.CultureInfo.InvariantCulture), + ["installTime"] = installTime?.ToString(System.Globalization.CultureInfo.InvariantCulture), + ["os"] = GetString(values, RpmTags.Os), + }; + + return new RpmHeader( + name, + version, + arch, + release, + epoch, + summary, + description, + license, + sourceRpm, + url, + vendor, + buildTime, + installTime, + provides, + provideVersions, + requires, + requireVersions, + fileEntries, + changeLogs, + new ReadOnlyDictionary(metadata)); + } + + private static IReadOnlyList BuildFiles(Dictionary values) + { + var directories = GetStringArray(values, RpmTags.DirNames); + var basenames = GetStringArray(values, RpmTags.BaseNames); + var dirIndexes = GetInt32Array(values, RpmTags.DirIndexes); + var fileFlags = GetInt32Array(values, RpmTags.FileFlags); + var fileMd5 = GetStringArray(values, RpmTags.FileMd5); + var fileDigests = GetStringArray(values, RpmTags.FileDigests); + var digestAlgorithm = GetFirstInt32(values, RpmTags.FileDigestAlgorithm) ?? 1; + + if (basenames.Count == 0) + { + return Array.Empty(); + } + + var result = new List(basenames.Count); + for (var i = 0; i < basenames.Count; i++) + { + var dirIndex = dirIndexes.Count > i ? dirIndexes[i] : 0; + var directory = directories.Count > dirIndex ? directories[dirIndex] : "/"; + if (!directory.EndsWith('/')) + { + directory += "/"; + } + + var fullPath = (directory + basenames[i]).Replace("//", "/"); + var isConfig = fileFlags.Count > i && (fileFlags[i] & RpmFileConfigFlag) == RpmFileConfigFlag; + + var digests = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (fileDigests.Count > i && !string.IsNullOrWhiteSpace(fileDigests[i])) + { + digests[ResolveDigestName(digestAlgorithm)] = fileDigests[i]; + } + else if (fileMd5.Count > i && !string.IsNullOrWhiteSpace(fileMd5[i])) + { + digests["md5"] = fileMd5[i]; + } + + result.Add(new RpmFileEntry(fullPath, isConfig, new ReadOnlyDictionary(digests))); + } + + return new ReadOnlyCollection(result); + } + + private static string ResolveDigestName(int algorithm) + => algorithm switch + { + 1 => "md5", + 2 => "sha1", + 8 => "sha256", + 9 => "sha384", + 10 => "sha512", + _ => "md5", + }; + + private static string RequireString(Dictionary values, int tag) + { + var value = GetString(values, tag); + if (string.IsNullOrWhiteSpace(value)) + { + throw new InvalidOperationException($"Required RPM tag {tag} missing."); + } + + return value; + } + + private static string? GetString(Dictionary values, int tag) + { + if (!values.TryGetValue(tag, out var value) || value is null) + { + return null; + } + + return value switch + { + string s => s, + string[] array when array.Length > 0 => array[0], + byte[] bytes => Encoding.UTF8.GetString(bytes).TrimEnd('\0'), + _ => value.ToString(), + }; + } + + private static IReadOnlyList GetStringArray(Dictionary values, int tag) + { + if (!values.TryGetValue(tag, out var value) || value is null) + { + return Array.Empty(); + } + + return value switch + { + string[] array => array, + string s => new[] { s }, + _ => Array.Empty(), + }; + } + + private static IReadOnlyList GetInt32Array(Dictionary values, int tag) + { + if (!values.TryGetValue(tag, out var value) || value is null) + { + return Array.Empty(); + } + + return value switch + { + int[] array => array, + int i => new[] { i }, + _ => Array.Empty(), + }; + } + + private static long? GetFirstInt64(Dictionary values, int tag) + { + if (!values.TryGetValue(tag, out var value) || value is null) + { + return null; + } + + return value switch + { + long[] array when array.Length > 0 => array[0], + long l => l, + int[] ints when ints.Length > 0 => ints[0], + int i => i, + _ => null, + }; + } + + private static int? GetFirstInt32(Dictionary values, int tag) + { + if (!values.TryGetValue(tag, out var value) || value is null) + { + return null; + } + + return value switch + { + int[] array when array.Length > 0 => array[0], + int i => i, + _ => null, + }; + } + + private static string? GetEpoch(Dictionary values) + { + if (!values.TryGetValue(RpmTags.Epoch, out var value) || value is null) + { + return null; + } + + return value switch + { + int i when i > 0 => i.ToString(System.Globalization.CultureInfo.InvariantCulture), + int[] array when array.Length > 0 => array[0].ToString(System.Globalization.CultureInfo.InvariantCulture), + string s => s, + _ => null, + }; + } + + private static sbyte[] ReadSByteArray(ReadOnlySpan slice, int count) + { + if (count <= 0) + { + return Array.Empty(); + } + + var result = new sbyte[count]; + for (var i = 0; i < count && i < slice.Length; i++) + { + result[i] = unchecked((sbyte)slice[i]); + } + + return result; + } + + private static short[] ReadInt16Array(ReadOnlySpan slice, int count) + { + if (count <= 0) + { + return Array.Empty(); + } + + var result = new short[count]; + for (var i = 0; i < count && (i * 2 + 2) <= slice.Length; i++) + { + result[i] = unchecked((short)BinaryPrimitives.ReadInt16BigEndian(slice[(i * 2)..])); + } + + return result; + } + + private static int[] ReadInt32Array(ReadOnlySpan slice, int count) + { + if (count <= 0) + { + return Array.Empty(); + } + + var result = new int[count]; + for (var i = 0; i < count && (i * 4 + 4) <= slice.Length; i++) + { + result[i] = BinaryPrimitives.ReadInt32BigEndian(slice[(i * 4)..]); + } + + return result; + } + + private static long[] ReadInt64Array(ReadOnlySpan slice, int count) + { + if (count <= 0) + { + return Array.Empty(); + } + + var result = new long[count]; + for (var i = 0; i < count && (i * 8 + 8) <= slice.Length; i++) + { + result[i] = BinaryPrimitives.ReadInt64BigEndian(slice[(i * 8)..]); + } + + return result; + } + + private static string ReadString(ReadOnlySpan slice) + { + var zero = slice.IndexOf((byte)0); + if (zero >= 0) + { + slice = slice[..zero]; + } + + return Encoding.UTF8.GetString(slice); + } + + private static string[] ReadStringArray(ReadOnlySpan slice, int count) + { + if (count <= 0) + { + return Array.Empty(); + } + + var list = new List(count); + var span = slice; + for (var i = 0; i < count && span.Length > 0; i++) + { + var zero = span.IndexOf((byte)0); + if (zero < 0) + { + list.Add(Encoding.UTF8.GetString(span).TrimEnd('\0')); + break; + } + + var value = Encoding.UTF8.GetString(span[..zero]); + list.Add(value); + span = span[(zero + 1)..]; + } + + return list.ToArray(); + } + + private struct IndexEntry + { + public IndexEntry(int tag, RpmDataType type, int offset, int count) + { + Tag = tag; + Type = type; + Offset = offset; + Count = count; + Length = 0; + } + + public int Tag { get; } + public RpmDataType Type { get; } + public int Offset { get; } + public int Count { get; } + public int Length { readonly get; private set; } + public void SetLength(int length) => Length = length; + } + + private enum RpmDataType + { + Null = 0, + Char = 1, + Int8 = 2, + Int16 = 3, + Int32 = 4, + Int64 = 5, + String = 6, + Bin = 7, + StringArray = 8, + I18NString = 9, + } + + private ref struct HeaderReader + { + private readonly ReadOnlySpan _buffer; + private int _offset; + + public HeaderReader(ReadOnlySpan buffer) + { + _buffer = buffer; + _offset = 0; + } + + public uint ReadUInt32() + { + var value = BinaryPrimitives.ReadUInt32BigEndian(_buffer[_offset..]); + _offset += 4; + return value; + } + + public int ReadInt32() => (int)ReadUInt32(); + + public ushort ReadUInt16() + { + var value = BinaryPrimitives.ReadUInt16BigEndian(_buffer[_offset..]); + _offset += 2; + return value; + } + + public byte ReadByte() + { + return _buffer[_offset++]; + } + + public ReadOnlySpan ReadBytes(int length) + { + var slice = _buffer.Slice(_offset, length); + _offset += length; + return slice; + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs new file mode 100644 index 00000000..f4ddb4b8 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs @@ -0,0 +1,36 @@ +namespace StellaOps.Scanner.Analyzers.OS.Rpm.Internal; + +internal static class RpmTags +{ + public const int Name = 1000; + public const int Version = 1001; + public const int Release = 1002; + public const int Epoch = 1003; + public const int Summary = 1004; + public const int Description = 1005; + public const int BuildTime = 1006; + public const int InstallTime = 1008; + public const int Size = 1009; + public const int Vendor = 1011; + public const int License = 1014; + public const int Packager = 1015; + public const int BuildHost = 1007; + public const int Group = 1016; + public const int Url = 1020; + public const int Os = 1021; + public const int Arch = 1022; + public const int SourceRpm = 1044; + public const int ProvideName = 1047; + public const int ProvideVersion = 1048; + public const int RequireName = 1049; + public const int RequireVersion = 1050; + public const int DirNames = 1098; + public const int ChangeLogText = 1082; + public const int DirIndexes = 1116; + public const int BaseNames = 1117; + public const int FileFlags = 1037; + public const int FileSizes = 1028; + public const int FileMd5 = 1035; + public const int FileDigests = 1146; + public const int FileDigestAlgorithm = 5011; +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..d0ddbf86 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.OS.Tests")] diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs new file mode 100644 index 00000000..26e2cc07 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs @@ -0,0 +1,23 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Plugin; + +namespace StellaOps.Scanner.Analyzers.OS.Rpm; + +public sealed class RpmAnalyzerPlugin : IOSAnalyzerPlugin +{ + public string Name => "StellaOps.Scanner.Analyzers.OS.Rpm"; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public IOSPackageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + var loggerFactory = services.GetRequiredService(); + return new RpmPackageAnalyzer( + loggerFactory.CreateLogger(), + new RpmDatabaseReader(loggerFactory.CreateLogger())); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs new file mode 100644 index 00000000..e0ba2df3 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs @@ -0,0 +1,122 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using Microsoft.Data.Sqlite; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Rpm.Internal; + +namespace StellaOps.Scanner.Analyzers.OS.Rpm; + +internal sealed class RpmDatabaseReader : IRpmDatabaseReader +{ + private readonly ILogger _logger; + private readonly RpmHeaderParser _parser = new(); + + public RpmDatabaseReader(ILogger logger) + { + _logger = logger; + } + + public IReadOnlyList ReadHeaders(string rootPath, CancellationToken cancellationToken) + { + var sqlitePath = ResolveSqlitePath(rootPath); + if (sqlitePath is null) + { + _logger.LogWarning("rpmdb.sqlite not found under root {RootPath}; rpm analyzer will skip.", rootPath); + return Array.Empty(); + } + + var headers = new List(); + try + { + var connectionString = new SqliteConnectionStringBuilder + { + DataSource = sqlitePath, + Mode = SqliteOpenMode.ReadOnly, + }.ToString(); + + using var connection = new SqliteConnection(connectionString); + connection.Open(); + + using var command = connection.CreateCommand(); + command.CommandText = "SELECT * FROM Packages"; + + using var reader = command.ExecuteReader(); + while (reader.Read()) + { + cancellationToken.ThrowIfCancellationRequested(); + var blob = ExtractHeaderBlob(reader); + if (blob is null) + { + continue; + } + + try + { + headers.Add(_parser.Parse(blob)); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to parse RPM header record (pkgKey={PkgKey}).", TryGetPkgKey(reader)); + } + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Unable to read rpmdb.sqlite at {Path}.", sqlitePath); + return Array.Empty(); + } + + return headers; + } + + private static string? ResolveSqlitePath(string rootPath) + { + var candidates = new[] + { + Path.Combine(rootPath, "var", "lib", "rpm", "rpmdb.sqlite"), + Path.Combine(rootPath, "usr", "lib", "sysimage", "rpm", "rpmdb.sqlite"), + }; + + foreach (var candidate in candidates) + { + if (File.Exists(candidate)) + { + return candidate; + } + } + + return null; + } + + private static byte[]? ExtractHeaderBlob(SqliteDataReader reader) + { + for (var i = 0; i < reader.FieldCount; i++) + { + if (reader.GetFieldType(i) == typeof(byte[])) + { + return reader.GetFieldValue(i); + } + } + + return null; + } + + private static object? TryGetPkgKey(SqliteDataReader reader) + { + try + { + var ordinal = reader.GetOrdinal("pkgKey"); + if (ordinal >= 0) + { + return reader.GetValue(ordinal); + } + } + catch + { + } + + return null; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs b/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs new file mode 100644 index 00000000..818771ac --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs @@ -0,0 +1,134 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Analyzers; +using StellaOps.Scanner.Analyzers.OS.Helpers; +using StellaOps.Scanner.Analyzers.OS.Rpm.Internal; + +namespace StellaOps.Scanner.Analyzers.OS.Rpm; + +internal sealed class RpmPackageAnalyzer : OsPackageAnalyzerBase +{ + private static readonly IReadOnlyList EmptyPackages = + new ReadOnlyCollection(Array.Empty()); + + private readonly IRpmDatabaseReader _reader; + + public RpmPackageAnalyzer(ILogger logger) + : this(logger, null) + { + } + + internal RpmPackageAnalyzer(ILogger logger, IRpmDatabaseReader? reader) + : base(logger) + { + _reader = reader ?? new RpmDatabaseReader(logger); + } + + public override string AnalyzerId => "rpm"; + + protected override ValueTask> ExecuteCoreAsync(OSPackageAnalyzerContext context, CancellationToken cancellationToken) + { + var headers = _reader.ReadHeaders(context.RootPath, cancellationToken); + if (headers.Count == 0) + { + return ValueTask.FromResult>(EmptyPackages); + } + + var records = new List(headers.Count); + foreach (var header in headers) + { + try + { + var purl = PackageUrlBuilder.BuildRpm(header.Name, header.Epoch, header.Version, header.Release, header.Architecture); + + var vendorMetadata = new Dictionary(StringComparer.Ordinal) + { + ["summary"] = header.Summary, + ["description"] = header.Description, + ["vendor"] = header.Vendor, + ["url"] = header.Url, + ["sourceRpm"] = header.SourceRpm, + ["buildTime"] = header.BuildTime?.ToString(CultureInfo.InvariantCulture), + ["installTime"] = header.InstallTime?.ToString(CultureInfo.InvariantCulture), + }; + + foreach (var kvp in header.Metadata) + { + vendorMetadata[$"rpm:{kvp.Key}"] = kvp.Value; + } + + var provides = ComposeRelations(header.Provides, header.ProvideVersions); + var requires = ComposeRelations(header.Requires, header.RequireVersions); + + var files = new List(header.Files.Count); + foreach (var file in header.Files) + { + IDictionary? digests = null; + if (file.Digests.Count > 0) + { + digests = new Dictionary(file.Digests, StringComparer.OrdinalIgnoreCase); + } + + files.Add(new OSPackageFileEvidence(file.Path, isConfigFile: file.IsConfig, digests: digests)); + } + + var cveHints = CveHintExtractor.Extract( + header.Description, + string.Join('\n', header.ChangeLogs)); + + var record = new OSPackageRecord( + AnalyzerId, + purl, + header.Name, + header.Version, + header.Architecture, + PackageEvidenceSource.RpmDatabase, + epoch: header.Epoch, + release: header.Release, + sourcePackage: header.SourceRpm, + license: header.License, + cveHints: cveHints, + provides: provides, + depends: requires, + files: files, + vendorMetadata: vendorMetadata); + + records.Add(record); + } + catch (Exception ex) + { + Logger.LogWarning(ex, "Failed to convert RPM header for package {Name}.", header.Name); + } + } + + records.Sort(); + return ValueTask.FromResult>(records); + } + + private static IReadOnlyList ComposeRelations(IReadOnlyList names, IReadOnlyList versions) + { + if (names.Count == 0) + { + return Array.Empty(); + } + + var result = new string[names.Count]; + for (var i = 0; i < names.Count; i++) + { + var version = versions.Count > i ? versions[i] : null; + result[i] = string.IsNullOrWhiteSpace(version) + ? names[i] + : $"{names[i]} = {version}"; + } + + return result; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj b/src/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj new file mode 100644 index 00000000..d693e33c --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj @@ -0,0 +1,16 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json b/src/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json new file mode 100644 index 00000000..ae12ba03 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json @@ -0,0 +1,19 @@ +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzers.os.rpm", + "displayName": "StellaOps RPM Analyzer", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.OS.Rpm.dll" + }, + "capabilities": [ + "os-analyzer", + "rpm" + ], + "metadata": { + "org.stellaops.analyzer.kind": "os", + "org.stellaops.analyzer.id": "rpm" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed new file mode 100644 index 00000000..5f676897 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed @@ -0,0 +1,23 @@ +C:Q1 +P:busybox +V:1.37.0-r0 +A:x86_64 +S:4096 +I:8192 +T:BusyBox utility set +U:https://busybox.net/ +L:GPL-2.0-only +o:busybox +m:Stella Ops +t:1729286400 +c:deadbeef12345678 +D:musl>=1.2.5-r0 ssl_client +p:/bin/sh +F:bin +R:busybox +Z:0f1e2d3c4b5a6978ffeeddccbbaa9988 +F:etc +R:profile +Z:11223344556677889900aabbccddeeff +a:0:0:0644 + diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles new file mode 100644 index 00000000..6d5aadc6 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles @@ -0,0 +1 @@ +/etc/bash.bashrc abcdef1234567890 diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list new file mode 100644 index 00000000..9e6cf885 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list @@ -0,0 +1,3 @@ +/bin/bash +/etc/bash.bashrc +/usr/share/doc/bash/changelog.Debian.gz diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums new file mode 100644 index 00000000..e03cceab --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums @@ -0,0 +1,2 @@ +0123456789abcdef0123456789abcdef /bin/bash +abcdef1234567890abcdef1234567890 /etc/bash.bashrc diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status new file mode 100644 index 00000000..c3cdf87a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status @@ -0,0 +1,15 @@ +Package: bash +Status: install ok installed +Priority: important +Section: shells +Installed-Size: 1024 +Maintainer: Debian Developers +Architecture: amd64 +Version: 5.2.21-2 +Source: bash (5.2.21-2) +Homepage: https://www.gnu.org/software/bash/ +Description: GNU Bourne Again Shell + This is the GNU Bourne Again Shell. +Conffiles: + /etc/bash.bashrc abcdef1234567890 + diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json new file mode 100644 index 00000000..a78aa55f --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json @@ -0,0 +1,74 @@ +[ + { + "analyzerId": "apk", + "durationMilliseconds": 0, + "packageCount": 1, + "fileEvidenceCount": 4, + "warnings": [], + "packages": [ + { + "packageUrl": "pkg:alpine/busybox@1.37.0-r0?arch=x86_64", + "name": "busybox", + "version": "1.37.0", + "architecture": "x86_64", + "epoch": null, + "release": "r0", + "sourcePackage": "busybox", + "license": "GPL-2.0-only", + "evidenceSource": "ApkDatabase", + "cveHints": [], + "provides": [ + "/bin/sh" + ], + "depends": [ + "musl\u003E=1.2.5-r0", + "ssl_client" + ], + "files": [ + { + "path": "/bin/", + "layerDigest": null, + "sha256": null, + "sizeBytes": null, + "isConfigFile": false, + "digests": {} + }, + { + "path": "/bin/busybox", + "layerDigest": null, + "sha256": null, + "sizeBytes": null, + "isConfigFile": false, + "digests": {} + }, + { + "path": "/etc/", + "layerDigest": null, + "sha256": null, + "sizeBytes": null, + "isConfigFile": false, + "digests": {} + }, + { + "path": "/etc/profile", + "layerDigest": null, + "sha256": "0f1e2d3c4b5a6978ffeeddccbbaa9988", + "sizeBytes": null, + "isConfigFile": false, + "digests": { + "sha256": "0f1e2d3c4b5a6978ffeeddccbbaa9988" + } + } + ], + "vendorMetadata": { + "buildTime": "1729286400", + "checksum": "deadbeef12345678", + "description": "BusyBox utility set", + "homepage": "https://busybox.net/", + "maintainer": "Stella Ops \u003Cops@stella-ops.org\u003E", + "origin": "busybox" + } + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json new file mode 100644 index 00000000..7688d54c --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json @@ -0,0 +1,64 @@ +[ + { + "analyzerId": "dpkg", + "durationMilliseconds": 0, + "packageCount": 1, + "fileEvidenceCount": 3, + "warnings": [], + "packages": [ + { + "packageUrl": "pkg:deb/debian/bash@5.2.21-2?arch=amd64", + "name": "bash", + "version": "5.2.21", + "architecture": "amd64", + "epoch": null, + "release": "2", + "sourcePackage": "bash", + "license": null, + "evidenceSource": "DpkgStatus", + "cveHints": [], + "provides": [], + "depends": [], + "files": [ + { + "path": "/bin/bash", + "layerDigest": null, + "sha256": null, + "sizeBytes": null, + "isConfigFile": false, + "digests": { + "md5": "0123456789abcdef0123456789abcdef" + } + }, + { + "path": "/etc/bash.bashrc", + "layerDigest": null, + "sha256": null, + "sizeBytes": null, + "isConfigFile": true, + "digests": { + "md5": "abcdef1234567890abcdef1234567890" + } + }, + { + "path": "/usr/share/doc/bash/changelog.Debian.gz", + "layerDigest": null, + "sha256": null, + "sizeBytes": null, + "isConfigFile": false, + "digests": {} + } + ], + "vendorMetadata": { + "dpkg:Installed-Size": "1024", + "homepage": "https://www.gnu.org/software/bash/", + "maintainer": "Debian Developers \u003Cdebian-devel@lists.debian.org\u003E", + "origin": null, + "priority": "important", + "section": "shells", + "source": "bash (5.2.21-2)" + } + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json new file mode 100644 index 00000000..3045dd96 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json @@ -0,0 +1,64 @@ +[ + { + "analyzerId": "rpm", + "durationMilliseconds": 0, + "packageCount": 1, + "fileEvidenceCount": 2, + "warnings": [], + "packages": [ + { + "packageUrl": "pkg:rpm/openssl-libs@1:3.2.1-8.el9?arch=x86_64", + "name": "openssl-libs", + "version": "3.2.1", + "architecture": "x86_64", + "epoch": "1", + "release": "8.el9", + "sourcePackage": "openssl-3.2.1-8.el9.src.rpm", + "license": "OpenSSL", + "evidenceSource": "RpmDatabase", + "cveHints": [ + "CVE-2025-1234" + ], + "provides": [ + "libcrypto.so.3()(64bit)", + "openssl-libs" + ], + "depends": [ + "glibc(x86-64) \u003E= 2.34" + ], + "files": [ + { + "path": "/etc/pki/tls/openssl.cnf", + "layerDigest": null, + "sha256": null, + "sizeBytes": null, + "isConfigFile": true, + "digests": { + "md5": "c0ffee" + } + }, + { + "path": "/usr/lib64/libcrypto.so.3", + "layerDigest": null, + "sha256": "abc123", + "sizeBytes": null, + "isConfigFile": false, + "digests": { + "sha256": "abc123" + } + } + ], + "vendorMetadata": { + "buildTime": null, + "description": null, + "installTime": null, + "rpm:summary": "TLS toolkit", + "sourceRpm": "openssl-3.2.1-8.el9.src.rpm", + "summary": "TLS toolkit", + "url": null, + "vendor": null + } + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs b/src/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs new file mode 100644 index 00000000..d564eec7 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs @@ -0,0 +1,76 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using StellaOps.Scanner.Analyzers.OS.Mapping; +using StellaOps.Scanner.Core.Contracts; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.Tests.Mapping; + +public class OsComponentMapperTests +{ + [Fact] + public void ToLayerFragments_ProducesDeterministicComponents() + { + var package = new OSPackageRecord( + analyzerId: "apk", + packageUrl: "pkg:alpine/busybox@1.37.0-r0?arch=x86_64", + name: "busybox", + version: "1.37.0", + architecture: "x86_64", + evidenceSource: PackageEvidenceSource.ApkDatabase, + release: "r0", + sourcePackage: "busybox", + license: "GPL-2.0-only", + depends: new[] { "musl>=1.2.5-r0", "ssl_client" }, + files: new[] + { + new OSPackageFileEvidence("/bin/busybox", sha256: "abc123", isConfigFile: false), + new OSPackageFileEvidence("/etc/profile", isConfigFile: true, digests: new Dictionary { ["md5"] = "deadbeef" }), + }, + vendorMetadata: new Dictionary + { + ["homepage"] = "https://busybox.net/", + }); + + var result = new OSPackageAnalyzerResult( + analyzerId: "apk", + packages: ImmutableArray.Create(package), + telemetry: new OSAnalyzerTelemetry(System.TimeSpan.Zero, 1, 2)); + + var fragments = OsComponentMapper.ToLayerFragments(new[] { result }); + + Assert.Single(fragments); + var fragment = fragments[0]; + Assert.StartsWith("sha256:", fragment.LayerDigest); + Assert.Single(fragment.Components); + + var component = fragment.Components[0]; + Assert.Equal(fragment.LayerDigest, component.LayerDigest); + Assert.Equal("pkg:alpine/busybox@1.37.0-r0?arch=x86_64", component.Identity.Key); + Assert.Equal("busybox", component.Identity.Name); + Assert.Equal("1.37.0", component.Identity.Version); + Assert.Equal("pkg:alpine/busybox@1.37.0-r0?arch=x86_64", component.Identity.Purl); + Assert.Equal("os-package", component.Identity.ComponentType); + Assert.Equal("busybox", component.Identity.Group); + Assert.Collection(component.Evidence, + evidence => + { + Assert.Equal("file", evidence.Kind); + Assert.Equal("/bin/busybox", evidence.Value); + Assert.Equal("abc123", evidence.Source); + }, + evidence => + { + Assert.Equal("config-file", evidence.Kind); + Assert.Equal("/etc/profile", evidence.Value); + Assert.Null(evidence.Source); + }); + Assert.Equal(new[] { "musl>=1.2.5-r0", "ssl_client" }, component.Dependencies); + Assert.False(component.Usage.UsedByEntrypoint); + Assert.NotNull(component.Metadata); + Assert.Equal(new[] { "GPL-2.0-only" }, component.Metadata!.Licenses); + Assert.Contains("stellaops.os.analyzer", component.Metadata.Properties!.Keys); + Assert.Equal("apk", component.Metadata.Properties!["stellaops.os.analyzer"]); + Assert.Equal("https://busybox.net/", component.Metadata.Properties!["vendor.homepage"]); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs b/src/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs new file mode 100644 index 00000000..3c8ffc36 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs @@ -0,0 +1,137 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scanner.Analyzers.OS; +using StellaOps.Scanner.Analyzers.OS.Apk; +using StellaOps.Scanner.Analyzers.OS.Dpkg; +using StellaOps.Scanner.Analyzers.OS.Rpm; +using StellaOps.Scanner.Analyzers.OS.Rpm.Internal; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.Tests; + +public sealed class OsAnalyzerDeterminismTests +{ + [Fact] + public async Task ApkAnalyzerMatchesGolden() + { + using var fixture = FixtureManager.UseFixture("apk", out var rootPath); + var analyzer = new ApkPackageAnalyzer(NullLogger.Instance); + var context = CreateContext(rootPath); + + var result = await analyzer.AnalyzeAsync(context, CancellationToken.None); + var snapshot = SnapshotSerializer.Serialize(new[] { result }); + GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("apk.json")); + } + + [Fact] + public async Task DpkgAnalyzerMatchesGolden() + { + using var fixture = FixtureManager.UseFixture("dpkg", out var rootPath); + var analyzer = new DpkgPackageAnalyzer(NullLogger.Instance); + var context = CreateContext(rootPath); + + var result = await analyzer.AnalyzeAsync(context, CancellationToken.None); + var snapshot = SnapshotSerializer.Serialize(new[] { result }); + GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("dpkg.json")); + } + + [Fact] + public async Task RpmAnalyzerMatchesGolden() + { + var headers = new[] + { + CreateRpmHeader( + name: "openssl-libs", + version: "3.2.1", + architecture: "x86_64", + release: "8.el9", + epoch: "1", + license: "OpenSSL", + sourceRpm: "openssl-3.2.1-8.el9.src.rpm", + provides: new[] { "libcrypto.so.3()(64bit)", "openssl-libs" }, + requires: new[] { "glibc(x86-64) >= 2.34" }, + files: new[] + { + new RpmFileEntry("/usr/lib64/libcrypto.so.3", false, new Dictionary { ["sha256"] = "abc123" }), + new RpmFileEntry("/etc/pki/tls/openssl.cnf", true, new Dictionary { ["md5"] = "c0ffee" }) + }, + changeLogs: new[] { "Resolves: CVE-2025-1234" }, + metadata: new Dictionary { ["summary"] = "TLS toolkit" }) + }; + + var reader = new StubRpmDatabaseReader(headers); + var analyzer = new RpmPackageAnalyzer( + NullLogger.Instance, + reader); + + var context = CreateContext("/tmp/nonexistent"); + var result = await analyzer.AnalyzeAsync(context, CancellationToken.None); + var snapshot = SnapshotSerializer.Serialize(new[] { result }); + GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("rpm.json")); + } + + private static OSPackageAnalyzerContext CreateContext(string rootPath) + { + var metadata = new Dictionary + { + [ScanMetadataKeys.RootFilesystemPath] = rootPath + }; + + return new OSPackageAnalyzerContext(rootPath, workspacePath: null, TimeProvider.System, NullLoggerFactory.Instance.CreateLogger("os-analyzer-tests"), metadata); + } + + private static RpmHeader CreateRpmHeader( + string name, + string version, + string architecture, + string? release, + string? epoch, + string? license, + string? sourceRpm, + IReadOnlyList provides, + IReadOnlyList requires, + IReadOnlyList files, + IReadOnlyList changeLogs, + IReadOnlyDictionary metadata) + { + return new RpmHeader( + name, + version, + architecture, + release, + epoch, + metadata.TryGetValue("summary", out var summary) ? summary : null, + metadata.TryGetValue("description", out var description) ? description : null, + license, + sourceRpm, + metadata.TryGetValue("url", out var url) ? url : null, + metadata.TryGetValue("vendor", out var vendor) ? vendor : null, + buildTime: null, + installTime: null, + provides, + provideVersions: provides.Select(_ => string.Empty).ToArray(), + requires, + requireVersions: requires.Select(_ => string.Empty).ToArray(), + files, + changeLogs, + metadata); + } + + private sealed class StubRpmDatabaseReader : IRpmDatabaseReader + { + private readonly IReadOnlyList _headers; + + public StubRpmDatabaseReader(IReadOnlyList headers) + { + _headers = headers; + } + + public IReadOnlyList ReadHeaders(string rootPath, CancellationToken cancellationToken) + => _headers; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj b/src/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj new file mode 100644 index 00000000..8af1cde3 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj @@ -0,0 +1,28 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs b/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs new file mode 100644 index 00000000..bf949add --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs @@ -0,0 +1,75 @@ +using System; +using System.IO; + +namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities; + +internal static class FixtureManager +{ + public static IDisposable UseFixture(string name, out string rootPath) + { + var basePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", name); + if (!Directory.Exists(basePath)) + { + throw new DirectoryNotFoundException($"Fixture '{name}' was not found at '{basePath}'."); + } + + var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-os-fixture", name, Guid.NewGuid().ToString("n")); + CopyDirectory(basePath, tempRoot); + rootPath = tempRoot; + return new Disposable(() => DeleteDirectory(tempRoot)); + } + + public static string GetGoldenPath(string name) + => Path.Combine(AppContext.BaseDirectory, "Fixtures", "goldens", name); + + private static void CopyDirectory(string source, string destination) + { + Directory.CreateDirectory(destination); + foreach (var file in Directory.GetFiles(source, "*", SearchOption.AllDirectories)) + { + var relative = Path.GetRelativePath(source, file); + var target = Path.Combine(destination, relative); + Directory.CreateDirectory(Path.GetDirectoryName(target)!); + File.Copy(file, target); + } + } + + private static void DeleteDirectory(string path) + { + if (!Directory.Exists(path)) + { + return; + } + + try + { + Directory.Delete(path, recursive: true); + } + catch + { + // best-effort cleanup + } + } + + private sealed class Disposable : IDisposable + { + private readonly Action _dispose; + private bool _disposed; + + public Disposable(Action dispose) + { + _dispose = dispose; + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _disposed = true; + _dispose(); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs b/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs new file mode 100644 index 00000000..1ef631c8 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs @@ -0,0 +1,41 @@ +using System; +using System.IO; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities; + +internal static class GoldenAssert +{ + private const string UpdateEnvironmentVariable = "UPDATE_OS_ANALYZER_FIXTURES"; + + public static void MatchSnapshot(string snapshot, string goldenPath) + { + var directory = Path.GetDirectoryName(goldenPath); + if (!string.IsNullOrWhiteSpace(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + snapshot = Normalize(snapshot); + + if (!File.Exists(goldenPath)) + { + File.WriteAllText(goldenPath, snapshot); + return; + } + + if (ShouldUpdate()) + { + File.WriteAllText(goldenPath, snapshot); + } + + var expected = Normalize(File.ReadAllText(goldenPath)); + Assert.Equal(expected.TrimEnd(), snapshot.TrimEnd()); + } + + private static bool ShouldUpdate() + => string.Equals(Environment.GetEnvironmentVariable(UpdateEnvironmentVariable), "1", StringComparison.OrdinalIgnoreCase); + + private static string Normalize(string value) + => value.Replace("\r\n", "\n"); +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs b/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs new file mode 100644 index 00000000..29d15c96 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Scanner.Analyzers.OS; + +namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities; + +internal static class SnapshotSerializer +{ + private static readonly JsonSerializerOptions Options = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = true, + Converters = + { + new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) + } + }; + + public static string Serialize(IEnumerable results) + { + var ordered = results + .OrderBy(r => r.AnalyzerId, StringComparer.OrdinalIgnoreCase) + .Select(result => new AnalyzerSnapshot + { + AnalyzerId = result.AnalyzerId, + PackageCount = result.Telemetry.PackageCount, + FileEvidenceCount = result.Telemetry.FileEvidenceCount, + DurationMilliseconds = 0, + Warnings = result.Warnings.Select(w => new WarningSnapshot(w.Code, w.Message)).ToArray(), + Packages = result.Packages + .OrderBy(p => p, Comparer.Default) + .Select(p => new PackageSnapshot + { + PackageUrl = p.PackageUrl, + Name = p.Name, + Version = p.Version, + Architecture = p.Architecture, + Epoch = p.Epoch, + Release = p.Release, + SourcePackage = p.SourcePackage, + License = p.License, + EvidenceSource = p.EvidenceSource.ToString(), + CveHints = p.CveHints, + Provides = p.Provides, + Depends = p.Depends, + Files = p.Files.Select(f => new FileSnapshot + { + Path = f.Path, + LayerDigest = f.LayerDigest, + Sha256 = f.Sha256, + SizeBytes = f.SizeBytes, + IsConfigFile = f.IsConfigFile, + Digests = f.Digests.OrderBy(kv => kv.Key, StringComparer.OrdinalIgnoreCase).ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.OrdinalIgnoreCase) + }).ToArray(), + VendorMetadata = p.VendorMetadata.OrderBy(kv => kv.Key, StringComparer.Ordinal).ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.Ordinal) + }).ToArray() + }) + .ToArray(); + + return JsonSerializer.Serialize(ordered, Options); + } + + private sealed record AnalyzerSnapshot + { + public string AnalyzerId { get; init; } = string.Empty; + public double DurationMilliseconds { get; init; } + public int PackageCount { get; init; } + public int FileEvidenceCount { get; init; } + public IReadOnlyList Warnings { get; init; } = Array.Empty(); + public IReadOnlyList Packages { get; init; } = Array.Empty(); + } + + private sealed record WarningSnapshot(string Code, string Message); + + private sealed record PackageSnapshot + { + public string PackageUrl { get; init; } = string.Empty; + public string Name { get; init; } = string.Empty; + public string Version { get; init; } = string.Empty; + public string Architecture { get; init; } = string.Empty; + public string? Epoch { get; init; } + public string? Release { get; init; } + public string? SourcePackage { get; init; } + public string? License { get; init; } + public string EvidenceSource { get; init; } = string.Empty; + public IReadOnlyList CveHints { get; init; } = Array.Empty(); + public IReadOnlyList Provides { get; init; } = Array.Empty(); + public IReadOnlyList Depends { get; init; } = Array.Empty(); + public IReadOnlyList Files { get; init; } = Array.Empty(); + public IReadOnlyDictionary VendorMetadata { get; init; } = new Dictionary(); + } + + private sealed record FileSnapshot + { + public string Path { get; init; } = string.Empty; + public string? LayerDigest { get; init; } + public string? Sha256 { get; init; } + public long? SizeBytes { get; init; } + public bool? IsConfigFile { get; init; } + public IReadOnlyDictionary Digests { get; init; } = new Dictionary(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/AGENTS.md b/src/StellaOps.Scanner.Analyzers.OS/AGENTS.md new file mode 100644 index 00000000..b5426899 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/AGENTS.md @@ -0,0 +1,40 @@ +# AGENTS +## Role +Design and ship deterministic Linux operating-system analyzers that transform container root filesystems into canonical package evidence for SBOM emission. + +## Scope +- Provide shared helpers for reading apk, dpkg, and rpm metadata and emitting normalized package identities with provenance. +- Implement analyzer plug-ins for Alpine (apk), Debian (dpkg), and RPM-based distributions that operate on extracted rootfs snapshots. +- Enrich package records with vendor-origin metadata (source packages, declared licenses, CVE hints) and evidence linking files to packages. +- Expose restart-time plug-in manifests so the Scanner.Worker can load analyzers in offline or air-gapped environments. +- Supply deterministic fixtures and a regression harness that verifies analyzer outputs remain stable across runs. + +## Participants +- `StellaOps.Scanner.Core` for shared contracts, observability, and plug-in catalog guardrails. +- `StellaOps.Scanner.Worker` which executes analyzers inside the scan pipeline. +- `StellaOps.Scanner.Cache` (future) for layer cache integration; analyzers must be cache-aware via deterministic inputs/outputs. +- `StellaOps.Scanner.Emit` and `StellaOps.Scanner.Diff` rely on analyzer outputs to build SBOMs and change reports. + +## Interfaces & Contracts +- Analyzers implement `IOSPackageAnalyzer` (defined in this module) and register via plug-in manifests; they must be restart-time only. +- Input rootfs paths are read-only; analyzers must never mutate files and must tolerate missing metadata gracefully. +- Package records emit canonical purls (`pkg:alpine`, `pkg:deb`, `pkg:rpm`) plus NEVRA/EVR details, source package identifiers, declared licenses, and evidence (file lists with layer attribution placeholders). +- Outputs must be deterministic: ordering is lexicographic, timestamps removed or normalized, hashes (SHA256) calculated when required. + +## In/Out of Scope +In scope: +- Linux apk/dpkg/rpm analyzers, shared helpers, plug-in manifests, deterministic regression harness. + +Out of scope: +- Windows MSI/SxS analyzers, native (ELF) analyzers, language analyzers, EntryTrace pipeline, or SBOM assembly logic (handled by other guilds). + +## Observability & Security Expectations +- Emit structured logs with correlation/job identifiers provided by `StellaOps.Scanner.Core`. +- Surface metrics for package counts, elapsed time, and cache hits (metrics hooks stubbed until Cache module lands). +- Do not perform outbound network calls; operate entirely on provided filesystem snapshot. +- Validate plug-in manifests via `IPluginCatalogGuard` to enforce restart-only loading. + +## Tests +- `StellaOps.Scanner.Analyzers.OS.Tests` hosts regression tests with canned rootfs fixtures to verify determinism. +- Fixtures store expected analyzer outputs under `Fixtures/` with golden JSON (normalized, sorted). +- Tests cover apk/dpkg/rpm analyzers, shared helper edge cases, and plug-in catalog enforcement. diff --git a/src/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs b/src/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs new file mode 100644 index 00000000..fda45b69 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs @@ -0,0 +1,24 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Scanner.Analyzers.OS.Abstractions; + +/// +/// Represents a deterministic analyzer capable of extracting operating-system package +/// evidence from a container root filesystem snapshot. +/// +public interface IOSPackageAnalyzer +{ + /// + /// Gets the identifier used for logging and manifest composition (e.g. apk, dpkg). + /// + string AnalyzerId { get; } + + /// + /// Executes the analyzer against the provided context, producing a deterministic set of packages. + /// + /// Analysis context surfaced by the worker. + /// Cancellation token propagated from the orchestration pipeline. + /// A result describing discovered packages, metadata, and telemetry. + ValueTask AnalyzeAsync(OSPackageAnalyzerContext context, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs b/src/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs new file mode 100644 index 00000000..db8f73ec --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs @@ -0,0 +1,41 @@ +using System.Collections.Generic; +using System.Diagnostics; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Analyzers.OS.Abstractions; + +namespace StellaOps.Scanner.Analyzers.OS.Analyzers; + +public abstract class OsPackageAnalyzerBase : IOSPackageAnalyzer +{ + protected OsPackageAnalyzerBase(ILogger logger) + { + Logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public abstract string AnalyzerId { get; } + + protected ILogger Logger { get; } + + public async ValueTask AnalyzeAsync(OSPackageAnalyzerContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var stopwatch = Stopwatch.StartNew(); + var packages = await ExecuteCoreAsync(context, cancellationToken).ConfigureAwait(false); + stopwatch.Stop(); + + var packageCount = packages.Count; + var fileEvidenceCount = 0; + foreach (var package in packages) + { + fileEvidenceCount += package.Files.Count; + } + + var telemetry = new OSAnalyzerTelemetry(stopwatch.Elapsed, packageCount, fileEvidenceCount); + return new OSPackageAnalyzerResult(AnalyzerId, packages, telemetry); + } + + protected abstract ValueTask> ExecuteCoreAsync(OSPackageAnalyzerContext context, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs b/src/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs new file mode 100644 index 00000000..5d27551f --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Text.RegularExpressions; + +namespace StellaOps.Scanner.Analyzers.OS.Helpers; + +public static class CveHintExtractor +{ + private static readonly Regex CveRegex = new(@"CVE-\d{4}-\d{4,7}", RegexOptions.IgnoreCase | RegexOptions.Compiled); + + public static IReadOnlyList Extract(params string?[] inputs) + { + if (inputs is { Length: > 0 }) + { + var set = new SortedSet(StringComparer.OrdinalIgnoreCase); + foreach (var input in inputs) + { + if (string.IsNullOrWhiteSpace(input)) + { + continue; + } + + foreach (Match match in CveRegex.Matches(input)) + { + set.Add(match.Value.ToUpperInvariant()); + } + } + + if (set.Count > 0) + { + return new ReadOnlyCollection(set.ToArray()); + } + } + + return Array.Empty(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs b/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs new file mode 100644 index 00000000..f81f4fb7 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs @@ -0,0 +1,56 @@ +using System; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.OS.Helpers; + +public static class PackageUrlBuilder +{ + public static string BuildAlpine(string name, string version, string architecture) + => $"pkg:alpine/{Escape(name)}@{Escape(version)}?arch={EscapeQuery(architecture)}"; + + public static string BuildDebian(string distribution, string name, string version, string architecture) + { + var distro = string.IsNullOrWhiteSpace(distribution) ? "debian" : distribution.Trim().ToLowerInvariant(); + return $"pkg:deb/{Escape(distro)}/{Escape(name)}@{Escape(version)}?arch={EscapeQuery(architecture)}"; + } + + public static string BuildRpm(string name, string? epoch, string version, string? release, string architecture) + { + var versionComponent = string.IsNullOrWhiteSpace(epoch) + ? Escape(version) + : $"{Escape(epoch)}:{Escape(version)}"; + + var releaseComponent = string.IsNullOrWhiteSpace(release) + ? string.Empty + : $"-{Escape(release!)}"; + + return $"pkg:rpm/{Escape(name)}@{versionComponent}{releaseComponent}?arch={EscapeQuery(architecture)}"; + } + + private static string Escape(string value) + { + ArgumentException.ThrowIfNullOrWhiteSpace(value); + return Uri.EscapeDataString(value.Trim()); + } + + private static string EscapeQuery(string value) + { + ArgumentException.ThrowIfNullOrWhiteSpace(value); + var trimmed = value.Trim(); + var builder = new StringBuilder(trimmed.Length); + foreach (var ch in trimmed) + { + if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || (ch >= '0' && ch <= '9') || ch == '-' || ch == '_' || ch == '.' || ch == '~') + { + builder.Append(ch); + } + else + { + builder.Append('%'); + builder.Append(((int)ch).ToString("X2")); + } + } + + return builder.ToString(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs b/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs new file mode 100644 index 00000000..93f085af --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs @@ -0,0 +1,57 @@ +using System; +using System.Text.RegularExpressions; + +namespace StellaOps.Scanner.Analyzers.OS.Helpers; + +public static class PackageVersionParser +{ + private static readonly Regex DebianVersionRegex = new(@"^(?\d+):(?.+)$", RegexOptions.Compiled); + private static readonly Regex DebianRevisionRegex = new(@"^(?.+?)(?-[^-]+)?$", RegexOptions.Compiled); + private static readonly Regex ApkVersionRegex = new(@"^(?.+?)(?:-(?r\d+))?$", RegexOptions.Compiled); + + public static DebianVersionParts ParseDebianVersion(string version) + { + ArgumentException.ThrowIfNullOrWhiteSpace(version); + + var trimmed = version.Trim(); + string? epoch = null; + string baseVersion = trimmed; + + var epochMatch = DebianVersionRegex.Match(trimmed); + if (epochMatch.Success) + { + epoch = epochMatch.Groups["epoch"].Value; + baseVersion = epochMatch.Groups["version"].Value; + } + + string? revision = null; + var revisionMatch = DebianRevisionRegex.Match(baseVersion); + if (revisionMatch.Success && revisionMatch.Groups["revision"].Success) + { + revision = revisionMatch.Groups["revision"].Value.TrimStart('-'); + baseVersion = revisionMatch.Groups["base"].Value; + } + + return new DebianVersionParts(epoch, baseVersion, revision, trimmed); + } + + public static ApkVersionParts ParseApkVersion(string version) + { + ArgumentException.ThrowIfNullOrWhiteSpace(version); + var match = ApkVersionRegex.Match(version.Trim()); + if (!match.Success) + { + return new ApkVersionParts(null, version.Trim()); + } + + var release = match.Groups["release"].Success ? match.Groups["release"].Value : null; + return new ApkVersionParts(release, match.Groups["version"].Value); + } +} + +public sealed record DebianVersionParts(string? Epoch, string UpstreamVersion, string? Revision, string Original) +{ + public string ForPackageUrl => Epoch is null ? Original : $"{Epoch}:{UpstreamVersion}{(Revision is null ? string.Empty : "-" + Revision)}"; +} + +public sealed record ApkVersionParts(string? Release, string BaseVersion); diff --git a/src/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs b/src/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs new file mode 100644 index 00000000..24099d25 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs @@ -0,0 +1,173 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Analyzers.OS.Mapping; + +public static class OsComponentMapper +{ + private const string ComponentType = "os-package"; + + public static ImmutableArray ToLayerFragments(IEnumerable results) + { + ArgumentNullException.ThrowIfNull(results); + + var builder = ImmutableArray.CreateBuilder(); + foreach (var result in results) + { + if (result is null || string.IsNullOrWhiteSpace(result.AnalyzerId)) + { + continue; + } + + var layerDigest = ComputeLayerDigest(result.AnalyzerId); + var components = BuildComponentRecords(result.AnalyzerId, layerDigest, result.Packages); + if (components.IsEmpty) + { + continue; + } + + builder.Add(LayerComponentFragment.Create(layerDigest, components)); + } + + return builder.ToImmutable(); + } + + private static ImmutableArray BuildComponentRecords( + string analyzerId, + string layerDigest, + IEnumerable packages) + { + var records = ImmutableArray.CreateBuilder(); + foreach (var package in packages ?? Enumerable.Empty()) + { + records.Add(ToComponentRecord(analyzerId, layerDigest, package)); + } + + return records.ToImmutable(); + } + + private static ComponentRecord ToComponentRecord(string analyzerId, string layerDigest, OSPackageRecord package) + { + var identity = ComponentIdentity.Create( + key: package.PackageUrl, + name: package.Name, + version: package.Version, + purl: package.PackageUrl, + componentType: ComponentType, + group: package.SourcePackage); + + var evidence = package.Files.Select(file => + new ComponentEvidence + { + Kind = file.IsConfigFile is true ? "config-file" : "file", + Value = file.Path, + Source = ResolvePrimaryDigest(file), + }).ToImmutableArray(); + + var dependencies = package.Depends.Count == 0 + ? ImmutableArray.Empty + : ImmutableArray.CreateRange(package.Depends); + + var metadata = BuildMetadata(analyzerId, package); + + return new ComponentRecord + { + Identity = identity, + LayerDigest = layerDigest, + Evidence = evidence, + Dependencies = dependencies, + Metadata = metadata, + Usage = ComponentUsage.Unused, + }; + } + + private static ComponentMetadata? BuildMetadata(string analyzerId, OSPackageRecord package) + { + var properties = new SortedDictionary(StringComparer.Ordinal) + { + ["stellaops.os.analyzer"] = analyzerId, + ["stellaops.os.architecture"] = package.Architecture, + ["stellaops.os.evidenceSource"] = package.EvidenceSource.ToString(), + }; + + if (!string.IsNullOrWhiteSpace(package.SourcePackage)) + { + properties["stellaops.os.sourcePackage"] = package.SourcePackage!; + } + + if (package.CveHints.Count > 0) + { + properties["stellaops.os.cveHints"] = string.Join(",", package.CveHints); + } + + if (package.Provides.Count > 0) + { + properties["stellaops.os.provides"] = string.Join(",", package.Provides); + } + + foreach (var pair in package.VendorMetadata) + { + if (string.IsNullOrWhiteSpace(pair.Key) || string.IsNullOrWhiteSpace(pair.Value)) + { + continue; + } + + properties[$"vendor.{pair.Key}"] = pair.Value!.Trim(); + } + + foreach (var file in package.Files) + { + foreach (var digest in file.Digests) + { + if (string.IsNullOrWhiteSpace(digest.Value)) + { + continue; + } + + properties[$"digest.{digest.Key}.{NormalizePathKey(file.Path)}"] = digest.Value.Trim(); + } + } + + IReadOnlyList? licenses = null; + if (!string.IsNullOrWhiteSpace(package.License)) + { + licenses = new[] { package.License!.Trim() }; + } + + return new ComponentMetadata + { + Licenses = licenses, + Properties = properties.Count == 0 ? null : properties, + }; + } + + private static string NormalizePathKey(string path) + => path.Replace('/', '_').Replace('\\', '_').Trim('_'); + + private static string? ResolvePrimaryDigest(OSPackageFileEvidence file) + { + if (!string.IsNullOrWhiteSpace(file.Sha256)) + { + return file.Sha256; + } + + if (file.Digests.TryGetValue("sha256", out var sha256) && !string.IsNullOrWhiteSpace(sha256)) + { + return sha256; + } + + return null; + } + + private static string ComputeLayerDigest(string analyzerId) + { + var normalized = $"stellaops:os:{analyzerId.Trim().ToLowerInvariant()}"; + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(normalized)); + return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}"; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs b/src/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs new file mode 100644 index 00000000..f7238317 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs @@ -0,0 +1,13 @@ +using System; + +namespace StellaOps.Scanner.Analyzers.OS; + +public sealed record AnalyzerWarning(string Code, string Message) +{ + public static AnalyzerWarning From(string code, string message) + { + ArgumentException.ThrowIfNullOrWhiteSpace(code); + ArgumentException.ThrowIfNullOrWhiteSpace(message); + return new AnalyzerWarning(code.Trim(), message.Trim()); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs b/src/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs new file mode 100644 index 00000000..e769064a --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs @@ -0,0 +1,5 @@ +using System; + +namespace StellaOps.Scanner.Analyzers.OS; + +public sealed record OSAnalyzerTelemetry(TimeSpan Duration, int PackageCount, int FileEvidenceCount); diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs new file mode 100644 index 00000000..aa3810ff --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs @@ -0,0 +1,59 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scanner.Analyzers.OS; + +/// +/// Carries the immutable context shared across analyzer executions for a given scan job. +/// +public sealed class OSPackageAnalyzerContext +{ + private static readonly IReadOnlyDictionary EmptyMetadata = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); + + public OSPackageAnalyzerContext( + string rootPath, + string? workspacePath, + TimeProvider timeProvider, + ILogger logger, + IReadOnlyDictionary? metadata = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(rootPath); + RootPath = Path.GetFullPath(rootPath); + WorkspacePath = string.IsNullOrWhiteSpace(workspacePath) ? null : Path.GetFullPath(workspacePath!); + TimeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + Logger = logger ?? throw new ArgumentNullException(nameof(logger)); + Metadata = metadata is null or { Count: 0 } + ? EmptyMetadata + : new ReadOnlyDictionary(new Dictionary(metadata, StringComparer.Ordinal)); + } + + /// + /// Gets the absolute path to the reconstructed root filesystem of the scanned image/layer set. + /// + public string RootPath { get; } + + /// + /// Gets the absolute path to a writable workspace root the analyzer may use for transient state (optional). + /// The sandbox guarantees cleanup post-run. + /// + public string? WorkspacePath { get; } + + /// + /// Gets the time provider aligned with the scanner's deterministic clock. + /// + public TimeProvider TimeProvider { get; } + + /// + /// Gets the structured logger scoped to the analyzer execution. + /// + public ILogger Logger { get; } + + /// + /// Gets metadata forwarded by prior pipeline stages (image digest, layer digests, tenant, etc.). + /// + public IReadOnlyDictionary Metadata { get; } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs new file mode 100644 index 00000000..6646b851 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs @@ -0,0 +1,40 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; + +namespace StellaOps.Scanner.Analyzers.OS; + +public sealed class OSPackageAnalyzerResult +{ + private static readonly IReadOnlyList EmptyPackages = + new ReadOnlyCollection(Array.Empty()); + + private static readonly IReadOnlyList EmptyWarnings = + new ReadOnlyCollection(Array.Empty()); + + public OSPackageAnalyzerResult( + string analyzerId, + IEnumerable? packages, + OSAnalyzerTelemetry telemetry, + IEnumerable? warnings = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(analyzerId); + AnalyzerId = analyzerId.Trim(); + Packages = packages is null + ? EmptyPackages + : new ReadOnlyCollection(packages.ToArray()); + Telemetry = telemetry; + Warnings = warnings is null + ? EmptyWarnings + : new ReadOnlyCollection(warnings.ToArray()); + } + + public string AnalyzerId { get; } + + public IReadOnlyList Packages { get; } + + public OSAnalyzerTelemetry Telemetry { get; } + + public IReadOnlyList Warnings { get; } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs new file mode 100644 index 00000000..fa36904b --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs @@ -0,0 +1,100 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; + +namespace StellaOps.Scanner.Analyzers.OS; + +public sealed class OSPackageFileEvidence : IComparable +{ + public OSPackageFileEvidence( + string path, + string? layerDigest = null, + string? sha256 = null, + long? sizeBytes = null, + bool? isConfigFile = null, + IDictionary? digests = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + Path = Normalize(path); + LayerDigest = NormalizeDigest(layerDigest); + var digestMap = digests is null + ? new SortedDictionary(StringComparer.OrdinalIgnoreCase) + : new SortedDictionary(digests, StringComparer.OrdinalIgnoreCase); + + if (!string.IsNullOrWhiteSpace(sha256)) + { + digestMap["sha256"] = NormalizeHash(sha256)!; + } + + Digests = new ReadOnlyDictionary(digestMap); + Sha256 = Digests.TryGetValue("sha256", out var normalizedSha256) ? normalizedSha256 : null; + SizeBytes = sizeBytes; + IsConfigFile = isConfigFile; + } + + public string Path { get; } + + public string? LayerDigest { get; } + + public string? Sha256 { get; } + + public IReadOnlyDictionary Digests { get; } + + public long? SizeBytes { get; } + + public bool? IsConfigFile { get; } + + public int CompareTo(OSPackageFileEvidence? other) + { + if (other is null) + { + return 1; + } + + return string.CompareOrdinal(Path, other.Path); + } + + public override string ToString() + => $"{Path} ({SizeBytes?.ToString("N0", CultureInfo.InvariantCulture) ?? "?"} bytes)"; + + private static string Normalize(string path) + { + var trimmed = path.Trim(); + if (!trimmed.StartsWith('/')) + { + trimmed = "/" + trimmed; + } + + return trimmed.Replace('\\', '/'); + } + + private static string? NormalizeDigest(string? digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return null; + } + + var trimmed = digest.Trim(); + if (!trimmed.Contains(':', StringComparison.Ordinal)) + { + return trimmed.ToLowerInvariant(); + } + + var parts = trimmed.Split(':', 2, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + return parts.Length == 2 + ? $"{parts[0].ToLowerInvariant()}:{parts[1].ToLowerInvariant()}" + : trimmed.ToLowerInvariant(); + } + + private static string? NormalizeHash(string? hash) + { + if (string.IsNullOrWhiteSpace(hash)) + { + return null; + } + + return hash.Trim().ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs new file mode 100644 index 00000000..5faaaeb4 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs @@ -0,0 +1,138 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; + +namespace StellaOps.Scanner.Analyzers.OS; + +public sealed class OSPackageRecord : IComparable +{ + private static readonly IReadOnlyList EmptyList = + new ReadOnlyCollection(Array.Empty()); + + private static readonly IReadOnlyList EmptyFiles = + new ReadOnlyCollection(Array.Empty()); + + private static readonly IReadOnlyDictionary EmptyMetadata = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); + + public OSPackageRecord( + string analyzerId, + string packageUrl, + string name, + string version, + string architecture, + PackageEvidenceSource evidenceSource, + string? epoch = null, + string? release = null, + string? sourcePackage = null, + string? license = null, + IEnumerable? cveHints = null, + IEnumerable? provides = null, + IEnumerable? depends = null, + IEnumerable? files = null, + IDictionary? vendorMetadata = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(analyzerId); + ArgumentException.ThrowIfNullOrWhiteSpace(packageUrl); + ArgumentException.ThrowIfNullOrWhiteSpace(name); + ArgumentException.ThrowIfNullOrWhiteSpace(version); + ArgumentException.ThrowIfNullOrWhiteSpace(architecture); + + AnalyzerId = analyzerId.Trim(); + PackageUrl = packageUrl.Trim(); + Name = name.Trim(); + Version = version.Trim(); + Architecture = architecture.Trim(); + EvidenceSource = evidenceSource; + Epoch = string.IsNullOrWhiteSpace(epoch) ? null : epoch.Trim(); + Release = string.IsNullOrWhiteSpace(release) ? null : release.Trim(); + SourcePackage = string.IsNullOrWhiteSpace(sourcePackage) ? null : sourcePackage.Trim(); + License = string.IsNullOrWhiteSpace(license) ? null : license.Trim(); + CveHints = AsReadOnlyList(cveHints); + Provides = AsReadOnlyList(provides); + Depends = AsReadOnlyList(depends); + Files = files is null + ? EmptyFiles + : new ReadOnlyCollection(files.OrderBy(f => f).ToArray()); + VendorMetadata = vendorMetadata is null or { Count: 0 } + ? EmptyMetadata + : new ReadOnlyDictionary( + new SortedDictionary(vendorMetadata, StringComparer.Ordinal)); + } + + public string AnalyzerId { get; } + + public string PackageUrl { get; } + + public string Name { get; } + + public string Version { get; } + + public string Architecture { get; } + + public string? Epoch { get; } + + public string? Release { get; } + + public string? SourcePackage { get; } + + public string? License { get; } + + public IReadOnlyList CveHints { get; } + + public IReadOnlyList Provides { get; } + + public IReadOnlyList Depends { get; } + + public IReadOnlyList Files { get; } + + public IReadOnlyDictionary VendorMetadata { get; } + + public PackageEvidenceSource EvidenceSource { get; } + + public int CompareTo(OSPackageRecord? other) + { + if (other is null) + { + return 1; + } + + var cmp = string.CompareOrdinal(PackageUrl, other.PackageUrl); + if (cmp != 0) + { + return cmp; + } + + cmp = string.CompareOrdinal(Name, other.Name); + if (cmp != 0) + { + return cmp; + } + + cmp = string.CompareOrdinal(Version, other.Version); + if (cmp != 0) + { + return cmp; + } + + return string.CompareOrdinal(Architecture, other.Architecture); + } + + private static IReadOnlyList AsReadOnlyList(IEnumerable? values) + { + if (values is null) + { + return EmptyList; + } + + var buffer = values + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Select(static value => value.Trim()) + .Distinct(StringComparer.Ordinal) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToArray(); + + return buffer.Length == 0 ? EmptyList : new ReadOnlyCollection(buffer); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs b/src/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs new file mode 100644 index 00000000..f0ad3f37 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs @@ -0,0 +1,9 @@ +namespace StellaOps.Scanner.Analyzers.OS; + +public enum PackageEvidenceSource +{ + Unknown = 0, + ApkDatabase, + DpkgStatus, + RpmDatabase, +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs b/src/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs new file mode 100644 index 00000000..d2332714 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs @@ -0,0 +1,16 @@ +using System; +using StellaOps.Plugin; +using StellaOps.Scanner.Analyzers.OS.Abstractions; + +namespace StellaOps.Scanner.Analyzers.OS.Plugin; + +/// +/// Represents a restart-time plug-in that publishes a single . +/// +public interface IOSAnalyzerPlugin : IAvailabilityPlugin +{ + /// + /// Creates the analyzer instance bound to the host service provider. + /// + IOSPackageAnalyzer CreateAnalyzer(IServiceProvider services); +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs b/src/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs new file mode 100644 index 00000000..0f29bf33 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs @@ -0,0 +1,138 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Linq; +using System.Reflection; +using Microsoft.Extensions.Logging; +using StellaOps.Plugin; +using StellaOps.Plugin.Hosting; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Core.Security; + +namespace StellaOps.Scanner.Analyzers.OS.Plugin; + +public sealed class OsAnalyzerPluginCatalog +{ + private readonly ILogger _logger; + private readonly IPluginCatalogGuard _guard; + private readonly ConcurrentDictionary _assemblies = new(StringComparer.OrdinalIgnoreCase); + private IReadOnlyList _plugins = Array.Empty(); + + public OsAnalyzerPluginCatalog(IPluginCatalogGuard guard, ILogger logger) + { + _guard = guard ?? throw new ArgumentNullException(nameof(guard)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public IReadOnlyCollection Plugins => _plugins; + + public void LoadFromDirectory(string directory, bool seal = true) + { + ArgumentException.ThrowIfNullOrWhiteSpace(directory); + var fullDirectory = Path.GetFullPath(directory); + + var options = new PluginHostOptions + { + PluginsDirectory = fullDirectory, + EnsureDirectoryExists = false, + RecursiveSearch = false, + }; + options.SearchPatterns.Add("StellaOps.Scanner.Analyzers.*.dll"); + + var result = PluginHost.LoadPlugins(options, _logger); + if (result.Plugins.Count == 0) + { + _logger.LogWarning("No OS analyzer plug-ins discovered under '{Directory}'.", fullDirectory); + } + + foreach (var descriptor in result.Plugins) + { + try + { + _guard.EnsureRegistrationAllowed(descriptor.AssemblyPath); + _assemblies[descriptor.AssemblyPath] = descriptor.Assembly; + _logger.LogInformation("Registered OS analyzer plug-in assembly '{Assembly}' from '{Path}'.", + descriptor.Assembly.FullName, + descriptor.AssemblyPath); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to register analyzer plug-in '{Path}'.", descriptor.AssemblyPath); + } + } + + RefreshPluginList(); + + if (seal) + { + _guard.Seal(); + } + } + + public IReadOnlyList CreateAnalyzers(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + + if (_plugins.Count == 0) + { + _logger.LogWarning("No OS analyzer plug-ins available; scanning will skip OS package extraction."); + return Array.Empty(); + } + + var analyzers = new List(_plugins.Count); + foreach (var plugin in _plugins) + { + if (!IsPluginAvailable(plugin, services)) + { + continue; + } + + try + { + var analyzer = plugin.CreateAnalyzer(services); + if (analyzer is null) + { + continue; + } + + analyzers.Add(analyzer); + } + catch (Exception ex) + { + _logger.LogError(ex, "Analyzer plug-in '{Plugin}' failed to create analyzer instance.", plugin.Name); + } + } + + if (analyzers.Count == 0) + { + _logger.LogWarning("All OS analyzer plug-ins were unavailable."); + return Array.Empty(); + } + + analyzers.Sort(static (a, b) => string.CompareOrdinal(a.AnalyzerId, b.AnalyzerId)); + return new ReadOnlyCollection(analyzers); + } + + private void RefreshPluginList() + { + var assemblies = _assemblies.Values.ToArray(); + var plugins = PluginLoader.LoadPlugins(assemblies); + _plugins = plugins is IReadOnlyList list + ? list + : new ReadOnlyCollection(plugins.ToArray()); + } + + private static bool IsPluginAvailable(IOSAnalyzerPlugin plugin, IServiceProvider services) + { + try + { + return plugin.IsAvailable(services); + } + catch + { + return false; + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs b/src/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..d0ddbf86 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.OS.Tests")] diff --git a/src/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj b/src/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj new file mode 100644 index 00000000..fdff7fdd --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj @@ -0,0 +1,16 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + diff --git a/src/StellaOps.Scanner.Analyzers.OS/TASKS.md b/src/StellaOps.Scanner.Analyzers.OS/TASKS.md new file mode 100644 index 00000000..b81b3b90 --- /dev/null +++ b/src/StellaOps.Scanner.Analyzers.OS/TASKS.md @@ -0,0 +1,11 @@ +# OS Analyzer Task Board + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-OS-10-201 | DONE (2025-10-19) | OS Analyzer Guild | Scanner Core contracts | Alpine/apk analyzer emitting deterministic package components with provenance evidence. | Analyzer reads `/lib/apk/db/installed`, emits deterministic `pkg:alpine` components with provenance, license, and file evidence; snapshot tests cover fixture. | +| SCANNER-ANALYZERS-OS-10-202 | DONE (2025-10-19) | OS Analyzer Guild | Shared helpers (204) | Debian/dpkg analyzer mapping packages to canonical `pkg:deb` identities with evidence and normalized metadata. | Analyzer parses `status` + `info/*.list`/`md5sums`, outputs normalized packages with config flags and provenance evidence. | +| SCANNER-ANALYZERS-OS-10-203 | DONE (2025-10-19) | OS Analyzer Guild | Shared helpers (204) | RPM analyzer capturing EVR/NEVRA, declared file lists, provenance metadata. | SQLite rpmdb reader parses headers, reconstructs NEVRA, provides/requires, file evidence, and vendor metadata for fixtures. | +| SCANNER-ANALYZERS-OS-10-204 | DONE (2025-10-19) | OS Analyzer Guild | — | Build shared OS evidence helpers for package identity normalization, file attribution, and metadata enrichment used by analyzers. | Shared helpers deliver analyzer base context, PURL builders, CVE hint extraction, and file evidence model reused across plugins. | +| SCANNER-ANALYZERS-OS-10-205 | DONE (2025-10-19) | OS Analyzer Guild | Shared helpers (204) | Vendor metadata enrichment (source packages, declared licenses, CVE hints). | Apk/dpkg/rpm analyzers populate source, license, maintainer, URLs, and CVE hints; metadata stored deterministically. | +| SCANNER-ANALYZERS-OS-10-206 | DONE (2025-10-19) | QA + OS Analyzer Guild | 201–205 | Determinism harness + fixtures for OS analyzers (warm/cold runs). | xUnit snapshot harness with fixtures + goldens ensures byte-stable JSON; helper normalizes newlines and supports env-based regen. | +| SCANNER-ANALYZERS-OS-10-207 | DONE (2025-10-19) | OS Analyzer Guild + DevOps | 201–206 | Package OS analyzers as restart-time plug-ins (manifest + host registration). | Build targets copy analyzer DLLs/manifests to `plugins/scanner/analyzers/os/`; Worker dispatcher loads via restart-only plugin guard. | diff --git a/src/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs b/src/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs new file mode 100644 index 00000000..a1ea8701 --- /dev/null +++ b/src/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs @@ -0,0 +1,140 @@ +using System.Text; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Time.Testing; +using StellaOps.Scanner.Cache; +using StellaOps.Scanner.Cache.Abstractions; +using StellaOps.Scanner.Cache.FileCas; +using StellaOps.Scanner.Cache.LayerCache; +using Xunit; + +namespace StellaOps.Scanner.Cache.Tests; + +public sealed class LayerCacheRoundTripTests : IAsyncLifetime +{ + private readonly string _rootPath; + private readonly FakeTimeProvider _timeProvider; + private readonly IOptions _options; + private readonly LayerCacheStore _layerCache; + private readonly FileContentAddressableStore _fileCas; + + public LayerCacheRoundTripTests() + { + _rootPath = Path.Combine(Path.GetTempPath(), "stellaops-cache-tests", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(_rootPath); + + _timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero)); + + var optionsValue = new ScannerCacheOptions + { + RootPath = _rootPath, + LayerTtl = TimeSpan.FromHours(1), + FileTtl = TimeSpan.FromHours(2), + MaxBytes = 512 * 1024, // 512 KiB + WarmBytesThreshold = 256 * 1024, + ColdBytesThreshold = 400 * 1024, + MaintenanceInterval = TimeSpan.FromMinutes(5) + }; + + _options = Options.Create(optionsValue); + _layerCache = new LayerCacheStore(_options, NullLogger.Instance, _timeProvider); + _fileCas = new FileContentAddressableStore(_options, NullLogger.Instance, _timeProvider); + } + + [Fact] + public async Task RoundTrip_Succeeds_And_Respects_Ttl_And_ImportExport() + { + var layerDigest = "sha256:abcd1234"; + var metadata = new Dictionary + { + ["image"] = "ghcr.io/stella/sample:1", + ["schema"] = "1.0" + }; + + using var inventoryStream = CreateStream("inventory" + Environment.NewLine + "component:libfoo" + Environment.NewLine); + using var usageStream = CreateStream("usage" + Environment.NewLine + "component:bin" + Environment.NewLine); + + var request = new LayerCachePutRequest( + layerDigest, + architecture: "linux/amd64", + mediaType: "application/vnd.oci.image.layer.v1.tar", + metadata, + new List + { + new("inventory.cdx.json", inventoryStream, "application/json"), + new("usage.cdx.json", usageStream, "application/json") + }); + + var stored = await _layerCache.PutAsync(request, CancellationToken.None); + stored.LayerDigest.Should().Be(layerDigest); + stored.Artifacts.Should().ContainKey("inventory.cdx.json"); + stored.TotalSizeBytes.Should().BeGreaterThan(0); + + var cached = await _layerCache.TryGetAsync(layerDigest, CancellationToken.None); + cached.Should().NotBeNull(); + cached!.Metadata.Should().ContainKey("image"); + + await using (var artifact = await _layerCache.OpenArtifactAsync(layerDigest, "inventory.cdx.json", CancellationToken.None)) + { + artifact.Should().NotBeNull(); + using var reader = new StreamReader(artifact!, Encoding.UTF8); + var content = await reader.ReadToEndAsync(); + content.Should().Contain("component:libfoo"); + } + + // Store file CAS entry and validate export/import lifecycle. + var casHash = "sha256:" + new string('f', 64); + using var casStream = CreateStream("some-cas-content"); + await _fileCas.PutAsync(new FileCasPutRequest(casHash, casStream), CancellationToken.None); + + var exportPath = Path.Combine(_rootPath, "export"); + var exportCount = await _fileCas.ExportAsync(exportPath, CancellationToken.None); + exportCount.Should().Be(1); + + await _fileCas.RemoveAsync(casHash, CancellationToken.None); + (await _fileCas.TryGetAsync(casHash, CancellationToken.None)).Should().BeNull(); + + var importCount = await _fileCas.ImportAsync(exportPath, CancellationToken.None); + importCount.Should().Be(1); + var imported = await _fileCas.TryGetAsync(casHash, CancellationToken.None); + imported.Should().NotBeNull(); + imported!.RelativePath.Should().EndWith("content.bin"); + + // TTL eviction + _timeProvider.Advance(TimeSpan.FromHours(2)); + await _layerCache.EvictExpiredAsync(CancellationToken.None); + (await _layerCache.TryGetAsync(layerDigest, CancellationToken.None)).Should().BeNull(); + + // Compaction removes CAS entry once over threshold. + // Force compaction by writing a large entry. + using var largeStream = CreateStream(new string('x', 400_000)); + var largeHash = "sha256:" + new string('e', 64); + await _fileCas.PutAsync(new FileCasPutRequest(largeHash, largeStream), CancellationToken.None); + _timeProvider.Advance(TimeSpan.FromMinutes(1)); + await _fileCas.CompactAsync(CancellationToken.None); + (await _fileCas.TryGetAsync(casHash, CancellationToken.None)).Should().BeNull(); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() + { + try + { + if (Directory.Exists(_rootPath)) + { + Directory.Delete(_rootPath, recursive: true); + } + } + catch + { + // Ignored – best effort cleanup. + } + + return Task.CompletedTask; + } + + private static MemoryStream CreateStream(string content) + => new(Encoding.UTF8.GetBytes(content)); +} diff --git a/src/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj b/src/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj new file mode 100644 index 00000000..37fe45f6 --- /dev/null +++ b/src/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj @@ -0,0 +1,25 @@ + + + net10.0 + enable + enable + false + + + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Cache/AGENTS.md b/src/StellaOps.Scanner.Cache/AGENTS.md new file mode 100644 index 00000000..b0882d49 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/AGENTS.md @@ -0,0 +1,15 @@ +# StellaOps.Scanner.Cache — Agent Charter + +## Mission +Provide deterministic, offline-friendly caching primitives for scanner layers and file content so warm scans complete in <5 s and cache reuse remains reproducible across deployments. + +## Responsibilities +- Implement layer cache keyed by layer digest, retaining analyzer metadata and provenance per architecture §3.3. +- Deliver file content-addressable storage (CAS) with deduplication, TTL enforcement, and offline import/export hooks. +- Expose structured metrics, health probes, and configuration toggles for cache sizing, eviction, and warm/cold thresholds. +- Coordinate invalidation workflows (layer purge, TTL expiry, diff invalidation) while keeping deterministic logs and telemetry. + +## Interfaces & Dependencies +- Relies on `StackExchange.Redis` via `StellaOps.DependencyInjection` bindings for cache state. +- Coordinates with `StellaOps.Scanner.Storage` object store when persisting immutable artifacts. +- Targets `net10.0` preview SDK and follows scanner coding standards from `docs/18_CODING_STANDARDS.md`. diff --git a/src/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs b/src/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs new file mode 100644 index 00000000..9463bf5d --- /dev/null +++ b/src/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs @@ -0,0 +1,48 @@ +using System.IO; + +namespace StellaOps.Scanner.Cache.Abstractions; + +public interface IFileContentAddressableStore +{ + ValueTask TryGetAsync(string sha256, CancellationToken cancellationToken = default); + + Task PutAsync(FileCasPutRequest request, CancellationToken cancellationToken = default); + + Task RemoveAsync(string sha256, CancellationToken cancellationToken = default); + + Task EvictExpiredAsync(CancellationToken cancellationToken = default); + + Task ExportAsync(string destinationDirectory, CancellationToken cancellationToken = default); + + Task ImportAsync(string sourceDirectory, CancellationToken cancellationToken = default); + + Task CompactAsync(CancellationToken cancellationToken = default); +} + +public sealed record FileCasEntry( + string Sha256, + long SizeBytes, + DateTimeOffset CreatedAt, + DateTimeOffset LastAccessed, + string RelativePath); + +public sealed class FileCasPutRequest +{ + public string Sha256 { get; } + + public Stream Content { get; } + + public bool LeaveOpen { get; } + + public FileCasPutRequest(string sha256, Stream content, bool leaveOpen = false) + { + if (string.IsNullOrWhiteSpace(sha256)) + { + throw new ArgumentException("SHA-256 identifier must be provided.", nameof(sha256)); + } + + Sha256 = sha256; + Content = content ?? throw new ArgumentNullException(nameof(content)); + LeaveOpen = leaveOpen; + } +} diff --git a/src/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs b/src/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs new file mode 100644 index 00000000..8c200a23 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs @@ -0,0 +1,18 @@ +using System.IO; + +namespace StellaOps.Scanner.Cache.Abstractions; + +public interface ILayerCacheStore +{ + ValueTask TryGetAsync(string layerDigest, CancellationToken cancellationToken = default); + + Task PutAsync(LayerCachePutRequest request, CancellationToken cancellationToken = default); + + Task RemoveAsync(string layerDigest, CancellationToken cancellationToken = default); + + Task EvictExpiredAsync(CancellationToken cancellationToken = default); + + Task OpenArtifactAsync(string layerDigest, string artifactName, CancellationToken cancellationToken = default); + + Task CompactAsync(CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs b/src/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs new file mode 100644 index 00000000..0c40e597 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs @@ -0,0 +1,28 @@ +namespace StellaOps.Scanner.Cache.Abstractions; + +/// +/// Represents cached metadata for a single layer digest. +/// +public sealed record LayerCacheEntry( + string LayerDigest, + string Architecture, + string MediaType, + DateTimeOffset CachedAt, + DateTimeOffset LastAccessed, + long TotalSizeBytes, + IReadOnlyDictionary Artifacts, + IReadOnlyDictionary Metadata) +{ + public bool IsExpired(DateTimeOffset utcNow, TimeSpan ttl) + => utcNow - CachedAt >= ttl; +} + +/// +/// Points to a cached artifact stored on disk. +/// +public sealed record LayerCacheArtifactReference( + string Name, + string RelativePath, + string ContentType, + long SizeBytes, + bool IsImmutable = false); diff --git a/src/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs b/src/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs new file mode 100644 index 00000000..04d5a0c9 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs @@ -0,0 +1,93 @@ +using System.IO; + +namespace StellaOps.Scanner.Cache.Abstractions; + +/// +/// Describes layer cache content to be stored. +/// +public sealed class LayerCachePutRequest +{ + public string LayerDigest { get; } + + public string Architecture { get; } + + public string MediaType { get; } + + public IReadOnlyDictionary Metadata { get; } + + public IReadOnlyList Artifacts { get; } + + public LayerCachePutRequest( + string layerDigest, + string architecture, + string mediaType, + IReadOnlyDictionary metadata, + IReadOnlyList artifacts) + { + if (string.IsNullOrWhiteSpace(layerDigest)) + { + throw new ArgumentException("Layer digest must be provided.", nameof(layerDigest)); + } + + if (string.IsNullOrWhiteSpace(architecture)) + { + throw new ArgumentException("Architecture must be provided.", nameof(architecture)); + } + + if (string.IsNullOrWhiteSpace(mediaType)) + { + throw new ArgumentException("Media type must be provided.", nameof(mediaType)); + } + + Metadata = metadata ?? throw new ArgumentNullException(nameof(metadata)); + Artifacts = artifacts ?? throw new ArgumentNullException(nameof(artifacts)); + if (artifacts.Count == 0) + { + throw new ArgumentException("At least one artifact must be supplied.", nameof(artifacts)); + } + + LayerDigest = layerDigest; + Architecture = architecture; + MediaType = mediaType; + } +} + +/// +/// Stream payload for a cached artifact. +/// +public sealed class LayerCacheArtifactContent +{ + public string Name { get; } + + public Stream Content { get; } + + public string ContentType { get; } + + public bool Immutable { get; } + + public bool LeaveOpen { get; } + + public LayerCacheArtifactContent( + string name, + Stream content, + string contentType, + bool immutable = false, + bool leaveOpen = false) + { + if (string.IsNullOrWhiteSpace(name)) + { + throw new ArgumentException("Artifact name must be provided.", nameof(name)); + } + + if (string.IsNullOrWhiteSpace(contentType)) + { + throw new ArgumentException("Content type must be provided.", nameof(contentType)); + } + + Name = name; + Content = content ?? throw new ArgumentNullException(nameof(content)); + ContentType = contentType; + Immutable = immutable; + LeaveOpen = leaveOpen; + } +} diff --git a/src/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs b/src/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs new file mode 100644 index 00000000..0a15166b --- /dev/null +++ b/src/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs @@ -0,0 +1,481 @@ +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Cache.Abstractions; + +namespace StellaOps.Scanner.Cache.FileCas; + +public sealed class FileContentAddressableStore : IFileContentAddressableStore +{ + private const string MetadataFileName = "meta.json"; + private const string ContentFileName = "content.bin"; + + private readonly ScannerCacheOptions _options; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly JsonSerializerOptions _jsonOptions; + private readonly SemaphoreSlim _initializationLock = new(1, 1); + private volatile bool _initialised; + + public FileContentAddressableStore( + IOptions options, + ILogger logger, + TimeProvider? timeProvider = null) + { + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _jsonOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = false + }; + } + + public async ValueTask TryGetAsync(string sha256, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(sha256); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + + var entryDirectory = GetEntryDirectory(sha256); + var metadataPath = Path.Combine(entryDirectory, MetadataFileName); + if (!File.Exists(metadataPath)) + { + ScannerCacheMetrics.RecordFileCasMiss(sha256); + return null; + } + + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + await RemoveDirectoryAsync(entryDirectory).ConfigureAwait(false); + ScannerCacheMetrics.RecordFileCasMiss(sha256); + return null; + } + + var now = _timeProvider.GetUtcNow(); + if (IsExpired(metadata, now)) + { + ScannerCacheMetrics.RecordFileCasEviction(sha256); + await RemoveDirectoryAsync(entryDirectory).ConfigureAwait(false); + return null; + } + + metadata.LastAccessed = now; + await WriteMetadataAsync(metadataPath, metadata, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordFileCasHit(sha256); + + return new FileCasEntry( + metadata.Sha256, + metadata.SizeBytes, + metadata.CreatedAt, + metadata.LastAccessed, + GetRelativeContentPath(metadata.Sha256)); + } + + public async Task PutAsync(FileCasPutRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + + var sha = request.Sha256; + var directory = GetEntryDirectory(sha); + Directory.CreateDirectory(directory); + + var contentPath = Path.Combine(directory, ContentFileName); + await using (var destination = new FileStream(contentPath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, FileOptions.Asynchronous)) + { + await request.Content.CopyToAsync(destination, cancellationToken).ConfigureAwait(false); + await destination.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + if (!request.LeaveOpen) + { + request.Content.Dispose(); + } + + var now = _timeProvider.GetUtcNow(); + var sizeBytes = new FileInfo(contentPath).Length; + var metadata = new FileCasMetadata + { + Sha256 = NormalizeHash(sha), + CreatedAt = now, + LastAccessed = now, + SizeBytes = sizeBytes + }; + + var metadataPath = Path.Combine(directory, MetadataFileName); + await WriteMetadataAsync(metadataPath, metadata, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordFileCasBytes(sizeBytes); + + await CompactAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogInformation("Stored CAS entry {Sha256} ({SizeBytes} bytes)", sha, sizeBytes); + return new FileCasEntry(metadata.Sha256, metadata.SizeBytes, metadata.CreatedAt, metadata.LastAccessed, GetRelativeContentPath(metadata.Sha256)); + } + + public async Task RemoveAsync(string sha256, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(sha256); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + var directory = GetEntryDirectory(sha256); + if (!Directory.Exists(directory)) + { + return false; + } + + await RemoveDirectoryAsync(directory).ConfigureAwait(false); + ScannerCacheMetrics.RecordFileCasEviction(sha256); + return true; + } + + public async Task EvictExpiredAsync(CancellationToken cancellationToken = default) + { + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + if (_options.FileTtl <= TimeSpan.Zero) + { + return 0; + } + + var now = _timeProvider.GetUtcNow(); + var evicted = 0; + + foreach (var metadataPath in EnumerateMetadataFiles()) + { + cancellationToken.ThrowIfCancellationRequested(); + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + continue; + } + + if (IsExpired(metadata, now)) + { + var directory = Path.GetDirectoryName(metadataPath)!; + await RemoveDirectoryAsync(directory).ConfigureAwait(false); + ScannerCacheMetrics.RecordFileCasEviction(metadata.Sha256); + evicted++; + } + } + + if (evicted > 0) + { + _logger.LogInformation("Evicted {Count} CAS entries due to TTL", evicted); + } + + return evicted; + } + + public async Task ExportAsync(string destinationDirectory, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(destinationDirectory); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + + Directory.CreateDirectory(destinationDirectory); + var exported = 0; + + foreach (var entryDirectory in EnumerateEntryDirectories()) + { + cancellationToken.ThrowIfCancellationRequested(); + var hash = Path.GetFileName(entryDirectory); + if (hash is null) + { + continue; + } + + var target = Path.Combine(destinationDirectory, hash); + if (Directory.Exists(target)) + { + continue; + } + + CopyDirectory(entryDirectory, target); + exported++; + } + + _logger.LogInformation("Exported {Count} CAS entries to {Destination}", exported, destinationDirectory); + return exported; + } + + public async Task ImportAsync(string sourceDirectory, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(sourceDirectory); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + if (!Directory.Exists(sourceDirectory)) + { + return 0; + } + + var imported = 0; + foreach (var directory in Directory.EnumerateDirectories(sourceDirectory)) + { + cancellationToken.ThrowIfCancellationRequested(); + var metadataPath = Path.Combine(directory, MetadataFileName); + if (!File.Exists(metadataPath)) + { + continue; + } + + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + continue; + } + + var destination = GetEntryDirectory(metadata.Sha256); + if (Directory.Exists(destination)) + { + // Only overwrite if the source is newer. + var existingMetadataPath = Path.Combine(destination, MetadataFileName); + var existing = await ReadMetadataAsync(existingMetadataPath, cancellationToken).ConfigureAwait(false); + if (existing is not null && existing.CreatedAt >= metadata.CreatedAt) + { + continue; + } + + await RemoveDirectoryAsync(destination).ConfigureAwait(false); + } + + CopyDirectory(directory, destination); + imported++; + } + + if (imported > 0) + { + _logger.LogInformation("Imported {Count} CAS entries from {Source}", imported, sourceDirectory); + } + + return imported; + } + + public async Task CompactAsync(CancellationToken cancellationToken = default) + { + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + if (_options.MaxBytes <= 0) + { + return 0; + } + + var entries = new List<(FileCasMetadata Metadata, string Directory)>(); + long totalBytes = 0; + + foreach (var metadataPath in EnumerateMetadataFiles()) + { + cancellationToken.ThrowIfCancellationRequested(); + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + continue; + } + + var directory = Path.GetDirectoryName(metadataPath)!; + entries.Add((metadata, directory)); + totalBytes += metadata.SizeBytes; + } + + if (totalBytes <= Math.Min(_options.ColdBytesThreshold > 0 ? _options.ColdBytesThreshold : long.MaxValue, _options.MaxBytes)) + { + return 0; + } + + entries.Sort((left, right) => DateTimeOffset.Compare(left.Metadata.LastAccessed, right.Metadata.LastAccessed)); + var target = _options.WarmBytesThreshold > 0 ? _options.WarmBytesThreshold : _options.MaxBytes / 2; + var removed = 0; + + foreach (var entry in entries) + { + if (totalBytes <= target) + { + break; + } + + await RemoveDirectoryAsync(entry.Directory).ConfigureAwait(false); + totalBytes -= entry.Metadata.SizeBytes; + removed++; + ScannerCacheMetrics.RecordFileCasEviction(entry.Metadata.Sha256); + } + + if (removed > 0) + { + _logger.LogInformation("Compacted CAS store, removed {Count} entries", removed); + } + + return removed; + } + + private async Task EnsureInitialisedAsync(CancellationToken cancellationToken) + { + if (_initialised) + { + return; + } + + await _initializationLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_initialised) + { + return; + } + + Directory.CreateDirectory(_options.FileCasDirectoryPath); + _initialised = true; + } + finally + { + _initializationLock.Release(); + } + } + + private IEnumerable EnumerateMetadataFiles() + { + if (!Directory.Exists(_options.FileCasDirectoryPath)) + { + yield break; + } + + foreach (var file in Directory.EnumerateFiles(_options.FileCasDirectoryPath, MetadataFileName, SearchOption.AllDirectories)) + { + yield return file; + } + } + + private IEnumerable EnumerateEntryDirectories() + { + if (!Directory.Exists(_options.FileCasDirectoryPath)) + { + yield break; + } + + foreach (var directory in Directory.EnumerateDirectories(_options.FileCasDirectoryPath, "*", SearchOption.AllDirectories)) + { + if (File.Exists(Path.Combine(directory, MetadataFileName))) + { + yield return directory; + } + } + } + + private async Task ReadMetadataAsync(string metadataPath, CancellationToken cancellationToken) + { + try + { + await using var stream = new FileStream(metadataPath, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.Asynchronous | FileOptions.SequentialScan); + return await JsonSerializer.DeserializeAsync(stream, _jsonOptions, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is IOException or JsonException) + { + _logger.LogWarning(ex, "Failed to read CAS metadata from {Path}", metadataPath); + return null; + } + } + + private async Task WriteMetadataAsync(string metadataPath, FileCasMetadata metadata, CancellationToken cancellationToken) + { + var tempFile = Path.Combine(Path.GetDirectoryName(metadataPath)!, $"{Guid.NewGuid():N}.tmp"); + await using (var stream = new FileStream(tempFile, FileMode.Create, FileAccess.Write, FileShare.None, 4096, FileOptions.Asynchronous)) + { + await JsonSerializer.SerializeAsync(stream, metadata, _jsonOptions, cancellationToken).ConfigureAwait(false); + await stream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + File.Move(tempFile, metadataPath, overwrite: true); + } + + private static string GetRelativeContentPath(string sha256) + { + var normalized = NormalizeHash(sha256); + return Path.Combine(NormalizedPrefix(normalized, 0, 2), NormalizedPrefix(normalized, 2, 2), normalized, ContentFileName); + } + + private string GetEntryDirectory(string sha256) + { + var normalized = NormalizeHash(sha256); + return Path.Combine( + _options.FileCasDirectoryPath, + NormalizedPrefix(normalized, 0, 2), + NormalizedPrefix(normalized, 2, 2), + normalized); + } + + private static string NormalizeHash(string sha256) + { + if (string.IsNullOrWhiteSpace(sha256)) + { + return "unknown"; + } + + var hash = sha256.Contains(':', StringComparison.Ordinal) ? sha256[(sha256.IndexOf(':') + 1)..] : sha256; + return hash.ToLowerInvariant(); + } + + private static string NormalizedPrefix(string hash, int offset, int length) + { + if (hash.Length <= offset) + { + return "00"; + } + + if (hash.Length < offset + length) + { + length = hash.Length - offset; + } + + return hash.Substring(offset, length); + } + + private bool IsExpired(FileCasMetadata metadata, DateTimeOffset now) + { + if (_options.FileTtl <= TimeSpan.Zero) + { + return false; + } + + return now - metadata.CreatedAt >= _options.FileTtl; + } + + private static void CopyDirectory(string sourceDir, string destDir) + { + Directory.CreateDirectory(destDir); + foreach (var file in Directory.EnumerateFiles(sourceDir, "*", SearchOption.AllDirectories)) + { + var relative = Path.GetRelativePath(sourceDir, file); + var destination = Path.Combine(destDir, relative); + var parent = Path.GetDirectoryName(destination); + if (!string.IsNullOrEmpty(parent)) + { + Directory.CreateDirectory(parent); + } + File.Copy(file, destination, overwrite: true); + } + } + + private Task RemoveDirectoryAsync(string directory) + { + if (!Directory.Exists(directory)) + { + return Task.CompletedTask; + } + + try + { + Directory.Delete(directory, recursive: true); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + _logger.LogWarning(ex, "Failed to delete CAS directory {Directory}", directory); + } + + return Task.CompletedTask; + } + + private sealed class FileCasMetadata + { + public string Sha256 { get; set; } = string.Empty; + + public DateTimeOffset CreatedAt { get; set; } + + public DateTimeOffset LastAccessed { get; set; } + + public long SizeBytes { get; set; } + } +} diff --git a/src/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs b/src/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs new file mode 100644 index 00000000..3d254db1 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs @@ -0,0 +1,27 @@ +using StellaOps.Scanner.Cache.Abstractions; + +namespace StellaOps.Scanner.Cache.FileCas; + +internal sealed class NullFileContentAddressableStore : IFileContentAddressableStore +{ + public ValueTask TryGetAsync(string sha256, CancellationToken cancellationToken = default) + => ValueTask.FromResult(null); + + public Task PutAsync(FileCasPutRequest request, CancellationToken cancellationToken = default) + => Task.FromException(new InvalidOperationException("File CAS is disabled via configuration.")); + + public Task RemoveAsync(string sha256, CancellationToken cancellationToken = default) + => Task.FromResult(false); + + public Task EvictExpiredAsync(CancellationToken cancellationToken = default) + => Task.FromResult(0); + + public Task ExportAsync(string destinationDirectory, CancellationToken cancellationToken = default) + => Task.FromResult(0); + + public Task ImportAsync(string sourceDirectory, CancellationToken cancellationToken = default) + => Task.FromResult(0); + + public Task CompactAsync(CancellationToken cancellationToken = default) + => Task.FromResult(0); +} diff --git a/src/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs b/src/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs new file mode 100644 index 00000000..1dab98c7 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs @@ -0,0 +1,480 @@ +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Cache.Abstractions; + +namespace StellaOps.Scanner.Cache.LayerCache; + +public sealed class LayerCacheStore : ILayerCacheStore +{ + private const string MetadataFileName = "meta.json"; + + private readonly ScannerCacheOptions _options; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly JsonSerializerOptions _jsonOptions; + private readonly SemaphoreSlim _initializationLock = new(1, 1); + private volatile bool _initialised; + + public LayerCacheStore( + IOptions options, + ILogger logger, + TimeProvider? timeProvider = null) + { + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _jsonOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = false + }; + } + + public async ValueTask TryGetAsync(string layerDigest, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(layerDigest); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + + var directory = GetLayerDirectory(layerDigest); + if (!Directory.Exists(directory)) + { + _logger.LogTrace("Layer cache miss: directory {Directory} not found for {LayerDigest}", directory, layerDigest); + ScannerCacheMetrics.RecordLayerMiss(layerDigest); + return null; + } + + var metadataPath = Path.Combine(directory, MetadataFileName); + if (!File.Exists(metadataPath)) + { + _logger.LogDebug("Layer cache metadata missing at {Path} for {LayerDigest}; removing directory", metadataPath, layerDigest); + await RemoveInternalAsync(directory, layerDigest, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordLayerMiss(layerDigest); + return null; + } + + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + await RemoveInternalAsync(directory, layerDigest, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordLayerMiss(layerDigest); + return null; + } + + var now = _timeProvider.GetUtcNow(); + if (IsExpired(metadata, now)) + { + _logger.LogDebug("Layer cache entry {LayerDigest} expired at {Expiration}", metadata.LayerDigest, metadata.CachedAt + _options.LayerTtl); + await RemoveInternalAsync(directory, layerDigest, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordLayerEviction(layerDigest); + return null; + } + + metadata.LastAccessed = now; + await WriteMetadataAsync(metadataPath, metadata, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordLayerHit(layerDigest); + + return Map(metadata); + } + + public async Task PutAsync(LayerCachePutRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + + var digest = request.LayerDigest; + var directory = GetLayerDirectory(digest); + var metadataPath = Path.Combine(directory, MetadataFileName); + + if (Directory.Exists(directory)) + { + _logger.LogDebug("Replacing existing layer cache entry for {LayerDigest}", digest); + await RemoveInternalAsync(directory, digest, cancellationToken).ConfigureAwait(false); + } + + Directory.CreateDirectory(directory); + + var artifactMetadata = new Dictionary(StringComparer.OrdinalIgnoreCase); + long totalSize = 0; + + foreach (var artifact in request.Artifacts) + { + cancellationToken.ThrowIfCancellationRequested(); + var fileName = SanitizeArtifactName(artifact.Name); + var relativePath = Path.Combine("artifacts", fileName); + var artifactDirectory = Path.Combine(directory, "artifacts"); + Directory.CreateDirectory(artifactDirectory); + var filePath = Path.Combine(directory, relativePath); + + await using (var target = new FileStream(filePath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, FileOptions.Asynchronous)) + { + await artifact.Content.CopyToAsync(target, cancellationToken).ConfigureAwait(false); + await target.FlushAsync(cancellationToken).ConfigureAwait(false); + totalSize += target.Length; + } + + if (!artifact.LeaveOpen) + { + artifact.Content.Dispose(); + } + + var sizeBytes = new FileInfo(filePath).Length; + + artifactMetadata[artifact.Name] = new LayerCacheArtifactMetadata + { + Name = artifact.Name, + ContentType = artifact.ContentType, + RelativePath = relativePath, + SizeBytes = sizeBytes, + Immutable = artifact.Immutable + }; + } + + var now = _timeProvider.GetUtcNow(); + var metadata = new LayerCacheMetadata + { + LayerDigest = digest, + Architecture = request.Architecture, + MediaType = request.MediaType, + CachedAt = now, + LastAccessed = now, + Metadata = new Dictionary(request.Metadata, StringComparer.Ordinal), + Artifacts = artifactMetadata, + SizeBytes = totalSize + }; + + await WriteMetadataAsync(metadataPath, metadata, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordLayerBytes(totalSize); + + await CompactAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogInformation("Cached layer {LayerDigest} with {ArtifactCount} artifacts ({SizeBytes} bytes)", digest, artifactMetadata.Count, totalSize); + return Map(metadata); + } + + public async Task RemoveAsync(string layerDigest, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(layerDigest); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + var directory = GetLayerDirectory(layerDigest); + await RemoveInternalAsync(directory, layerDigest, cancellationToken).ConfigureAwait(false); + } + + public async Task EvictExpiredAsync(CancellationToken cancellationToken = default) + { + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + if (_options.LayerTtl <= TimeSpan.Zero) + { + return 0; + } + + var now = _timeProvider.GetUtcNow(); + var evicted = 0; + + foreach (var metadataPath in EnumerateMetadataFiles()) + { + cancellationToken.ThrowIfCancellationRequested(); + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + continue; + } + + if (IsExpired(metadata, now)) + { + var directory = Path.GetDirectoryName(metadataPath)!; + await RemoveInternalAsync(directory, metadata.LayerDigest, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordLayerEviction(metadata.LayerDigest); + evicted++; + } + } + + if (evicted > 0) + { + _logger.LogInformation("Evicted {Count} expired layer cache entries", evicted); + } + + return evicted; + } + + public async Task OpenArtifactAsync(string layerDigest, string artifactName, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(layerDigest); + ArgumentException.ThrowIfNullOrWhiteSpace(artifactName); + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + + var directory = GetLayerDirectory(layerDigest); + var metadataPath = Path.Combine(directory, MetadataFileName); + if (!File.Exists(metadataPath)) + { + ScannerCacheMetrics.RecordLayerMiss(layerDigest); + return null; + } + + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + ScannerCacheMetrics.RecordLayerMiss(layerDigest); + return null; + } + + if (!metadata.Artifacts.TryGetValue(artifactName, out var artifact)) + { + _logger.LogDebug("Layer cache artifact {Artifact} missing for {LayerDigest}", artifactName, layerDigest); + return null; + } + + var filePath = Path.Combine(directory, artifact.RelativePath); + if (!File.Exists(filePath)) + { + _logger.LogDebug("Layer cache file {FilePath} not found for artifact {Artifact}", filePath, artifactName); + await RemoveInternalAsync(directory, layerDigest, cancellationToken).ConfigureAwait(false); + ScannerCacheMetrics.RecordLayerMiss(layerDigest); + return null; + } + + metadata.LastAccessed = _timeProvider.GetUtcNow(); + await WriteMetadataAsync(metadataPath, metadata, cancellationToken).ConfigureAwait(false); + return new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read, 81920, FileOptions.Asynchronous | FileOptions.SequentialScan); + } + + public async Task CompactAsync(CancellationToken cancellationToken = default) + { + await EnsureInitialisedAsync(cancellationToken).ConfigureAwait(false); + if (_options.MaxBytes <= 0) + { + return 0; + } + + var entries = new List<(LayerCacheMetadata Metadata, string Directory)>(); + long totalBytes = 0; + + foreach (var metadataPath in EnumerateMetadataFiles()) + { + cancellationToken.ThrowIfCancellationRequested(); + var metadata = await ReadMetadataAsync(metadataPath, cancellationToken).ConfigureAwait(false); + if (metadata is null) + { + continue; + } + + var directory = Path.GetDirectoryName(metadataPath)!; + entries.Add((metadata, directory)); + totalBytes += metadata.SizeBytes; + } + + if (totalBytes <= Math.Min(_options.ColdBytesThreshold > 0 ? _options.ColdBytesThreshold : long.MaxValue, _options.MaxBytes)) + { + return 0; + } + + entries.Sort((left, right) => DateTimeOffset.Compare(left.Metadata.LastAccessed, right.Metadata.LastAccessed)); + var targetBytes = _options.WarmBytesThreshold > 0 ? _options.WarmBytesThreshold : _options.MaxBytes / 2; + var removed = 0; + + foreach (var entry in entries) + { + if (totalBytes <= targetBytes) + { + break; + } + + await RemoveInternalAsync(entry.Directory, entry.Metadata.LayerDigest, cancellationToken).ConfigureAwait(false); + totalBytes -= entry.Metadata.SizeBytes; + removed++; + ScannerCacheMetrics.RecordLayerEviction(entry.Metadata.LayerDigest); + _logger.LogInformation("Evicted layer {LayerDigest} during compaction (remaining bytes: {Bytes})", entry.Metadata.LayerDigest, totalBytes); + } + + return removed; + } + + private async Task EnsureInitialisedAsync(CancellationToken cancellationToken) + { + if (_initialised) + { + return; + } + + await _initializationLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_initialised) + { + return; + } + + Directory.CreateDirectory(_options.LayersDirectoryPath); + _initialised = true; + } + finally + { + _initializationLock.Release(); + } + } + + private IEnumerable EnumerateMetadataFiles() + { + if (!Directory.Exists(_options.LayersDirectoryPath)) + { + yield break; + } + + foreach (var file in Directory.EnumerateFiles(_options.LayersDirectoryPath, MetadataFileName, SearchOption.AllDirectories)) + { + yield return file; + } + } + + private async Task ReadMetadataAsync(string metadataPath, CancellationToken cancellationToken) + { + try + { + await using var stream = new FileStream(metadataPath, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.Asynchronous | FileOptions.SequentialScan); + return await JsonSerializer.DeserializeAsync(stream, _jsonOptions, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is IOException or JsonException) + { + _logger.LogWarning(ex, "Failed to load layer cache metadata from {Path}", metadataPath); + return null; + } + } + + private async Task WriteMetadataAsync(string metadataPath, LayerCacheMetadata metadata, CancellationToken cancellationToken) + { + var tempFile = Path.Combine(Path.GetDirectoryName(metadataPath)!, $"{Guid.NewGuid():N}.tmp"); + await using (var stream = new FileStream(tempFile, FileMode.Create, FileAccess.Write, FileShare.None, 4096, FileOptions.Asynchronous)) + { + await JsonSerializer.SerializeAsync(stream, metadata, _jsonOptions, cancellationToken).ConfigureAwait(false); + await stream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + File.Move(tempFile, metadataPath, overwrite: true); + } + + private Task RemoveInternalAsync(string directory, string layerDigest, CancellationToken cancellationToken) + { + if (!Directory.Exists(directory)) + { + return Task.CompletedTask; + } + + try + { + Directory.Delete(directory, recursive: true); + _logger.LogDebug("Removed layer cache entry {LayerDigest}", layerDigest); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException) + { + _logger.LogWarning(ex, "Failed to delete layer cache directory {Directory}", directory); + } + + return Task.CompletedTask; + } + + private bool IsExpired(LayerCacheMetadata metadata, DateTimeOffset now) + { + if (_options.LayerTtl <= TimeSpan.Zero) + { + return false; + } + + if (metadata.CachedAt == default) + { + return false; + } + + return now - metadata.CachedAt >= _options.LayerTtl; + } + + private LayerCacheEntry Map(LayerCacheMetadata metadata) + { + var artifacts = metadata.Artifacts?.ToDictionary( + pair => pair.Key, + pair => new LayerCacheArtifactReference( + pair.Value.Name, + pair.Value.RelativePath, + pair.Value.ContentType, + pair.Value.SizeBytes, + pair.Value.Immutable), + StringComparer.OrdinalIgnoreCase) + ?? new Dictionary(StringComparer.OrdinalIgnoreCase); + + return new LayerCacheEntry( + metadata.LayerDigest, + metadata.Architecture, + metadata.MediaType, + metadata.CachedAt, + metadata.LastAccessed, + metadata.SizeBytes, + artifacts, + metadata.Metadata is null + ? new Dictionary(StringComparer.Ordinal) + : new Dictionary(metadata.Metadata, StringComparer.Ordinal)); + } + + private string GetLayerDirectory(string layerDigest) + { + var safeDigest = SanitizeDigest(layerDigest); + return Path.Combine(_options.LayersDirectoryPath, safeDigest); + } + + private static string SanitizeArtifactName(string name) + { + var fileName = Path.GetFileName(name); + return string.IsNullOrWhiteSpace(fileName) ? "artifact" : fileName; + } + + private static string SanitizeDigest(string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return "unknown"; + } + + var hash = digest.Contains(':', StringComparison.Ordinal) + ? digest[(digest.IndexOf(':') + 1)..] + : digest; + + var buffer = new char[hash.Length]; + var count = 0; + foreach (var ch in hash) + { + buffer[count++] = char.IsLetterOrDigit(ch) ? char.ToLowerInvariant(ch) : '_'; + } + + return new string(buffer, 0, count); + } + + private sealed class LayerCacheMetadata + { + public string LayerDigest { get; set; } = string.Empty; + + public string Architecture { get; set; } = string.Empty; + + public string MediaType { get; set; } = string.Empty; + + public DateTimeOffset CachedAt { get; set; } + + public DateTimeOffset LastAccessed { get; set; } + + public Dictionary? Metadata { get; set; } + + public Dictionary Artifacts { get; set; } = new(StringComparer.OrdinalIgnoreCase); + + public long SizeBytes { get; set; } + + } + + private sealed class LayerCacheArtifactMetadata + { + public string Name { get; set; } = string.Empty; + + public string RelativePath { get; set; } = string.Empty; + + public string ContentType { get; set; } = string.Empty; + + public long SizeBytes { get; set; } + + public bool Immutable { get; set; } + } +} diff --git a/src/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs b/src/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs new file mode 100644 index 00000000..f5967ab2 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs @@ -0,0 +1,85 @@ +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Cache.Abstractions; + +namespace StellaOps.Scanner.Cache.Maintenance; + +public sealed class ScannerCacheMaintenanceService : BackgroundService +{ + private readonly ILayerCacheStore _layerCache; + private readonly IFileContentAddressableStore _fileCas; + private readonly IOptions _options; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + + public ScannerCacheMaintenanceService( + ILayerCacheStore layerCache, + IFileContentAddressableStore fileCas, + IOptions options, + ILogger logger, + TimeProvider? timeProvider = null) + { + _layerCache = layerCache ?? throw new ArgumentNullException(nameof(layerCache)); + _fileCas = fileCas ?? throw new ArgumentNullException(nameof(fileCas)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var settings = _options.Value; + if (!settings.Enabled) + { + _logger.LogInformation("Scanner cache disabled; maintenance loop will not start."); + return; + } + + if (!settings.EnableAutoEviction) + { + _logger.LogInformation("Scanner cache automatic eviction disabled by configuration."); + return; + } + + var interval = settings.MaintenanceInterval > TimeSpan.Zero + ? settings.MaintenanceInterval + : TimeSpan.FromMinutes(15); + + _logger.LogInformation("Scanner cache maintenance loop started with interval {Interval}", interval); + + await RunMaintenanceAsync(stoppingToken).ConfigureAwait(false); + + using var timer = new PeriodicTimer(interval, _timeProvider); + while (await timer.WaitForNextTickAsync(stoppingToken).ConfigureAwait(false)) + { + await RunMaintenanceAsync(stoppingToken).ConfigureAwait(false); + } + } + + private async Task RunMaintenanceAsync(CancellationToken cancellationToken) + { + try + { + var layerExpired = await _layerCache.EvictExpiredAsync(cancellationToken).ConfigureAwait(false); + var layerCompacted = await _layerCache.CompactAsync(cancellationToken).ConfigureAwait(false); + var casExpired = await _fileCas.EvictExpiredAsync(cancellationToken).ConfigureAwait(false); + var casCompacted = await _fileCas.CompactAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogDebug( + "Scanner cache maintenance tick complete (layers expired={LayersExpired}, layers compacted={LayersCompacted}, cas expired={CasExpired}, cas compacted={CasCompacted})", + layerExpired, + layerCompacted, + casExpired, + casCompacted); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + // Shutting down; ignore. + } + catch (Exception ex) + { + _logger.LogError(ex, "Scanner cache maintenance tick failed"); + } + } +} diff --git a/src/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs b/src/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs new file mode 100644 index 00000000..129d1453 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs @@ -0,0 +1,43 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.Scanner.Cache; + +public static class ScannerCacheMetrics +{ + public const string MeterName = "StellaOps.Scanner.Cache"; + + private static readonly Meter Meter = new(MeterName, "1.0.0"); + + private static readonly Counter LayerHits = Meter.CreateCounter("scanner.layer_cache_hits_total"); + private static readonly Counter LayerMisses = Meter.CreateCounter("scanner.layer_cache_misses_total"); + private static readonly Counter LayerEvictions = Meter.CreateCounter("scanner.layer_cache_evictions_total"); + private static readonly Histogram LayerBytes = Meter.CreateHistogram("scanner.layer_cache_bytes"); + private static readonly Counter FileCasHits = Meter.CreateCounter("scanner.file_cas_hits_total"); + private static readonly Counter FileCasMisses = Meter.CreateCounter("scanner.file_cas_misses_total"); + private static readonly Counter FileCasEvictions = Meter.CreateCounter("scanner.file_cas_evictions_total"); + private static readonly Histogram FileCasBytes = Meter.CreateHistogram("scanner.file_cas_bytes"); + + public static void RecordLayerHit(string layerDigest) + => LayerHits.Add(1, new KeyValuePair("layer", layerDigest)); + + public static void RecordLayerMiss(string layerDigest) + => LayerMisses.Add(1, new KeyValuePair("layer", layerDigest)); + + public static void RecordLayerEviction(string layerDigest) + => LayerEvictions.Add(1, new KeyValuePair("layer", layerDigest)); + + public static void RecordLayerBytes(long bytes) + => LayerBytes.Record(bytes); + + public static void RecordFileCasHit(string sha256) + => FileCasHits.Add(1, new KeyValuePair("sha256", sha256)); + + public static void RecordFileCasMiss(string sha256) + => FileCasMisses.Add(1, new KeyValuePair("sha256", sha256)); + + public static void RecordFileCasEviction(string sha256) + => FileCasEvictions.Add(1, new KeyValuePair("sha256", sha256)); + + public static void RecordFileCasBytes(long bytes) + => FileCasBytes.Record(bytes); +} diff --git a/src/StellaOps.Scanner.Cache/ScannerCacheOptions.cs b/src/StellaOps.Scanner.Cache/ScannerCacheOptions.cs new file mode 100644 index 00000000..427b0ccb --- /dev/null +++ b/src/StellaOps.Scanner.Cache/ScannerCacheOptions.cs @@ -0,0 +1,40 @@ +using System.IO; + +namespace StellaOps.Scanner.Cache; + +public sealed class ScannerCacheOptions +{ + private const long DefaultMaxBytes = 5L * 1024 * 1024 * 1024; // 5 GiB + + public bool Enabled { get; set; } = true; + + public string RootPath { get; set; } = Path.Combine("cache", "scanner"); + + public string LayersDirectoryName { get; set; } = "layers"; + + public string FileCasDirectoryName { get; set; } = "cas"; + + public TimeSpan LayerTtl { get; set; } = TimeSpan.FromDays(45); + + public TimeSpan FileTtl { get; set; } = TimeSpan.FromDays(30); + + public long MaxBytes { get; set; } = DefaultMaxBytes; + + public long WarmBytesThreshold { get; set; } = DefaultMaxBytes / 5; // 20 % + + public long ColdBytesThreshold { get; set; } = (DefaultMaxBytes * 4) / 5; // 80 % + + public bool EnableAutoEviction { get; set; } = true; + + public TimeSpan MaintenanceInterval { get; set; } = TimeSpan.FromMinutes(15); + + public bool EnableFileCas { get; set; } = true; + + public string? ImportDirectory { get; set; } + + public string? ExportDirectory { get; set; } + + public string LayersDirectoryPath => Path.Combine(RootPath, LayersDirectoryName); + + public string FileCasDirectoryPath => Path.Combine(RootPath, FileCasDirectoryName); +} diff --git a/src/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs b/src/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs new file mode 100644 index 00000000..ff7a8f59 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs @@ -0,0 +1,51 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Cache.Abstractions; +using StellaOps.Scanner.Cache.FileCas; +using StellaOps.Scanner.Cache.LayerCache; +using StellaOps.Scanner.Cache.Maintenance; + +namespace StellaOps.Scanner.Cache; + +public static class ScannerCacheServiceCollectionExtensions +{ + public static IServiceCollection AddScannerCache( + this IServiceCollection services, + IConfiguration configuration, + string sectionName = "scanner:cache") + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddOptions() + .Bind(configuration.GetSection(sectionName)) + .Validate(options => !string.IsNullOrWhiteSpace(options.RootPath), "scanner:cache:rootPath must be configured"); + + services.TryAddSingleton(TimeProvider.System); + + services.TryAddSingleton(); + services.TryAddSingleton(sp => + { + var options = sp.GetRequiredService>(); + var timeProvider = sp.GetService() ?? TimeProvider.System; + var loggerFactory = sp.GetRequiredService(); + + if (!options.Value.EnableFileCas) + { + return new NullFileContentAddressableStore(); + } + + return new FileContentAddressableStore( + options, + loggerFactory.CreateLogger(), + timeProvider); + }); + + services.AddHostedService(); + + return services; + } +} diff --git a/src/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj b/src/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj new file mode 100644 index 00000000..bcfd5152 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj @@ -0,0 +1,19 @@ + + + net10.0 + enable + enable + false + + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Cache/TASKS.md b/src/StellaOps.Scanner.Cache/TASKS.md new file mode 100644 index 00000000..283fe459 --- /dev/null +++ b/src/StellaOps.Scanner.Cache/TASKS.md @@ -0,0 +1,10 @@ +# Scanner Cache Task Board (Sprint 10) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-CACHE-10-101 | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-WORKER-09-201 | Implement layer cache store keyed by layer digest with metadata retention aligned with architecture §3.3 object layout. | Layer cache API supports get/put/delete by digest; metadata persisted with deterministic serialization; warm lookup covered by tests. | +| SCANNER-CACHE-10-102 | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-101 | Build file CAS with dedupe, TTL enforcement, and offline import/export hooks for offline kit workflows. | CAS stores content by SHA-256, enforces TTL policy, import/export commands documented and exercised in tests. | +| SCANNER-CACHE-10-103 | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-101 | Expose cache metrics/logging and configuration toggles for warm/cold thresholds. | Metrics counters/gauges emitted; options validated; logs include correlation IDs; configuration doc references settings. | +| SCANNER-CACHE-10-104 | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-101 | Implement cache invalidation workflows (layer delete, TTL expiry, diff invalidation). | Invalidation API implemented with deterministic eviction; tests cover TTL expiry + explicit delete; logs instrumented. | + +> Update statuses to DONE once acceptance criteria and tests/documentation are delivered. diff --git a/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs b/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs new file mode 100644 index 00000000..1d6d684e --- /dev/null +++ b/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs @@ -0,0 +1,93 @@ +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Core.Tests.Contracts; + +public sealed class ComponentGraphBuilderTests +{ + [Fact] + public void Build_AggregatesComponentsAcrossLayers() + { + var layer1 = LayerComponentFragment.Create("sha256:layer1", new[] + { + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/a", "a", "1.0.0"), + LayerDigest = "sha256:layer1", + Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/package.json")), + Dependencies = ImmutableArray.Create("pkg:npm/x"), + Usage = ComponentUsage.Create(false), + Metadata = new ComponentMetadata + { + Scope = "runtime", + }, + } + }); + + var layer2 = LayerComponentFragment.Create("sha256:layer2", new[] + { + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/a", "a", "1.0.0"), + LayerDigest = "sha256:layer2", + Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/index.js")), + Dependencies = ImmutableArray.Create("pkg:npm/y"), + Usage = ComponentUsage.Create(true, new[] { "/app/start.sh" }), + }, + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/b", "b", "2.0.0"), + LayerDigest = "sha256:layer2", + Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/b/package.json")), + } + }); + + var graph = ComponentGraphBuilder.Build(new[] { layer1, layer2 }); + + Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, graph.Layers.Select(layer => layer.LayerDigest)); + Assert.Equal(new[] { "pkg:npm/a", "pkg:npm/b" }, graph.Components.Select(component => component.Identity.Key)); + + var componentA = graph.ComponentMap["pkg:npm/a"]; + Assert.Equal("sha256:layer1", componentA.FirstLayerDigest); + Assert.Equal("sha256:layer2", componentA.LastLayerDigest); + Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, componentA.LayerDigests); + Assert.True(componentA.Usage.UsedByEntrypoint); + Assert.Contains("/app/start.sh", componentA.Usage.Entrypoints); + Assert.Equal(new[] { "pkg:npm/x", "pkg:npm/y" }, componentA.Dependencies); + Assert.Equal("runtime", componentA.Metadata?.Scope); + Assert.Equal(2, componentA.Evidence.Length); + + var componentB = graph.ComponentMap["pkg:npm/b"]; + Assert.Equal("sha256:layer2", componentB.FirstLayerDigest); + Assert.Null(componentB.LastLayerDigest); + Assert.Single(componentB.LayerDigests, "sha256:layer2"); + Assert.False(componentB.Usage.UsedByEntrypoint); + } + + [Fact] + public void Build_DeterministicOrdering() + { + var fragments = new[] + { + LayerComponentFragment.Create("sha256:layer1", new[] + { + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/c", "c"), + LayerDigest = "sha256:layer1", + }, + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/a", "a"), + LayerDigest = "sha256:layer1", + } + }) + }; + + var graph1 = ComponentGraphBuilder.Build(fragments); + var graph2 = ComponentGraphBuilder.Build(fragments); + + Assert.Equal(graph1.Components.Select(c => c.Identity.Key), graph2.Components.Select(c => c.Identity.Key)); + } +} diff --git a/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs b/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs new file mode 100644 index 00000000..d6492a6c --- /dev/null +++ b/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs @@ -0,0 +1,85 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Serialization; + +namespace StellaOps.Scanner.Core.Tests.Contracts; + +public sealed class ComponentModelsTests +{ + [Fact] + public void ComponentIdentity_Create_Trimmed() + { + var identity = ComponentIdentity.Create(" pkg:npm/foo ", " Foo ", " 1.0.0 ", " pkg:npm/foo@1.0.0 ", " library ", " group "); + + Assert.Equal("pkg:npm/foo", identity.Key); + Assert.Equal("Foo", identity.Name); + Assert.Equal("1.0.0", identity.Version); + Assert.Equal("pkg:npm/foo@1.0.0", identity.Purl); + Assert.Equal("library", identity.ComponentType); + Assert.Equal("group", identity.Group); + } + + [Fact] + public void ComponentUsage_Create_SortsEntrypoints() + { + var usage = ComponentUsage.Create(true, new[] { "/app/start.sh", "/app/start.sh", "/bin/init", " ", null! }); + + Assert.True(usage.UsedByEntrypoint); + Assert.Equal(new[] { "/app/start.sh", "/bin/init" }, usage.Entrypoints); + } + + [Fact] + public void LayerComponentFragment_Create_SortsComponents() + { + var compB = new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/b", "b"), + LayerDigest = "sha256:layer2", + }; + + var compA = new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/a", "a"), + LayerDigest = "sha256:layer2", + }; + + var fragment = LayerComponentFragment.Create("sha256:layer2", new[] { compB, compA }); + + Assert.Equal("sha256:layer2", fragment.LayerDigest); + Assert.Equal(new[] { compA.Identity.Key, compB.Identity.Key }, fragment.Components.Select(c => c.Identity.Key)); + } + + [Fact] + public void ComponentRecord_Serializes_WithScannerDefaults() + { + var record = new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/test", "test", "1.0.0"), + LayerDigest = "sha256:layer", + Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/package.json")), + Dependencies = ImmutableArray.Create("pkg:npm/dep"), + Usage = ComponentUsage.Create(true, new[] { "/app/start.sh" }), + Metadata = new ComponentMetadata + { + Scope = "runtime", + Licenses = new[] { "MIT" }, + Properties = new Dictionary + { + ["source"] = "package-lock.json", + }, + }, + }; + + var json = JsonSerializer.Serialize(record, ScannerJsonOptions.Default); + var deserialized = JsonSerializer.Deserialize(json, ScannerJsonOptions.Default); + + Assert.NotNull(deserialized); + Assert.Equal(record.Identity.Key, deserialized!.Identity.Key); + Assert.Equal(record.Metadata?.Scope, deserialized.Metadata?.Scope); + Assert.True(deserialized.Usage.UsedByEntrypoint); + Assert.Equal(record.Usage.Entrypoints.AsSpan(), deserialized.Usage.Entrypoints.AsSpan()); + } +} diff --git a/src/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs b/src/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs new file mode 100644 index 00000000..ec3536a5 --- /dev/null +++ b/src/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs @@ -0,0 +1,130 @@ +using System.Text.Json; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Serialization; +using StellaOps.Scanner.Core.Utility; +using Xunit; + +namespace StellaOps.Scanner.Core.Tests.Contracts; + +public sealed class ScannerCoreContractsTests +{ + private static readonly JsonSerializerOptions Options = ScannerJsonOptions.CreateDefault(); + private static readonly ScanJobId SampleJobId = ScanJobId.From(Guid.Parse("8f4cc9c5-8245-4b9d-9b4f-5ae049631b7d")); + private static readonly DateTimeOffset SampleCreatedAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero).AddTicks(1_234_560); + + [Fact] + public void ScanJob_RoundTripMatchesGoldenFixture() + { + var job = CreateSampleJob(); + + var json = JsonSerializer.Serialize(job, Options); + var expected = LoadFixture("scan-job.json"); + Assert.Equal(expected, json); + + var deserialized = JsonSerializer.Deserialize(expected, Options); + Assert.NotNull(deserialized); + Assert.Equal(job.Id, deserialized!.Id); + Assert.Equal(job.ImageDigest, deserialized.ImageDigest); + Assert.Equal(job.CorrelationId, deserialized.CorrelationId); + Assert.Equal(job.Metadata, deserialized.Metadata); + Assert.Equal(job.Failure?.Message, deserialized.Failure?.Message); + Assert.Equal(job.Failure?.Details, deserialized.Failure?.Details); + } + + [Fact] + public void ScanProgressEvent_RoundTripMatchesGoldenFixture() + { + var progress = CreateSampleProgressEvent(); + + var json = JsonSerializer.Serialize(progress, Options); + var expected = LoadFixture("scan-progress-event.json"); + Assert.Equal(expected, json); + + var deserialized = JsonSerializer.Deserialize(expected, Options); + Assert.NotNull(deserialized); + Assert.Equal(progress.JobId, deserialized!.JobId); + Assert.Equal(progress.Stage, deserialized.Stage); + Assert.Equal(progress.Kind, deserialized.Kind); + Assert.Equal(progress.Sequence, deserialized.Sequence); + Assert.Equal(progress.Error?.Details, deserialized.Error?.Details); + } + + [Fact] + public void ScannerError_RoundTripMatchesGoldenFixture() + { + var error = CreateSampleError(); + + var json = JsonSerializer.Serialize(error, Options); + var expected = LoadFixture("scanner-error.json"); + Assert.Equal(expected, json); + + var deserialized = JsonSerializer.Deserialize(expected, Options); + Assert.NotNull(deserialized); + Assert.Equal(error.Code, deserialized!.Code); + Assert.Equal(error.Severity, deserialized.Severity); + Assert.Equal(error.Details, deserialized.Details); + } + + private static ScanJob CreateSampleJob() + { + var updatedAt = SampleCreatedAt.AddSeconds(5); + var correlationId = ScannerIdentifiers.CreateCorrelationId(SampleJobId, nameof(ScanStage.AnalyzeOperatingSystem)); + + return new ScanJob( + SampleJobId, + ScanJobStatus.Running, + "registry.example.com/stellaops/scanner:1.2.3", + "SHA256:ABCDEF", + SampleCreatedAt, + updatedAt, + correlationId, + "tenant-a", + new Dictionary + { + ["requestId"] = "req-1234", + ["source"] = "ci" + }, + CreateSampleError()); + } + + private static ScanProgressEvent CreateSampleProgressEvent() + { + return new ScanProgressEvent( + SampleJobId, + ScanStage.AnalyzeOperatingSystem, + ScanProgressEventKind.Warning, + sequence: 3, + timestamp: SampleCreatedAt.AddSeconds(1), + percentComplete: 42.5, + message: "OS analyzer reported missing packages", + attributes: new Dictionary + { + ["package"] = "openssl", + ["version"] = "1.1.1w" + }, + error: CreateSampleError()); + } + + private static ScannerError CreateSampleError() + { + return new ScannerError( + ScannerErrorCode.AnalyzerFailure, + ScannerErrorSeverity.Error, + "Analyzer failed to parse layer", + SampleCreatedAt, + retryable: false, + details: new Dictionary + { + ["layerDigest"] = "sha256:deadbeef", + ["attempt"] = "1" + }, + stage: nameof(ScanStage.AnalyzeOperatingSystem), + component: "os-analyzer"); + } + + private static string LoadFixture(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName); + return File.ReadAllText(path).Trim(); + } +} diff --git a/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json b/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json new file mode 100644 index 00000000..37ae5361 --- /dev/null +++ b/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json @@ -0,0 +1 @@ +{"id":"8f4cc9c582454b9d9b4f5ae049631b7d","status":"running","imageReference":"registry.example.com/stellaops/scanner:1.2.3","imageDigest":"sha256:abcdef","createdAt":"2025-10-18T14:30:15.123456+00:00","updatedAt":"2025-10-18T14:30:20.123456+00:00","correlationId":"scan-analyzeoperatingsystem-8f4cc9c582454b9d9b4f5ae049631b7d","tenantId":"tenant-a","metadata":{"requestId":"req-1234","source":"ci"},"failure":{"code":"analyzerFailure","severity":"error","message":"Analyzer failed to parse layer","timestamp":"2025-10-18T14:30:15.123456+00:00","retryable":false,"stage":"AnalyzeOperatingSystem","component":"os-analyzer","details":{"layerDigest":"sha256:deadbeef","attempt":"1"}}} diff --git a/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json b/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json new file mode 100644 index 00000000..2577f3c5 --- /dev/null +++ b/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json @@ -0,0 +1 @@ +{"jobId":"8f4cc9c582454b9d9b4f5ae049631b7d","stage":"analyzeOperatingSystem","kind":"warning","sequence":3,"timestamp":"2025-10-18T14:30:16.123456+00:00","percentComplete":42.5,"message":"OS analyzer reported missing packages","attributes":{"package":"openssl","version":"1.1.1w"},"error":{"code":"analyzerFailure","severity":"error","message":"Analyzer failed to parse layer","timestamp":"2025-10-18T14:30:15.123456+00:00","retryable":false,"stage":"AnalyzeOperatingSystem","component":"os-analyzer","details":{"layerDigest":"sha256:deadbeef","attempt":"1"}}} diff --git a/src/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json b/src/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json new file mode 100644 index 00000000..1ee0bcf6 --- /dev/null +++ b/src/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json @@ -0,0 +1 @@ +{"code":"analyzerFailure","severity":"error","message":"Analyzer failed to parse layer","timestamp":"2025-10-18T14:30:15.123456+00:00","retryable":false,"stage":"AnalyzeOperatingSystem","component":"os-analyzer","details":{"layerDigest":"sha256:deadbeef","attempt":"1"}} diff --git a/src/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs b/src/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs new file mode 100644 index 00000000..0f830365 --- /dev/null +++ b/src/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs @@ -0,0 +1,103 @@ +using System.Collections.Generic; +using System.Diagnostics; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Observability; +using StellaOps.Scanner.Core.Utility; +using Xunit; + +namespace StellaOps.Scanner.Core.Tests.Observability; + +public sealed class ScannerLogExtensionsPerformanceTests +{ + private const double ThresholdMicroseconds = 5.0; + private const int WarmupIterations = 5_000; + private const int MeasuredIterations = 200_000; + private static readonly DateTimeOffset Timestamp = ScannerTimestamps.Normalize(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero)); + private static readonly string Stage = nameof(ScanStage.AnalyzeOperatingSystem); + private static readonly string Component = "os-analyzer"; + + [Fact] + public void BeginScanScope_CompletesWithinThreshold() + { + using var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => false)); + var logger = factory.CreateLogger("ScannerPerformance"); + var job = CreateScanJob(); + + var microseconds = Measure(() => logger.BeginScanScope(job, Stage, Component)); + + Assert.True(microseconds <= ThresholdMicroseconds, $"Expected BeginScanScope to stay ≤ {ThresholdMicroseconds} µs but measured {microseconds:F3} µs."); + } + + [Fact] + public void BeginProgressScope_CompletesWithinThreshold() + { + using var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => false)); + var logger = factory.CreateLogger("ScannerPerformance"); + var progress = CreateProgressEvent(); + + var microseconds = Measure(() => logger.BeginProgressScope(progress, Component)); + + Assert.True(microseconds <= ThresholdMicroseconds, $"Expected BeginProgressScope to stay ≤ {ThresholdMicroseconds} µs but measured {microseconds:F3} µs."); + } + + private static double Measure(Func scopeFactory) + { + for (var i = 0; i < WarmupIterations; i++) + { + using var scope = scopeFactory(); + } + + GC.Collect(); + GC.WaitForPendingFinalizers(); + GC.Collect(); + + var stopwatch = Stopwatch.StartNew(); + for (var i = 0; i < MeasuredIterations; i++) + { + using var scope = scopeFactory(); + } + + stopwatch.Stop(); + + return stopwatch.Elapsed.TotalSeconds * 1_000_000 / MeasuredIterations; + } + + private static ScanJob CreateScanJob() + { + var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "perf"); + var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, Stage, Component); + + return new ScanJob( + jobId, + ScanJobStatus.Running, + "registry.example.com/stellaops/scanner:1.2.3", + "sha256:abcdef", + Timestamp, + Timestamp, + correlationId, + "tenant-a", + new Dictionary(StringComparer.Ordinal) + { + ["requestId"] = "req-perf" + }); + } + + private static ScanProgressEvent CreateProgressEvent() + { + var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "perf"); + + return new ScanProgressEvent( + jobId, + ScanStage.AnalyzeOperatingSystem, + ScanProgressEventKind.Progress, + sequence: 42, + Timestamp, + percentComplete: 10.5, + message: "performance check", + attributes: new Dictionary(StringComparer.Ordinal) + { + ["sample"] = "true" + }); + } +} diff --git a/src/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs b/src/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs index 1bc72b99..ef43bb6c 100644 --- a/src/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs +++ b/src/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs @@ -2,9 +2,9 @@ using System.Collections.Generic; using System.IdentityModel.Tokens.Jwt; using System.Security.Cryptography; using Microsoft.Extensions.Time.Testing; -using Microsoft.Extensions.Options; using Microsoft.IdentityModel.Tokens; -using StellaOps.Scanner.Core.Security; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Security.Dpop; using Xunit; namespace StellaOps.Scanner.Core.Tests.Security; diff --git a/src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj b/src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj index 6875005c..4c94d15d 100644 --- a/src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj +++ b/src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj @@ -9,4 +9,7 @@ + + + diff --git a/src/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs b/src/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs new file mode 100644 index 00000000..e334271f --- /dev/null +++ b/src/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs @@ -0,0 +1,242 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Scanner.Core.Contracts; + +public sealed record ComponentGraph +{ + public required ImmutableArray Layers { get; init; } + + public required ImmutableArray Components { get; init; } + + public ImmutableDictionary ComponentMap { get; init; } = ImmutableDictionary.Empty; + + public bool TryGetComponent(string key, out AggregatedComponent component) + => ComponentMap.TryGetValue(key, out component!); +} + +public static class ComponentGraphBuilder +{ + public static ComponentGraph Build(IEnumerable fragments) + { + ArgumentNullException.ThrowIfNull(fragments); + + var orderedLayers = fragments + .Where(static fragment => !string.IsNullOrWhiteSpace(fragment.LayerDigest)) + .Select(NormalizeFragment) + .ToImmutableArray(); + + var accumulators = new Dictionary(StringComparer.Ordinal); + + foreach (var fragment in orderedLayers) + { + foreach (var component in fragment.Components) + { + var key = component.Identity.Key; + if (string.IsNullOrWhiteSpace(key)) + { + continue; + } + + if (!accumulators.TryGetValue(key, out var accumulator)) + { + accumulator = new ComponentAccumulator(component.Identity); + accumulators.Add(key, accumulator); + } + + accumulator.Include(component, fragment.LayerDigest); + } + } + + var components = accumulators.Values + .Select(static accumulator => accumulator.ToAggregatedComponent()) + .OrderBy(static component => component.Identity.Key, StringComparer.Ordinal) + .ToImmutableArray(); + + var map = components.ToImmutableDictionary(static component => component.Identity.Key, StringComparer.Ordinal); + + return new ComponentGraph + { + Layers = orderedLayers, + Components = components, + ComponentMap = map, + }; + } + + private static LayerComponentFragment NormalizeFragment(LayerComponentFragment fragment) + { + if (fragment.Components.All(component => string.Equals(component.LayerDigest, fragment.LayerDigest, StringComparison.Ordinal))) + { + return fragment; + } + + var normalizedComponents = fragment.Components + .Select(component => component.LayerDigest.Equals(fragment.LayerDigest, StringComparison.Ordinal) + ? component + : component with { LayerDigest = fragment.LayerDigest }) + .ToImmutableArray(); + + return fragment with { Components = normalizedComponents }; + } + + private sealed class ComponentAccumulator + { + private readonly ComponentIdentity _identity; + private readonly SortedSet _layers = new(StringComparer.Ordinal); + private readonly SortedSet _dependencies = new(StringComparer.Ordinal); + private readonly Dictionary _evidence = new(); + private ComponentUsage _usage = ComponentUsage.Unused; + private ComponentMetadata? _metadata; + private string? _firstLayer; + private string? _lastLayer; + + public ComponentAccumulator(ComponentIdentity identity) + { + _identity = identity; + } + + public void Include(ComponentRecord record, string layerDigest) + { + _layers.Add(layerDigest); + _dependencies.UnionWith(record.Dependencies); + + foreach (var evidence in record.Evidence) + { + var key = new EvidenceKey(evidence.Kind, evidence.Value, evidence.Source); + _evidence[key] = evidence; + } + + if (record.Metadata is not null) + { + _metadata = _metadata is null + ? record.Metadata + : MergeMetadata(_metadata, record.Metadata); + } + + if (record.Usage.UsedByEntrypoint || _usage.UsedByEntrypoint) + { + var entrypoints = record.Usage.EntryPointsOrEmpty(); + var existing = _usage.EntryPointsOrEmpty(); + var builder = ImmutableSortedSet.CreateBuilder(StringComparer.Ordinal); + builder.UnionWith(existing); + builder.UnionWith(entrypoints); + _usage = new ComponentUsage(true, builder.ToImmutableArray()); + } + else if (!_usage.UsedByEntrypoint && record.Usage.UsedByEntrypoint) + { + _usage = record.Usage; + } + else if (_usage is { UsedByEntrypoint: false } && record.Usage is { UsedByEntrypoint: false } && record.Usage.Entrypoints.Length > 0) + { + _usage = new ComponentUsage(false, record.Usage.Entrypoints); + } + + if (_firstLayer is null) + { + _firstLayer = layerDigest; + } + else if (!StringComparer.Ordinal.Equals(_firstLayer, layerDigest)) + { + _lastLayer = layerDigest; + } + } + + public AggregatedComponent ToAggregatedComponent() + { + return new AggregatedComponent + { + Identity = _identity, + FirstLayerDigest = _firstLayer ?? string.Empty, + LastLayerDigest = _lastLayer, + LayerDigests = _layers.ToImmutableArray(), + Evidence = _evidence.Values + .OrderBy(static evidence => evidence.Kind, StringComparer.Ordinal) + .ThenBy(static evidence => evidence.Value, StringComparer.Ordinal) + .ThenBy(static evidence => evidence.Source, StringComparer.Ordinal) + .ToImmutableArray(), + Dependencies = _dependencies.ToImmutableArray(), + Metadata = _metadata, + Usage = _usage, + }; + } + } + + private static ComponentMetadata MergeMetadata(ComponentMetadata existing, ComponentMetadata incoming) + { + var scope = existing.Scope ?? incoming.Scope; + + var licenses = MergeLists(existing.Licenses, incoming.Licenses); + var properties = MergeDictionary(existing.Properties, incoming.Properties); + + return new ComponentMetadata + { + Scope = scope, + Licenses = licenses, + Properties = properties, + }; + } + + private static IReadOnlyList? MergeLists(IReadOnlyList? left, IReadOnlyList? right) + { + if ((left is null || left.Count == 0) && (right is null || right.Count == 0)) + { + return null; + } + + var builder = ImmutableSortedSet.CreateBuilder(StringComparer.Ordinal); + if (left is not null) + { + builder.UnionWith(left.Where(static item => !string.IsNullOrWhiteSpace(item))); + } + + if (right is not null) + { + builder.UnionWith(right.Where(static item => !string.IsNullOrWhiteSpace(item))); + } + + return builder.ToImmutableArray(); + } + + private static IReadOnlyDictionary? MergeDictionary(IReadOnlyDictionary? left, IReadOnlyDictionary? right) + { + if ((left is null || left.Count == 0) && (right is null || right.Count == 0)) + { + return null; + } + + var builder = ImmutableSortedDictionary.CreateBuilder(StringComparer.Ordinal); + if (left is not null) + { + foreach (var (key, value) in left) + { + if (!string.IsNullOrWhiteSpace(key) && value is not null) + { + builder[key] = value; + } + } + } + + if (right is not null) + { + foreach (var (key, value) in right) + { + if (!string.IsNullOrWhiteSpace(key) && value is not null) + { + builder[key] = value; + } + } + } + + return builder.ToImmutable(); + } + + private readonly record struct EvidenceKey(string Kind, string Value, string? Source); +} + +internal static class ComponentUsageExtensions +{ + public static ImmutableArray EntryPointsOrEmpty(this ComponentUsage usage) + => usage.Entrypoints; +} diff --git a/src/StellaOps.Scanner.Core/Contracts/ComponentModels.cs b/src/StellaOps.Scanner.Core/Contracts/ComponentModels.cs new file mode 100644 index 00000000..2ac0f5ac --- /dev/null +++ b/src/StellaOps.Scanner.Core/Contracts/ComponentModels.cs @@ -0,0 +1,278 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.Core.Contracts; + +/// +/// Canonical identifier for a component discovered during analysis. +/// +public sealed record ComponentIdentity +{ + [JsonPropertyName("key")] + public string Key { get; init; } = string.Empty; + + [JsonPropertyName("name")] + public string Name { get; init; } = string.Empty; + + [JsonPropertyName("version")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Version { get; init; } + = null; + + [JsonPropertyName("purl")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Purl { get; init; } + = null; + + [JsonPropertyName("type")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ComponentType { get; init; } + = null; + + [JsonPropertyName("group")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Group { get; init; } + = null; + + public static ComponentIdentity Create(string key, string name, string? version = null, string? purl = null, string? componentType = null, string? group = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(key); + ArgumentException.ThrowIfNullOrWhiteSpace(name); + + key = key.Trim(); + name = name.Trim(); + version = version?.Trim(); + purl = purl?.Trim(); + componentType = componentType?.Trim(); + group = group?.Trim(); + + return new ComponentIdentity + { + Key = key, + Name = name, + Version = version, + Purl = purl, + ComponentType = componentType, + Group = group, + }; + } +} + +/// +/// Evidence associated with a component (e.g., file path, manifest origin). +/// +public sealed record ComponentEvidence +{ + [JsonPropertyName("kind")] + public string Kind { get; init; } = string.Empty; + + [JsonPropertyName("value")] + public string Value { get; init; } = string.Empty; + + [JsonPropertyName("source")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Source { get; init; } + = null; + + public static ComponentEvidence FromPath(string path) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + return new ComponentEvidence { Kind = "file", Value = path }; + } +} + +/// +/// Optional metadata describing dependency relationships or classification. +/// +public sealed record ComponentMetadata +{ + [JsonPropertyName("scope")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Scope { get; init; } + = null; + + [JsonPropertyName("licenses")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? Licenses { get; init; } + = null; + + [JsonPropertyName("properties")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Properties { get; init; } + = null; +} + +/// +/// Represents a single component discovered within a layer fragment. +/// +public sealed record ComponentRecord +{ + [JsonPropertyName("identity")] + public ComponentIdentity Identity { get; init; } = ComponentIdentity.Create("unknown", "unknown"); + + [JsonPropertyName("layerDigest")] + public string LayerDigest { get; init; } = string.Empty; + + [JsonPropertyName("evidence")] + public ImmutableArray Evidence { get; init; } = ImmutableArray.Empty; + + [JsonPropertyName("dependencies")] + public ImmutableArray Dependencies { get; init; } = ImmutableArray.Empty; + + [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public ComponentMetadata? Metadata { get; init; } + = null; + + [JsonPropertyName("usage")] + public ComponentUsage Usage { get; init; } = ComponentUsage.Unused; + + public ComponentRecord WithUsage(ComponentUsage usage) + => this with { Usage = usage }; + + public ComponentRecord WithLayer(string layerDigest) + => this with { LayerDigest = layerDigest }; +} + +/// +/// Usage annotations (derived from EntryTrace or other signals). +/// +public sealed record ComponentUsage +{ + public static ComponentUsage Unused { get; } = new(false, ImmutableArray.Empty); + + public ComponentUsage(bool usedByEntrypoint, ImmutableArray entrypoints) + { + UsedByEntrypoint = usedByEntrypoint; + Entrypoints = entrypoints.IsDefault ? ImmutableArray.Empty : entrypoints; + } + + [JsonPropertyName("usedByEntrypoint")] + public bool UsedByEntrypoint { get; init; } + = false; + + [JsonPropertyName("entrypoints")] + public ImmutableArray Entrypoints { get; init; } + = ImmutableArray.Empty; + + public static ComponentUsage Create(bool usedByEntrypoint, IEnumerable? entrypoints = null) + { + if (entrypoints is null) + { + return new ComponentUsage(usedByEntrypoint, ImmutableArray.Empty); + } + + var builder = ImmutableSortedSet.CreateBuilder(StringComparer.Ordinal); + foreach (var entry in entrypoints) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + builder.Add(entry.Trim()); + } + + if (builder.Count == 0) + { + return new ComponentUsage(usedByEntrypoint, ImmutableArray.Empty); + } + + var arrayBuilder = ImmutableArray.CreateBuilder(builder.Count); + foreach (var entry in builder) + { + if (!string.IsNullOrEmpty(entry)) + { + arrayBuilder.Add(entry!); + } + } + + return new ComponentUsage(usedByEntrypoint, arrayBuilder.ToImmutable()); + } +} + +/// +/// Convenience helpers for component collections. +/// +public static class ComponentModelExtensions +{ + public static ImmutableArray Normalize(this IEnumerable? components) + { + if (components is null) + { + return ImmutableArray.Empty; + } + + return ImmutableArray.CreateRange(components); + } +} + +/// +/// Components introduced by a specific layer. +/// +public sealed record LayerComponentFragment +{ + [JsonPropertyName("layerDigest")] + public string LayerDigest { get; init; } = string.Empty; + + [JsonPropertyName("components")] + public ImmutableArray Components { get; init; } = ImmutableArray.Empty; + + public static LayerComponentFragment Create(string layerDigest, IEnumerable? components) + { + ArgumentException.ThrowIfNullOrWhiteSpace(layerDigest); + var list = components is null + ? ImmutableArray.Empty + : ImmutableArray.CreateRange(components); + + if (!list.IsEmpty) + { + list = list + .OrderBy(component => component.Identity.Key, StringComparer.Ordinal) + .ToImmutableArray(); + } + + return new LayerComponentFragment + { + LayerDigest = layerDigest, + Components = list, + }; + } +} + +/// +/// Aggregated component spanning the complete image (all layers). +/// +public sealed record AggregatedComponent +{ + [JsonPropertyName("identity")] + public ComponentIdentity Identity { get; init; } = ComponentIdentity.Create("unknown", "unknown"); + + [JsonPropertyName("firstLayerDigest")] + public string FirstLayerDigest { get; init; } = string.Empty; + + [JsonPropertyName("lastLayerDigest")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? LastLayerDigest { get; init; } + = null; + + [JsonPropertyName("layerDigests")] + public ImmutableArray LayerDigests { get; init; } = ImmutableArray.Empty; + + [JsonPropertyName("evidence")] + public ImmutableArray Evidence { get; init; } = ImmutableArray.Empty; + + [JsonPropertyName("dependencies")] + public ImmutableArray Dependencies { get; init; } = ImmutableArray.Empty; + + [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public ComponentMetadata? Metadata { get; init; } + = null; + + [JsonPropertyName("usage")] + public ComponentUsage Usage { get; init; } = ComponentUsage.Unused; +} diff --git a/src/StellaOps.Scanner.Core/Contracts/SbomView.cs b/src/StellaOps.Scanner.Core/Contracts/SbomView.cs new file mode 100644 index 00000000..9c107f7b --- /dev/null +++ b/src/StellaOps.Scanner.Core/Contracts/SbomView.cs @@ -0,0 +1,7 @@ +namespace StellaOps.Scanner.Core.Contracts; + +public enum SbomView +{ + Inventory, + Usage, +} diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs b/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs new file mode 100644 index 00000000..3a4d22b0 --- /dev/null +++ b/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Scanner.Core.Contracts; + +public static class ScanAnalysisKeys +{ + public const string OsPackageAnalyzers = "analysis.os.packages"; + + public const string OsComponentFragments = "analysis.os.fragments"; + + public const string LayerComponentFragments = "analysis.layers.fragments"; +} diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs b/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs new file mode 100644 index 00000000..45511d26 --- /dev/null +++ b/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs @@ -0,0 +1,35 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; + +namespace StellaOps.Scanner.Core.Contracts; + +public sealed class ScanAnalysisStore +{ + private readonly ConcurrentDictionary _items = new(StringComparer.OrdinalIgnoreCase); + + public void Set(string key, T value) + { + ArgumentException.ThrowIfNullOrWhiteSpace(key); + ArgumentNullException.ThrowIfNull(value); + _items[key] = value!; + } + + public bool TryGet(string key, out T value) + { + ArgumentException.ThrowIfNullOrWhiteSpace(key); + + if (_items.TryGetValue(key, out var stored) && stored is T typed) + { + value = typed; + return true; + } + + value = default!; + return false; + } + + public IReadOnlyDictionary Snapshot() + => new ReadOnlyDictionary(new Dictionary(_items, StringComparer.OrdinalIgnoreCase)); +} diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs b/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs new file mode 100644 index 00000000..a518c5c5 --- /dev/null +++ b/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs @@ -0,0 +1,41 @@ +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Scanner.Core.Contracts; + +public static class ScanAnalysisStoreExtensions +{ + public static ImmutableArray GetLayerFragments(this ScanAnalysisStore store) + { + ArgumentNullException.ThrowIfNull(store); + + if (store.TryGet>(ScanAnalysisKeys.LayerComponentFragments, out var fragments) && !fragments.IsDefault) + { + return fragments; + } + + return ImmutableArray.Empty; + } + + public static ImmutableArray AppendLayerFragments(this ScanAnalysisStore store, IEnumerable fragments) + { + ArgumentNullException.ThrowIfNull(store); + ArgumentNullException.ThrowIfNull(fragments); + + var newFragments = fragments.ToImmutableArray(); + if (newFragments.IsDefaultOrEmpty) + { + return store.GetLayerFragments(); + } + + if (store.TryGet>(ScanAnalysisKeys.LayerComponentFragments, out var existing) && !existing.IsDefaultOrEmpty) + { + var combined = existing.AddRange(newFragments); + store.Set(ScanAnalysisKeys.LayerComponentFragments, combined); + return combined; + } + + store.Set(ScanAnalysisKeys.LayerComponentFragments, newFragments); + return newFragments; + } +} diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs b/src/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs new file mode 100644 index 00000000..e4be4da6 --- /dev/null +++ b/src/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs @@ -0,0 +1,7 @@ +namespace StellaOps.Scanner.Core.Contracts; + +public static class ScanMetadataKeys +{ + public const string RootFilesystemPath = "scanner.rootfs.path"; + public const string WorkspacePath = "scanner.workspace.path"; +} diff --git a/src/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs b/src/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs index 833c93ee..9419f8d5 100644 --- a/src/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs +++ b/src/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs @@ -1,7 +1,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; using StellaOps.Auth.Client; +using StellaOps.Auth.Security.Dpop; namespace StellaOps.Scanner.Core.Security; diff --git a/src/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj b/src/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj index ee9ecfa5..e889a8d5 100644 --- a/src/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj +++ b/src/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj @@ -9,10 +9,9 @@ - - + diff --git a/src/StellaOps.Scanner.Core/TASKS.md b/src/StellaOps.Scanner.Core/TASKS.md index 55eeac1e..57fdfaeb 100644 --- a/src/StellaOps.Scanner.Core/TASKS.md +++ b/src/StellaOps.Scanner.Core/TASKS.md @@ -2,6 +2,6 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCANNER-CORE-09-501 | DONE (2025-10-18) | Scanner Core Guild | — | Define shared DTOs (ScanJob, ProgressEvent), error taxonomy, and deterministic ID/timestamp helpers aligning with `ARCHITECTURE_SCANNER.md` §3–§4. | DTOs serialize deterministically, helpers produce reproducible IDs/timestamps, tests cover round-trips and hash derivation. | -| SCANNER-CORE-09-502 | DONE (2025-10-18) | Scanner Core Guild | SCANNER-CORE-09-501 | Observability helpers (correlation IDs, logging scopes, metric namespacing, deterministic hashes) consumed by WebService/Worker. | Logging/metrics helpers allocate minimally, correlation IDs stable, ActivitySource emitted; tests assert determinism. | +| SCANNER-CORE-09-501 | DONE (2025-10-19) | Scanner Core Guild | — | Define shared DTOs (ScanJob, ProgressEvent), error taxonomy, and deterministic ID/timestamp helpers aligning with `ARCHITECTURE_SCANNER.md` §3–§4.
2025-10-19: Added golden fixtures + `ScannerCoreContractsTests` to lock canonical JSON.
2025-10-19: Published canonical JSON snippet + acceptance notes in `docs/scanner-core-contracts.md`. | DTOs serialize deterministically, helpers produce reproducible IDs/timestamps, tests cover round-trips and hash derivation. | +| SCANNER-CORE-09-502 | DONE (2025-10-19) | Scanner Core Guild | SCANNER-CORE-09-501 | Observability helpers (correlation IDs, logging scopes, metric namespacing, deterministic hashes) consumed by WebService/Worker.
2025-10-19: Verified progress scope serialisation via new fixtures/tests.
2025-10-19: Added `ScannerLogExtensionsPerformanceTests` to enforce ≤ 5 µs scope overhead + documented micro-bench results. | Logging/metrics helpers allocate minimally, correlation IDs stable, ActivitySource emitted; tests assert determinism. | | SCANNER-CORE-09-503 | DONE (2025-10-18) | Scanner Core Guild | SCANNER-CORE-09-501, SCANNER-CORE-09-502 | Security utilities: Authority client factory, OpTok caching, DPoP verifier, restart-time plug-in guardrails for scanner components. | Authority helpers cache tokens, DPoP validator rejects invalid proofs, plug-in guard prevents runtime additions; tests cover happy/error paths. | diff --git a/src/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs b/src/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs new file mode 100644 index 00000000..1602f2a4 --- /dev/null +++ b/src/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs @@ -0,0 +1,295 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Diff; +using StellaOps.Scanner.Core.Utility; +using Xunit; + +namespace StellaOps.Scanner.Diff.Tests; + +public sealed class ComponentDifferTests +{ + [Fact] + public void Compute_CapturesAddedRemovedAndChangedComponents() + { + var oldFragments = new[] + { + LayerComponentFragment.Create("sha256:layer1", new[] + { + CreateComponent( + "pkg:npm/a", + version: "1.0.0", + layer: "sha256:layer1", + usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }), + evidence: new[] { ComponentEvidence.FromPath("/app/package-lock.json") }), + CreateComponent("pkg:npm/b", version: "2.0.0", layer: "sha256:layer1", scope: "runtime"), + }), + LayerComponentFragment.Create("sha256:layer1b", new[] + { + CreateComponent( + "pkg:npm/a", + version: "1.0.0", + layer: "sha256:layer1b", + usage: ComponentUsage.Create(true, new[] { "/app/start.sh" })), + CreateComponent("pkg:npm/d", version: "0.9.0", layer: "sha256:layer1b"), + }) + }; + + var newFragments = new[] + { + LayerComponentFragment.Create("sha256:layer2", new[] + { + CreateComponent( + "pkg:npm/a", + version: "1.1.0", + layer: "sha256:layer2", + usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }), + evidence: new[] { ComponentEvidence.FromPath("/app/package-lock.json") }), + }), + LayerComponentFragment.Create("sha256:layer3", new[] + { + CreateComponent( + "pkg:npm/b", + version: "2.0.0", + layer: "sha256:layer3", + usage: ComponentUsage.Create(true, new[] { "/app/init.sh" }), + scope: "runtime"), + CreateComponent("pkg:npm/c", version: "3.0.0", layer: "sha256:layer3"), + }) + }; + + var oldGraph = ComponentGraphBuilder.Build(oldFragments); + var newGraph = ComponentGraphBuilder.Build(newFragments); + + var request = new ComponentDiffRequest + { + OldGraph = oldGraph, + NewGraph = newGraph, + GeneratedAt = new DateTimeOffset(2025, 10, 19, 10, 0, 0, TimeSpan.Zero), + View = SbomView.Inventory, + OldImageDigest = "sha256:old", + NewImageDigest = "sha256:new", + }; + + var differ = new ComponentDiffer(); + var document = differ.Compute(request); + + Assert.Equal(SbomView.Inventory, document.View); + Assert.Equal("sha256:old", document.OldImageDigest); + Assert.Equal("sha256:new", document.NewImageDigest); + Assert.Equal(1, document.Summary.Added); + Assert.Equal(1, document.Summary.Removed); + Assert.Equal(1, document.Summary.VersionChanged); + Assert.Equal(1, document.Summary.MetadataChanged); + + Assert.Equal(new[] { "sha256:layer2", "sha256:layer3", "sha256:layer1b" }, document.Layers.Select(layer => layer.LayerDigest)); + + var layerGroups = document.Layers.ToDictionary(layer => layer.LayerDigest); + Assert.True(layerGroups.ContainsKey("sha256:layer2"), "Expected layer2 group present"); + Assert.True(layerGroups.ContainsKey("sha256:layer3"), "Expected layer3 group present"); + Assert.True(layerGroups.ContainsKey("sha256:layer1b"), "Expected layer1b group present"); + + var addedChange = layerGroups["sha256:layer3"].Changes.Single(change => change.Kind == ComponentChangeKind.Added); + Assert.Equal("pkg:npm/c", addedChange.ComponentKey); + Assert.NotNull(addedChange.NewComponent); + + var versionChange = layerGroups["sha256:layer2"].Changes.Single(change => change.Kind == ComponentChangeKind.VersionChanged); + Assert.Equal("pkg:npm/a", versionChange.ComponentKey); + Assert.Equal("sha256:layer1b", versionChange.RemovingLayer); + Assert.Equal("sha256:layer2", versionChange.IntroducingLayer); + Assert.Equal("1.1.0", versionChange.NewComponent!.Identity.Version); + + var metadataChange = layerGroups["sha256:layer3"].Changes.Single(change => change.Kind == ComponentChangeKind.MetadataChanged); + Assert.True(metadataChange.NewComponent!.Usage.UsedByEntrypoint); + Assert.False(metadataChange.OldComponent!.Usage.UsedByEntrypoint); + Assert.Equal("sha256:layer3", metadataChange.IntroducingLayer); + Assert.Equal("sha256:layer1", metadataChange.RemovingLayer); + + var removedChange = layerGroups["sha256:layer1b"].Changes.Single(change => change.Kind == ComponentChangeKind.Removed); + Assert.Equal("pkg:npm/d", removedChange.ComponentKey); + Assert.Equal("sha256:layer1b", removedChange.RemovingLayer); + Assert.Null(removedChange.IntroducingLayer); + + var json = DiffJsonSerializer.Serialize(document); + using var parsed = JsonDocument.Parse(json); + var root = parsed.RootElement; + Assert.Equal("inventory", root.GetProperty("view").GetString()); + Assert.Equal("2025-10-19T10:00:00.000000Z", root.GetProperty("generatedAt").GetString()); + Assert.Equal("sha256:old", root.GetProperty("oldImageDigest").GetString()); + Assert.Equal("sha256:new", root.GetProperty("newImageDigest").GetString()); + + var summaryJson = root.GetProperty("summary"); + Assert.Equal(1, summaryJson.GetProperty("added").GetInt32()); + Assert.Equal(1, summaryJson.GetProperty("removed").GetInt32()); + Assert.Equal(1, summaryJson.GetProperty("versionChanged").GetInt32()); + Assert.Equal(1, summaryJson.GetProperty("metadataChanged").GetInt32()); + + var layersJson = root.GetProperty("layers"); + Assert.Equal(3, layersJson.GetArrayLength()); + + var layer2Json = layersJson[0]; + Assert.Equal("sha256:layer2", layer2Json.GetProperty("layerDigest").GetString()); + var layer2Changes = layer2Json.GetProperty("changes"); + Assert.Equal(1, layer2Changes.GetArrayLength()); + var versionChangeJson = layer2Changes.EnumerateArray().Single(); + Assert.Equal("versionChanged", versionChangeJson.GetProperty("kind").GetString()); + Assert.Equal("pkg:npm/a", versionChangeJson.GetProperty("componentKey").GetString()); + Assert.Equal("sha256:layer2", versionChangeJson.GetProperty("introducingLayer").GetString()); + Assert.Equal("sha256:layer1b", versionChangeJson.GetProperty("removingLayer").GetString()); + Assert.Equal("1.1.0", versionChangeJson.GetProperty("newComponent").GetProperty("identity").GetProperty("version").GetString()); + + var layer3Json = layersJson[1]; + Assert.Equal("sha256:layer3", layer3Json.GetProperty("layerDigest").GetString()); + var layer3Changes = layer3Json.GetProperty("changes"); + Assert.Equal(2, layer3Changes.GetArrayLength()); + var layer3ChangeArray = layer3Changes.EnumerateArray().ToArray(); + var metadataChangeJson = layer3ChangeArray[0]; + Assert.Equal("metadataChanged", metadataChangeJson.GetProperty("kind").GetString()); + Assert.Equal("pkg:npm/b", metadataChangeJson.GetProperty("componentKey").GetString()); + Assert.Equal("sha256:layer3", metadataChangeJson.GetProperty("introducingLayer").GetString()); + Assert.Equal("sha256:layer1", metadataChangeJson.GetProperty("removingLayer").GetString()); + Assert.True(metadataChangeJson.GetProperty("newComponent").GetProperty("usage").GetProperty("usedByEntrypoint").GetBoolean()); + Assert.False(metadataChangeJson.GetProperty("oldComponent").GetProperty("usage").GetProperty("usedByEntrypoint").GetBoolean()); + + var addedJson = layer3ChangeArray[1]; + Assert.Equal("added", addedJson.GetProperty("kind").GetString()); + Assert.Equal("pkg:npm/c", addedJson.GetProperty("componentKey").GetString()); + Assert.Equal("sha256:layer3", addedJson.GetProperty("introducingLayer").GetString()); + Assert.False(addedJson.TryGetProperty("removingLayer", out _)); + + var removedLayerJson = layersJson[2]; + Assert.Equal("sha256:layer1b", removedLayerJson.GetProperty("layerDigest").GetString()); + var removedChanges = removedLayerJson.GetProperty("changes"); + Assert.Equal(1, removedChanges.GetArrayLength()); + var removedJson = removedChanges.EnumerateArray().Single(); + Assert.Equal("removed", removedJson.GetProperty("kind").GetString()); + Assert.Equal("pkg:npm/d", removedJson.GetProperty("componentKey").GetString()); + Assert.Equal("sha256:layer1b", removedJson.GetProperty("removingLayer").GetString()); + Assert.False(removedJson.TryGetProperty("introducingLayer", out _)); + } + + [Fact] + public void Compute_UsageView_FiltersComponents() + { + var oldFragments = new[] + { + LayerComponentFragment.Create("sha256:base", new[] + { + CreateComponent("pkg:npm/a", "1", "sha256:base", usage: ComponentUsage.Create(false)), + }) + }; + + var newFragments = new[] + { + LayerComponentFragment.Create("sha256:new", new[] + { + CreateComponent("pkg:npm/a", "1", "sha256:new", usage: ComponentUsage.Create(false)), + CreateComponent("pkg:npm/b", "1", "sha256:new", usage: ComponentUsage.Create(true, new[] { "/entry" })), + }) + }; + + var request = new ComponentDiffRequest + { + OldGraph = ComponentGraphBuilder.Build(oldFragments), + NewGraph = ComponentGraphBuilder.Build(newFragments), + View = SbomView.Usage, + GeneratedAt = DateTimeOffset.UtcNow, + }; + + var differ = new ComponentDiffer(); + var document = differ.Compute(request); + + Assert.Single(document.Layers); + var layer = document.Layers[0]; + Assert.Single(layer.Changes); + Assert.Equal(ComponentChangeKind.Added, layer.Changes[0].Kind); + Assert.Equal("pkg:npm/b", layer.Changes[0].ComponentKey); + + var json = DiffJsonSerializer.Serialize(document); + using var parsed = JsonDocument.Parse(json); + Assert.Equal("usage", parsed.RootElement.GetProperty("view").GetString()); + Assert.Equal(1, parsed.RootElement.GetProperty("summary").GetProperty("added").GetInt32()); + Assert.False(parsed.RootElement.TryGetProperty("oldImageDigest", out _)); + Assert.False(parsed.RootElement.TryGetProperty("newImageDigest", out _)); + } + + [Fact] + public void Compute_MetadataChange_WhenEvidenceDiffers() + { + var oldFragments = new[] + { + LayerComponentFragment.Create("sha256:underlay", new[] + { + CreateComponent( + "pkg:npm/a", + version: "1.0.0", + layer: "sha256:underlay", + usage: ComponentUsage.Create(false), + evidence: new[] { ComponentEvidence.FromPath("/workspace/package-lock.json") }), + }), + }; + + var newFragments = new[] + { + LayerComponentFragment.Create("sha256:overlay", new[] + { + CreateComponent( + "pkg:npm/a", + version: "1.0.0", + layer: "sha256:overlay", + usage: ComponentUsage.Create(false), + evidence: new[] + { + ComponentEvidence.FromPath("/workspace/package-lock.json"), + ComponentEvidence.FromPath("/workspace/yarn.lock"), + }), + }), + }; + + var request = new ComponentDiffRequest + { + OldGraph = ComponentGraphBuilder.Build(oldFragments), + NewGraph = ComponentGraphBuilder.Build(newFragments), + GeneratedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero), + }; + + var differ = new ComponentDiffer(); + var document = differ.Compute(request); + + Assert.Equal(0, document.Summary.Added); + Assert.Equal(0, document.Summary.Removed); + Assert.Equal(0, document.Summary.VersionChanged); + Assert.Equal(1, document.Summary.MetadataChanged); + + var layer = Assert.Single(document.Layers); + Assert.Equal("sha256:overlay", layer.LayerDigest); + + var change = Assert.Single(layer.Changes); + Assert.Equal(ComponentChangeKind.MetadataChanged, change.Kind); + Assert.Equal("sha256:overlay", change.IntroducingLayer); + Assert.Equal("sha256:underlay", change.RemovingLayer); + Assert.Equal(2, change.NewComponent!.Evidence.Length); + Assert.Equal(1, change.OldComponent!.Evidence.Length); + } + + private static ComponentRecord CreateComponent( + string key, + string version, + string layer, + ComponentUsage? usage = null, + string? scope = null, + IEnumerable? evidence = null) + { + return new ComponentRecord + { + Identity = ComponentIdentity.Create(key, key.Split('/', 2)[^1], version, purl: key, componentType: "library"), + LayerDigest = layer, + Usage = usage ?? ComponentUsage.Unused, + Metadata = scope is null ? null : new ComponentMetadata { Scope = scope }, + Evidence = evidence is null ? ImmutableArray.Empty : ImmutableArray.CreateRange(evidence), + }; + } +} diff --git a/src/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj b/src/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj new file mode 100644 index 00000000..c9e84dd0 --- /dev/null +++ b/src/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj @@ -0,0 +1,11 @@ + + + net10.0 + enable + enable + + + + + + diff --git a/src/StellaOps.Scanner.Diff/AGENTS.md b/src/StellaOps.Scanner.Diff/AGENTS.md new file mode 100644 index 00000000..ddddbdae --- /dev/null +++ b/src/StellaOps.Scanner.Diff/AGENTS.md @@ -0,0 +1,20 @@ +# StellaOps.Scanner.Diff — Agent Charter + +## Mission +Deliver deterministic image-to-image component diffs grouped by layer with provenance signals that power policy previews, UI surfacing, and downstream scheduling. + +## Responsibilities +- Maintain diff computation pipelines for inventory and usage SBOM views. +- Ensure ordering, hashing, and serialization are stable across runs and hosts. +- Capture layer provenance, usage flags, and supporting evidence for every change. +- Provide JSON artifacts and helper APIs consumed by the Scanner WebService, Worker, CLI, and UI. + +## Interfaces & Dependencies +- Consumes normalized component fragments emitted by analyzers and usage signals from EntryTrace. +- Emits diff models used by `StellaOps.Scanner.WebService` and persisted by `StellaOps.Scanner.Storage`. +- Shares deterministic primitives from `StellaOps.Scanner.Core` once extended with component contracts. + +## Testing Expectations +- Golden diff fixtures for add/remove/version-change flows. +- Determinism checks comparing shuffled inputs. +- Layer attribution regression tests to guard provenance correctness. diff --git a/src/StellaOps.Scanner.Diff/ComponentDiffModels.cs b/src/StellaOps.Scanner.Diff/ComponentDiffModels.cs new file mode 100644 index 00000000..3b57e456 --- /dev/null +++ b/src/StellaOps.Scanner.Diff/ComponentDiffModels.cs @@ -0,0 +1,109 @@ +using System; +using System.Collections.Immutable; +using System.Text.Json.Serialization; +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Diff; + +public enum ComponentChangeKind +{ + Added, + Removed, + VersionChanged, + MetadataChanged, +} + +public sealed record ComponentDiffRequest +{ + public required ComponentGraph OldGraph { get; init; } + + public required ComponentGraph NewGraph { get; init; } + + public SbomView View { get; init; } = SbomView.Inventory; + + public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow; + + public string? OldImageDigest { get; init; } + = null; + + public string? NewImageDigest { get; init; } + = null; +} + +public sealed record ComponentChange +{ + [JsonPropertyName("kind")] + public ComponentChangeKind Kind { get; init; } + + [JsonPropertyName("componentKey")] + public string ComponentKey { get; init; } = string.Empty; + + [JsonPropertyName("introducingLayer")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? IntroducingLayer { get; init; } + = null; + + [JsonPropertyName("removingLayer")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? RemovingLayer { get; init; } + = null; + + [JsonPropertyName("oldComponent")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public AggregatedComponent? OldComponent { get; init; } + = null; + + [JsonPropertyName("newComponent")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public AggregatedComponent? NewComponent { get; init; } + = null; +} + +public sealed record LayerDiff +{ + [JsonPropertyName("layerDigest")] + public string LayerDigest { get; init; } = string.Empty; + + [JsonPropertyName("changes")] + public ImmutableArray Changes { get; init; } = ImmutableArray.Empty; +} + +public sealed record DiffSummary +{ + [JsonPropertyName("added")] + public int Added { get; init; } + + [JsonPropertyName("removed")] + public int Removed { get; init; } + + [JsonPropertyName("versionChanged")] + public int VersionChanged { get; init; } + + [JsonPropertyName("metadataChanged")] + public int MetadataChanged { get; init; } +} + +public sealed record ComponentDiffDocument +{ + [JsonPropertyName("generatedAt")] + public DateTimeOffset GeneratedAt { get; init; } + + [JsonPropertyName("view")] + public SbomView View { get; init; } + + [JsonPropertyName("oldImageDigest")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? OldImageDigest { get; init; } + = null; + + [JsonPropertyName("newImageDigest")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? NewImageDigest { get; init; } + = null; + + [JsonPropertyName("summary")] + public DiffSummary Summary { get; init; } = new(); + + [JsonPropertyName("layers")] + public ImmutableArray Layers { get; init; } = ImmutableArray.Empty; +} diff --git a/src/StellaOps.Scanner.Diff/ComponentDiffer.cs b/src/StellaOps.Scanner.Diff/ComponentDiffer.cs new file mode 100644 index 00000000..17806e58 --- /dev/null +++ b/src/StellaOps.Scanner.Diff/ComponentDiffer.cs @@ -0,0 +1,377 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Utility; + +namespace StellaOps.Scanner.Diff; + +public sealed class ComponentDiffer +{ + private static readonly StringComparer Ordinal = StringComparer.Ordinal; + private const string UnknownLayerKey = "(unknown)"; + + public ComponentDiffDocument Compute(ComponentDiffRequest request) + { + ArgumentNullException.ThrowIfNull(request); + + var generatedAt = ScannerTimestamps.Normalize(request.GeneratedAt); + var oldComponents = ToDictionary(FilterComponents(request.OldGraph, request.View)); + var newComponents = ToDictionary(FilterComponents(request.NewGraph, request.View)); + var layerOrder = BuildLayerOrder(request.OldGraph, request.NewGraph); + + var changes = new List(); + var counters = new DiffCounters(); + + foreach (var (key, newComponent) in newComponents) + { + if (!oldComponents.TryGetValue(key, out var oldComponent)) + { + changes.Add(new ComponentChange + { + Kind = ComponentChangeKind.Added, + ComponentKey = key, + IntroducingLayer = GetIntroducingLayer(newComponent), + NewComponent = newComponent, + }); + counters.Added++; + continue; + } + + var change = CompareComponents(oldComponent, newComponent, key); + if (change is not null) + { + changes.Add(change); + counters.Register(change.Kind); + } + + oldComponents.Remove(key); + } + + foreach (var (key, oldComponent) in oldComponents) + { + changes.Add(new ComponentChange + { + Kind = ComponentChangeKind.Removed, + ComponentKey = key, + RemovingLayer = GetRemovingLayer(oldComponent), + OldComponent = oldComponent, + }); + counters.Removed++; + } + + var layerGroups = changes + .GroupBy(ResolveLayerKey, Ordinal) + .OrderBy(group => layerOrder.TryGetValue(group.Key, out var position) ? position : int.MaxValue) + .ThenBy(static group => group.Key, Ordinal) + .Select(group => new LayerDiff + { + LayerDigest = group.Key, + Changes = group + .OrderBy(change => change.ComponentKey, Ordinal) + .ThenBy(change => change.Kind) + .ThenBy(change => change.NewComponent?.Identity.Version ?? change.OldComponent?.Identity.Version ?? string.Empty, Ordinal) + .ToImmutableArray(), + }) + .ToImmutableArray(); + + var document = new ComponentDiffDocument + { + GeneratedAt = generatedAt, + View = request.View, + OldImageDigest = request.OldImageDigest, + NewImageDigest = request.NewImageDigest, + Summary = counters.ToSummary(), + Layers = layerGroups, + }; + + return document; + } + + private static ComponentChange? CompareComponents(AggregatedComponent oldComponent, AggregatedComponent newComponent, string key) + { + var versionChanged = !string.Equals(oldComponent.Identity.Version, newComponent.Identity.Version, StringComparison.Ordinal); + if (versionChanged) + { + return new ComponentChange + { + Kind = ComponentChangeKind.VersionChanged, + ComponentKey = key, + IntroducingLayer = GetIntroducingLayer(newComponent), + RemovingLayer = GetRemovingLayer(oldComponent), + OldComponent = oldComponent, + NewComponent = newComponent, + }; + } + + var metadataChanged = HasMetadataChanged(oldComponent, newComponent); + if (!metadataChanged) + { + return null; + } + + return new ComponentChange + { + Kind = ComponentChangeKind.MetadataChanged, + ComponentKey = key, + IntroducingLayer = GetIntroducingLayer(newComponent), + RemovingLayer = GetRemovingLayer(oldComponent), + OldComponent = oldComponent, + NewComponent = newComponent, + }; + } + + private static bool HasMetadataChanged(AggregatedComponent oldComponent, AggregatedComponent newComponent) + { + if (!string.Equals(oldComponent.Identity.Name, newComponent.Identity.Name, StringComparison.Ordinal)) + { + return true; + } + + if (!string.Equals(oldComponent.Identity.ComponentType, newComponent.Identity.ComponentType, StringComparison.Ordinal)) + { + return true; + } + + if (!string.Equals(oldComponent.Identity.Group, newComponent.Identity.Group, StringComparison.Ordinal)) + { + return true; + } + + if (!string.Equals(oldComponent.Identity.Purl, newComponent.Identity.Purl, StringComparison.Ordinal)) + { + return true; + } + + if (!oldComponent.Dependencies.SequenceEqual(newComponent.Dependencies, Ordinal)) + { + return true; + } + + if (!oldComponent.LayerDigests.SequenceEqual(newComponent.LayerDigests, Ordinal)) + { + return true; + } + + if (!oldComponent.Evidence.SequenceEqual(newComponent.Evidence)) + { + return true; + } + + if (UsageChanged(oldComponent.Usage, newComponent.Usage)) + { + return true; + } + + if (!MetadataEquals(oldComponent.Metadata, newComponent.Metadata)) + { + return true; + } + + return false; + } + + private static bool UsageChanged(ComponentUsage oldUsage, ComponentUsage newUsage) + { + if (oldUsage.UsedByEntrypoint != newUsage.UsedByEntrypoint) + { + return true; + } + + return !oldUsage.Entrypoints.SequenceEqual(newUsage.Entrypoints, Ordinal); + } + + private static bool MetadataEquals(ComponentMetadata? left, ComponentMetadata? right) + { + if (left is null && right is null) + { + return true; + } + + if (left is null || right is null) + { + return false; + } + + if (!string.Equals(left.Scope, right.Scope, StringComparison.Ordinal)) + { + return false; + } + + if (!SequenceEqual(left.Licenses, right.Licenses)) + { + return false; + } + + if (!DictionaryEqual(left.Properties, right.Properties)) + { + return false; + } + + return true; + } + + private static bool SequenceEqual(IReadOnlyList? left, IReadOnlyList? right) + { + if (left is null && right is null) + { + return true; + } + + if (left is null || right is null) + { + return false; + } + + if (left.Count != right.Count) + { + return false; + } + + for (var i = 0; i < left.Count; i++) + { + if (!string.Equals(left[i], right[i], StringComparison.Ordinal)) + { + return false; + } + } + + return true; + } + + private static bool DictionaryEqual(IReadOnlyDictionary? left, IReadOnlyDictionary? right) + { + if (left is null && right is null) + { + return true; + } + + if (left is null || right is null) + { + return false; + } + + if (left.Count != right.Count) + { + return false; + } + + foreach (var (key, value) in left) + { + if (!right.TryGetValue(key, out var rightValue)) + { + return false; + } + + if (!string.Equals(value, rightValue, StringComparison.Ordinal)) + { + return false; + } + } + + return true; + } + + private static Dictionary ToDictionary(ImmutableArray components) + { + var dictionary = new Dictionary(components.Length, Ordinal); + foreach (var component in components) + { + dictionary[component.Identity.Key] = component; + } + + return dictionary; + } + + private static ImmutableArray FilterComponents(ComponentGraph graph, SbomView view) + { + if (view == SbomView.Usage) + { + return graph.Components.Where(static component => component.Usage.UsedByEntrypoint).ToImmutableArray(); + } + + return graph.Components; + } + + private static Dictionary BuildLayerOrder(ComponentGraph oldGraph, ComponentGraph newGraph) + { + var order = new Dictionary(Ordinal); + var index = 0; + + foreach (var layer in newGraph.Layers) + { + AddLayer(order, layer.LayerDigest, ref index); + } + + foreach (var layer in oldGraph.Layers) + { + AddLayer(order, layer.LayerDigest, ref index); + } + + return order; + } + + private static void AddLayer(IDictionary order, string? layerDigest, ref int index) + { + var normalized = NormalizeLayer(layerDigest); + if (normalized is null || order.ContainsKey(normalized)) + { + return; + } + + order[normalized] = index++; + } + + private static string ResolveLayerKey(ComponentChange change) + => NormalizeLayer(change.IntroducingLayer) ?? NormalizeLayer(change.RemovingLayer) ?? UnknownLayerKey; + + private static string? GetIntroducingLayer(AggregatedComponent component) + => NormalizeLayer(component.FirstLayerDigest); + + private static string? GetRemovingLayer(AggregatedComponent component) + { + var layer = component.LastLayerDigest ?? component.FirstLayerDigest; + return NormalizeLayer(layer); + } + + private static string? NormalizeLayer(string? layer) + { + if (string.IsNullOrWhiteSpace(layer)) + { + return null; + } + + return layer; + } + + private sealed class DiffCounters + { + public int Added; + public int Removed; + public int VersionChanged; + public int MetadataChanged; + + public void Register(ComponentChangeKind kind) + { + switch (kind) + { + case ComponentChangeKind.VersionChanged: + VersionChanged++; + break; + case ComponentChangeKind.MetadataChanged: + MetadataChanged++; + break; + } + } + + public DiffSummary ToSummary() + => new() + { + Added = Added, + Removed = Removed, + VersionChanged = VersionChanged, + MetadataChanged = MetadataChanged, + }; + } +} diff --git a/src/StellaOps.Scanner.Diff/DiffJsonSerializer.cs b/src/StellaOps.Scanner.Diff/DiffJsonSerializer.cs new file mode 100644 index 00000000..6df297e8 --- /dev/null +++ b/src/StellaOps.Scanner.Diff/DiffJsonSerializer.cs @@ -0,0 +1,10 @@ +using System.Text.Json; +using StellaOps.Scanner.Core.Serialization; + +namespace StellaOps.Scanner.Diff; + +public static class DiffJsonSerializer +{ + public static string Serialize(ComponentDiffDocument document) + => JsonSerializer.Serialize(document, ScannerJsonOptions.Default); +} diff --git a/src/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj b/src/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj new file mode 100644 index 00000000..3f7c0a76 --- /dev/null +++ b/src/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj @@ -0,0 +1,12 @@ + + + net10.0 + enable + enable + true + + + + + + diff --git a/src/StellaOps.Scanner.Diff/TASKS.md b/src/StellaOps.Scanner.Diff/TASKS.md new file mode 100644 index 00000000..ef74a909 --- /dev/null +++ b/src/StellaOps.Scanner.Diff/TASKS.md @@ -0,0 +1,7 @@ +# Scanner Diff Task Board (Sprint 10) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-DIFF-10-501 | DONE (2025-10-19) | Diff Guild | SCANNER-CORE-09-501 | Build component differ tracking add/remove/version changes with deterministic ordering. | Diff engine produces deterministic results across runs; unit tests cover add/remove/version scenarios. | +| SCANNER-DIFF-10-502 | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-501 | Attribute diffs to introducing/removing layers including provenance evidence. | Layer attribution stored on every change; tests validate provenance with synthetic layer stacks. | +| SCANNER-DIFF-10-503 | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-502 | Produce JSON diff output for inventory vs usage views aligned with API contract. | JSON serializer emits stable ordering; golden fixture captured; API contract documented. | diff --git a/src/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs b/src/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs new file mode 100644 index 00000000..f2c9cfe1 --- /dev/null +++ b/src/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs @@ -0,0 +1,148 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Emit.Composition; +using Xunit; + +namespace StellaOps.Scanner.Emit.Tests.Composition; + +public sealed class CycloneDxComposerTests +{ + [Fact] + public void Compose_ProducesInventoryAndUsageArtifacts() + { + var request = BuildRequest(); + var composer = new CycloneDxComposer(); + + var result = composer.Compose(request); + + Assert.NotNull(result.Inventory); + Assert.StartsWith("urn:uuid:", result.Inventory.SerialNumber, StringComparison.Ordinal); + Assert.Equal("application/vnd.cyclonedx+json; version=1.5", result.Inventory.JsonMediaType); + Assert.Equal("application/vnd.cyclonedx+protobuf; version=1.5", result.Inventory.ProtobufMediaType); + Assert.Equal(2, result.Inventory.Components.Length); + + Assert.NotNull(result.Usage); + Assert.Equal("application/vnd.cyclonedx+json; version=1.5; view=usage", result.Usage!.JsonMediaType); + Assert.Single(result.Usage.Components); + Assert.Equal("pkg:npm/a", result.Usage.Components[0].Identity.Key); + + ValidateJson(result.Inventory.JsonBytes, expectedComponentCount: 2, expectedView: "inventory"); + ValidateJson(result.Usage.JsonBytes, expectedComponentCount: 1, expectedView: "usage"); + } + + [Fact] + public void Compose_IsDeterministic() + { + var request = BuildRequest(); + var composer = new CycloneDxComposer(); + + var first = composer.Compose(request); + var second = composer.Compose(request); + + Assert.Equal(first.Inventory.JsonSha256, second.Inventory.JsonSha256); + Assert.Equal(first.Inventory.ProtobufSha256, second.Inventory.ProtobufSha256); + Assert.Equal(first.Inventory.SerialNumber, second.Inventory.SerialNumber); + + Assert.NotNull(first.Usage); + Assert.NotNull(second.Usage); + Assert.Equal(first.Usage!.JsonSha256, second.Usage!.JsonSha256); + Assert.Equal(first.Usage.ProtobufSha256, second.Usage.ProtobufSha256); + Assert.Equal(first.Usage.SerialNumber, second.Usage.SerialNumber); + } + + private static SbomCompositionRequest BuildRequest() + { + var fragments = new[] + { + LayerComponentFragment.Create("sha256:layer1", new[] + { + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/a", "component-a", "1.0.0", "pkg:npm/a@1.0.0", "library"), + LayerDigest = "sha256:layer1", + Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/package.json")), + Dependencies = ImmutableArray.Create("pkg:npm/b"), + Usage = ComponentUsage.Create(true, new[] { "/app/start.sh" }), + Metadata = new ComponentMetadata + { + Scope = "runtime", + Licenses = new[] { "MIT" }, + Properties = new Dictionary + { + ["stellaops:source"] = "package-lock.json", + ["stellaops.os.analyzer"] = "apk", + ["stellaops.os.architecture"] = "x86_64", + }, + }, + } + }), + LayerComponentFragment.Create("sha256:layer2", new[] + { + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:npm/b", "component-b", "2.0.0", "pkg:npm/b@2.0.0", "library"), + LayerDigest = "sha256:layer2", + Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/b/package.json")), + Usage = ComponentUsage.Create(false), + Metadata = new ComponentMetadata + { + Scope = "development", + Properties = new Dictionary + { + ["stellaops.os.analyzer"] = "language-node", + }, + }, + } + }) + }; + + var image = new ImageArtifactDescriptor + { + ImageDigest = "sha256:1234567890abcdef", + ImageReference = "registry.example.com/app/service:1.2.3", + Repository = "registry.example.com/app/service", + Tag = "1.2.3", + Architecture = "amd64", + }; + + return SbomCompositionRequest.Create( + image, + fragments, + new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero), + generatorName: "StellaOps.Scanner", + generatorVersion: "0.10.0", + properties: new Dictionary + { + ["stellaops:scanId"] = "scan-1234", + }); + } + + private static void ValidateJson(byte[] data, int expectedComponentCount, string expectedView) + { + using var document = JsonDocument.Parse(data); + var root = document.RootElement; + + Assert.True(root.TryGetProperty("metadata", out var metadata), "metadata property missing"); + var properties = metadata.GetProperty("properties"); + var viewProperty = properties.EnumerateArray() + .Single(prop => prop.GetProperty("name").GetString() == "stellaops:sbom.view"); + Assert.Equal(expectedView, viewProperty.GetProperty("value").GetString()); + + var components = root.GetProperty("components").EnumerateArray().ToArray(); + Assert.Equal(expectedComponentCount, components.Length); + + var names = components.Select(component => component.GetProperty("name").GetString()).ToArray(); + Assert.Equal(names, names.OrderBy(n => n, StringComparer.Ordinal).ToArray()); + + var firstComponentProperties = components[0].GetProperty("properties").EnumerateArray().ToDictionary( + element => element.GetProperty("name").GetString(), + element => element.GetProperty("value").GetString()); + + Assert.Equal("apk", firstComponentProperties["stellaops.os.analyzer"]); + Assert.Equal("x86_64", firstComponentProperties["stellaops.os.architecture"]); + } +} diff --git a/src/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs b/src/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs new file mode 100644 index 00000000..875e1dc3 --- /dev/null +++ b/src/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs @@ -0,0 +1,52 @@ +using System.Collections.Immutable; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Emit.Composition; +using Xunit; + +namespace StellaOps.Scanner.Emit.Tests.Composition; + +public class ScanAnalysisCompositionBuilderTests +{ + [Fact] + public void FromAnalysis_BuildsRequest_WhenFragmentsPresent() + { + var analysis = new ScanAnalysisStore(); + var fragment = LayerComponentFragment.Create( + "sha256:layer", + new[] + { + new ComponentRecord + { + Identity = ComponentIdentity.Create("pkg:test/a", "a", "1.0.0", "pkg:test/a@1.0.0", "library"), + LayerDigest = "sha256:layer", + Evidence = ImmutableArray.Empty, + Dependencies = ImmutableArray.Empty, + Metadata = null, + Usage = ComponentUsage.Unused, + } + }); + + analysis.AppendLayerFragments(new[] { fragment }); + + var request = ScanAnalysisCompositionBuilder.FromAnalysis( + analysis, + new ImageArtifactDescriptor { ImageDigest = "sha256:image" }, + DateTimeOffset.UtcNow, + generatorName: "test", + generatorVersion: "1.0.0"); + + Assert.Equal("sha256:image", request.Image.ImageDigest); + Assert.Single(request.LayerFragments); + Assert.Equal(fragment.LayerDigest, request.LayerFragments[0].LayerDigest); + } + + [Fact] + public void BuildComponentGraph_ReturnsEmpty_WhenNoFragments() + { + var analysis = new ScanAnalysisStore(); + var graph = ScanAnalysisCompositionBuilder.BuildComponentGraph(analysis); + + Assert.Empty(graph.Components); + Assert.Empty(graph.Layers); + } +} diff --git a/src/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs b/src/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs new file mode 100644 index 00000000..cd3076d8 --- /dev/null +++ b/src/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs @@ -0,0 +1,141 @@ +using System; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using Collections.Special; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Emit.Index; + +namespace StellaOps.Scanner.Emit.Tests.Index; + +public sealed class BomIndexBuilderTests +{ + [Fact] + public void Build_GeneratesDeterministicBinaryIndex_WithUsageBitmaps() + { + var graph = ComponentGraphBuilder.Build(new[] + { + LayerComponentFragment.Create("sha256:layer1", new[] + { + CreateComponent("pkg:npm/a", "1.0.0", "sha256:layer1", usageEntrypoints: new[] { "/app/start.sh" }), + CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer1"), + }), + LayerComponentFragment.Create("sha256:layer2", new[] + { + CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer2"), + CreateComponent("pkg:npm/c", "3.1.0", "sha256:layer2", usageEntrypoints: new[] { "/app/init.sh" }), + }), + }); + + var request = new BomIndexBuildRequest + { + ImageDigest = "sha256:image", + Graph = graph, + GeneratedAt = new DateTimeOffset(2025, 10, 19, 9, 45, 0, TimeSpan.Zero), + }; + + var builder = new BomIndexBuilder(); + var artifact = builder.Build(request); + var second = builder.Build(request); + + Assert.Equal(artifact.Sha256, second.Sha256); + Assert.Equal(artifact.Bytes, second.Bytes); + Assert.Equal(2, artifact.LayerCount); + Assert.Equal(3, artifact.ComponentCount); + Assert.Equal(2, artifact.EntrypointCount); + + using var reader = new BinaryReader(new MemoryStream(artifact.Bytes), System.Text.Encoding.UTF8, leaveOpen: false); + ValidateHeader(reader, request); + var layers = ReadTable(reader, artifact.LayerCount); + Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, layers); + + var purls = ReadTable(reader, artifact.ComponentCount); + Assert.Equal(new[] { "pkg:npm/a", "pkg:npm/b", "pkg:npm/c" }, purls); + + var componentBitmaps = ReadBitmaps(reader, artifact.ComponentCount); + Assert.Equal(new[] { new[] { 0 }, new[] { 0, 1 }, new[] { 1 } }, componentBitmaps); + + var entrypoints = ReadTable(reader, artifact.EntrypointCount); + Assert.Equal(new[] { "/app/init.sh", "/app/start.sh" }, entrypoints); + + var usageBitmaps = ReadBitmaps(reader, artifact.ComponentCount); + Assert.Equal(new[] { new[] { 1 }, Array.Empty(), new[] { 0 } }, usageBitmaps); + } + + private static void ValidateHeader(BinaryReader reader, BomIndexBuildRequest request) + { + var magic = reader.ReadBytes(7); + Assert.Equal("BOMIDX1", System.Text.Encoding.ASCII.GetString(magic)); + + var version = reader.ReadUInt16(); + Assert.Equal(1u, version); + + var flags = reader.ReadUInt16(); + Assert.Equal(0x1, flags); + + var digestLength = reader.ReadUInt16(); + var digestBytes = reader.ReadBytes(digestLength); + Assert.Equal(request.ImageDigest, System.Text.Encoding.UTF8.GetString(digestBytes)); + + var unixMicroseconds = reader.ReadInt64(); + var expectedMicroseconds = request.GeneratedAt.ToUniversalTime().ToUnixTimeMilliseconds() * 1000L; + expectedMicroseconds += request.GeneratedAt.ToUniversalTime().Ticks % TimeSpan.TicksPerMillisecond / 10; + Assert.Equal(expectedMicroseconds, unixMicroseconds); + + var layers = reader.ReadUInt32(); + var components = reader.ReadUInt32(); + var entrypoints = reader.ReadUInt32(); + + Assert.Equal(2u, layers); + Assert.Equal(3u, components); + Assert.Equal(2u, entrypoints); + } + + private static string[] ReadTable(BinaryReader reader, int count) + { + var values = new string[count]; + for (var i = 0; i < count; i++) + { + var length = reader.ReadUInt16(); + var bytes = reader.ReadBytes(length); + values[i] = System.Text.Encoding.UTF8.GetString(bytes); + } + + return values; + } + + private static int[][] ReadBitmaps(BinaryReader reader, int count) + { + var result = new int[count][]; + for (var i = 0; i < count; i++) + { + var length = reader.ReadUInt32(); + if (length == 0) + { + result[i] = Array.Empty(); + continue; + } + + var bytes = reader.ReadBytes((int)length); + using var ms = new MemoryStream(bytes, writable: false); + var bitmap = RoaringBitmap.Deserialize(ms); + result[i] = bitmap.ToArray(); + } + + return result; + } + + private static ComponentRecord CreateComponent(string key, string version, string layerDigest, string[]? usageEntrypoints = null) + { + var usage = usageEntrypoints is null + ? ComponentUsage.Unused + : ComponentUsage.Create(true, usageEntrypoints); + + return new ComponentRecord + { + Identity = ComponentIdentity.Create(key, key.Split('/', 2)[^1], version, key, "library"), + LayerDigest = layerDigest, + Usage = usage, + }; + } +} diff --git a/src/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs b/src/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs new file mode 100644 index 00000000..9e3149fc --- /dev/null +++ b/src/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs @@ -0,0 +1,97 @@ +using System; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Emit.Composition; +using StellaOps.Scanner.Emit.Index; +using StellaOps.Scanner.Emit.Packaging; + +namespace StellaOps.Scanner.Emit.Tests.Packaging; + +public sealed class ScannerArtifactPackageBuilderTests +{ + [Fact] + public void BuildPackage_ProducesDescriptorsAndManifest() + { + var fragments = new[] + { + LayerComponentFragment.Create("sha256:layer1", new[] + { + CreateComponent( + "pkg:npm/a", + "1.0.0", + "sha256:layer1", + usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }), + metadata: new Dictionary + { + ["stellaops.os.analyzer"] = "apk", + ["stellaops.os.architecture"] = "x86_64", + }), + CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer1"), + }), + LayerComponentFragment.Create("sha256:layer2", new[] + { + CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer2"), + CreateComponent("pkg:npm/c", "3.0.0", "sha256:layer2", usage: ComponentUsage.Create(true, new[] { "/app/init.sh" })), + }) + }; + + var request = SbomCompositionRequest.Create( + new ImageArtifactDescriptor + { + ImageDigest = "sha256:image", + ImageReference = "registry.example/app:latest", + Repository = "registry.example/app", + Tag = "latest", + }, + fragments, + new DateTimeOffset(2025, 10, 19, 12, 30, 0, TimeSpan.Zero), + generatorName: "StellaOps.Scanner", + generatorVersion: "0.10.0"); + + var composer = new CycloneDxComposer(); + var composition = composer.Compose(request); + + var indexBuilder = new BomIndexBuilder(); + var bomIndex = indexBuilder.Build(new BomIndexBuildRequest + { + ImageDigest = request.Image.ImageDigest, + Graph = composition.Graph, + GeneratedAt = request.GeneratedAt, + }); + + var packageBuilder = new ScannerArtifactPackageBuilder(); + var package = packageBuilder.Build(request.Image.ImageDigest, request.GeneratedAt, composition, bomIndex); + + Assert.Equal(5, package.Artifacts.Length); // inventory JSON+PB, usage JSON+PB, index + + var kinds = package.Manifest.Artifacts.Select(entry => entry.Kind).ToArray(); + Assert.Equal(new[] { "bom-index", "sbom-inventory", "sbom-inventory", "sbom-usage", "sbom-usage" }, kinds); + + var manifestJson = package.Manifest.ToJsonBytes(); + using var document = JsonDocument.Parse(manifestJson); + var root = document.RootElement; + Assert.Equal("sha256:image", root.GetProperty("imageDigest").GetString()); + Assert.Equal(5, root.GetProperty("artifacts").GetArrayLength()); + + var usageEntry = root.GetProperty("artifacts").EnumerateArray().First(element => element.GetProperty("kind").GetString() == "sbom-usage"); + Assert.Equal("application/vnd.cyclonedx+json; version=1.5; view=usage", usageEntry.GetProperty("mediaType").GetString()); + } + + private static ComponentRecord CreateComponent(string key, string version, string layerDigest, ComponentUsage? usage = null, IReadOnlyDictionary? metadata = null) + { + return new ComponentRecord + { + Identity = ComponentIdentity.Create(key, key.Split('/', 2)[^1], version, key, "library"), + LayerDigest = layerDigest, + Usage = usage ?? ComponentUsage.Unused, + Metadata = metadata is null + ? null + : new ComponentMetadata + { + Properties = metadata, + }, + }; + } +} diff --git a/src/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj b/src/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj new file mode 100644 index 00000000..f0bdf065 --- /dev/null +++ b/src/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj @@ -0,0 +1,11 @@ + + + net10.0 + enable + enable + + + + + + diff --git a/src/StellaOps.Scanner.Emit/AGENTS.md b/src/StellaOps.Scanner.Emit/AGENTS.md new file mode 100644 index 00000000..09c928e2 --- /dev/null +++ b/src/StellaOps.Scanner.Emit/AGENTS.md @@ -0,0 +1,20 @@ +# StellaOps.Scanner.Emit — Agent Charter + +## Mission +Assemble deterministic SBOM artifacts (inventory, usage, BOM index) from analyzer fragments and usage telemetry, and prepare them for storage, signing, and distribution. + +## Responsibilities +- Merge per-layer/component fragments into CycloneDX JSON/Protobuf SBOMs. +- Generate BOM index sidecars with roaring bitmap acceleration and usage flags. +- Package artifacts with stable naming, hashing, and manifests for downstream storage and attestations. +- Surface helper APIs for Scanner Worker/WebService to request compositions and exports. + +## Interfaces & Dependencies +- Consumes analyzer outputs (OS, language, native) and EntryTrace usage annotations. +- Produces artifacts persisted via `StellaOps.Scanner.Storage` and referenced by policy/report pipelines. +- Relies on deterministic primitives from `StellaOps.Scanner.Core` for timestamps, hashing, and serialization defaults. + +## Testing Expectations +- Golden SBOM and BOM index fixtures with determinism checks. +- Schema validation for CycloneDX outputs and BOM index binary layout. +- Integration tests exercising packaging helpers with in-memory storage fakes. diff --git a/src/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs b/src/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs new file mode 100644 index 00000000..5959d30a --- /dev/null +++ b/src/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs @@ -0,0 +1,405 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using CycloneDX; +using CycloneDX.Models; +using JsonSerializer = CycloneDX.Json.Serializer; +using ProtoSerializer = CycloneDX.Protobuf.Serializer; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Utility; + +namespace StellaOps.Scanner.Emit.Composition; + +public sealed class CycloneDxComposer +{ + private static readonly Guid SerialNamespace = new("0d3a422b-6e1b-4d9b-9c35-654b706c97e8"); + + private const string InventoryMediaTypeJson = "application/vnd.cyclonedx+json; version=1.5"; + private const string UsageMediaTypeJson = "application/vnd.cyclonedx+json; version=1.5; view=usage"; + private const string InventoryMediaTypeProtobuf = "application/vnd.cyclonedx+protobuf; version=1.5"; + private const string UsageMediaTypeProtobuf = "application/vnd.cyclonedx+protobuf; version=1.5; view=usage"; + + public SbomCompositionResult Compose(SbomCompositionRequest request) + { + ArgumentNullException.ThrowIfNull(request); + if (request.LayerFragments.IsDefaultOrEmpty) + { + throw new ArgumentException("At least one layer fragment is required.", nameof(request)); + } + + var graph = ComponentGraphBuilder.Build(request.LayerFragments); + var generatedAt = ScannerTimestamps.Normalize(request.GeneratedAt); + + var inventoryArtifact = BuildArtifact( + request, + graph, + SbomView.Inventory, + graph.Components, + generatedAt, + InventoryMediaTypeJson, + InventoryMediaTypeProtobuf); + + var usageComponents = graph.Components + .Where(static component => component.Usage.UsedByEntrypoint) + .ToImmutableArray(); + + CycloneDxArtifact? usageArtifact = null; + if (!usageComponents.IsEmpty) + { + usageArtifact = BuildArtifact( + request, + graph, + SbomView.Usage, + usageComponents, + generatedAt, + UsageMediaTypeJson, + UsageMediaTypeProtobuf); + } + + return new SbomCompositionResult + { + Inventory = inventoryArtifact, + Usage = usageArtifact, + Graph = graph, + }; + } + + private CycloneDxArtifact BuildArtifact( + SbomCompositionRequest request, + ComponentGraph graph, + SbomView view, + ImmutableArray components, + DateTimeOffset generatedAt, + string jsonMediaType, + string protobufMediaType) + { + var bom = BuildBom(request, view, components, generatedAt); + var json = JsonSerializer.Serialize(bom); + var jsonBytes = Encoding.UTF8.GetBytes(json); + var protobufBytes = ProtoSerializer.Serialize(bom); + + var jsonHash = ComputeSha256(jsonBytes); + var protobufHash = ComputeSha256(protobufBytes); + + return new CycloneDxArtifact + { + View = view, + SerialNumber = bom.SerialNumber ?? string.Empty, + GeneratedAt = generatedAt, + Components = components, + JsonBytes = jsonBytes, + JsonSha256 = jsonHash, + JsonMediaType = jsonMediaType, + ProtobufBytes = protobufBytes, + ProtobufSha256 = protobufHash, + ProtobufMediaType = protobufMediaType, + }; + } + + private Bom BuildBom( + SbomCompositionRequest request, + SbomView view, + ImmutableArray components, + DateTimeOffset generatedAt) + { + var bom = new Bom + { + SpecVersion = SpecificationVersion.v1_4, + Version = 1, + Metadata = BuildMetadata(request, view, generatedAt), + Components = BuildComponents(components), + Dependencies = BuildDependencies(components), + }; + + var serialPayload = $"{request.Image.ImageDigest}|{view}|{ScannerTimestamps.ToIso8601(generatedAt)}"; + bom.SerialNumber = $"urn:uuid:{ScannerIdentifiers.CreateDeterministicGuid(SerialNamespace, Encoding.UTF8.GetBytes(serialPayload)).ToString("d", CultureInfo.InvariantCulture)}"; + + return bom; + } + + private static Metadata BuildMetadata(SbomCompositionRequest request, SbomView view, DateTimeOffset generatedAt) + { + var metadata = new Metadata + { + Timestamp = generatedAt.UtcDateTime, + Component = BuildMetadataComponent(request.Image), + }; + + if (!string.IsNullOrWhiteSpace(request.GeneratorName)) + { + metadata.Tools = new List + { + new() + { + Name = request.GeneratorName, + Version = request.GeneratorVersion, + } + }; + } + + if (request.AdditionalProperties is not null && request.AdditionalProperties.Count > 0) + { + metadata.Properties = request.AdditionalProperties + .Where(static pair => !string.IsNullOrWhiteSpace(pair.Key) && pair.Value is not null) + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .Select(pair => new Property + { + Name = pair.Key, + Value = pair.Value, + }) + .ToList(); + } + + if (metadata.Properties is null) + { + metadata.Properties = new List(); + } + + metadata.Properties.Add(new Property + { + Name = "stellaops:sbom.view", + Value = view.ToString().ToLowerInvariant(), + }); + + return metadata; + } + + private static Component BuildMetadataComponent(ImageArtifactDescriptor image) + { + var digest = image.ImageDigest; + var digestValue = digest.Split(':', 2, StringSplitOptions.TrimEntries)[^1]; + var bomRef = $"image:{digestValue}"; + + var name = image.ImageReference ?? image.Repository ?? digest; + var component = new Component + { + BomRef = bomRef, + Type = Component.Classification.Container, + Name = name, + Version = digestValue, + Purl = BuildImagePurl(image), + Properties = new List + { + new() { Name = "stellaops:image.digest", Value = image.ImageDigest }, + }, + }; + + if (!string.IsNullOrWhiteSpace(image.ImageReference)) + { + component.Properties.Add(new Property { Name = "stellaops:image.reference", Value = image.ImageReference }); + } + + if (!string.IsNullOrWhiteSpace(image.Repository)) + { + component.Properties.Add(new Property { Name = "stellaops:image.repository", Value = image.Repository }); + } + + if (!string.IsNullOrWhiteSpace(image.Tag)) + { + component.Properties.Add(new Property { Name = "stellaops:image.tag", Value = image.Tag }); + } + + if (!string.IsNullOrWhiteSpace(image.Architecture)) + { + component.Properties.Add(new Property { Name = "stellaops:image.architecture", Value = image.Architecture }); + } + + return component; + } + + private static string? BuildImagePurl(ImageArtifactDescriptor image) + { + if (string.IsNullOrWhiteSpace(image.Repository)) + { + return null; + } + + var repo = image.Repository.Trim(); + var tag = string.IsNullOrWhiteSpace(image.Tag) ? null : image.Tag.Trim(); + var digest = image.ImageDigest.Trim(); + + var purlBuilder = new StringBuilder("pkg:oci/"); + purlBuilder.Append(repo.Replace("/", "%2F", StringComparison.Ordinal)); + if (!string.IsNullOrWhiteSpace(tag)) + { + purlBuilder.Append('@').Append(tag); + } + + purlBuilder.Append("?digest=").Append(Uri.EscapeDataString(digest)); + + if (!string.IsNullOrWhiteSpace(image.Architecture)) + { + purlBuilder.Append("&arch=").Append(Uri.EscapeDataString(image.Architecture.Trim())); + } + + return purlBuilder.ToString(); + } + + private static List BuildComponents(ImmutableArray components) + { + var result = new List(components.Length); + foreach (var component in components) + { + var model = new Component + { + BomRef = component.Identity.Key, + Name = component.Identity.Name, + Version = component.Identity.Version, + Purl = component.Identity.Purl, + Group = component.Identity.Group, + Type = MapClassification(component.Identity.ComponentType), + Scope = MapScope(component.Metadata?.Scope), + Properties = BuildProperties(component), + }; + + result.Add(model); + } + + return result; + } + + private static List? BuildProperties(AggregatedComponent component) + { + var properties = new List(); + + if (component.Metadata?.Properties is not null) + { + foreach (var property in component.Metadata.Properties.OrderBy(static pair => pair.Key, StringComparer.Ordinal)) + { + properties.Add(new Property + { + Name = property.Key, + Value = property.Value, + }); + } + } + + properties.Add(new Property { Name = "stellaops:firstLayerDigest", Value = component.FirstLayerDigest }); + if (component.LastLayerDigest is not null) + { + properties.Add(new Property { Name = "stellaops:lastLayerDigest", Value = component.LastLayerDigest }); + } + + if (!component.LayerDigests.IsDefaultOrEmpty) + { + properties.Add(new Property + { + Name = "stellaops:layerDigests", + Value = string.Join(",", component.LayerDigests), + }); + } + + if (component.Usage.UsedByEntrypoint) + { + properties.Add(new Property { Name = "stellaops:usage.usedByEntrypoint", Value = "true" }); + } + + if (!component.Usage.Entrypoints.IsDefaultOrEmpty && component.Usage.Entrypoints.Length > 0) + { + for (var index = 0; index < component.Usage.Entrypoints.Length; index++) + { + properties.Add(new Property + { + Name = $"stellaops:usage.entrypoint[{index}]", + Value = component.Usage.Entrypoints[index], + }); + } + } + + for (var index = 0; index < component.Evidence.Length; index++) + { + var evidence = component.Evidence[index]; + var builder = new StringBuilder(evidence.Kind); + builder.Append(':').Append(evidence.Value); + if (!string.IsNullOrWhiteSpace(evidence.Source)) + { + builder.Append('@').Append(evidence.Source); + } + + properties.Add(new Property + { + Name = $"stellaops:evidence[{index}]", + Value = builder.ToString(), + }); + } + + return properties; + } + + private static List? BuildDependencies(ImmutableArray components) + { + var componentKeys = components.Select(static component => component.Identity.Key).ToImmutableHashSet(StringComparer.Ordinal); + var dependencies = new List(); + + foreach (var component in components) + { + if (component.Dependencies.IsDefaultOrEmpty || component.Dependencies.Length == 0) + { + continue; + } + + var filtered = component.Dependencies.Where(componentKeys.Contains).ToArray(); + if (filtered.Length == 0) + { + continue; + } + + dependencies.Add(new Dependency + { + Ref = component.Identity.Key, + Dependencies = filtered + .Select(dependencyKey => new Dependency { Ref = dependencyKey }) + .ToList(), + }); + } + + return dependencies.Count == 0 ? null : dependencies; + } + + private static Component.Classification MapClassification(string? type) + { + if (string.IsNullOrWhiteSpace(type)) + { + return Component.Classification.Library; + } + + return type.Trim().ToLowerInvariant() switch + { + "application" => Component.Classification.Application, + "framework" => Component.Classification.Framework, + "container" => Component.Classification.Container, + "operating-system" or "os" => Component.Classification.OperationSystem, + "device" => Component.Classification.Device, + "firmware" => Component.Classification.Firmware, + "file" => Component.Classification.File, + _ => Component.Classification.Library, + }; + } + + private static Component.ComponentScope? MapScope(string? scope) + { + if (string.IsNullOrWhiteSpace(scope)) + { + return null; + } + + return scope.Trim().ToLowerInvariant() switch + { + "runtime" or "required" => Component.ComponentScope.Required, + "development" or "optional" => Component.ComponentScope.Optional, + "excluded" => Component.ComponentScope.Excluded, + _ => null, + }; + } + + private static string ComputeSha256(byte[] bytes) + { + using var sha256 = SHA256.Create(); + var hash = sha256.ComputeHash(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs b/src/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs new file mode 100644 index 00000000..63b58ef3 --- /dev/null +++ b/src/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs @@ -0,0 +1,85 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Utility; + +namespace StellaOps.Scanner.Emit.Composition; + +public sealed record ImageArtifactDescriptor +{ + public string ImageDigest { get; init; } = string.Empty; + + public string? ImageReference { get; init; } + = null; + + public string? Repository { get; init; } + = null; + + public string? Tag { get; init; } + = null; + + public string? Architecture { get; init; } + = null; +} + +public sealed record SbomCompositionRequest +{ + public required ImageArtifactDescriptor Image { get; init; } + + public required ImmutableArray LayerFragments { get; init; } + + public DateTimeOffset GeneratedAt { get; init; } + = ScannerTimestamps.UtcNow(); + + public string? GeneratorName { get; init; } + = null; + + public string? GeneratorVersion { get; init; } + = null; + + public IReadOnlyDictionary? AdditionalProperties { get; init; } + = null; + + public static SbomCompositionRequest Create( + ImageArtifactDescriptor image, + IEnumerable fragments, + DateTimeOffset generatedAt, + string? generatorName = null, + string? generatorVersion = null, + IReadOnlyDictionary? properties = null) + { + ArgumentNullException.ThrowIfNull(image); + ArgumentNullException.ThrowIfNull(fragments); + + var normalizedImage = new ImageArtifactDescriptor + { + ImageDigest = ScannerIdentifiers.NormalizeDigest(image.ImageDigest) ?? throw new ArgumentException("Image digest is required.", nameof(image)), + ImageReference = Normalize(image.ImageReference), + Repository = Normalize(image.Repository), + Tag = Normalize(image.Tag), + Architecture = Normalize(image.Architecture), + }; + + return new SbomCompositionRequest + { + Image = normalizedImage, + LayerFragments = fragments.ToImmutableArray(), + GeneratedAt = ScannerTimestamps.Normalize(generatedAt), + GeneratorName = Normalize(generatorName), + GeneratorVersion = Normalize(generatorVersion), + AdditionalProperties = properties, + }; + } + + private static string? Normalize(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return value.Trim(); + } +} diff --git a/src/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs b/src/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs new file mode 100644 index 00000000..c3984aa9 --- /dev/null +++ b/src/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections.Immutable; +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Emit.Composition; + +public sealed record CycloneDxArtifact +{ + public required SbomView View { get; init; } + + public required string SerialNumber { get; init; } + + public required DateTimeOffset GeneratedAt { get; init; } + + public required ImmutableArray Components { get; init; } + + public required byte[] JsonBytes { get; init; } + + public required string JsonSha256 { get; init; } + + public required string JsonMediaType { get; init; } + + public required byte[] ProtobufBytes { get; init; } + + public required string ProtobufSha256 { get; init; } + + public required string ProtobufMediaType { get; init; } +} + +public sealed record SbomCompositionResult +{ + public required CycloneDxArtifact Inventory { get; init; } + + public CycloneDxArtifact? Usage { get; init; } + + public required ComponentGraph Graph { get; init; } +} diff --git a/src/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs b/src/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs new file mode 100644 index 00000000..b5e52d40 --- /dev/null +++ b/src/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs @@ -0,0 +1,53 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Emit.Composition; + +public static class ScanAnalysisCompositionBuilder +{ + public static SbomCompositionRequest FromAnalysis( + ScanAnalysisStore analysis, + ImageArtifactDescriptor image, + DateTimeOffset generatedAt, + string? generatorName = null, + string? generatorVersion = null, + IReadOnlyDictionary? properties = null) + { + ArgumentNullException.ThrowIfNull(analysis); + ArgumentNullException.ThrowIfNull(image); + + var fragments = analysis.GetLayerFragments(); + if (fragments.IsDefaultOrEmpty) + { + throw new InvalidOperationException("No layer fragments recorded in analysis."); + } + + return SbomCompositionRequest.Create( + image, + fragments, + generatedAt, + generatorName, + generatorVersion, + properties); + } + + public static ComponentGraph BuildComponentGraph(ScanAnalysisStore analysis) + { + ArgumentNullException.ThrowIfNull(analysis); + + var fragments = analysis.GetLayerFragments(); + if (fragments.IsDefaultOrEmpty) + { + return new ComponentGraph + { + Layers = ImmutableArray.Empty, + Components = ImmutableArray.Empty, + ComponentMap = ImmutableDictionary.Empty, + }; + } + + return ComponentGraphBuilder.Build(fragments); + } +} diff --git a/src/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs b/src/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs new file mode 100644 index 00000000..6f66aa19 --- /dev/null +++ b/src/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs @@ -0,0 +1,239 @@ +using System; +using System.Buffers.Binary; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using Collections.Special; +using StellaOps.Scanner.Core.Contracts; + +namespace StellaOps.Scanner.Emit.Index; + +public sealed record BomIndexBuildRequest +{ + public required string ImageDigest { get; init; } + + public required ComponentGraph Graph { get; init; } + + public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow; +} + +public sealed record BomIndexArtifact +{ + public required byte[] Bytes { get; init; } + + public required string Sha256 { get; init; } + + public required int LayerCount { get; init; } + + public required int ComponentCount { get; init; } + + public required int EntrypointCount { get; init; } + + public string MediaType { get; init; } = "application/vnd.stellaops.bom-index.v1+binary"; +} + +public sealed class BomIndexBuilder +{ + private static readonly byte[] Magic = Encoding.ASCII.GetBytes("BOMIDX1"); + + public BomIndexArtifact Build(BomIndexBuildRequest request) + { + ArgumentNullException.ThrowIfNull(request); + if (string.IsNullOrWhiteSpace(request.ImageDigest)) + { + throw new ArgumentException("Image digest is required.", nameof(request)); + } + + var normalizedDigest = request.ImageDigest.Trim(); + var graph = request.Graph ?? throw new ArgumentNullException(nameof(request.Graph)); + var layers = graph.Layers.Select(layer => layer.LayerDigest).ToImmutableArray(); + var components = graph.Components; + + var layerIndex = new Dictionary(layers.Length, StringComparer.Ordinal); + for (var i = 0; i < layers.Length; i++) + { + layerIndex[layers[i]] = i; + } + + var entrypointSet = new SortedSet(StringComparer.Ordinal); + foreach (var component in components) + { + if (!component.Usage.Entrypoints.IsDefaultOrEmpty) + { + foreach (var entry in component.Usage.Entrypoints) + { + if (!string.IsNullOrWhiteSpace(entry)) + { + entrypointSet.Add(entry); + } + } + } + } + + var entrypoints = entrypointSet.ToImmutableArray(); + var entrypointIndex = new Dictionary(entrypoints.Length, StringComparer.Ordinal); + for (var i = 0; i < entrypoints.Length; i++) + { + entrypointIndex[entrypoints[i]] = i; + } + + using var buffer = new MemoryStream(); + using var writer = new BinaryWriter(buffer, Encoding.UTF8, leaveOpen: true); + + WriteHeader(writer, normalizedDigest, request.GeneratedAt, layers.Length, components.Length, entrypoints.Length); + WriteLayerTable(writer, layers); + WriteComponentTable(writer, components); + WriteComponentBitmaps(writer, components, layerIndex); + + if (entrypoints.Length > 0) + { + WriteEntrypointTable(writer, entrypoints); + WriteEntrypointBitmaps(writer, components, entrypointIndex); + } + + writer.Flush(); + var bytes = buffer.ToArray(); + var sha256 = ComputeSha256(bytes); + + return new BomIndexArtifact + { + Bytes = bytes, + Sha256 = sha256, + LayerCount = layers.Length, + ComponentCount = components.Length, + EntrypointCount = entrypoints.Length, + }; + } + + private static void WriteHeader(BinaryWriter writer, string imageDigest, DateTimeOffset generatedAt, int layerCount, int componentCount, int entrypointCount) + { + writer.Write(Magic); + writer.Write((ushort)1); // version + + var flags = (ushort)0; + if (entrypointCount > 0) + { + flags |= 0x1; + } + + writer.Write(flags); + + var digestBytes = Encoding.UTF8.GetBytes(imageDigest); + if (digestBytes.Length > ushort.MaxValue) + { + throw new InvalidOperationException("Image digest exceeds maximum length."); + } + + writer.Write((ushort)digestBytes.Length); + writer.Write(digestBytes); + + var unixMicroseconds = ToUnixMicroseconds(generatedAt); + writer.Write(unixMicroseconds); + + writer.Write((uint)layerCount); + writer.Write((uint)componentCount); + writer.Write((uint)entrypointCount); + } + + private static void WriteLayerTable(BinaryWriter writer, ImmutableArray layers) + { + foreach (var layer in layers) + { + WriteUtf8String(writer, layer); + } + } + + private static void WriteComponentTable(BinaryWriter writer, ImmutableArray components) + { + foreach (var component in components) + { + var key = component.Identity.Purl ?? component.Identity.Key; + WriteUtf8String(writer, key); + } + } + + private static void WriteComponentBitmaps(BinaryWriter writer, ImmutableArray components, IReadOnlyDictionary layerIndex) + { + foreach (var component in components) + { + var indices = component.LayerDigests + .Select(digest => layerIndex.TryGetValue(digest, out var index) ? index : -1) + .Where(index => index >= 0) + .Distinct() + .OrderBy(index => index) + .ToArray(); + + var bitmap = RoaringBitmap.Create(indices).Optimize(); + WriteBitmap(writer, bitmap); + } + } + + private static void WriteEntrypointTable(BinaryWriter writer, ImmutableArray entrypoints) + { + foreach (var entry in entrypoints) + { + WriteUtf8String(writer, entry); + } + } + + private static void WriteEntrypointBitmaps(BinaryWriter writer, ImmutableArray components, IReadOnlyDictionary entrypointIndex) + { + foreach (var component in components) + { + var indices = component.Usage.Entrypoints + .Where(entrypointIndex.ContainsKey) + .Select(entry => entrypointIndex[entry]) + .Distinct() + .OrderBy(index => index) + .ToArray(); + + if (indices.Length == 0) + { + writer.Write((uint)0); + continue; + } + + var bitmap = RoaringBitmap.Create(indices).Optimize(); + WriteBitmap(writer, bitmap); + } + } + + private static void WriteBitmap(BinaryWriter writer, RoaringBitmap bitmap) + { + using var ms = new MemoryStream(); + RoaringBitmap.Serialize(bitmap, ms); + var data = ms.ToArray(); + writer.Write((uint)data.Length); + writer.Write(data); + } + + private static void WriteUtf8String(BinaryWriter writer, string value) + { + var bytes = Encoding.UTF8.GetBytes(value ?? string.Empty); + if (bytes.Length > ushort.MaxValue) + { + throw new InvalidOperationException("String value exceeds maximum length supported by BOM index."); + } + + writer.Write((ushort)bytes.Length); + writer.Write(bytes); + } + + private static long ToUnixMicroseconds(DateTimeOffset timestamp) + { + var normalized = timestamp.ToUniversalTime(); + var microseconds = normalized.ToUnixTimeMilliseconds() * 1000L; + microseconds += normalized.Ticks % TimeSpan.TicksPerMillisecond / 10; + return microseconds; + } + + private static string ComputeSha256(byte[] data) + { + using var sha256 = SHA256.Create(); + var hash = sha256.ComputeHash(data); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs b/src/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs new file mode 100644 index 00000000..792ab296 --- /dev/null +++ b/src/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs @@ -0,0 +1,154 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Serialization; +using StellaOps.Scanner.Emit.Composition; +using StellaOps.Scanner.Emit.Index; +using StellaOps.Scanner.Storage.Catalog; + +namespace StellaOps.Scanner.Emit.Packaging; + +public sealed record ScannerArtifactDescriptor +{ + public required ArtifactDocumentType Type { get; init; } + + public required ArtifactDocumentFormat Format { get; init; } + + public required string MediaType { get; init; } + + public required ReadOnlyMemory Content { get; init; } + + public required string Sha256 { get; init; } + + public SbomView? View { get; init; } + + public long Size => Content.Length; +} + +public sealed record ScannerArtifactManifestEntry +{ + public required string Kind { get; init; } + + public required ArtifactDocumentType Type { get; init; } + + public required ArtifactDocumentFormat Format { get; init; } + + public required string MediaType { get; init; } + + public required string Sha256 { get; init; } + + public required long Size { get; init; } + + public SbomView? View { get; init; } +} + +public sealed record ScannerArtifactManifest +{ + public required string ImageDigest { get; init; } + + public required DateTimeOffset GeneratedAt { get; init; } + + public required ImmutableArray Artifacts { get; init; } + + public byte[] ToJsonBytes() + => JsonSerializer.SerializeToUtf8Bytes(this, ScannerJsonOptions.Default); +} + +public sealed record ScannerArtifactPackage +{ + public required ImmutableArray Artifacts { get; init; } + + public required ScannerArtifactManifest Manifest { get; init; } +} + +public sealed class ScannerArtifactPackageBuilder +{ + public ScannerArtifactPackage Build( + string imageDigest, + DateTimeOffset generatedAt, + SbomCompositionResult composition, + BomIndexArtifact bomIndex) + { + if (string.IsNullOrWhiteSpace(imageDigest)) + { + throw new ArgumentException("Image digest is required.", nameof(imageDigest)); + } + + var descriptors = new List(); + + descriptors.Add(CreateDescriptor(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxJson, composition.Inventory.JsonMediaType, composition.Inventory.JsonBytes, composition.Inventory.JsonSha256, SbomView.Inventory)); + descriptors.Add(CreateDescriptor(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxProtobuf, composition.Inventory.ProtobufMediaType, composition.Inventory.ProtobufBytes, composition.Inventory.ProtobufSha256, SbomView.Inventory)); + + if (composition.Usage is not null) + { + descriptors.Add(CreateDescriptor(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxJson, composition.Usage.JsonMediaType, composition.Usage.JsonBytes, composition.Usage.JsonSha256, SbomView.Usage)); + descriptors.Add(CreateDescriptor(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxProtobuf, composition.Usage.ProtobufMediaType, composition.Usage.ProtobufBytes, composition.Usage.ProtobufSha256, SbomView.Usage)); + } + + descriptors.Add(CreateDescriptor(ArtifactDocumentType.Index, ArtifactDocumentFormat.BomIndex, "application/vnd.stellaops.bom-index.v1+binary", bomIndex.Bytes, bomIndex.Sha256, null)); + + var manifest = new ScannerArtifactManifest + { + ImageDigest = imageDigest.Trim(), + GeneratedAt = generatedAt, + Artifacts = descriptors + .Select(ToManifestEntry) + .OrderBy(entry => entry.Kind, StringComparer.Ordinal) + .ThenBy(entry => entry.Format) + .ToImmutableArray(), + }; + + return new ScannerArtifactPackage + { + Artifacts = descriptors.ToImmutableArray(), + Manifest = manifest, + }; + } + + private static ScannerArtifactDescriptor CreateDescriptor( + ArtifactDocumentType type, + ArtifactDocumentFormat format, + string mediaType, + ReadOnlyMemory content, + string sha256, + SbomView? view) + { + return new ScannerArtifactDescriptor + { + Type = type, + Format = format, + MediaType = mediaType, + Content = content, + Sha256 = sha256, + View = view, + }; + } + + private static ScannerArtifactManifestEntry ToManifestEntry(ScannerArtifactDescriptor descriptor) + { + var kind = descriptor.Type switch + { + ArtifactDocumentType.Index => "bom-index", + ArtifactDocumentType.ImageBom when descriptor.View == SbomView.Usage => "sbom-usage", + ArtifactDocumentType.ImageBom => "sbom-inventory", + ArtifactDocumentType.LayerBom => "layer-sbom", + ArtifactDocumentType.Diff => "diff", + ArtifactDocumentType.Attestation => "attestation", + _ => descriptor.Type.ToString().ToLowerInvariant(), + }; + + return new ScannerArtifactManifestEntry + { + Kind = kind, + Type = descriptor.Type, + Format = descriptor.Format, + MediaType = descriptor.MediaType, + Sha256 = descriptor.Sha256, + Size = descriptor.Size, + View = descriptor.View, + }; + } +} diff --git a/src/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj b/src/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj new file mode 100644 index 00000000..07b0a674 --- /dev/null +++ b/src/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj @@ -0,0 +1,18 @@ + + + net10.0 + enable + enable + true + + + + + + + + + + + + diff --git a/src/StellaOps.Scanner.Emit/TASKS.md b/src/StellaOps.Scanner.Emit/TASKS.md index c79fefef..abc4a5d5 100644 --- a/src/StellaOps.Scanner.Emit/TASKS.md +++ b/src/StellaOps.Scanner.Emit/TASKS.md @@ -2,11 +2,11 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCANNER-EMIT-10-601 | TODO | Emit Guild | SCANNER-CACHE-10-101 | Compose inventory SBOM (CycloneDX JSON/Protobuf) from layer fragments with deterministic ordering. | Inventory SBOM validated against schema; fixtures confirm deterministic output. | -| SCANNER-EMIT-10-602 | TODO | Emit Guild | SCANNER-EMIT-10-601 | Compose usage SBOM leveraging EntryTrace to flag actual usage; ensure separate view toggles. | Usage SBOM tests confirm correct subset; API contract documented. | -| SCANNER-EMIT-10-603 | TODO | Emit Guild | SCANNER-EMIT-10-601 | Generate BOM index sidecar (purl table + roaring bitmap + usedByEntrypoint flag). | Index format validated; query helpers proven; stored artifacts hashed deterministically. | -| SCANNER-EMIT-10-604 | TODO | Emit Guild | SCANNER-EMIT-10-602 | Package artifacts for export + attestation (naming, compression, manifests). | Export pipeline produces deterministic file paths/hashes; integration test with storage passes. | -| SCANNER-EMIT-10-605 | TODO | Emit Guild | SCANNER-EMIT-10-603 | Emit BOM-Index sidecar schema/fixtures (`bom-index@1`) and note CRITICAL PATH for Scheduler. | Schema + fixtures in docs/artifacts/bom-index; tests `BOMIndexGoldenIsStable` green. | -| SCANNER-EMIT-10-606 | TODO | Emit Guild | SCANNER-EMIT-10-605 | Integrate EntryTrace usage flags into BOM-Index; document semantics. | Usage bits present in sidecar; integration tests with EntryTrace fixtures pass. | +| SCANNER-EMIT-10-601 | DOING (2025-10-19) | Emit Guild | SCANNER-CACHE-10-101 | Compose inventory SBOM (CycloneDX JSON/Protobuf) from layer fragments with deterministic ordering. | Inventory SBOM validated against schema; fixtures confirm deterministic output. | +| SCANNER-EMIT-10-602 | DOING (2025-10-19) | Emit Guild | SCANNER-EMIT-10-601 | Compose usage SBOM leveraging EntryTrace to flag actual usage; ensure separate view toggles. | Usage SBOM tests confirm correct subset; API contract documented. | +| SCANNER-EMIT-10-603 | DOING (2025-10-19) | Emit Guild | SCANNER-EMIT-10-601 | Generate BOM index sidecar (purl table + roaring bitmap + usedByEntrypoint flag). | Index format validated; query helpers proven; stored artifacts hashed deterministically. | +| SCANNER-EMIT-10-604 | DOING (2025-10-19) | Emit Guild | SCANNER-EMIT-10-602 | Package artifacts for export + attestation (naming, compression, manifests). | Export pipeline produces deterministic file paths/hashes; integration test with storage passes. | +| SCANNER-EMIT-10-605 | DOING (2025-10-19) | Emit Guild | SCANNER-EMIT-10-603 | Emit BOM-Index sidecar schema/fixtures (`bom-index@1`) and note CRITICAL PATH for Scheduler. | Schema + fixtures in docs/artifacts/bom-index; tests `BOMIndexGoldenIsStable` green. | +| SCANNER-EMIT-10-606 | DOING (2025-10-19) | Emit Guild | SCANNER-EMIT-10-605 | Integrate EntryTrace usage flags into BOM-Index; document semantics. | Usage bits present in sidecar; integration tests with EntryTrace fixtures pass. | | SCANNER-EMIT-17-701 | TODO | Emit Guild, Native Analyzer Guild | SCANNER-EMIT-10-602 | Record GNU build-id for ELF components and surface it in inventory/usage SBOM plus diff payloads with deterministic ordering. | Native analyzer emits buildId for every ELF executable/library, SBOM/diff fixtures updated with canonical `buildId` field, regression tests prove stability, docs call out debug-symbol lookup flow. | | SCANNER-EMIT-10-607 | TODO | Emit Guild | SCANNER-EMIT-10-604, POLICY-CORE-09-005 | Embed scoring inputs, confidence band, and `quietedBy` provenance into CycloneDX 1.6 and DSSE predicates; verify deterministic serialization. | SBOM/attestation fixtures include score, inputs, configVersion, quiet metadata; golden tests confirm canonical output. | diff --git a/src/StellaOps.Scanner.EntryTrace/TASKS.md b/src/StellaOps.Scanner.EntryTrace/TASKS.md index 3a8b94af..fe41c516 100644 --- a/src/StellaOps.Scanner.EntryTrace/TASKS.md +++ b/src/StellaOps.Scanner.EntryTrace/TASKS.md @@ -9,3 +9,8 @@ | SCANNER-ENTRYTRACE-10-405 | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-403 | Implement Node/Java launcher analyzer capturing script/jar targets including npm lifecycle wrappers. | Node/Java fixtures resolved with evidence chain; `RunParts` coverage ensures child scripts traced. | | SCANNER-ENTRYTRACE-10-406 | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-402 | Surface explainability + diagnostics for unresolved constructs and emit metrics counters. | Diagnostics catalog enumerates unknown reasons; metrics wired via `EntryTraceMetrics`; explainability doc updated. | | SCANNER-ENTRYTRACE-10-407 | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-401..406 | Package EntryTrace analyzers as restart-time plug-ins with manifest + host registration. | Plug-in manifest under `plugins/scanner/entrytrace/`; restart-only policy documented; DI extension exposes `AddEntryTraceAnalyzer`. | + +## Status Review — 2025-10-19 + +- Confirmed Wave 0 instructions for EntryTrace Guild; SCANNER-ENTRYTRACE-10-401..407 already marked complete. +- No outstanding prerequisites identified during review; readiness noted for any follow-on work. diff --git a/src/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs b/src/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs index e04295e5..b5ff1361 100644 --- a/src/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs +++ b/src/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs @@ -41,6 +41,25 @@ public sealed class QueueLeaseIntegrationTests second.Deduplicated.Should().BeTrue(); } + [Fact] + public async Task Lease_ShouldExposeTraceId_FromQueuedMessage() + { + var clock = new FakeTimeProvider(); + var queue = new InMemoryScanQueue(_options, clock); + + var payload = new byte[] { 9 }; + var message = new ScanQueueMessage("job-trace", payload) + { + TraceId = "trace-123" + }; + + await queue.EnqueueAsync(message); + + var lease = await LeaseSingleAsync(queue, consumer: "worker-trace"); + lease.Should().NotBeNull(); + lease!.TraceId.Should().Be("trace-123"); + } + [Fact] public async Task Lease_Acknowledge_ShouldRemoveFromQueue() { @@ -136,7 +155,11 @@ public sealed class QueueLeaseIntegrationTests payload: message.Payload.ToArray(), idempotencyKey: token, attempt: 1, - enqueuedAt: _timeProvider.GetUtcNow()); + enqueuedAt: _timeProvider.GetUtcNow(), + traceId: message.TraceId, + attributes: message.Attributes is null + ? new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)) + : new ReadOnlyDictionary(new Dictionary(message.Attributes, StringComparer.Ordinal))); _idempotency[token] = entry; _ready.Enqueue(entry); @@ -247,6 +270,7 @@ public sealed class QueueLeaseIntegrationTests EnqueuedAt = entry.EnqueuedAt; LeaseExpiresAt = now.Add(leaseDuration); IdempotencyKey = entry.IdempotencyKey; + TraceId = entry.TraceId; Attributes = entry.Attributes; } @@ -266,6 +290,8 @@ public sealed class QueueLeaseIntegrationTests public string? IdempotencyKey { get; } + public string? TraceId { get; } + public IReadOnlyDictionary Attributes { get; } public Task AcknowledgeAsync(CancellationToken cancellationToken = default) @@ -315,7 +341,15 @@ public sealed class QueueLeaseIntegrationTests internal sealed class QueueEntry { - public QueueEntry(string sequenceId, string jobId, byte[] payload, string idempotencyKey, int attempt, DateTimeOffset enqueuedAt) + public QueueEntry( + string sequenceId, + string jobId, + byte[] payload, + string idempotencyKey, + int attempt, + DateTimeOffset enqueuedAt, + string? traceId, + IReadOnlyDictionary attributes) { SequenceId = sequenceId; JobId = jobId; @@ -324,7 +358,8 @@ public sealed class QueueLeaseIntegrationTests Attempt = attempt; EnqueuedAt = enqueuedAt; LastLeaseAt = enqueuedAt; - Attributes = new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal)); + TraceId = traceId; + Attributes = attributes; } public string SequenceId { get; } @@ -345,6 +380,8 @@ public sealed class QueueLeaseIntegrationTests public DateTimeOffset LastLeaseAt { get; set; } + public string? TraceId { get; } + public IReadOnlyDictionary Attributes { get; } public string? DeadLetterReason { get; set; } diff --git a/src/StellaOps.Scanner.Queue/IScanQueueLease.cs b/src/StellaOps.Scanner.Queue/IScanQueueLease.cs index ecf2be29..b805d4ef 100644 --- a/src/StellaOps.Scanner.Queue/IScanQueueLease.cs +++ b/src/StellaOps.Scanner.Queue/IScanQueueLease.cs @@ -23,6 +23,8 @@ public interface IScanQueueLease string? IdempotencyKey { get; } + string? TraceId { get; } + IReadOnlyDictionary Attributes { get; } Task AcknowledgeAsync(CancellationToken cancellationToken = default); diff --git a/src/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs b/src/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs index b236e03f..5f4894ea 100644 --- a/src/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs +++ b/src/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs @@ -501,6 +501,10 @@ internal sealed class NatsScanQueue : IScanQueue, IAsyncDisposable ? idemValues[0] : null; + var traceId = headers.TryGetValue(QueueEnvelopeFields.TraceId, out var traceValues) && traceValues.Count > 0 + ? string.IsNullOrWhiteSpace(traceValues[0]) ? null : traceValues[0] + : null; + var enqueuedAt = headers.TryGetValue(QueueEnvelopeFields.EnqueuedAt, out var enqueuedValues) && enqueuedValues.Count > 0 && long.TryParse(enqueuedValues[0], out var unix) ? DateTimeOffset.FromUnixTimeMilliseconds(unix) @@ -535,6 +539,7 @@ internal sealed class NatsScanQueue : IScanQueue, IAsyncDisposable leaseExpires, consumer, idempotencyKey, + traceId, attributes); } @@ -597,6 +602,11 @@ internal sealed class NatsScanQueue : IScanQueue, IAsyncDisposable { "deadletter-reason", reason } }; + if (!string.IsNullOrWhiteSpace(lease.TraceId)) + { + headers.Add(QueueEnvelopeFields.TraceId, lease.TraceId!); + } + foreach (var kvp in lease.Attributes) { headers.Add(QueueEnvelopeFields.AttributePrefix + kvp.Key, kvp.Value); diff --git a/src/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs b/src/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs index 4aaed705..8f7a50ac 100644 --- a/src/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs +++ b/src/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs @@ -23,6 +23,7 @@ internal sealed class NatsScanQueueLease : IScanQueueLease DateTimeOffset leaseExpiresAt, string consumer, string? idempotencyKey, + string? traceId, IReadOnlyDictionary attributes) { _queue = queue; @@ -35,6 +36,7 @@ internal sealed class NatsScanQueueLease : IScanQueueLease LeaseExpiresAt = leaseExpiresAt; Consumer = consumer; IdempotencyKey = idempotencyKey; + TraceId = traceId; Attributes = attributes; } @@ -54,6 +56,8 @@ internal sealed class NatsScanQueueLease : IScanQueueLease public string? IdempotencyKey { get; } + public string? TraceId { get; } + public IReadOnlyDictionary Attributes { get; } internal NatsJSMsg Message => _message; diff --git a/src/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs b/src/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs index c5911af6..c43e1556 100644 --- a/src/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs +++ b/src/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs @@ -405,7 +405,7 @@ internal sealed class RedisScanQueue : IScanQueue, IAsyncDisposable { IdempotencyKey = lease.IdempotencyKey, Attributes = lease.Attributes, - TraceId = null + TraceId = lease.TraceId }; var now = _timeProvider.GetUtcNow(); @@ -463,7 +463,7 @@ internal sealed class RedisScanQueue : IScanQueue, IAsyncDisposable { IdempotencyKey = lease.IdempotencyKey, Attributes = lease.Attributes, - TraceId = null + TraceId = lease.TraceId }, now, lease.Attempt); @@ -644,7 +644,8 @@ internal sealed class RedisScanQueue : IScanQueue, IAsyncDisposable } else if (name.Equals(QueueEnvelopeFields.TraceId, StringComparison.Ordinal)) { - traceId = field.Value.ToString(); + var value = field.Value.ToString(); + traceId = string.IsNullOrWhiteSpace(value) ? null : value; } else if (name.StartsWith(QueueEnvelopeFields.AttributePrefix, StringComparison.Ordinal)) { @@ -674,6 +675,7 @@ internal sealed class RedisScanQueue : IScanQueue, IAsyncDisposable leaseExpires, consumer, idempotency, + traceId, attributeView); } diff --git a/src/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs b/src/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs index cab293b0..b20ac8a1 100644 --- a/src/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs +++ b/src/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs @@ -20,6 +20,7 @@ internal sealed class RedisScanQueueLease : IScanQueueLease DateTimeOffset leaseExpiresAt, string consumer, string? idempotencyKey, + string? traceId, IReadOnlyDictionary attributes) { _queue = queue; @@ -31,6 +32,7 @@ internal sealed class RedisScanQueueLease : IScanQueueLease LeaseExpiresAt = leaseExpiresAt; Consumer = consumer; IdempotencyKey = idempotencyKey; + TraceId = traceId; Attributes = attributes; } @@ -50,6 +52,8 @@ internal sealed class RedisScanQueueLease : IScanQueueLease public string? IdempotencyKey { get; } + public string? TraceId { get; } + public IReadOnlyDictionary Attributes { get; } public Task AcknowledgeAsync(CancellationToken cancellationToken = default) diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs b/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs index 785c8a05..66ad2e2f 100644 --- a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs +++ b/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs @@ -61,6 +61,76 @@ public sealed class DescriptorGeneratorTests var expectedDsse = ComputeExpectedDsse(request.ImageDigest, expectedSbomDigest, document.Provenance.Nonce); Assert.Equal(expectedDsse, document.Provenance.ExpectedDsseSha256); Assert.Equal(expectedDsse, document.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"]); + Assert.Equal(document.Provenance.Nonce, document.Artifact.Annotations["org.stellaops.provenance.nonce"]); + } + + [Fact] + public async Task CreateAsync_RepeatedInvocationsReuseDeterministicNonce() + { + await using var temp = new TempDirectory(); + var sbomPath = Path.Combine(temp.Path, "sample.cdx.json"); + await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}"); + + var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero)); + var generator = new DescriptorGenerator(fakeTime); + + var request = new DescriptorRequest + { + ImageDigest = "sha256:0123456789abcdef", + SbomPath = sbomPath, + SbomMediaType = "application/vnd.cyclonedx+json", + SbomFormat = "cyclonedx-json", + SbomKind = "inventory", + SbomArtifactType = "application/vnd.stellaops.sbom.layer+json", + SubjectMediaType = "application/vnd.oci.image.manifest.v1+json", + GeneratorVersion = "1.2.3", + GeneratorName = "StellaOps.Scanner.Sbomer.BuildXPlugin", + LicenseId = "lic-123", + SbomName = "sample.cdx.json", + Repository = "git.stella-ops.org/stellaops", + BuildRef = "refs/heads/main", + AttestorUri = "https://attestor.local/api/v1/provenance" + }.Validate(); + + var first = await generator.CreateAsync(request, CancellationToken.None); + var second = await generator.CreateAsync(request, CancellationToken.None); + + Assert.Equal(first.Provenance.Nonce, second.Provenance.Nonce); + Assert.Equal(first.Provenance.ExpectedDsseSha256, second.Provenance.ExpectedDsseSha256); + Assert.Equal(first.Artifact.Annotations["org.stellaops.provenance.nonce"], second.Artifact.Annotations["org.stellaops.provenance.nonce"]); + Assert.Equal(first.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"], second.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"]); + } + + [Fact] + public async Task CreateAsync_MetadataDifferencesYieldDistinctNonce() + { + await using var temp = new TempDirectory(); + var sbomPath = Path.Combine(temp.Path, "sample.cdx.json"); + await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}"); + + var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero)); + var generator = new DescriptorGenerator(fakeTime); + + var baseline = new DescriptorRequest + { + ImageDigest = "sha256:0123456789abcdef", + SbomPath = sbomPath, + Repository = "git.stella-ops.org/stellaops", + BuildRef = "refs/heads/main" + }.Validate(); + + var variant = baseline with + { + BuildRef = "refs/heads/feature", + Repository = "git.stella-ops.org/stellaops/feature" + }; + variant = variant.Validate(); + + var baselineDocument = await generator.CreateAsync(baseline, CancellationToken.None); + var variantDocument = await generator.CreateAsync(variant, CancellationToken.None); + + Assert.NotEqual(baselineDocument.Provenance.Nonce, variantDocument.Provenance.Nonce); + Assert.NotEqual(baselineDocument.Provenance.ExpectedDsseSha256, variantDocument.Provenance.ExpectedDsseSha256); } private static string ComputeSha256File(string path) diff --git a/src/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs b/src/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs index 55cdf50d..3c1acd6d 100644 --- a/src/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs +++ b/src/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs @@ -11,6 +11,7 @@ using StellaOps.Scanner.Storage.ObjectStore; using StellaOps.Scanner.Storage.Repositories; using StellaOps.Scanner.Storage.Services; using Xunit; +using Microsoft.Extensions.Time.Testing; namespace StellaOps.Scanner.Storage.Tests; @@ -34,21 +35,24 @@ public sealed class StorageDualWriteFixture { var options = BuildOptions(dualWrite: true, mirrorBucket: "mirror-bucket"); var objectStore = new InMemoryArtifactObjectStore(); + var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero)); await InitializeMongoAsync(options); var provider = new MongoCollectionProvider(_fixture.Database, Options.Create(options)); - var artifactRepository = new ArtifactRepository(provider); - var lifecycleRepository = new LifecycleRuleRepository(provider); + var artifactRepository = new ArtifactRepository(provider, fakeTime); + var lifecycleRepository = new LifecycleRuleRepository(provider, fakeTime); var service = new ArtifactStorageService( artifactRepository, lifecycleRepository, objectStore, Options.Create(options), - NullLogger.Instance); + NullLogger.Instance, + fakeTime); var bytes = System.Text.Encoding.UTF8.GetBytes("test artifact payload"); using var stream = new MemoryStream(bytes); var expiresAt = DateTime.UtcNow.AddHours(6); + var expectedTimestamp = fakeTime.GetUtcNow().UtcDateTime; var document = await service.StoreArtifactAsync( ArtifactDocumentType.LayerBom, @@ -71,6 +75,8 @@ public sealed class StorageDualWriteFixture Assert.Equal(1, artifact.RefCount); Assert.Equal("compliance", artifact.TtlClass); Assert.True(artifact.Immutable); + Assert.Equal(expectedTimestamp, artifact.CreatedAtUtc); + Assert.Equal(expectedTimestamp, artifact.UpdatedAtUtc); var lifecycleCollection = _fixture.Database.GetCollection(ScannerStorageDefaults.Collections.LifecycleRules); var lifecycle = await lifecycleCollection.Find(x => x.ArtifactId == document.Id).FirstOrDefaultAsync(); @@ -78,6 +84,7 @@ public sealed class StorageDualWriteFixture Assert.Equal("compliance", lifecycle!.Class); Assert.True(lifecycle.ExpiresAtUtc.HasValue); Assert.True(lifecycle.ExpiresAtUtc.Value <= expiresAt.AddSeconds(5)); + Assert.Equal(expectedTimestamp, lifecycle.CreatedAtUtc); } [Fact] diff --git a/src/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs b/src/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs index a2f80846..aec0fc54 100644 --- a/src/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs +++ b/src/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs @@ -39,6 +39,7 @@ public static class ServiceCollectionExtensions private static void RegisterScannerStorageServices(IServiceCollection services) { + services.TryAddSingleton(TimeProvider.System); services.TryAddSingleton(CreateMongoClient); services.TryAddSingleton(CreateMongoDatabase); services.TryAddSingleton(); diff --git a/src/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs b/src/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs index bd25fbac..70e60f6c 100644 --- a/src/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs +++ b/src/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs @@ -7,10 +7,12 @@ namespace StellaOps.Scanner.Storage.Repositories; public sealed class ArtifactRepository { private readonly MongoCollectionProvider _collections; + private readonly TimeProvider _timeProvider; - public ArtifactRepository(MongoCollectionProvider collections) + public ArtifactRepository(MongoCollectionProvider collections, TimeProvider? timeProvider = null) { _collections = collections ?? throw new ArgumentNullException(nameof(collections)); + _timeProvider = timeProvider ?? TimeProvider.System; } public async Task GetAsync(string artifactId, CancellationToken cancellationToken) @@ -25,7 +27,9 @@ public sealed class ArtifactRepository public async Task UpsertAsync(ArtifactDocument document, CancellationToken cancellationToken) { ArgumentNullException.ThrowIfNull(document); - document.UpdatedAtUtc = DateTime.UtcNow; + var now = _timeProvider.GetUtcNow().UtcDateTime; + document.CreatedAtUtc = document.CreatedAtUtc == default ? now : document.CreatedAtUtc; + document.UpdatedAtUtc = now; var options = new ReplaceOptions { IsUpsert = true }; await _collections.Artifacts .ReplaceOneAsync(x => x.Id == document.Id, document, options, cancellationToken) @@ -37,9 +41,10 @@ public sealed class ArtifactRepository ArgumentException.ThrowIfNullOrWhiteSpace(artifactId); ArgumentNullException.ThrowIfNull(reference); + var now = _timeProvider.GetUtcNow().UtcDateTime; var update = Builders.Update .Set(x => x.Rekor, reference) - .Set(x => x.UpdatedAtUtc, DateTime.UtcNow); + .Set(x => x.UpdatedAtUtc, now); await _collections.Artifacts.UpdateOneAsync(x => x.Id == artifactId, update, cancellationToken: cancellationToken).ConfigureAwait(false); } @@ -48,9 +53,10 @@ public sealed class ArtifactRepository { ArgumentException.ThrowIfNullOrWhiteSpace(artifactId); + var now = _timeProvider.GetUtcNow().UtcDateTime; var update = Builders.Update .Inc(x => x.RefCount, delta) - .Set(x => x.UpdatedAtUtc, DateTime.UtcNow); + .Set(x => x.UpdatedAtUtc, now); var options = new FindOneAndUpdateOptions { diff --git a/src/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs b/src/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs index 600f504a..4163c945 100644 --- a/src/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs +++ b/src/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs @@ -7,16 +7,18 @@ namespace StellaOps.Scanner.Storage.Repositories; public sealed class ImageRepository { private readonly MongoCollectionProvider _collections; + private readonly TimeProvider _timeProvider; - public ImageRepository(MongoCollectionProvider collections) + public ImageRepository(MongoCollectionProvider collections, TimeProvider? timeProvider = null) { _collections = collections ?? throw new ArgumentNullException(nameof(collections)); + _timeProvider = timeProvider ?? TimeProvider.System; } public async Task UpsertAsync(ImageDocument document, CancellationToken cancellationToken) { ArgumentNullException.ThrowIfNull(document); - document.LastSeenAtUtc = DateTime.UtcNow; + document.LastSeenAtUtc = _timeProvider.GetUtcNow().UtcDateTime; var updateOptions = new ReplaceOptions { IsUpsert = true }; await _collections.Images .ReplaceOneAsync(x => x.ImageDigest == document.ImageDigest, document, updateOptions, cancellationToken) diff --git a/src/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs b/src/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs index f8ccb5f8..2cc6c76f 100644 --- a/src/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs +++ b/src/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs @@ -7,16 +7,18 @@ namespace StellaOps.Scanner.Storage.Repositories; public sealed class LayerRepository { private readonly MongoCollectionProvider _collections; + private readonly TimeProvider _timeProvider; - public LayerRepository(MongoCollectionProvider collections) + public LayerRepository(MongoCollectionProvider collections, TimeProvider? timeProvider = null) { _collections = collections ?? throw new ArgumentNullException(nameof(collections)); + _timeProvider = timeProvider ?? TimeProvider.System; } public async Task UpsertAsync(LayerDocument document, CancellationToken cancellationToken) { ArgumentNullException.ThrowIfNull(document); - document.LastSeenAtUtc = DateTime.UtcNow; + document.LastSeenAtUtc = _timeProvider.GetUtcNow().UtcDateTime; var options = new ReplaceOptions { IsUpsert = true }; await _collections.Layers .ReplaceOneAsync(x => x.LayerDigest == document.LayerDigest, document, options, cancellationToken) diff --git a/src/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs b/src/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs index 06dde535..92f9df7b 100644 --- a/src/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs +++ b/src/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs @@ -7,16 +7,19 @@ namespace StellaOps.Scanner.Storage.Repositories; public sealed class LifecycleRuleRepository { private readonly MongoCollectionProvider _collections; + private readonly TimeProvider _timeProvider; - public LifecycleRuleRepository(MongoCollectionProvider collections) + public LifecycleRuleRepository(MongoCollectionProvider collections, TimeProvider? timeProvider = null) { _collections = collections ?? throw new ArgumentNullException(nameof(collections)); + _timeProvider = timeProvider ?? TimeProvider.System; } public async Task UpsertAsync(LifecycleRuleDocument document, CancellationToken cancellationToken) { ArgumentNullException.ThrowIfNull(document); - document.CreatedAtUtc = document.CreatedAtUtc == default ? DateTime.UtcNow : document.CreatedAtUtc; + var now = _timeProvider.GetUtcNow().UtcDateTime; + document.CreatedAtUtc = document.CreatedAtUtc == default ? now : document.CreatedAtUtc; var options = new ReplaceOptions { IsUpsert = true }; await _collections.LifecycleRules .ReplaceOneAsync(x => x.Id == document.Id, document, options, cancellationToken) diff --git a/src/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs b/src/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs index f2792d2d..4c85de21 100644 --- a/src/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs +++ b/src/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs @@ -15,19 +15,22 @@ public sealed class ArtifactStorageService private readonly IArtifactObjectStore _objectStore; private readonly ScannerStorageOptions _options; private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; public ArtifactStorageService( ArtifactRepository artifactRepository, LifecycleRuleRepository lifecycleRuleRepository, IArtifactObjectStore objectStore, IOptions options, - ILogger logger) + ILogger logger, + TimeProvider? timeProvider = null) { _artifactRepository = artifactRepository ?? throw new ArgumentNullException(nameof(artifactRepository)); _lifecycleRuleRepository = lifecycleRuleRepository ?? throw new ArgumentNullException(nameof(lifecycleRuleRepository)); _objectStore = objectStore ?? throw new ArgumentNullException(nameof(objectStore)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _options = (options ?? throw new ArgumentNullException(nameof(options))).Value; + _timeProvider = timeProvider ?? TimeProvider.System; } public async Task StoreArtifactAsync( @@ -65,6 +68,7 @@ public sealed class ArtifactStorageService await _objectStore.PutAsync(mirrorDescriptor, buffer, cancellationToken).ConfigureAwait(false); } + var now = _timeProvider.GetUtcNow().UtcDateTime; var document = new ArtifactDocument { Id = artifactId, @@ -75,8 +79,8 @@ public sealed class ArtifactStorageService SizeBytes = size, Immutable = immutable, RefCount = 1, - CreatedAtUtc = DateTime.UtcNow, - UpdatedAtUtc = DateTime.UtcNow, + CreatedAtUtc = now, + UpdatedAtUtc = now, TtlClass = ttlClass, }; @@ -90,7 +94,7 @@ public sealed class ArtifactStorageService ArtifactId = document.Id, Class = ttlClass, ExpiresAtUtc = expiresAtUtc, - CreatedAtUtc = DateTime.UtcNow, + CreatedAtUtc = now, }; await _lifecycleRuleRepository.UpsertAsync(lifecycle, cancellationToken).ConfigureAwait(false); diff --git a/src/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj b/src/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj index 4d77df47..bfc6eee3 100644 --- a/src/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj +++ b/src/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj @@ -7,8 +7,7 @@ true - - + diff --git a/src/StellaOps.Scanner.Storage/TASKS.md b/src/StellaOps.Scanner.Storage/TASKS.md index 20d9fb65..c801b667 100644 --- a/src/StellaOps.Scanner.Storage/TASKS.md +++ b/src/StellaOps.Scanner.Storage/TASKS.md @@ -5,3 +5,4 @@ | SCANNER-STORAGE-09-301 | DONE (2025-10-18) | Scanner Storage Guild | SCANNER-CORE-09-501 | Mongo catalog schemas/indexes for images, layers, artifacts, jobs, lifecycle rules plus migrations. | Collections created via bootstrapper; migrations recorded; indexes enforce uniqueness + TTL; majority read/write configured. | | SCANNER-STORAGE-09-302 | DONE (2025-10-18) | Scanner Storage Guild | SCANNER-STORAGE-09-301 | MinIO layout, immutability policies, client abstraction, and configuration binding. | S3 client abstraction configurable via options; bucket/prefix defaults documented; immutability flags enforced with tests; config binding validated. | | SCANNER-STORAGE-09-303 | DONE (2025-10-18) | Scanner Storage Guild | SCANNER-STORAGE-09-301, SCANNER-STORAGE-09-302 | Repositories/services with dual-write feature flag, deterministic digests, TTL enforcement tests. | Dual-write service writes metadata + objects atomically; digest determinism covered by tests; TTL enforcement fixture passing. | +| SCANNER-STORAGE-09-304 | DONE (2025-10-19) | Scanner Storage Guild | SCANNER-STORAGE-09-303 | Adopt `TimeProvider` across storage timestamps for determinism. | Storage services/repositories use injected `TimeProvider`; tests cover timestamp determinism. | diff --git a/src/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs b/src/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs new file mode 100644 index 00000000..752e8ced --- /dev/null +++ b/src/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs @@ -0,0 +1,71 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.WebService.Options; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Tests; + +public sealed class PlatformEventPublisherRegistrationTests +{ + [Fact] + public void NullPublisherRegisteredWhenEventsDisabled() + { + using var factory = new ScannerApplicationFactory(configuration => + { + configuration["scanner:events:enabled"] = "false"; + configuration["scanner:events:dsn"] = string.Empty; + }); + using var scope = factory.Services.CreateScope(); + + var publisher = scope.ServiceProvider.GetRequiredService(); + Assert.IsType(publisher); + } + + [Fact] + public void RedisPublisherRegisteredWhenEventsEnabled() + { + var originalEnabled = Environment.GetEnvironmentVariable("SCANNER__EVENTS__ENABLED"); + var originalDriver = Environment.GetEnvironmentVariable("SCANNER__EVENTS__DRIVER"); + var originalDsn = Environment.GetEnvironmentVariable("SCANNER__EVENTS__DSN"); + var originalStream = Environment.GetEnvironmentVariable("SCANNER__EVENTS__STREAM"); + var originalTimeout = Environment.GetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS"); + var originalMax = Environment.GetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH"); + + Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", "true"); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__DRIVER", "redis"); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__DSN", "localhost:6379"); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__STREAM", "stella.events.tests"); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS", "1"); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH", "100"); + + try + { + using var factory = new ScannerApplicationFactory(configuration => + { + configuration["scanner:events:enabled"] = "true"; + configuration["scanner:events:driver"] = "redis"; + configuration["scanner:events:dsn"] = "localhost:6379"; + configuration["scanner:events:stream"] = "stella.events.tests"; + configuration["scanner:events:publishTimeoutSeconds"] = "1"; + configuration["scanner:events:maxStreamLength"] = "100"; + }); + using var scope = factory.Services.CreateScope(); + + var options = scope.ServiceProvider.GetRequiredService>().Value; + Assert.True(options.Events.Enabled); + Assert.Equal("redis", options.Events.Driver); + + var publisher = scope.ServiceProvider.GetRequiredService(); + Assert.IsType(publisher); + } + finally + { + Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", originalEnabled); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__DRIVER", originalDriver); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__DSN", originalDsn); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__STREAM", originalStream); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS", originalTimeout); + Environment.SetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH", originalMax); + } + } +} diff --git a/src/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs b/src/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs new file mode 100644 index 00000000..285589fc --- /dev/null +++ b/src/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs @@ -0,0 +1,71 @@ +using System; +using System.IO; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using StellaOps.Notify.Models; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Tests; + +public sealed class PlatformEventSamplesTests +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter() } + }; + + [Theory] + [InlineData("scanner.report.ready@1.sample.json", NotifyEventKinds.ScannerReportReady)] + [InlineData("scanner.scan.completed@1.sample.json", NotifyEventKinds.ScannerScanCompleted)] + public void PlatformEventSamplesStayCanonical(string fileName, string expectedKind) + { + var json = LoadSample(fileName); + var notifyEvent = JsonSerializer.Deserialize(json, SerializerOptions); + + Assert.NotNull(notifyEvent); + Assert.Equal(expectedKind, notifyEvent!.Kind); + Assert.NotEqual(Guid.Empty, notifyEvent.EventId); + Assert.NotNull(notifyEvent.Payload); + + AssertCanonical(json, notifyEvent); + AssertReportConsistency(notifyEvent.Payload); + } + + private static void AssertCanonical(string originalJson, NotifyEvent notifyEvent) + { + var canonicalJson = NotifyCanonicalJsonSerializer.Serialize(notifyEvent); + var originalNode = JsonNode.Parse(originalJson) ?? throw new InvalidOperationException("Sample JSON must not be null."); + var canonicalNode = JsonNode.Parse(canonicalJson) ?? throw new InvalidOperationException("Canonical JSON must not be null."); + + Assert.True(JsonNode.DeepEquals(originalNode, canonicalNode), "Platform event sample must remain canonical."); + } + + private static void AssertReportConsistency(JsonNode? payloadNode) + { + var payload = Assert.IsType(payloadNode); + + var reportNode = Assert.IsType(payload["report"]); + var report = reportNode.Deserialize(SerializerOptions); + Assert.NotNull(report); + + var dsseNode = Assert.IsType(payload["dsse"]); + var payloadValueNode = Assert.IsAssignableFrom(dsseNode["payload"]); + var base64Payload = payloadValueNode.GetValue(); + + var canonicalReportBytes = JsonSerializer.SerializeToUtf8Bytes(report!, SerializerOptions); + var expectedPayload = Convert.ToBase64String(canonicalReportBytes); + Assert.Equal(expectedPayload, base64Payload); + + var reportIdReference = Assert.IsAssignableFrom(payload["reportId"]).GetValue(); + Assert.Equal(report!.ReportId, reportIdReference); + } + + private static string LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + Assert.True(File.Exists(path), $"Sample file not found at '{path}'."); + return File.ReadAllText(path); + } +} diff --git a/src/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs b/src/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs new file mode 100644 index 00000000..e4cd654b --- /dev/null +++ b/src/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs @@ -0,0 +1,108 @@ +using System.Net; +using System.Net.Http.Json; +using System.Text.Json; +using System.Threading.Tasks; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Tests; + +public sealed class PolicyEndpointsTests +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + [Fact] + public async Task PolicySchemaReturnsEmbeddedSchema() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var response = await client.GetAsync("/api/v1/policy/schema"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Equal("application/schema+json", response.Content.Headers.ContentType?.MediaType); + + var payload = await response.Content.ReadAsStringAsync(); + Assert.Contains("\"$schema\"", payload); + Assert.Contains("\"properties\"", payload); + } + + [Fact] + public async Task PolicyDiagnosticsReturnsRecommendations() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var request = new PolicyDiagnosticsRequestDto + { + Policy = new PolicyPreviewPolicyDto + { + Content = "version: \"1.0\"\nrules: []\n", + Format = "yaml", + Actor = "tester", + Description = "empty ruleset" + } + }; + + var response = await client.PostAsJsonAsync("/api/v1/policy/diagnostics", request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var diagnostics = await response.Content.ReadFromJsonAsync(SerializerOptions); + Assert.NotNull(diagnostics); + Assert.False(diagnostics!.Success); + Assert.True(diagnostics.ErrorCount >= 0); + Assert.NotEmpty(diagnostics.Recommendations); + } + + [Fact] + public async Task PolicyPreviewUsesProposedPolicy() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + const string policyYaml = """ +version: "1.0" +rules: + - name: Block Critical + severity: [Critical] + action: block +"""; + + var request = new PolicyPreviewRequestDto + { + ImageDigest = "sha256:abc123", + Findings = new[] + { + new PolicyPreviewFindingDto + { + Id = "finding-1", + Severity = "Critical", + Source = "NVD", + Tags = new[] { "reachability:runtime" } + } + }, + Policy = new PolicyPreviewPolicyDto + { + Content = policyYaml, + Format = "yaml", + Actor = "preview", + Description = "test policy" + } + }; + + var response = await client.PostAsJsonAsync("/api/v1/policy/preview", request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var preview = await response.Content.ReadFromJsonAsync(SerializerOptions); + Assert.NotNull(preview); + Assert.True(preview!.Success); + Assert.Equal(1, preview.Changed); + var diff = Assert.Single(preview.Diffs); + Assert.Equal("finding-1", diff.Projected?.FindingId); + Assert.Equal("Blocked", diff.Projected?.Status); + Assert.Equal(PolicyScoringConfig.Default.Version, diff.Projected?.ConfigVersion); + Assert.NotNull(diff.Projected?.Inputs); + Assert.True(diff.Projected!.Inputs!.ContainsKey("severityWeight")); + Assert.Equal("NVD", diff.Projected.SourceTrust); + Assert.Equal("runtime", diff.Projected.Reachability); + } +} diff --git a/src/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs b/src/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs new file mode 100644 index 00000000..3d9e5513 --- /dev/null +++ b/src/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs @@ -0,0 +1,156 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Security.Claims; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Auth.Abstractions; +using StellaOps.Notify.Models; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Tests; + +public sealed class ReportEventDispatcherTests +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + [Fact] + public async Task PublishAsync_EmitsReportReadyAndScanCompleted() + { + var publisher = new RecordingEventPublisher(); + var dispatcher = new ReportEventDispatcher(publisher, TimeProvider.System, NullLogger.Instance); + var cancellationToken = CancellationToken.None; + + var request = new ReportRequestDto + { + ImageDigest = "sha256:feedface", + Findings = new[] + { + new PolicyPreviewFindingDto + { + Id = "finding-1", + Severity = "Critical", + Repository = "acme/edge/api", + Cve = "CVE-2024-9999", + Tags = new[] { "reachability:runtime", "kev:CVE-2024-9999" } + } + } + }; + + var baseline = new PolicyVerdict("finding-1", PolicyVerdictStatus.Pass, ConfigVersion: "1.0"); + var projected = new PolicyVerdict( + "finding-1", + PolicyVerdictStatus.Blocked, + Score: 47.5, + ConfigVersion: "1.0", + SourceTrust: "NVD", + Reachability: "runtime"); + + var preview = new PolicyPreviewResponse( + Success: true, + PolicyDigest: "digest-123", + RevisionId: "rev-42", + Issues: ImmutableArray.Empty, + Diffs: ImmutableArray.Create(new PolicyVerdictDiff(baseline, projected)), + ChangedCount: 1); + + var document = new ReportDocumentDto + { + ReportId = "report-abc", + ImageDigest = "sha256:feedface", + GeneratedAt = DateTimeOffset.Parse("2025-10-19T12:34:56Z"), + Verdict = "blocked", + Policy = new ReportPolicyDto + { + RevisionId = "rev-42", + Digest = "digest-123" + }, + Summary = new ReportSummaryDto + { + Total = 1, + Blocked = 1, + Warned = 0, + Ignored = 0, + Quieted = 0 + }, + Verdicts = new[] + { + new PolicyPreviewVerdictDto + { + FindingId = "finding-1", + Status = "Blocked", + Score = 47.5, + SourceTrust = "NVD", + Reachability = "runtime" + } + } + }; + + var envelope = new DsseEnvelopeDto + { + PayloadType = "application/vnd.stellaops.report+json", + Payload = Convert.ToBase64String(JsonSerializer.SerializeToUtf8Bytes(document, SerializerOptions)), + Signatures = new[] + { + new DsseSignatureDto { KeyId = "test-key", Algorithm = "hs256", Signature = "signature-value" } + } + }; + + var context = new DefaultHttpContext(); + context.User = new ClaimsPrincipal(new ClaimsIdentity(new[] + { + new Claim(StellaOpsClaimTypes.Tenant, "tenant-alpha") + })); + context.Request.Scheme = "https"; + context.Request.Host = new HostString("scanner.example"); + + await dispatcher.PublishAsync(request, preview, document, envelope, context, cancellationToken); + + Assert.Equal(2, publisher.Events.Count); + + var readyEvent = Assert.Single(publisher.Events, evt => evt.Kind == NotifyEventKinds.ScannerReportReady); + Assert.Equal("tenant-alpha", readyEvent.Tenant); + Assert.Equal("api", readyEvent.Scope?.Repo); + Assert.Equal("acme/edge", readyEvent.Scope?.Namespace); + Assert.Equal("sha256:feedface", readyEvent.Scope?.Digest); + Assert.NotNull(readyEvent.Payload); + Assert.Equal("fail", readyEvent.Payload?["verdict"]?.GetValue()); + Assert.Equal("report-abc", readyEvent.Payload?["reportId"]?.GetValue()); + Assert.Equal("signature-value", readyEvent.Payload?["dsse"]?["signatures"]?[0]?["signature"]?.GetValue()); + Assert.Equal(envelope.Payload, readyEvent.Payload?["dsse"]?["payload"]?.GetValue()); + Assert.Equal(1, readyEvent.Payload?["delta"]?["newCritical"]?.GetValue()); + Assert.Equal("CVE-2024-9999", readyEvent.Payload?["delta"]?["kev"]?[0]?.GetValue()); + Assert.Equal("https://scanner.example/ui/reports/report-abc", readyEvent.Payload?["links"]?["ui"]?.GetValue()); + var scanEvent = Assert.Single(publisher.Events, evt => evt.Kind == NotifyEventKinds.ScannerScanCompleted); + Assert.Equal("fail", scanEvent.Payload?["verdict"]?.GetValue()); + Assert.Equal("report-abc", scanEvent.Payload?["reportId"]?.GetValue()); + Assert.Equal("sha256:feedface", scanEvent.Payload?["digest"]?.GetValue()); + Assert.Equal("runtime", scanEvent.Payload?["findings"]?[0]?["reachability"]?.GetValue()); + Assert.Equal("report-abc", scanEvent.Payload?["report"]?["reportId"]?.GetValue()); + Assert.Equal("blocked", scanEvent.Payload?["report"]?["verdict"]?.GetValue()); + Assert.Equal(envelope.Payload, scanEvent.Payload?["dsse"]?["payload"]?.GetValue()); + Assert.Equal(NotifyEventKinds.ScannerScanCompleted, scanEvent.Kind); + } + + private sealed class RecordingEventPublisher : IPlatformEventPublisher + { + public List Events { get; } = new(); + + public Task PublishAsync(NotifyEvent @event, CancellationToken cancellationToken = default) + { + Events.Add(@event); + return Task.CompletedTask; + } + } +} diff --git a/src/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs b/src/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs new file mode 100644 index 00000000..b9f301f0 --- /dev/null +++ b/src/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs @@ -0,0 +1,35 @@ +using System; +using System.IO; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading.Tasks; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Tests; + +public sealed class ReportSamplesTests +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter() } + }; + + [Fact] + public async Task ReportSampleEnvelope_RemainsCanonical() + { + var baseDirectory = AppContext.BaseDirectory; + var repoRoot = Path.GetFullPath(Path.Combine(baseDirectory, "..", "..", "..", "..", "..")); + var path = Path.Combine(repoRoot, "samples", "api", "reports", "report-sample.dsse.json"); + Assert.True(File.Exists(path), $"Sample file not found at {path}."); + await using var stream = File.OpenRead(path); + var response = await JsonSerializer.DeserializeAsync(stream, SerializerOptions); + Assert.NotNull(response); + Assert.NotNull(response!.Report); + Assert.NotNull(response.Dsse); + + var reportBytes = JsonSerializer.SerializeToUtf8Bytes(response.Report, SerializerOptions); + var expectedPayload = Convert.ToBase64String(reportBytes); + Assert.Equal(expectedPayload, response.Dsse!.Payload); + } +} diff --git a/src/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs b/src/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs new file mode 100644 index 00000000..49af2075 --- /dev/null +++ b/src/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs @@ -0,0 +1,136 @@ +using System.Net; +using System.Net.Http.Json; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Tests; + +public sealed class ReportsEndpointsTests +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + [Fact] + public async Task ReportsEndpointReturnsSignedEnvelope() + { + const string policyYaml = """ +version: "1.0" +rules: + - name: Block Critical + severity: [Critical] + action: block +"""; + + var hmacKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("scanner-report-hmac-key-2025!")); + + using var factory = new ScannerApplicationFactory(configuration => + { + configuration["scanner:signing:enabled"] = "true"; + configuration["scanner:signing:keyId"] = "scanner-report-signing"; + configuration["scanner:signing:algorithm"] = "hs256"; + configuration["scanner:signing:keyPem"] = hmacKey; + configuration["scanner:features:enableSignedReports"] = "true"; + }); + + var store = factory.Services.GetRequiredService(); + await store.SaveAsync( + new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "seed", "initial"), + CancellationToken.None); + + using var client = factory.CreateClient(); + + var request = new ReportRequestDto + { + ImageDigest = "sha256:deadbeef", + Findings = new[] + { + new PolicyPreviewFindingDto + { + Id = "finding-1", + Severity = "Critical", + Source = "NVD", + Tags = new[] { "reachability:runtime" } + } + } + }; + + var response = await client.PostAsJsonAsync("/api/v1/reports", request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var raw = await response.Content.ReadAsStringAsync(); + Assert.False(string.IsNullOrWhiteSpace(raw), raw); + var payload = JsonSerializer.Deserialize(raw, SerializerOptions); + Assert.NotNull(payload); + Assert.NotNull(payload!.Report); + Assert.NotNull(payload.Dsse); + Assert.StartsWith("report-", payload.Report.ReportId, StringComparison.Ordinal); + Assert.Equal("blocked", payload.Report.Verdict); + + var dsse = payload.Dsse!; + Assert.Equal("application/vnd.stellaops.report+json", dsse.PayloadType); + var decodedPayload = Convert.FromBase64String(dsse.Payload); + var canonicalPayload = JsonSerializer.SerializeToUtf8Bytes(payload.Report, SerializerOptions); + var expectedBase64 = Convert.ToBase64String(canonicalPayload); + Assert.Equal(expectedBase64, dsse.Payload); + + var reportVerdict = Assert.Single(payload.Report.Verdicts); + Assert.Equal("NVD", reportVerdict.SourceTrust); + Assert.Equal("runtime", reportVerdict.Reachability); + Assert.NotNull(reportVerdict.Inputs); + Assert.True(reportVerdict.Inputs!.ContainsKey("severityWeight")); + Assert.Equal(PolicyScoringConfig.Default.Version, reportVerdict.ConfigVersion); + + var signature = Assert.Single(dsse.Signatures); + Assert.Equal("scanner-report-signing", signature.KeyId); + Assert.Equal("hs256", signature.Algorithm, ignoreCase: true); + + using var hmac = new System.Security.Cryptography.HMACSHA256(Convert.FromBase64String(hmacKey)); + var expectedSig = Convert.ToBase64String(hmac.ComputeHash(decodedPayload)); + var actualSig = signature.Signature; + Assert.True(expectedSig == actualSig, $"expected:{expectedSig}, actual:{actualSig}"); + } + + [Fact] + public async Task ReportsEndpointValidatesDigest() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var request = new ReportRequestDto + { + ImageDigest = "", + Findings = Array.Empty() + }; + + var response = await client.PostAsJsonAsync("/api/v1/reports", request); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task ReportsEndpointReturnsServiceUnavailableWhenPolicyMissing() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var request = new ReportRequestDto + { + ImageDigest = "sha256:feedface", + Findings = new[] + { + new PolicyPreviewFindingDto { Id = "finding-1", Severity = "High" } + } + }; + + var response = await client.PostAsJsonAsync("/api/v1/reports", request); + Assert.Equal((HttpStatusCode)StatusCodes.Status503ServiceUnavailable, response.StatusCode); + } +} diff --git a/src/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs b/src/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs index d8100dd9..fb5afc64 100644 --- a/src/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs +++ b/src/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs @@ -24,6 +24,7 @@ internal sealed class ScannerApplicationFactory : WebApplicationFactory ["scanner:artifactStore:bucket"] = "scanner-artifacts", ["scanner:telemetry:minimumLogLevel"] = "Information", ["scanner:telemetry:enableRequestLogging"] = "false", + ["scanner:events:enabled"] = "false", ["scanner:features:enableSignedReports"] = "false" }; @@ -58,6 +59,10 @@ internal sealed class ScannerApplicationFactory : WebApplicationFactory Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__ENDPOINT", configuration["scanner:artifactStore:endpoint"]); Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__ACCESSKEY", configuration["scanner:artifactStore:accessKey"]); Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__SECRETKEY", configuration["scanner:artifactStore:secretKey"]); + if (configuration.TryGetValue("scanner:events:enabled", out var eventsEnabled)) + { + Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", eventsEnabled); + } if (configuration.TryGetValue("scanner:authority:enabled", out var authorityEnabled)) { diff --git a/src/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs b/src/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs index 756f6b17..86281373 100644 --- a/src/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs +++ b/src/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs @@ -1,3 +1,4 @@ +using System; using System.Collections.Generic; using System.IO; using System.Net; @@ -247,6 +248,48 @@ public sealed class ScansEndpointsTests Assert.Contains(envelope.Data.Keys, key => key == "stage"); } + [Fact] + public async Task ProgressStreamSupportsServerSentEvents() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var request = new ScanSubmitRequest + { + Image = new ScanImageDescriptor { Reference = "ghcr.io/demo/app:3.0.0" } + }; + + var submit = await client.PostAsJsonAsync("/api/v1/scans", request); + var submitPayload = await submit.Content.ReadFromJsonAsync(); + Assert.NotNull(submitPayload); + + var response = await client.GetAsync($"/api/v1/scans/{submitPayload!.ScanId}/events", HttpCompletionOption.ResponseHeadersRead); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Equal("text/event-stream", response.Content.Headers.ContentType?.MediaType); + + await using var stream = await response.Content.ReadAsStreamAsync(); + using var reader = new StreamReader(stream); + + var idLine = await reader.ReadLineAsync(); + var eventLine = await reader.ReadLineAsync(); + var dataLine = await reader.ReadLineAsync(); + var separator = await reader.ReadLineAsync(); + + Assert.Equal("id: 1", idLine); + Assert.Equal("event: pending", eventLine); + Assert.NotNull(dataLine); + Assert.StartsWith("data: ", dataLine, StringComparison.Ordinal); + Assert.Equal(string.Empty, separator); + + var json = dataLine!["data: ".Length..]; + var envelope = JsonSerializer.Deserialize(json, SerializerOptions); + Assert.NotNull(envelope); + Assert.Equal(submitPayload.ScanId, envelope!.ScanId); + Assert.Equal("Pending", envelope.State); + Assert.Equal(1, envelope.Sequence); + Assert.True(envelope.Timestamp.UtcDateTime <= DateTime.UtcNow); + } + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); private sealed record ProgressEnvelope( diff --git a/src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj b/src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj index 35a72be6..78aa462a 100644 --- a/src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj +++ b/src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj @@ -9,4 +9,12 @@ + + + Always + + + Always + + diff --git a/src/StellaOps.Scanner.WebService/AssemblyInfo.cs b/src/StellaOps.Scanner.WebService/AssemblyInfo.cs new file mode 100644 index 00000000..b927833e --- /dev/null +++ b/src/StellaOps.Scanner.WebService/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.WebService.Tests")] diff --git a/src/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs b/src/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs new file mode 100644 index 00000000..6baf1f80 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.WebService.Contracts; + +public sealed record PolicyDiagnosticsRequestDto +{ + [JsonPropertyName("policy")] + public PolicyPreviewPolicyDto? Policy { get; init; } +} + +public sealed record PolicyDiagnosticsResponseDto +{ + [JsonPropertyName("success")] + public bool Success { get; init; } + + [JsonPropertyName("version")] + public string Version { get; init; } = string.Empty; + + [JsonPropertyName("ruleCount")] + public int RuleCount { get; init; } + + [JsonPropertyName("errorCount")] + public int ErrorCount { get; init; } + + [JsonPropertyName("warningCount")] + public int WarningCount { get; init; } + + [JsonPropertyName("generatedAt")] + public DateTimeOffset GeneratedAt { get; init; } + + [JsonPropertyName("issues")] + public IReadOnlyList Issues { get; init; } = Array.Empty(); + + [JsonPropertyName("recommendations")] + public IReadOnlyList Recommendations { get; init; } = Array.Empty(); +} diff --git a/src/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs b/src/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs index 7c8fe368..8e85cc2b 100644 --- a/src/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs +++ b/src/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs @@ -95,6 +95,22 @@ public sealed record PolicyPreviewVerdictDto [JsonPropertyName("quiet")] public bool? Quiet { get; init; } + + [JsonPropertyName("unknownConfidence")] + public double? UnknownConfidence { get; init; } + + [JsonPropertyName("confidenceBand")] + public string? ConfidenceBand { get; init; } + + [JsonPropertyName("unknownAgeDays")] + public double? UnknownAgeDays { get; init; } + + [JsonPropertyName("sourceTrust")] + public string? SourceTrust { get; init; } + + [JsonPropertyName("reachability")] + public string? Reachability { get; init; } + } public sealed record PolicyPreviewPolicyDto diff --git a/src/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs b/src/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs new file mode 100644 index 00000000..7434d5ab --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs @@ -0,0 +1,122 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.WebService.Contracts; + +public sealed record ReportRequestDto +{ + [JsonPropertyName("imageDigest")] + public string? ImageDigest { get; init; } + + [JsonPropertyName("findings")] + public IReadOnlyList? Findings { get; init; } + + [JsonPropertyName("baseline")] + public IReadOnlyList? Baseline { get; init; } +} + +public sealed record ReportResponseDto +{ + [JsonPropertyName("report")] + public ReportDocumentDto Report { get; init; } = new(); + + [JsonPropertyName("dsse")] + public DsseEnvelopeDto? Dsse { get; init; } +} + +public sealed record ReportDocumentDto +{ + [JsonPropertyName("reportId")] + [JsonPropertyOrder(0)] + public string ReportId { get; init; } = string.Empty; + + [JsonPropertyName("imageDigest")] + [JsonPropertyOrder(1)] + public string ImageDigest { get; init; } = string.Empty; + + [JsonPropertyName("generatedAt")] + [JsonPropertyOrder(2)] + public DateTimeOffset GeneratedAt { get; init; } + + [JsonPropertyName("verdict")] + [JsonPropertyOrder(3)] + public string Verdict { get; init; } = string.Empty; + + [JsonPropertyName("policy")] + [JsonPropertyOrder(4)] + public ReportPolicyDto Policy { get; init; } = new(); + + [JsonPropertyName("summary")] + [JsonPropertyOrder(5)] + public ReportSummaryDto Summary { get; init; } = new(); + + [JsonPropertyName("verdicts")] + [JsonPropertyOrder(6)] + public IReadOnlyList Verdicts { get; init; } = Array.Empty(); + + [JsonPropertyName("issues")] + [JsonPropertyOrder(7)] + public IReadOnlyList Issues { get; init; } = Array.Empty(); +} + +public sealed record ReportPolicyDto +{ + [JsonPropertyName("revisionId")] + [JsonPropertyOrder(0)] + public string? RevisionId { get; init; } + + [JsonPropertyName("digest")] + [JsonPropertyOrder(1)] + public string? Digest { get; init; } +} + +public sealed record ReportSummaryDto +{ + [JsonPropertyName("total")] + [JsonPropertyOrder(0)] + public int Total { get; init; } + + [JsonPropertyName("blocked")] + [JsonPropertyOrder(1)] + public int Blocked { get; init; } + + [JsonPropertyName("warned")] + [JsonPropertyOrder(2)] + public int Warned { get; init; } + + [JsonPropertyName("ignored")] + [JsonPropertyOrder(3)] + public int Ignored { get; init; } + + [JsonPropertyName("quieted")] + [JsonPropertyOrder(4)] + public int Quieted { get; init; } +} + +public sealed record DsseEnvelopeDto +{ + [JsonPropertyName("payloadType")] + [JsonPropertyOrder(0)] + public string PayloadType { get; init; } = string.Empty; + + [JsonPropertyName("payload")] + [JsonPropertyOrder(1)] + public string Payload { get; init; } = string.Empty; + + [JsonPropertyName("signatures")] + [JsonPropertyOrder(2)] + public IReadOnlyList Signatures { get; init; } = Array.Empty(); +} + +public sealed record DsseSignatureDto +{ + [JsonPropertyName("keyId")] + public string KeyId { get; init; } = string.Empty; + + [JsonPropertyName("algorithm")] + public string Algorithm { get; init; } = string.Empty; + + [JsonPropertyName("signature")] + public string Signature { get; init; } = string.Empty; +} diff --git a/src/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs b/src/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs new file mode 100644 index 00000000..4dd4d849 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs @@ -0,0 +1,175 @@ +using System.Collections.Immutable; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class PolicyEndpoints +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + public static void MapPolicyEndpoints(this RouteGroupBuilder apiGroup, string policySegment) + { + ArgumentNullException.ThrowIfNull(apiGroup); + + var policyGroup = apiGroup + .MapGroup(NormalizeSegment(policySegment)) + .WithTags("Policy"); + + policyGroup.MapGet("/schema", HandleSchemaAsync) + .WithName("scanner.policy.schema") + .Produces(StatusCodes.Status200OK) + .RequireAuthorization(ScannerPolicies.Reports) + .WithOpenApi(operation => + { + operation.Summary = "Retrieve the embedded policy JSON schema."; + operation.Description = "Returns the policy schema (`policy-schema@1`) used to validate YAML or JSON rulesets."; + return operation; + }); + + policyGroup.MapPost("/diagnostics", HandleDiagnosticsAsync) + .WithName("scanner.policy.diagnostics") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest) + .RequireAuthorization(ScannerPolicies.Reports) + .WithOpenApi(operation => + { + operation.Summary = "Run policy diagnostics."; + operation.Description = "Accepts YAML or JSON policy content and returns normalization issues plus recommendations (ignore rules, VEX include/exclude, vendor precedence)."; + return operation; + }); + + policyGroup.MapPost("/preview", HandlePreviewAsync) + .WithName("scanner.policy.preview") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest) + .RequireAuthorization(ScannerPolicies.Reports) + .WithOpenApi(operation => + { + operation.Summary = "Preview policy impact against findings."; + operation.Description = "Evaluates the supplied findings against the active or proposed policy, returning diffs, quieted verdicts, and actionable validation messages."; + return operation; + }); + } + + private static IResult HandleSchemaAsync(HttpContext context) + { + var schema = PolicySchemaResource.ReadSchemaJson(); + return Results.Text(schema, "application/schema+json", Encoding.UTF8); + } + + private static IResult HandleDiagnosticsAsync( + PolicyDiagnosticsRequestDto request, + TimeProvider timeProvider, + HttpContext context) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(timeProvider); + + if (request.Policy is null || string.IsNullOrWhiteSpace(request.Policy.Content)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid policy diagnostics request", + StatusCodes.Status400BadRequest, + detail: "Policy content is required for diagnostics."); + } + + var format = PolicyDtoMapper.ParsePolicyFormat(request.Policy.Format); + var binding = PolicyBinder.Bind(request.Policy.Content, format); + var diagnostics = PolicyDiagnostics.Create(binding, timeProvider); + + var response = new PolicyDiagnosticsResponseDto + { + Success = diagnostics.ErrorCount == 0, + Version = diagnostics.Version, + RuleCount = diagnostics.RuleCount, + ErrorCount = diagnostics.ErrorCount, + WarningCount = diagnostics.WarningCount, + GeneratedAt = diagnostics.GeneratedAt, + Issues = diagnostics.Issues.Select(PolicyDtoMapper.ToIssueDto).ToImmutableArray(), + Recommendations = diagnostics.Recommendations + }; + + return Json(response); + } + + private static async Task HandlePreviewAsync( + PolicyPreviewRequestDto request, + PolicyPreviewService previewService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(previewService); + + if (string.IsNullOrWhiteSpace(request.ImageDigest)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid policy preview request", + StatusCodes.Status400BadRequest, + detail: "imageDigest is required."); + } + + if (!request.ImageDigest.Contains(':', StringComparison.Ordinal)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid policy preview request", + StatusCodes.Status400BadRequest, + detail: "imageDigest must include algorithm prefix (e.g. sha256:...)."); + } + + if (request.Findings is not null) + { + var missingIds = request.Findings.Any(f => string.IsNullOrWhiteSpace(f.Id)); + if (missingIds) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid policy preview request", + StatusCodes.Status400BadRequest, + detail: "All findings must include an id value."); + } + } + + var domainRequest = PolicyDtoMapper.ToDomain(request); + var response = await previewService.PreviewAsync(domainRequest, cancellationToken).ConfigureAwait(false); + var payload = PolicyDtoMapper.ToDto(response); + return Json(payload); + } + + private static string NormalizeSegment(string segment) + { + if (string.IsNullOrWhiteSpace(segment)) + { + return "/policy"; + } + + var trimmed = segment.Trim('/'); + return "/" + trimmed; + } + + private static IResult Json(T value) + { + var payload = JsonSerializer.Serialize(value, SerializerOptions); + return Results.Content(payload, "application/json", Encoding.UTF8); + } +} diff --git a/src/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs b/src/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs new file mode 100644 index 00000000..ca6f9708 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs @@ -0,0 +1,266 @@ +using System.Collections.Generic; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class ReportEndpoints +{ + private const string PayloadType = "application/vnd.stellaops.report+json"; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter() } + }; + + public static void MapReportEndpoints(this RouteGroupBuilder apiGroup, string reportsSegment) + { + ArgumentNullException.ThrowIfNull(apiGroup); + + var reports = apiGroup + .MapGroup(NormalizeSegment(reportsSegment)) + .WithTags("Reports"); + + reports.MapPost("/", HandleCreateReportAsync) + .WithName("scanner.reports.create") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status503ServiceUnavailable) + .RequireAuthorization(ScannerPolicies.Reports) + .WithOpenApi(operation => + { + operation.Summary = "Assemble a signed scan report."; + operation.Description = "Aggregates latest findings with the active policy snapshot, returning verdicts plus an optional DSSE envelope."; + return operation; + }); + } + + private static async Task HandleCreateReportAsync( + ReportRequestDto request, + PolicyPreviewService previewService, + IReportSigner signer, + TimeProvider timeProvider, + IReportEventDispatcher eventDispatcher, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(previewService); + ArgumentNullException.ThrowIfNull(signer); + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(eventDispatcher); + + if (string.IsNullOrWhiteSpace(request.ImageDigest)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid report request", + StatusCodes.Status400BadRequest, + detail: "imageDigest is required."); + } + + if (!request.ImageDigest.Contains(':', StringComparison.Ordinal)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid report request", + StatusCodes.Status400BadRequest, + detail: "imageDigest must include algorithm prefix (e.g. sha256:...)."); + } + + if (request.Findings is not null && request.Findings.Any(f => string.IsNullOrWhiteSpace(f.Id))) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid report request", + StatusCodes.Status400BadRequest, + detail: "All findings must include an id value."); + } + + var previewDto = new PolicyPreviewRequestDto + { + ImageDigest = request.ImageDigest, + Findings = request.Findings, + Baseline = request.Baseline, + Policy = null + }; + + var domainRequest = PolicyDtoMapper.ToDomain(previewDto) with { ProposedPolicy = null }; + var preview = await previewService.PreviewAsync(domainRequest, cancellationToken).ConfigureAwait(false); + + if (!preview.Success) + { + var issues = preview.Issues.Select(PolicyDtoMapper.ToIssueDto).ToArray(); + var extensions = new Dictionary(StringComparer.Ordinal) + { + ["issues"] = issues + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Unable to assemble report", + StatusCodes.Status503ServiceUnavailable, + detail: "No policy snapshot is available or validation failed.", + extensions: extensions); + } + + var projectedVerdicts = preview.Diffs + .Select(diff => PolicyDtoMapper.ToVerdictDto(diff.Projected)) + .ToArray(); + + var issuesDto = preview.Issues.Select(PolicyDtoMapper.ToIssueDto).ToArray(); + var summary = BuildSummary(projectedVerdicts); + var verdict = ComputeVerdict(projectedVerdicts); + var reportId = CreateReportId(request.ImageDigest!, preview.PolicyDigest); + var generatedAt = timeProvider.GetUtcNow(); + + var document = new ReportDocumentDto + { + ReportId = reportId, + ImageDigest = request.ImageDigest!, + GeneratedAt = generatedAt, + Verdict = verdict, + Policy = new ReportPolicyDto + { + RevisionId = preview.RevisionId, + Digest = preview.PolicyDigest + }, + Summary = summary, + Verdicts = projectedVerdicts, + Issues = issuesDto + }; + + var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(document, SerializerOptions); + var signature = signer.Sign(payloadBytes); + DsseEnvelopeDto? envelope = null; + if (signature is not null) + { + envelope = new DsseEnvelopeDto + { + PayloadType = PayloadType, + Payload = Convert.ToBase64String(payloadBytes), + Signatures = new[] + { + new DsseSignatureDto + { + KeyId = signature.KeyId, + Algorithm = signature.Algorithm, + Signature = signature.Signature + } + } + }; + } + + var response = new ReportResponseDto + { + Report = document, + Dsse = envelope + }; + + await eventDispatcher + .PublishAsync(request, preview, document, envelope, context, cancellationToken) + .ConfigureAwait(false); + + return Json(response); + } + + private static ReportSummaryDto BuildSummary(IReadOnlyList verdicts) + { + if (verdicts.Count == 0) + { + return new ReportSummaryDto { Total = 0 }; + } + + var blocked = verdicts.Count(v => string.Equals(v.Status, nameof(PolicyVerdictStatus.Blocked), StringComparison.OrdinalIgnoreCase)); + var warned = verdicts.Count(v => + string.Equals(v.Status, nameof(PolicyVerdictStatus.Warned), StringComparison.OrdinalIgnoreCase) + || string.Equals(v.Status, nameof(PolicyVerdictStatus.Deferred), StringComparison.OrdinalIgnoreCase) + || string.Equals(v.Status, nameof(PolicyVerdictStatus.RequiresVex), StringComparison.OrdinalIgnoreCase) + || string.Equals(v.Status, nameof(PolicyVerdictStatus.Escalated), StringComparison.OrdinalIgnoreCase)); + var ignored = verdicts.Count(v => string.Equals(v.Status, nameof(PolicyVerdictStatus.Ignored), StringComparison.OrdinalIgnoreCase)); + var quieted = verdicts.Count(v => v.Quiet is true); + + return new ReportSummaryDto + { + Total = verdicts.Count, + Blocked = blocked, + Warned = warned, + Ignored = ignored, + Quieted = quieted + }; + } + + private static string ComputeVerdict(IReadOnlyList verdicts) + { + if (verdicts.Count == 0) + { + return "unknown"; + } + + if (verdicts.Any(v => string.Equals(v.Status, nameof(PolicyVerdictStatus.Blocked), StringComparison.OrdinalIgnoreCase))) + { + return "blocked"; + } + + if (verdicts.Any(v => string.Equals(v.Status, nameof(PolicyVerdictStatus.Escalated), StringComparison.OrdinalIgnoreCase))) + { + return "escalated"; + } + + if (verdicts.Any(v => + string.Equals(v.Status, nameof(PolicyVerdictStatus.Warned), StringComparison.OrdinalIgnoreCase) + || string.Equals(v.Status, nameof(PolicyVerdictStatus.Deferred), StringComparison.OrdinalIgnoreCase) + || string.Equals(v.Status, nameof(PolicyVerdictStatus.RequiresVex), StringComparison.OrdinalIgnoreCase))) + { + return "warn"; + } + + return "pass"; + } + + private static string CreateReportId(string imageDigest, string policyDigest) + { + var builder = new StringBuilder(); + builder.Append(imageDigest.Trim()); + builder.Append('|'); + builder.Append(policyDigest ?? string.Empty); + + using var sha256 = SHA256.Create(); + var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(builder.ToString())); + var hex = Convert.ToHexString(hash.AsSpan(0, 10)).ToLowerInvariant(); + return $"report-{hex}"; + } + + private static string NormalizeSegment(string segment) + { + if (string.IsNullOrWhiteSpace(segment)) + { + return "/reports"; + } + + var trimmed = segment.Trim('/'); + return "/" + trimmed; + } + + private static IResult Json(T value) + { + var payload = JsonSerializer.Serialize(value, SerializerOptions); + return Results.Content(payload, "application/json", Encoding.UTF8); + } +} diff --git a/src/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs b/src/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs index 627e42c4..154aae4a 100644 --- a/src/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs +++ b/src/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs @@ -23,11 +23,11 @@ internal static class ScanEndpoints Converters = { new JsonStringEnumConverter() } }; - public static void MapScanEndpoints(this RouteGroupBuilder apiGroup) + public static void MapScanEndpoints(this RouteGroupBuilder apiGroup, string scansSegment) { ArgumentNullException.ThrowIfNull(apiGroup); - var scans = apiGroup.MapGroup("/scans"); + var scans = apiGroup.MapGroup(NormalizeSegment(scansSegment)); scans.MapPost("/", HandleSubmitAsync) .WithName("scanner.scans.submit") @@ -295,4 +295,15 @@ internal static class ScanEndpoints var payload = JsonSerializer.Serialize(value, SerializerOptions); return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode); } + + private static string NormalizeSegment(string segment) + { + if (string.IsNullOrWhiteSpace(segment)) + { + return "/scans"; + } + + var trimmed = segment.Trim('/'); + return "/" + trimmed; + } } diff --git a/src/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs b/src/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs new file mode 100644 index 00000000..8efca70c --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs @@ -0,0 +1,58 @@ +using System.Linq; +using System.Reflection; +using Microsoft.AspNetCore.Builder; +using Microsoft.Extensions.DependencyInjection; + +namespace StellaOps.Scanner.WebService.Extensions; + +internal static class OpenApiRegistrationExtensions +{ + public static IServiceCollection AddOpenApiIfAvailable(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + var extensionType = Type.GetType("Microsoft.Extensions.DependencyInjection.OpenApiServiceCollectionExtensions, Microsoft.AspNetCore.OpenApi"); + if (extensionType is not null) + { + var method = extensionType + .GetMethods(BindingFlags.Public | BindingFlags.Static) + .FirstOrDefault(m => + string.Equals(m.Name, "AddOpenApi", StringComparison.Ordinal) && + m.GetParameters().Length == 2); + + if (method is not null) + { + var result = method.Invoke(null, new object?[] { services, null }); + if (result is IServiceCollection collection) + { + return collection; + } + } + } + + services.AddEndpointsApiExplorer(); + return services; + } + + public static WebApplication MapOpenApiIfAvailable(this WebApplication app) + { + ArgumentNullException.ThrowIfNull(app); + + var extensionType = Type.GetType("Microsoft.AspNetCore.Builder.OpenApiApplicationBuilderExtensions, Microsoft.AspNetCore.OpenApi"); + if (extensionType is not null) + { + var method = extensionType + .GetMethods(BindingFlags.Public | BindingFlags.Static) + .FirstOrDefault(m => + string.Equals(m.Name, "MapOpenApi", StringComparison.Ordinal) && + m.GetParameters().Length == 1); + + if (method is not null) + { + method.Invoke(null, new object?[] { app }); + } + } + + return app; + } +} diff --git a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs b/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs index e0e80bff..57ffbf17 100644 --- a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs +++ b/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs @@ -60,6 +60,11 @@ public sealed class ScannerWebServiceOptions /// public ApiOptions Api { get; set; } = new(); + /// + /// Platform event emission settings. + /// + public EventsOptions Events { get; set; } = new(); + public sealed class StorageOptions { public string Driver { get; set; } = "mongo"; @@ -214,6 +219,8 @@ public sealed class ScannerWebServiceOptions public string Algorithm { get; set; } = "ed25519"; + public string? Provider { get; set; } + public string? KeyPem { get; set; } public string? KeyPemFile { get; set; } @@ -236,5 +243,24 @@ public sealed class ScannerWebServiceOptions public string ScansSegment { get; set; } = "scans"; public string ReportsSegment { get; set; } = "reports"; + + public string PolicySegment { get; set; } = "policy"; + } + + public sealed class EventsOptions + { + public bool Enabled { get; set; } + + public string Driver { get; set; } = "redis"; + + public string Dsn { get; set; } = string.Empty; + + public string Stream { get; set; } = "stella.events"; + + public double PublishTimeoutSeconds { get; set; } = 5; + + public long MaxStreamLength { get; set; } = 10000; + + public IDictionary DriverSettings { get; set; } = new Dictionary(StringComparer.OrdinalIgnoreCase); } } diff --git a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs b/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs index 476da69a..dbf61916 100644 --- a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs +++ b/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs @@ -1,4 +1,5 @@ using System; +using System.Collections.Generic; using System.IO; namespace StellaOps.Scanner.WebService.Options; @@ -54,6 +55,28 @@ public static class ScannerWebServiceOptionsPostConfigure { signing.CertificateChainPem = ReadAllText(signing.CertificateChainPemFile!, contentRootPath); } + + options.Events ??= new ScannerWebServiceOptions.EventsOptions(); + var eventsOptions = options.Events; + eventsOptions.DriverSettings ??= new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (string.IsNullOrWhiteSpace(eventsOptions.Driver)) + { + eventsOptions.Driver = "redis"; + } + + if (string.IsNullOrWhiteSpace(eventsOptions.Stream)) + { + eventsOptions.Stream = "stella.events"; + } + + if (string.IsNullOrWhiteSpace(eventsOptions.Dsn) + && string.Equals(options.Queue?.Driver, "redis", StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrWhiteSpace(options.Queue?.Dsn)) + { + eventsOptions.Dsn = options.Queue!.Dsn; + } + } private static string ReadSecretFile(string path, string contentRootPath) diff --git a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs b/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs index 7025c91e..bf05dce4 100644 --- a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs +++ b/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs @@ -28,6 +28,11 @@ public static class ScannerWebServiceOptionsValidator "minio" }; + private static readonly HashSet SupportedEventDrivers = new(StringComparer.OrdinalIgnoreCase) + { + "redis" + }; + public static void Validate(ScannerWebServiceOptions options) { ArgumentNullException.ThrowIfNull(options); @@ -62,6 +67,24 @@ public static class ScannerWebServiceOptionsValidator { throw new InvalidOperationException("API basePath must be configured."); } + + if (string.IsNullOrWhiteSpace(options.Api.ScansSegment)) + { + throw new InvalidOperationException("API scansSegment must be configured."); + } + + if (string.IsNullOrWhiteSpace(options.Api.ReportsSegment)) + { + throw new InvalidOperationException("API reportsSegment must be configured."); + } + + if (string.IsNullOrWhiteSpace(options.Api.PolicySegment)) + { + throw new InvalidOperationException("API policySegment must be configured."); + } + + options.Events ??= new ScannerWebServiceOptions.EventsOptions(); + ValidateEvents(options.Events); } private static void ValidateStorage(ScannerWebServiceOptions.StorageOptions storage) @@ -143,6 +166,39 @@ public static class ScannerWebServiceOptionsValidator } } + private static void ValidateEvents(ScannerWebServiceOptions.EventsOptions eventsOptions) + { + if (!eventsOptions.Enabled) + { + return; + } + + if (!SupportedEventDrivers.Contains(eventsOptions.Driver)) + { + throw new InvalidOperationException($"Unsupported events driver '{eventsOptions.Driver}'. Supported drivers: redis."); + } + + if (string.IsNullOrWhiteSpace(eventsOptions.Dsn)) + { + throw new InvalidOperationException("Events DSN must be configured when event emission is enabled."); + } + + if (string.IsNullOrWhiteSpace(eventsOptions.Stream)) + { + throw new InvalidOperationException("Events stream must be configured when event emission is enabled."); + } + + if (eventsOptions.PublishTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Events publishTimeoutSeconds must be greater than zero."); + } + + if (eventsOptions.MaxStreamLength < 0) + { + throw new InvalidOperationException("Events maxStreamLength must be zero or greater."); + } + } + private static void ValidateTelemetry(ScannerWebServiceOptions.TelemetryOptions telemetry) { if (string.IsNullOrWhiteSpace(telemetry.MinimumLogLevel)) diff --git a/src/StellaOps.Scanner.WebService/Program.cs b/src/StellaOps.Scanner.WebService/Program.cs index 6dd41123..ed993796 100644 --- a/src/StellaOps.Scanner.WebService/Program.cs +++ b/src/StellaOps.Scanner.WebService/Program.cs @@ -15,6 +15,10 @@ using StellaOps.Auth.Client; using StellaOps.Auth.ServerIntegration; using StellaOps.Configuration; using StellaOps.Plugin.DependencyInjection; +using StellaOps.Cryptography.DependencyInjection; +using StellaOps.Cryptography.Plugin.BouncyCastle; +using StellaOps.Policy; +using StellaOps.Scanner.Cache; using StellaOps.Scanner.WebService.Diagnostics; using StellaOps.Scanner.WebService.Endpoints; using StellaOps.Scanner.WebService.Extensions; @@ -64,17 +68,35 @@ builder.Host.UseSerilog((context, services, loggerConfiguration) => }); builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddScannerCache(builder.Configuration); builder.Services.AddSingleton(); builder.Services.AddHttpContextAccessor(); builder.Services.AddSingleton(); builder.Services.AddSingleton(sp => sp.GetRequiredService()); builder.Services.AddSingleton(sp => sp.GetRequiredService()); builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddStellaOpsCrypto(); +builder.Services.AddBouncyCastleEd25519Provider(); +builder.Services.AddSingleton(); +if (bootstrapOptions.Events is { Enabled: true } eventsOptions + && string.Equals(eventsOptions.Driver, "redis", StringComparison.OrdinalIgnoreCase)) +{ + builder.Services.AddSingleton(); +} +else +{ + builder.Services.AddSingleton(); +} +builder.Services.AddSingleton(); var pluginHostOptions = ScannerPluginHostFactory.Build(bootstrapOptions, contentRoot); builder.Services.RegisterPluginRoutines(builder.Configuration, pluginHostOptions); -builder.Services.AddEndpointsApiExplorer(); +builder.Services.AddOpenApiIfAvailable(); if (bootstrapOptions.Authority.Enabled) { @@ -241,5 +263,14 @@ if (app.Environment.IsEnvironment("Testing")) .WithName("scanner.auth-probe"); } -apiGroup.MapScanEndpoints(); +apiGroup.MapScanEndpoints(resolvedOptions.Api.ScansSegment); + +if (resolvedOptions.Features.EnablePolicyPreview) +{ + apiGroup.MapPolicyEndpoints(resolvedOptions.Api.PolicySegment); +} + +apiGroup.MapReportEndpoints(resolvedOptions.Api.ReportsSegment); + +app.MapOpenApiIfAvailable(); await app.RunAsync().ConfigureAwait(false); diff --git a/src/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs b/src/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs new file mode 100644 index 00000000..8cd70307 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs @@ -0,0 +1,16 @@ +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Models; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Publishes platform events to the internal bus consumed by downstream services (Notify, UI, etc.). +/// +public interface IPlatformEventPublisher +{ + /// + /// Publishes the supplied event envelope. + /// + Task PublishAsync(NotifyEvent @event, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs b/src/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs new file mode 100644 index 00000000..22d7c7df --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs @@ -0,0 +1,21 @@ +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Coordinates generation and publication of scanner-related platform events. +/// +public interface IReportEventDispatcher +{ + Task PublishAsync( + ReportRequestDto request, + PolicyPreviewResponse preview, + ReportDocumentDto document, + DsseEnvelopeDto? envelope, + HttpContext httpContext, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs b/src/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs new file mode 100644 index 00000000..78ce3e4a --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs @@ -0,0 +1,34 @@ +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Models; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// No-op fallback publisher used until queue adapters register a concrete implementation. +/// +internal sealed class NullPlatformEventPublisher : IPlatformEventPublisher +{ + private readonly ILogger _logger; + + public NullPlatformEventPublisher(ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public Task PublishAsync(NotifyEvent @event, CancellationToken cancellationToken = default) + { + if (@event is null) + { + throw new ArgumentNullException(nameof(@event)); + } + + if (_logger.IsEnabled(LogLevel.Debug)) + { + _logger.LogDebug("Suppressing publish for event {EventKind} (tenant {Tenant}).", @event.Kind, @event.Tenant); + } + + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs b/src/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs new file mode 100644 index 00000000..45021f90 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs @@ -0,0 +1,356 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Services; + +internal static class PolicyDtoMapper +{ + private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; + + public static PolicyPreviewRequest ToDomain(PolicyPreviewRequestDto request) + { + ArgumentNullException.ThrowIfNull(request); + + var findings = BuildFindings(request.Findings); + var baseline = BuildBaseline(request.Baseline); + var proposedPolicy = ToSnapshotContent(request.Policy); + + return new PolicyPreviewRequest( + request.ImageDigest!.Trim(), + findings, + baseline, + SnapshotOverride: null, + ProposedPolicy: proposedPolicy); + } + + public static PolicyPreviewResponseDto ToDto(PolicyPreviewResponse response) + { + ArgumentNullException.ThrowIfNull(response); + + var diffs = response.Diffs.Select(ToDiffDto).ToImmutableArray(); + var issues = response.Issues.Select(ToIssueDto).ToImmutableArray(); + + return new PolicyPreviewResponseDto + { + Success = response.Success, + PolicyDigest = response.PolicyDigest, + RevisionId = response.RevisionId, + Changed = response.ChangedCount, + Diffs = diffs, + Issues = issues + }; + } + + public static PolicyPreviewIssueDto ToIssueDto(PolicyIssue issue) + { + ArgumentNullException.ThrowIfNull(issue); + + return new PolicyPreviewIssueDto + { + Code = issue.Code, + Message = issue.Message, + Severity = issue.Severity.ToString(), + Path = issue.Path + }; + } + + public static PolicyDocumentFormat ParsePolicyFormat(string? format) + => string.Equals(format, "json", StringComparison.OrdinalIgnoreCase) + ? PolicyDocumentFormat.Json + : PolicyDocumentFormat.Yaml; + + private static ImmutableArray BuildFindings(IReadOnlyList? findings) + { + if (findings is null || findings.Count == 0) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(findings.Count); + foreach (var finding in findings) + { + if (finding is null) + { + continue; + } + + var tags = finding.Tags is { Count: > 0 } + ? finding.Tags.Where(tag => !string.IsNullOrWhiteSpace(tag)) + .Select(tag => tag.Trim()) + .ToImmutableArray() + : ImmutableArray.Empty; + + var severity = ParseSeverity(finding.Severity); + var candidate = PolicyFinding.Create( + finding.Id!.Trim(), + severity, + environment: Normalize(finding.Environment), + source: Normalize(finding.Source), + vendor: Normalize(finding.Vendor), + license: Normalize(finding.License), + image: Normalize(finding.Image), + repository: Normalize(finding.Repository), + package: Normalize(finding.Package), + purl: Normalize(finding.Purl), + cve: Normalize(finding.Cve), + path: Normalize(finding.Path), + layerDigest: Normalize(finding.LayerDigest), + tags: tags); + + builder.Add(candidate); + } + + return builder.ToImmutable(); + } + + private static ImmutableArray BuildBaseline(IReadOnlyList? baseline) + { + if (baseline is null || baseline.Count == 0) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(baseline.Count); + foreach (var verdict in baseline) + { + if (verdict is null || string.IsNullOrWhiteSpace(verdict.FindingId)) + { + continue; + } + + var inputs = verdict.Inputs is { Count: > 0 } + ? CreateImmutableDeterministicDictionary(verdict.Inputs) + : ImmutableDictionary.Empty; + + var status = ParseVerdictStatus(verdict.Status); + builder.Add(new PolicyVerdict( + verdict.FindingId!.Trim(), + status, + verdict.RuleName, + verdict.RuleAction, + verdict.Notes, + verdict.Score ?? 0, + verdict.ConfigVersion ?? PolicyScoringConfig.Default.Version, + inputs, + verdict.QuietedBy, + verdict.Quiet ?? false, + verdict.UnknownConfidence, + verdict.ConfidenceBand, + verdict.UnknownAgeDays, + verdict.SourceTrust, + verdict.Reachability)); + } + + return builder.ToImmutable(); + } + + private static PolicyPreviewDiffDto ToDiffDto(PolicyVerdictDiff diff) + { + ArgumentNullException.ThrowIfNull(diff); + + return new PolicyPreviewDiffDto + { + FindingId = diff.Projected.FindingId, + Baseline = ToVerdictDto(diff.Baseline), + Projected = ToVerdictDto(diff.Projected), + Changed = diff.Changed + }; + } + + internal static PolicyPreviewVerdictDto ToVerdictDto(PolicyVerdict verdict) + { + ArgumentNullException.ThrowIfNull(verdict); + + IReadOnlyDictionary? inputs = null; + var verdictInputs = verdict.GetInputs(); + if (verdictInputs.Count > 0) + { + inputs = CreateDeterministicInputs(verdictInputs); + } + + var sourceTrust = verdict.SourceTrust; + if (string.IsNullOrWhiteSpace(sourceTrust)) + { + sourceTrust = ExtractSuffix(verdictInputs, "trustWeight."); + } + + var reachability = verdict.Reachability; + if (string.IsNullOrWhiteSpace(reachability)) + { + reachability = ExtractSuffix(verdictInputs, "reachability."); + } + + return new PolicyPreviewVerdictDto + { + FindingId = verdict.FindingId, + Status = verdict.Status.ToString(), + RuleName = verdict.RuleName, + RuleAction = verdict.RuleAction, + Notes = verdict.Notes, + Score = verdict.Score, + ConfigVersion = verdict.ConfigVersion, + Inputs = inputs, + QuietedBy = verdict.QuietedBy, + Quiet = verdict.Quiet, + UnknownConfidence = verdict.UnknownConfidence, + ConfidenceBand = verdict.ConfidenceBand, + UnknownAgeDays = verdict.UnknownAgeDays, + SourceTrust = sourceTrust, + Reachability = reachability + }; + } + + private static ImmutableDictionary CreateImmutableDeterministicDictionary(IEnumerable> inputs) + { + var sorted = CreateDeterministicInputs(inputs); + var builder = ImmutableDictionary.CreateBuilder(OrdinalIgnoreCase); + foreach (var pair in sorted) + { + builder[pair.Key] = pair.Value; + } + + return builder.ToImmutable(); + } + + private static IReadOnlyDictionary CreateDeterministicInputs(IEnumerable> inputs) + { + ArgumentNullException.ThrowIfNull(inputs); + + var dictionary = new SortedDictionary(InputKeyComparer.Instance); + foreach (var pair in inputs) + { + if (string.IsNullOrWhiteSpace(pair.Key)) + { + continue; + } + + var key = pair.Key.Trim(); + dictionary[key] = pair.Value; + } + + return dictionary; + } + + private sealed class InputKeyComparer : IComparer + { + public static InputKeyComparer Instance { get; } = new(); + + public int Compare(string? x, string? y) + { + if (ReferenceEquals(x, y)) + { + return 0; + } + + if (x is null) + { + return -1; + } + + if (y is null) + { + return 1; + } + + var px = GetPriority(x); + var py = GetPriority(y); + if (px != py) + { + return px.CompareTo(py); + } + + return string.Compare(x, y, StringComparison.Ordinal); + } + + private static int GetPriority(string key) + { + if (string.Equals(key, "reachabilityWeight", StringComparison.OrdinalIgnoreCase)) + { + return 0; + } + + if (string.Equals(key, "baseScore", StringComparison.OrdinalIgnoreCase)) + { + return 1; + } + + if (string.Equals(key, "severityWeight", StringComparison.OrdinalIgnoreCase)) + { + return 2; + } + + if (string.Equals(key, "trustWeight", StringComparison.OrdinalIgnoreCase)) + { + return 3; + } + + if (key.StartsWith("trustWeight.", StringComparison.OrdinalIgnoreCase)) + { + return 4; + } + + if (key.StartsWith("reachability.", StringComparison.OrdinalIgnoreCase)) + { + return 5; + } + + return 6; + } + } + + private static PolicySnapshotContent? ToSnapshotContent(PolicyPreviewPolicyDto? policy) + { + if (policy is null || string.IsNullOrWhiteSpace(policy.Content)) + { + return null; + } + + var format = ParsePolicyFormat(policy.Format); + return new PolicySnapshotContent( + policy.Content, + format, + policy.Actor, + Source: null, + policy.Description); + } + + private static PolicySeverity ParseSeverity(string? value) + { + if (Enum.TryParse(value, true, out var severity)) + { + return severity; + } + + return PolicySeverity.Unknown; + } + + private static PolicyVerdictStatus ParseVerdictStatus(string? value) + { + if (Enum.TryParse(value, true, out var status)) + { + return status; + } + + return PolicyVerdictStatus.Pass; + } + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + private static string? ExtractSuffix(ImmutableDictionary inputs, string prefix) + { + foreach (var key in inputs.Keys) + { + if (key.StartsWith(prefix, StringComparison.OrdinalIgnoreCase) && key.Length > prefix.Length) + { + return key.Substring(prefix.Length); + } + } + + return null; + } +} diff --git a/src/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs b/src/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs new file mode 100644 index 00000000..f0bc67c0 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs @@ -0,0 +1,148 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StackExchange.Redis; +using StellaOps.Notify.Models; +using StellaOps.Scanner.WebService.Options; + +namespace StellaOps.Scanner.WebService.Services; + +internal sealed class RedisPlatformEventPublisher : IPlatformEventPublisher, IAsyncDisposable +{ + private readonly ScannerWebServiceOptions.EventsOptions _options; + private readonly ILogger _logger; + private readonly TimeSpan _publishTimeout; + private readonly string _streamKey; + private readonly long? _maxStreamLength; + + private readonly SemaphoreSlim _connectionGate = new(1, 1); + private IConnectionMultiplexer? _connection; + private bool _disposed; + + public RedisPlatformEventPublisher( + IOptions options, + ILogger logger) + { + ArgumentNullException.ThrowIfNull(options); + + _options = options.Value.Events ?? throw new InvalidOperationException("Events options are required when redis publisher is registered."); + if (!_options.Enabled) + { + throw new InvalidOperationException("RedisPlatformEventPublisher requires events emission to be enabled."); + } + + if (!string.Equals(_options.Driver, "redis", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"RedisPlatformEventPublisher cannot be used with driver '{_options.Driver}'."); + } + + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _streamKey = string.IsNullOrWhiteSpace(_options.Stream) ? "stella.events" : _options.Stream; + _publishTimeout = TimeSpan.FromSeconds(_options.PublishTimeoutSeconds <= 0 ? 5 : _options.PublishTimeoutSeconds); + _maxStreamLength = _options.MaxStreamLength > 0 ? _options.MaxStreamLength : null; + } + + public async Task PublishAsync(NotifyEvent @event, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(@event); + cancellationToken.ThrowIfCancellationRequested(); + + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + var payload = NotifyCanonicalJsonSerializer.Serialize(@event); + + var entries = new NameValueEntry[] + { + new("event", payload), + new("kind", @event.Kind), + new("tenant", @event.Tenant), + new("ts", @event.Ts.ToString("O")) + }; + + int? maxLength = null; + if (_maxStreamLength.HasValue) + { + var clamped = Math.Min(_maxStreamLength.Value, int.MaxValue); + maxLength = (int)clamped; + } + + var publishTask = maxLength.HasValue + ? database.StreamAddAsync(_streamKey, entries, maxLength: maxLength, useApproximateMaxLength: true) + : database.StreamAddAsync(_streamKey, entries); + + if (_publishTimeout > TimeSpan.Zero) + { + await publishTask.WaitAsync(_publishTimeout, cancellationToken).ConfigureAwait(false); + } + else + { + await publishTask.ConfigureAwait(false); + } + } + + private async Task GetDatabaseAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (_connection is not null && _connection.IsConnected) + { + return _connection.GetDatabase(); + } + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_connection is null || !_connection.IsConnected) + { + var config = ConfigurationOptions.Parse(_options.Dsn); + config.AbortOnConnectFail = false; + + if (_options.DriverSettings.TryGetValue("clientName", out var clientName) && !string.IsNullOrWhiteSpace(clientName)) + { + config.ClientName = clientName; + } + + if (_options.DriverSettings.TryGetValue("ssl", out var sslValue) && bool.TryParse(sslValue, out var ssl)) + { + config.Ssl = ssl; + } + + _connection = await ConnectionMultiplexer.ConnectAsync(config).WaitAsync(cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Connected Redis platform event publisher to stream {Stream}.", _streamKey); + } + } + finally + { + _connectionGate.Release(); + } + + return _connection!.GetDatabase(); + } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + + if (_connection is not null) + { + try + { + await _connection.CloseAsync(); + } + catch (Exception ex) + { + _logger.LogDebug(ex, "Error while closing Redis platform event publisher connection."); + } + + _connection.Dispose(); + } + + _connectionGate.Dispose(); + } +} diff --git a/src/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs b/src/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs new file mode 100644 index 00000000..868db6c7 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs @@ -0,0 +1,520 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Security.Claims; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using StellaOps.Auth.Abstractions; +using StellaOps.Notify.Models; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Services; + +internal sealed class ReportEventDispatcher : IReportEventDispatcher +{ + private const string DefaultTenant = "default"; + private const string Actor = "scanner.webservice"; + + private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + private readonly IPlatformEventPublisher _publisher; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public ReportEventDispatcher( + IPlatformEventPublisher publisher, + TimeProvider timeProvider, + ILogger logger) + { + _publisher = publisher ?? throw new ArgumentNullException(nameof(publisher)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task PublishAsync( + ReportRequestDto request, + PolicyPreviewResponse preview, + ReportDocumentDto document, + DsseEnvelopeDto? envelope, + HttpContext httpContext, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(preview); + ArgumentNullException.ThrowIfNull(document); + ArgumentNullException.ThrowIfNull(httpContext); + + cancellationToken.ThrowIfCancellationRequested(); + + var now = _timeProvider.GetUtcNow(); + var tenant = ResolveTenant(httpContext); + var scope = BuildScope(request, document); + var attributes = BuildAttributes(document); + + var reportPayload = BuildReportReadyPayload(request, preview, document, envelope, httpContext); + var reportEvent = NotifyEvent.Create( + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerReportReady, + tenant: tenant, + ts: document.GeneratedAt == default ? now : document.GeneratedAt, + payload: reportPayload, + scope: scope, + actor: Actor, + attributes: attributes); + + await PublishSafelyAsync(reportEvent, document.ReportId, cancellationToken).ConfigureAwait(false); + + var scanPayload = BuildScanCompletedPayload(request, preview, document, envelope); + var scanEvent = NotifyEvent.Create( + eventId: Guid.NewGuid(), + kind: NotifyEventKinds.ScannerScanCompleted, + tenant: tenant, + ts: document.GeneratedAt == default ? now : document.GeneratedAt, + payload: scanPayload, + scope: scope, + actor: Actor, + attributes: attributes); + + await PublishSafelyAsync(scanEvent, document.ReportId, cancellationToken).ConfigureAwait(false); + } + + private async Task PublishSafelyAsync(NotifyEvent @event, string reportId, CancellationToken cancellationToken) + { + try + { + await _publisher.PublishAsync(@event, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Failed to publish event {EventKind} for report {ReportId}.", + @event.Kind, + reportId); + } + } + + private static string ResolveTenant(HttpContext context) + { + var tenant = context.User?.FindFirstValue(StellaOpsClaimTypes.Tenant); + if (!string.IsNullOrWhiteSpace(tenant)) + { + return tenant.Trim(); + } + + if (context.Request.Headers.TryGetValue("X-Stella-Tenant", out var headerTenant)) + { + var headerValue = headerTenant.ToString(); + if (!string.IsNullOrWhiteSpace(headerValue)) + { + return headerValue.Trim(); + } + } + + return DefaultTenant; + } + + private static NotifyEventScope BuildScope(ReportRequestDto request, ReportDocumentDto document) + { + var repository = ResolveRepository(request); + var (ns, repo) = SplitRepository(repository); + + var digest = string.IsNullOrWhiteSpace(document.ImageDigest) + ? request.ImageDigest ?? string.Empty + : document.ImageDigest; + + return NotifyEventScope.Create( + @namespace: ns, + repo: string.IsNullOrWhiteSpace(repo) ? "(unknown)" : repo, + digest: string.IsNullOrWhiteSpace(digest) ? "(unknown)" : digest); + } + + private static string ResolveRepository(ReportRequestDto request) + { + if (request.Findings is { Count: > 0 }) + { + foreach (var finding in request.Findings) + { + if (!string.IsNullOrWhiteSpace(finding.Repository)) + { + return finding.Repository!.Trim(); + } + + if (!string.IsNullOrWhiteSpace(finding.Image)) + { + return finding.Image!.Trim(); + } + } + } + + return string.Empty; + } + + private static (string? Namespace, string Repo) SplitRepository(string repository) + { + if (string.IsNullOrWhiteSpace(repository)) + { + return (null, string.Empty); + } + + var normalized = repository.Trim(); + var segments = normalized.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (segments.Length == 0) + { + return (null, normalized); + } + + if (segments.Length == 1) + { + return (null, segments[0]); + } + + var repo = segments[^1]; + var ns = string.Join('/', segments[..^1]); + return (ns, repo); + } + + private static IEnumerable> BuildAttributes(ReportDocumentDto document) + { + var attributes = new List>(capacity: 4) + { + new("reportId", document.ReportId) + }; + + if (!string.IsNullOrWhiteSpace(document.Policy.RevisionId)) + { + attributes.Add(new("policyRevisionId", document.Policy.RevisionId!)); + } + + if (!string.IsNullOrWhiteSpace(document.Policy.Digest)) + { + attributes.Add(new("policyDigest", document.Policy.Digest!)); + } + + attributes.Add(new("verdict", document.Verdict)); + return attributes; + } + + private static JsonObject BuildReportReadyPayload( + ReportRequestDto request, + PolicyPreviewResponse preview, + ReportDocumentDto document, + DsseEnvelopeDto? envelope, + HttpContext context) + { + var payload = new JsonObject + { + ["reportId"] = document.ReportId, + ["generatedAt"] = document.GeneratedAt == default + ? null + : JsonValue.Create(document.GeneratedAt), + ["verdict"] = MapVerdict(document.Verdict), + ["summary"] = JsonSerializer.SerializeToNode(document.Summary, JsonOptions), + ["delta"] = BuildDelta(preview, request), + ["links"] = BuildLinks(context, document), + ["quietedFindingCount"] = document.Summary.Quieted + }; + + payload.RemoveNulls(); + + if (envelope is not null) + { + payload["dsse"] = JsonSerializer.SerializeToNode(envelope, JsonOptions); + } + + payload["report"] = JsonSerializer.SerializeToNode(document, JsonOptions); + return payload; + } + + private static JsonObject BuildScanCompletedPayload( + ReportRequestDto request, + PolicyPreviewResponse preview, + ReportDocumentDto document, + DsseEnvelopeDto? envelope) + { + var payload = new JsonObject + { + ["reportId"] = document.ReportId, + ["digest"] = document.ImageDigest, + ["summary"] = JsonSerializer.SerializeToNode(document.Summary, JsonOptions), + ["verdict"] = MapVerdict(document.Verdict), + ["policy"] = JsonSerializer.SerializeToNode(document.Policy, JsonOptions), + ["delta"] = BuildDelta(preview, request), + ["report"] = JsonSerializer.SerializeToNode(document, JsonOptions) + }; + + if (envelope is not null) + { + payload["dsse"] = JsonSerializer.SerializeToNode(envelope, JsonOptions); + } + + payload["findings"] = BuildFindingSummaries(request); + payload.RemoveNulls(); + return payload; + } + + private static JsonArray BuildFindingSummaries(ReportRequestDto request) + { + var array = new JsonArray(); + if (request.Findings is { Count: > 0 }) + { + foreach (var finding in request.Findings) + { + if (string.IsNullOrWhiteSpace(finding.Id)) + { + continue; + } + + var summary = new JsonObject + { + ["id"] = finding.Id, + ["severity"] = finding.Severity, + ["cve"] = finding.Cve, + ["purl"] = finding.Purl, + ["reachability"] = ResolveReachability(finding.Tags) + }; + + summary.RemoveNulls(); + array.Add(summary); + } + } + + return array; + } + + private static string? ResolveReachability(IReadOnlyList? tags) + { + if (tags is null) + { + return null; + } + + foreach (var tag in tags) + { + if (string.IsNullOrWhiteSpace(tag)) + { + continue; + } + + if (tag.StartsWith("reachability:", StringComparison.OrdinalIgnoreCase)) + { + return tag["reachability:".Length..]; + } + } + + return null; + } + + private static JsonObject BuildDelta(PolicyPreviewResponse preview, ReportRequestDto request) + { + var delta = new JsonObject(); + if (preview.Diffs.IsDefaultOrEmpty) + { + return delta; + } + + var findings = BuildFindingsIndex(request.Findings); + var kevIds = new SortedSet(StringComparer.OrdinalIgnoreCase); + var newCritical = 0; + var newHigh = 0; + + foreach (var diff in preview.Diffs) + { + var projected = diff.Projected; + if (projected is null || string.IsNullOrWhiteSpace(projected.FindingId)) + { + continue; + } + + if (!findings.TryGetValue(projected.FindingId, out var finding)) + { + finding = null; + } + + if (IsNewlyImportant(diff)) + { + var severity = finding?.Severity; + if (string.Equals(severity, "Critical", StringComparison.OrdinalIgnoreCase)) + { + newCritical++; + } + else if (string.Equals(severity, "High", StringComparison.OrdinalIgnoreCase)) + { + newHigh++; + } + + var kevId = ResolveKevIdentifier(finding); + if (!string.IsNullOrWhiteSpace(kevId)) + { + kevIds.Add(kevId); + } + } + } + + if (newCritical > 0) + { + delta["newCritical"] = newCritical; + } + + if (newHigh > 0) + { + delta["newHigh"] = newHigh; + } + + if (kevIds.Count > 0) + { + var kev = new JsonArray(); + foreach (var id in kevIds) + { + kev.Add(id); + } + + delta["kev"] = kev; + } + + return delta; + } + + private static ImmutableDictionary BuildFindingsIndex( + IReadOnlyList? findings) + { + if (findings is null || findings.Count == 0) + { + return ImmutableDictionary.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var finding in findings) + { + if (string.IsNullOrWhiteSpace(finding.Id)) + { + continue; + } + + if (!builder.ContainsKey(finding.Id)) + { + builder.Add(finding.Id, finding); + } + } + + return builder.ToImmutable(); + } + + private static bool IsNewlyImportant(PolicyVerdictDiff diff) + { + var projected = diff.Projected.Status; + var baseline = diff.Baseline.Status; + + return projected switch + { + PolicyVerdictStatus.Blocked or PolicyVerdictStatus.Escalated + => baseline != PolicyVerdictStatus.Blocked && baseline != PolicyVerdictStatus.Escalated, + PolicyVerdictStatus.Warned or PolicyVerdictStatus.Deferred or PolicyVerdictStatus.RequiresVex + => baseline != PolicyVerdictStatus.Warned + && baseline != PolicyVerdictStatus.Deferred + && baseline != PolicyVerdictStatus.RequiresVex + && baseline != PolicyVerdictStatus.Blocked + && baseline != PolicyVerdictStatus.Escalated, + _ => false + }; + } + + private static string? ResolveKevIdentifier(PolicyPreviewFindingDto? finding) + { + if (finding is null) + { + return null; + } + + var tags = finding.Tags; + if (tags is not null) + { + foreach (var tag in tags) + { + if (string.IsNullOrWhiteSpace(tag)) + { + continue; + } + + if (string.Equals(tag, "kev", StringComparison.OrdinalIgnoreCase)) + { + return finding.Cve; + } + + if (tag.StartsWith("kev:", StringComparison.OrdinalIgnoreCase)) + { + var value = tag["kev:".Length..]; + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim(); + } + } + } + } + + return finding.Cve; + } + + private static JsonObject BuildLinks(HttpContext context, ReportDocumentDto document) + { + var links = new JsonObject(); + + if (context.Request.Host.HasValue) + { + var scheme = string.IsNullOrWhiteSpace(context.Request.Scheme) ? "https" : context.Request.Scheme; + var builder = new UriBuilder(scheme, context.Request.Host.Host) + { + Port = context.Request.Host.Port ?? -1, + Path = $"/ui/reports/{Uri.EscapeDataString(document.ReportId)}" + }; + links["ui"] = builder.Uri.ToString(); + } + + return links; + } + + private static string MapVerdict(string verdict) + => verdict.ToLowerInvariant() switch + { + "blocked" or "fail" => "fail", + "escalated" => "fail", + "warn" or "warned" or "deferred" or "requiresvex" => "warn", + _ => "pass" + }; +} + +internal static class ReportEventDispatcherExtensions +{ + public static void RemoveNulls(this JsonObject jsonObject) + { + if (jsonObject is null) + { + return; + } + + var keysToRemove = new List(); + foreach (var pair in jsonObject) + { + if (pair.Value is null || pair.Value.GetValueKind() == JsonValueKind.Null) + { + keysToRemove.Add(pair.Key); + } + } + + foreach (var key in keysToRemove) + { + jsonObject.Remove(key); + } + } +} diff --git a/src/StellaOps.Scanner.WebService/Services/ReportSigner.cs b/src/StellaOps.Scanner.WebService/Services/ReportSigner.cs new file mode 100644 index 00000000..925fd1a6 --- /dev/null +++ b/src/StellaOps.Scanner.WebService/Services/ReportSigner.cs @@ -0,0 +1,263 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Security.Cryptography; +using System.Text; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Cryptography; +using StellaOps.Scanner.WebService.Options; + +namespace StellaOps.Scanner.WebService.Services; + +public interface IReportSigner : IDisposable +{ + ReportSignature? Sign(ReadOnlySpan payload); +} + +public sealed class ReportSigner : IReportSigner +{ + private enum SigningMode + { + Disabled, + Provider, + Hs256 + } + + private readonly SigningMode mode; + private readonly string keyId = string.Empty; + private readonly string algorithmName = string.Empty; + private readonly ILogger logger; + private readonly ICryptoProviderRegistry cryptoRegistry; + private readonly ICryptoProvider? provider; + private readonly CryptoKeyReference? keyReference; + private readonly CryptoSignerResolution? signerResolution; + private readonly byte[]? hmacKey; + + public ReportSigner( + IOptions options, + ICryptoProviderRegistry cryptoRegistry, + ILogger logger) + { + ArgumentNullException.ThrowIfNull(options); + this.cryptoRegistry = cryptoRegistry ?? throw new ArgumentNullException(nameof(cryptoRegistry)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + var value = options.Value ?? new ScannerWebServiceOptions(); + var features = value.Features ?? new ScannerWebServiceOptions.FeatureFlagOptions(); + var signing = value.Signing ?? new ScannerWebServiceOptions.SigningOptions(); + + if (!features.EnableSignedReports || !signing.Enabled) + { + mode = SigningMode.Disabled; + logger.LogInformation("Report signing disabled (feature flag or signing.enabled=false)."); + return; + } + + if (string.IsNullOrWhiteSpace(signing.KeyId)) + { + throw new InvalidOperationException("Signing keyId must be configured when signing is enabled."); + } + + var keyPem = ResolveKeyMaterial(signing); + keyId = signing.KeyId.Trim(); + + var resolvedMode = ResolveSigningMode(signing.Algorithm, out var canonicalAlgorithm, out var joseAlgorithm); + algorithmName = joseAlgorithm; + + switch (resolvedMode) + { + case SigningMode.Provider: + { + provider = ResolveProvider(signing.Provider, canonicalAlgorithm); + + var privateKey = DecodeKey(keyPem); + var reference = new CryptoKeyReference(keyId, provider.Name); + var signingKeyDescriptor = new CryptoSigningKey( + reference, + canonicalAlgorithm, + privateKey, + createdAt: DateTimeOffset.UtcNow); + + provider.UpsertSigningKey(signingKeyDescriptor); + + signerResolution = cryptoRegistry.ResolveSigner( + CryptoCapability.Signing, + canonicalAlgorithm, + reference, + provider.Name); + + keyReference = reference; + mode = SigningMode.Provider; + break; + } + case SigningMode.Hs256: + { + hmacKey = DecodeKey(keyPem); + mode = SigningMode.Hs256; + break; + } + default: + mode = SigningMode.Disabled; + break; + } + } + + public ReportSignature? Sign(ReadOnlySpan payload) + { + if (mode == SigningMode.Disabled) + { + return null; + } + + if (payload.IsEmpty) + { + throw new ArgumentException("Payload must be non-empty.", nameof(payload)); + } + + return mode switch + { + SigningMode.Provider => SignWithProvider(payload), + SigningMode.Hs256 => SignHs256(payload), + _ => null + }; + } + + private ReportSignature SignWithProvider(ReadOnlySpan payload) + { + var resolution = signerResolution ?? throw new InvalidOperationException("Signing provider has not been initialised."); + + var signature = resolution.Signer + .SignAsync(payload.ToArray()) + .ConfigureAwait(false) + .GetAwaiter() + .GetResult(); + + return new ReportSignature(keyId, algorithmName, Convert.ToBase64String(signature)); + } + + private ReportSignature SignHs256(ReadOnlySpan payload) + { + if (hmacKey is null) + { + throw new InvalidOperationException("HMAC signing has not been initialised."); + } + + using var hmac = new HMACSHA256(hmacKey); + var signature = hmac.ComputeHash(payload.ToArray()); + return new ReportSignature(keyId, algorithmName, Convert.ToBase64String(signature)); + } + + public void Dispose() + { + if (provider is not null && keyReference is not null) + { + provider.RemoveSigningKey(keyReference.KeyId); + } + } + + private ICryptoProvider ResolveProvider(string? configuredProvider, string canonicalAlgorithm) + { + if (!string.IsNullOrWhiteSpace(configuredProvider)) + { + if (!cryptoRegistry.TryResolve(configuredProvider.Trim(), out var hinted)) + { + throw new InvalidOperationException($"Configured signing provider '{configuredProvider}' is not registered."); + } + + if (!hinted.Supports(CryptoCapability.Signing, canonicalAlgorithm)) + { + throw new InvalidOperationException($"Provider '{configuredProvider}' does not support algorithm '{canonicalAlgorithm}'."); + } + + return hinted; + } + + return cryptoRegistry.ResolveOrThrow(CryptoCapability.Signing, canonicalAlgorithm); + } + + private static SigningMode ResolveSigningMode(string? algorithm, out string canonicalAlgorithm, out string joseAlgorithm) + { + if (string.IsNullOrWhiteSpace(algorithm)) + { + throw new InvalidOperationException("Signing algorithm must be specified when signing is enabled."); + } + + switch (algorithm.Trim().ToLowerInvariant()) + { + case "ed25519": + case "eddsa": + canonicalAlgorithm = SignatureAlgorithms.Ed25519; + joseAlgorithm = SignatureAlgorithms.EdDsa; + return SigningMode.Provider; + case "hs256": + canonicalAlgorithm = "HS256"; + joseAlgorithm = "HS256"; + return SigningMode.Hs256; + default: + throw new InvalidOperationException($"Unsupported signing algorithm '{algorithm}'."); + } + } + + private static string ResolveKeyMaterial(ScannerWebServiceOptions.SigningOptions signing) + { + if (!string.IsNullOrWhiteSpace(signing.KeyPem)) + { + return signing.KeyPem; + } + + if (!string.IsNullOrWhiteSpace(signing.KeyPemFile)) + { + try + { + return File.ReadAllText(signing.KeyPemFile); + } + catch (Exception ex) + { + throw new InvalidOperationException($"Unable to read signing key file '{signing.KeyPemFile}'.", ex); + } + } + + throw new InvalidOperationException("Signing keyPem must be configured when signing is enabled."); + } + + private static byte[] DecodeKey(string keyMaterial) + { + if (string.IsNullOrWhiteSpace(keyMaterial)) + { + throw new InvalidOperationException("Signing key material is empty."); + } + + var segments = keyMaterial.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); + var builder = new StringBuilder(); + var hadPemMarkers = false; + foreach (var segment in segments) + { + var trimmed = segment.Trim(); + if (trimmed.Length == 0) + { + continue; + } + + if (trimmed.StartsWith("-----", StringComparison.Ordinal)) + { + hadPemMarkers = true; + continue; + } + + builder.Append(trimmed); + } + + var base64 = hadPemMarkers ? builder.ToString() : keyMaterial.Trim(); + try + { + return Convert.FromBase64String(base64); + } + catch (FormatException ex) + { + throw new InvalidOperationException("Signing key must be Base64 encoded.", ex); + } + } +} + +public sealed record ReportSignature(string KeyId, string Algorithm, string Signature); diff --git a/src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj b/src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj index 0737bba6..ec9c1f0d 100644 --- a/src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj +++ b/src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj @@ -12,6 +12,7 @@ + @@ -20,5 +21,11 @@ + + + + + + diff --git a/src/StellaOps.Scanner.WebService/TASKS.md b/src/StellaOps.Scanner.WebService/TASKS.md index f920168b..86e18955 100644 --- a/src/StellaOps.Scanner.WebService/TASKS.md +++ b/src/StellaOps.Scanner.WebService/TASKS.md @@ -4,12 +4,13 @@ |----|--------|----------|------------|-------------|---------------| | SCANNER-WEB-09-101 | DONE (2025-10-18) | Scanner WebService Guild | SCANNER-CORE-09-501 | Stand up minimal API host with Authority OpTok + DPoP enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. | Host boots with configuration validation, `/healthz` and `/readyz` return 200, Authority middleware enforced in integration tests. | | SCANNER-WEB-09-102 | DONE (2025-10-18) | Scanner WebService Guild | SCANNER-WEB-09-101, SCANNER-QUEUE-09-401 | Implement `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation tokens. | Contract documented, e2e test posts scan request and retrieves status, cancellation token honoured. | -| SCANNER-WEB-09-103 | DOING | Scanner WebService Guild | SCANNER-WEB-09-102, SCANNER-CORE-09-502 | Emit scan progress via SSE/JSONL with correlation IDs and deterministic timestamps; document API reference. | Streaming endpoint verified in tests, timestamps formatted ISO-8601 UTC, docs updated in `docs/09_API_CLI_REFERENCE.md`. | +| SCANNER-WEB-09-103 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-WEB-09-102, SCANNER-CORE-09-502 | Emit scan progress via SSE/JSONL with correlation IDs and deterministic timestamps; document API reference. | Streaming endpoint verified in tests, timestamps formatted ISO-8601 UTC, docs updated in `docs/09_API_CLI_REFERENCE.md`. | | SCANNER-WEB-09-104 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-STORAGE-09-301, SCANNER-QUEUE-09-401 | Bind configuration for Mongo, MinIO, queue, feature flags; add startup diagnostics and fail-fast policy for missing deps. | Misconfiguration fails fast with actionable errors, configuration bound tests pass, diagnostics logged with correlation IDs. | -| SCANNER-POLICY-09-105 | DOING | Scanner WebService Guild | POLICY-CORE-09-001 | Integrate policy schema loader + diagnostics + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). | Policy endpoints documented; validation surfaces actionable errors; OpenAPI schema published. | -| SCANNER-POLICY-09-106 | TODO | Scanner WebService Guild | POLICY-CORE-09-002, SCANNER-POLICY-09-105 | `/reports` verdict assembly (Feedser/Vexer/Policy merge) + signed response envelope. | Aggregated report includes policy metadata; integration test verifies signed response; docs updated. | -| SCANNER-POLICY-09-107 | TODO | Scanner WebService Guild | POLICY-CORE-09-005, SCANNER-POLICY-09-106 | Surface score inputs, config version, and `quietedBy` provenance in `/reports` response and signed payload; document schema changes. | `/reports` JSON + DSSE contain score, reachability, sourceTrust, confidenceBand, quiet provenance; contract tests updated; docs refreshed. | +| SCANNER-POLICY-09-105 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-001 | Integrate policy schema loader + diagnostics + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). | Policy endpoints documented; validation surfaces actionable errors; OpenAPI schema published. | +| SCANNER-POLICY-09-106 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-002, SCANNER-POLICY-09-105 | `/reports` verdict assembly (Feedser/Vexer/Policy merge) + signed response envelope. | Aggregated report includes policy metadata; integration test verifies signed response; docs updated. | +| SCANNER-POLICY-09-107 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-005, SCANNER-POLICY-09-106 | Surface score inputs, config version, and `quietedBy` provenance in `/reports` response and signed payload; document schema changes. | `/reports` JSON + DSSE contain score, reachability, sourceTrust, confidenceBand, quiet provenance; contract tests updated; docs refreshed. | +| SCANNER-WEB-10-201 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-CACHE-10-101 | Register scanner cache services and maintenance loop within WebService host. | `AddScannerCache` wired for configuration binding; maintenance service skips when disabled; project references updated. | | SCANNER-RUNTIME-12-301 | TODO | Scanner WebService Guild | ZASTAVA-CORE-12-201 | Implement `/runtime/events` ingestion endpoint with validation, batching, and storage hooks per Zastava contract. | Observer fixtures POST events, data persisted and acked; invalid payloads rejected with deterministic errors. | -| SCANNER-RUNTIME-12-302 | TODO | Scanner WebService Guild | SCANNER-RUNTIME-12-301, ZASTAVA-CORE-12-201 | Implement `/policy/runtime` endpoint joining SBOM baseline + policy verdict, returning admission guidance. | Webhook integration test passes; responses include verdict, TTL, reasons; metrics/logging added. | -| SCANNER-EVENTS-15-201 | TODO | Scanner WebService Guild | NOTIFY-QUEUE-15-401 | Emit `scanner.report.ready` and `scanner.scan.completed` events (bus adapters + tests). | Event envelopes published to queue with schemas; fixtures committed; Notify consumption test passes. | -| SCANNER-RUNTIME-17-401 | TODO | Scanner WebService Guild | SCANNER-RUNTIME-12-301, ZASTAVA-OBS-17-005, SCANNER-EMIT-17-701 | Persist runtime build-id observations and expose them via `/runtime/events` + policy joins for debug-symbol correlation. | Mongo schema stores optional `buildId`, API/SDK responses document field, integration test resolves debug-store path using stored build-id, docs updated accordingly. | +| SCANNER-RUNTIME-12-302 | TODO | Scanner WebService Guild | SCANNER-RUNTIME-12-301, ZASTAVA-CORE-12-201 | Implement `/policy/runtime` endpoint joining SBOM baseline + policy verdict, returning admission guidance. Coordinate with CLI (`CLI-RUNTIME-13-008`) before GA to lock response field names/metadata. | Webhook integration test passes; responses include verdict, TTL, reasons; metrics/logging added; CLI contract review signed off. | +| SCANNER-EVENTS-15-201 | DOING (2025-10-19) | Scanner WebService Guild | NOTIFY-QUEUE-15-401 | Emit `scanner.report.ready` and `scanner.scan.completed` events (bus adapters + tests). | Event envelopes published to queue with schemas; fixtures committed; Notify consumption test passes. | +| SCANNER-RUNTIME-17-401 | TODO | Scanner WebService Guild | SCANNER-RUNTIME-12-301, ZASTAVA-OBS-17-005, SCANNER-EMIT-17-701, POLICY-RUNTIME-17-201 | Persist runtime build-id observations and expose them via `/runtime/events` + policy joins for debug-symbol correlation. | Mongo schema stores optional `buildId`, API/SDK responses document field, integration test resolves debug-store path using stored build-id, docs updated accordingly. | diff --git a/src/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs b/src/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs new file mode 100644 index 00000000..0c34c0a0 --- /dev/null +++ b/src/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs @@ -0,0 +1,121 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Worker.Options; +using StellaOps.Scanner.Worker.Processing; +using Xunit; + +namespace StellaOps.Scanner.Worker.Tests; + +public sealed class LeaseHeartbeatServiceTests +{ + [Fact] + public async Task RunAsync_RespectsSafetyFactorBudget() + { + var options = new ScannerWorkerOptions + { + MaxConcurrentJobs = 1, + }; + options.Queue.HeartbeatSafetyFactor = 3.0; + options.Queue.MinHeartbeatInterval = TimeSpan.FromSeconds(5); + options.Queue.MaxHeartbeatInterval = TimeSpan.FromSeconds(60); + options.Queue.SetHeartbeatRetryDelays(Array.Empty()); + options.Queue.MaxHeartbeatJitterMilliseconds = 750; + + var optionsMonitor = new StaticOptionsMonitor(options); + using var cts = new CancellationTokenSource(); + var scheduler = new RecordingDelayScheduler(cts); + var lease = new TestJobLease(TimeSpan.FromSeconds(90)); + + var service = new LeaseHeartbeatService(TimeProvider.System, scheduler, optionsMonitor, NullLogger.Instance); + + await service.RunAsync(lease, cts.Token); + + var delay = Assert.Single(scheduler.Delays); + var expectedMax = TimeSpan.FromTicks((long)(lease.LeaseDuration.Ticks / Math.Max(3.0, options.Queue.HeartbeatSafetyFactor))); + Assert.True(delay <= expectedMax, $"Heartbeat delay {delay} should stay within safety factor budget {expectedMax}."); + Assert.True(delay >= options.Queue.MinHeartbeatInterval, $"Heartbeat delay {delay} should respect minimum interval {options.Queue.MinHeartbeatInterval}."); + } + + private sealed class RecordingDelayScheduler : IDelayScheduler + { + private readonly CancellationTokenSource _cts; + + public RecordingDelayScheduler(CancellationTokenSource cts) + { + _cts = cts ?? throw new ArgumentNullException(nameof(cts)); + } + + public List Delays { get; } = new(); + + public Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) + { + Delays.Add(delay); + _cts.Cancel(); + return Task.CompletedTask; + } + } + + private sealed class TestJobLease : IScanJobLease + { + public TestJobLease(TimeSpan leaseDuration) + { + LeaseDuration = leaseDuration; + EnqueuedAtUtc = DateTimeOffset.UtcNow - leaseDuration; + LeasedAtUtc = DateTimeOffset.UtcNow; + } + + public string JobId { get; } = Guid.NewGuid().ToString("n"); + + public string ScanId { get; } = $"scan-{Guid.NewGuid():n}"; + + public int Attempt { get; } = 1; + + public DateTimeOffset EnqueuedAtUtc { get; } + + public DateTimeOffset LeasedAtUtc { get; } + + public TimeSpan LeaseDuration { get; } + + public IReadOnlyDictionary Metadata { get; } = new Dictionary(); + + public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; + } + + private sealed class StaticOptionsMonitor : IOptionsMonitor + where TOptions : class + { + private readonly TOptions _value; + + public StaticOptionsMonitor(TOptions value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + public TOptions CurrentValue => _value; + + public TOptions Get(string? name) => _value; + + public IDisposable OnChange(Action listener) => NullDisposable.Instance; + + private sealed class NullDisposable : IDisposable + { + public static readonly NullDisposable Instance = new(); + + public void Dispose() + { + } + } + } +} diff --git a/src/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs b/src/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs new file mode 100644 index 00000000..205fe633 --- /dev/null +++ b/src/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs @@ -0,0 +1,245 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Queue; +using StellaOps.Scanner.Worker.Diagnostics; +using StellaOps.Scanner.Worker.Hosting; +using StellaOps.Scanner.Worker.Options; +using StellaOps.Scanner.Worker.Processing; +using StellaOps.Scanner.Worker.Tests.TestInfrastructure; +using Xunit; + +namespace StellaOps.Scanner.Worker.Tests; + +public sealed class RedisWorkerSmokeTests +{ + [Fact] + public async Task Worker_CompletesJob_ViaRedisQueue() + { + var flag = Environment.GetEnvironmentVariable("STELLAOPS_REDIS_SMOKE"); + if (string.IsNullOrWhiteSpace(flag)) + { + return; + } + + var redisConnection = Environment.GetEnvironmentVariable("STELLAOPS_REDIS_CONNECTION") ?? "localhost:6379"; + var streamName = $"scanner:jobs:{Guid.NewGuid():n}"; + var consumerGroup = $"worker-smoke-{Guid.NewGuid():n}"; + var configuration = BuildQueueConfiguration(redisConnection, streamName, consumerGroup); + + var queueOptions = new ScannerQueueOptions(); + configuration.GetSection("scanner:queue").Bind(queueOptions); + + var workerOptions = new ScannerWorkerOptions + { + MaxConcurrentJobs = 1, + }; + workerOptions.Queue.HeartbeatSafetyFactor = 3.0; + workerOptions.Queue.MinHeartbeatInterval = TimeSpan.FromSeconds(2); + workerOptions.Queue.MaxHeartbeatInterval = TimeSpan.FromSeconds(8); + workerOptions.Queue.SetHeartbeatRetryDelays(new[] + { + TimeSpan.FromMilliseconds(200), + TimeSpan.FromMilliseconds(500), + TimeSpan.FromSeconds(1), + }); + + var services = new ServiceCollection(); + services.AddLogging(builder => + { + builder.SetMinimumLevel(LogLevel.Debug); + builder.AddConsole(); + }); + services.AddSingleton(TimeProvider.System); + services.AddScannerQueue(configuration, "scanner:queue"); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(queueOptions); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton>(new StaticOptionsMonitor(workerOptions)); + services.AddSingleton(); + + using var provider = services.BuildServiceProvider(); + var queue = provider.GetRequiredService(); + + var jobId = $"job-{Guid.NewGuid():n}"; + var scanId = $"scan-{Guid.NewGuid():n}"; + await queue.EnqueueAsync(new ScanQueueMessage(jobId, Encoding.UTF8.GetBytes("smoke")) + { + Attributes = new Dictionary(StringComparer.Ordinal) + { + ["scanId"] = scanId, + ["queue"] = "redis", + } + }); + + var hostedService = provider.GetRequiredService(); + using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30)); + + await hostedService.StartAsync(cts.Token); + + var smokeObserver = provider.GetRequiredService(); + await smokeObserver.JobCompleted.Task.WaitAsync(TimeSpan.FromSeconds(20)); + + await hostedService.StopAsync(CancellationToken.None); + } + + private static IConfiguration BuildQueueConfiguration(string connection, string stream, string consumerGroup) + { + return new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["scanner:queue:kind"] = "redis", + ["scanner:queue:defaultLeaseDuration"] = "00:00:30", + ["scanner:queue:redis:connectionString"] = connection, + ["scanner:queue:redis:streamName"] = stream, + ["scanner:queue:redis:consumerGroup"] = consumerGroup, + ["scanner:queue:redis:idempotencyKeyPrefix"] = $"{stream}:idemp:", + ["scanner:queue:redis:initializationTimeout"] = "00:00:10", + }) + .Build(); + } + + private sealed class SmokeAnalyzerDispatcher : IScanAnalyzerDispatcher + { + public ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken) + { + return ValueTask.CompletedTask; + } + } + + private sealed class QueueBackedScanJobSourceDependencies + { + public QueueBackedScanJobSourceDependencies() + { + JobCompleted = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + } + + public TaskCompletionSource JobCompleted { get; } + } + + private sealed class QueueBackedScanJobSource : IScanJobSource + { + private readonly IScanQueue _queue; + private readonly ScannerQueueOptions _queueOptions; + private readonly QueueBackedScanJobSourceDependencies _deps; + private readonly TimeProvider _timeProvider; + private readonly string _consumerName = $"worker-smoke-{Guid.NewGuid():n}"; + + public QueueBackedScanJobSource( + IScanQueue queue, + ScannerQueueOptions queueOptions, + QueueBackedScanJobSourceDependencies deps, + TimeProvider timeProvider) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); + _deps = deps ?? throw new ArgumentNullException(nameof(deps)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async Task TryAcquireAsync(CancellationToken cancellationToken) + { + var request = new QueueLeaseRequest(_consumerName, 1, _queueOptions.DefaultLeaseDuration); + var leases = await _queue.LeaseAsync(request, cancellationToken).ConfigureAwait(false); + if (leases.Count == 0) + { + return null; + } + + return new QueueBackedScanJobLease( + leases[0], + _queueOptions, + _deps, + _timeProvider.GetUtcNow()); + } + } + + private sealed class QueueBackedScanJobLease : IScanJobLease + { + private readonly IScanQueueLease _lease; + private readonly ScannerQueueOptions _options; + private readonly QueueBackedScanJobSourceDependencies _deps; + private readonly DateTimeOffset _leasedAt; + private readonly IReadOnlyDictionary _metadata; + + public QueueBackedScanJobLease( + IScanQueueLease lease, + ScannerQueueOptions options, + QueueBackedScanJobSourceDependencies deps, + DateTimeOffset leasedAt) + { + _lease = lease ?? throw new ArgumentNullException(nameof(lease)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _deps = deps ?? throw new ArgumentNullException(nameof(deps)); + _leasedAt = leasedAt; + + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["queue"] = _options.Kind.ToString(), + ["queue.consumer"] = lease.Consumer, + }; + + if (!string.IsNullOrWhiteSpace(lease.IdempotencyKey)) + { + metadata["job.idempotency"] = lease.IdempotencyKey; + } + + foreach (var attribute in lease.Attributes) + { + metadata[attribute.Key] = attribute.Value; + } + + _metadata = metadata; + } + + public string JobId => _lease.JobId; + + public string ScanId => _metadata.TryGetValue("scanId", out var scanId) ? scanId : _lease.JobId; + + public int Attempt => _lease.Attempt; + + public DateTimeOffset EnqueuedAtUtc => _lease.EnqueuedAt; + + public DateTimeOffset LeasedAtUtc => _leasedAt; + + public TimeSpan LeaseDuration => _lease.LeaseExpiresAt - _leasedAt; + + public IReadOnlyDictionary Metadata => _metadata; + + public async ValueTask RenewAsync(CancellationToken cancellationToken) + { + await _lease.RenewAsync(_options.DefaultLeaseDuration, cancellationToken).ConfigureAwait(false); + } + + public async ValueTask CompleteAsync(CancellationToken cancellationToken) + { + await _lease.AcknowledgeAsync(cancellationToken).ConfigureAwait(false); + _deps.JobCompleted.TrySetResult(); + } + + public async ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) + { + await _lease.ReleaseAsync(QueueReleaseDisposition.Retry, cancellationToken).ConfigureAwait(false); + } + + public async ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) + { + await _lease.DeadLetterAsync(reason, cancellationToken).ConfigureAwait(false); + } + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; + } +} diff --git a/src/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs b/src/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs new file mode 100644 index 00000000..0f49934d --- /dev/null +++ b/src/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs @@ -0,0 +1,34 @@ +using System; +using System.Linq; +using StellaOps.Scanner.Worker.Options; +using Xunit; + +namespace StellaOps.Scanner.Worker.Tests; + +public sealed class ScannerWorkerOptionsValidatorTests +{ + [Fact] + public void Validate_Fails_WhenHeartbeatSafetyFactorBelowThree() + { + var options = new ScannerWorkerOptions(); + options.Queue.HeartbeatSafetyFactor = 2.5; + + var validator = new ScannerWorkerOptionsValidator(); + var result = validator.Validate(string.Empty, options); + + Assert.True(result.Failed, "Validation should fail when HeartbeatSafetyFactor < 3."); + Assert.Contains(result.Failures, failure => failure.Contains("HeartbeatSafetyFactor", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public void Validate_Succeeds_WhenHeartbeatSafetyFactorAtLeastThree() + { + var options = new ScannerWorkerOptions(); + options.Queue.HeartbeatSafetyFactor = 3.5; + + var validator = new ScannerWorkerOptionsValidator(); + var result = validator.Validate(string.Empty, options); + + Assert.True(result.Succeeded, "Validation should succeed when HeartbeatSafetyFactor >= 3."); + } +} diff --git a/src/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj b/src/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj index 29c4feb2..8e22bec9 100644 --- a/src/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj +++ b/src/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj @@ -8,5 +8,6 @@ + diff --git a/src/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs b/src/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs new file mode 100644 index 00000000..fa24dae8 --- /dev/null +++ b/src/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs @@ -0,0 +1,29 @@ +using System; +using Microsoft.Extensions.Options; + +namespace StellaOps.Scanner.Worker.Tests.TestInfrastructure; + +public sealed class StaticOptionsMonitor : IOptionsMonitor + where TOptions : class +{ + private readonly TOptions _value; + + public StaticOptionsMonitor(TOptions value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + public TOptions CurrentValue => _value; + + public TOptions Get(string? name) => _value; + + public IDisposable OnChange(Action listener) => NullDisposable.Instance; + + private sealed class NullDisposable : IDisposable + { + public static readonly NullDisposable Instance = new(); + public void Dispose() + { + } + } +} diff --git a/src/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs b/src/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs index c5d5b3fe..5615bacd 100644 --- a/src/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs +++ b/src/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs @@ -77,9 +77,15 @@ public sealed class WorkerBasicScanScenarioTests await worker.StartAsync(CancellationToken.None); await jobSource.LeaseIssued.Task.WaitAsync(TimeSpan.FromSeconds(5)); + await Task.Yield(); - scheduler.AdvanceBy(TimeSpan.FromSeconds(30)); - scheduler.AdvanceBy(TimeSpan.FromSeconds(30)); + var spin = 0; + while (!lease.Completed.Task.IsCompleted && spin++ < 24) + { + fakeTime.Advance(TimeSpan.FromSeconds(15)); + scheduler.AdvanceBy(TimeSpan.FromSeconds(15)); + await Task.Delay(1); + } try { diff --git a/src/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs b/src/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs index 58bc5027..2b085dd0 100644 --- a/src/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs +++ b/src/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs @@ -54,17 +54,19 @@ public static class TelemetryExtensions }); } - if (telemetry.EnableMetrics) - { - openTelemetry.WithMetrics(metrics => - { - metrics - .AddMeter(ScannerWorkerInstrumentation.MeterName) - .AddRuntimeInstrumentation() - .AddProcessInstrumentation(); - - ConfigureExporter(metrics, telemetry); - }); + if (telemetry.EnableMetrics) + { + openTelemetry.WithMetrics(metrics => + { + metrics + .AddMeter( + ScannerWorkerInstrumentation.MeterName, + "StellaOps.Scanner.Analyzers.Lang.Node") + .AddRuntimeInstrumentation() + .AddProcessInstrumentation(); + + ConfigureExporter(metrics, telemetry); + }); } } diff --git a/src/StellaOps.Scanner.Worker/Processing/OsScanAnalyzerDispatcher.cs b/src/StellaOps.Scanner.Worker/Processing/OsScanAnalyzerDispatcher.cs index 07733fed..517c956e 100644 --- a/src/StellaOps.Scanner.Worker/Processing/OsScanAnalyzerDispatcher.cs +++ b/src/StellaOps.Scanner.Worker/Processing/OsScanAnalyzerDispatcher.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; using System.Linq; +using System.Collections.Immutable; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; @@ -10,6 +11,7 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using StellaOps.Scanner.Analyzers.OS; using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Mapping; using StellaOps.Scanner.Analyzers.OS.Plugin; using StellaOps.Scanner.Core.Contracts; using StellaOps.Scanner.Worker.Options; @@ -84,10 +86,19 @@ internal sealed class OsScanAnalyzerDispatcher : IScanAnalyzerDispatcher } } - if (results.Count > 0) + if (results.Count == 0) { - var dictionary = results.ToDictionary(result => result.AnalyzerId, StringComparer.OrdinalIgnoreCase); - context.Analysis.Set(ScanAnalysisKeys.OsPackageAnalyzers, dictionary); + return; + } + + var dictionary = results.ToDictionary(result => result.AnalyzerId, StringComparer.OrdinalIgnoreCase); + context.Analysis.Set(ScanAnalysisKeys.OsPackageAnalyzers, dictionary); + + var fragments = OsComponentMapper.ToLayerFragments(results); + if (!fragments.IsDefaultOrEmpty) + { + context.Analysis.AppendLayerFragments(fragments); + context.Analysis.Set(ScanAnalysisKeys.OsComponentFragments, fragments); } } diff --git a/src/StellaOps.Scanner.Worker/Program.cs b/src/StellaOps.Scanner.Worker/Program.cs index 267eb3f2..ade9f109 100644 --- a/src/StellaOps.Scanner.Worker/Program.cs +++ b/src/StellaOps.Scanner.Worker/Program.cs @@ -3,8 +3,9 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using Microsoft.Extensions.DependencyInjection.Extensions; -using StellaOps.Auth.Client; +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Auth.Client; +using StellaOps.Scanner.Cache; using StellaOps.Scanner.Analyzers.OS.Plugin; using StellaOps.Scanner.EntryTrace; using StellaOps.Scanner.Worker.Diagnostics; @@ -18,9 +19,10 @@ builder.Services.AddOptions() .BindConfiguration(ScannerWorkerOptions.SectionName) .ValidateOnStart(); -builder.Services.AddSingleton, ScannerWorkerOptionsValidator>(); -builder.Services.AddSingleton(TimeProvider.System); -builder.Services.AddSingleton(); +builder.Services.AddSingleton, ScannerWorkerOptionsValidator>(); +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddScannerCache(builder.Configuration); +builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); diff --git a/src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj b/src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj index d4eedf0d..87d2aa51 100644 --- a/src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj +++ b/src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj @@ -18,5 +18,6 @@ + diff --git a/src/StellaOps.Scanner.Worker/TASKS.md b/src/StellaOps.Scanner.Worker/TASKS.md index 16f3b032..a21d474d 100644 --- a/src/StellaOps.Scanner.Worker/TASKS.md +++ b/src/StellaOps.Scanner.Worker/TASKS.md @@ -7,3 +7,4 @@ | SCANNER-WORKER-09-203 | DONE (2025-10-19) | Scanner Worker Guild | SCANNER-WORKER-09-202, SCANNER-STORAGE-09-301 | Analyzer dispatch skeleton emitting deterministic stage progress and honoring cancellation tokens. | Deterministic stage list + `ScanProgressReporter`; `WorkerBasicScanScenario` validates ordering and cancellation propagation. | | SCANNER-WORKER-09-204 | DONE (2025-10-19) | Scanner Worker Guild | SCANNER-WORKER-09-203 | Worker metrics (queue latency, stage duration, failure counts) with OpenTelemetry resource wiring. | `ScannerWorkerMetrics` records queue/job/stage metrics; integration test asserts analyzer stage histogram entries. | | SCANNER-WORKER-09-205 | DONE (2025-10-19) | Scanner Worker Guild | SCANNER-WORKER-09-202 | Harden heartbeat jitter so lease safety margin stays ≥3× and cover with regression tests. | `LeaseHeartbeatService` clamps jitter to safety window, validator enforces ≥3 safety factor, regression tests cover heartbeat scheduling and metrics. | +| SCANNER-WORKER-10-201 | DONE (2025-10-19) | Scanner Worker Guild | SCANNER-CACHE-10-101 | Wire scanner cache services and maintenance into worker host. | `AddScannerCache` registered with worker configuration; cache maintenance hosted service runs respecting enabled/auto-evict flags. | diff --git a/src/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs b/src/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs new file mode 100644 index 00000000..a3827491 --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs @@ -0,0 +1,39 @@ +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class AuditRecordTests +{ + [Fact] + public void AuditRecordNormalizesMetadataAndIdentifiers() + { + var actor = new AuditActor(actorId: "user_admin", displayName: "Cluster Admin", kind: "user"); + var metadata = new[] + { + new KeyValuePair("details", "schedule paused"), + new KeyValuePair("Details", "should be overridden"), // duplicate with different casing + new KeyValuePair("reason", "maintenance"), + }; + + var record = new AuditRecord( + id: "audit_001", + tenantId: "tenant-alpha", + category: "scheduler", + action: "pause", + occurredAt: DateTimeOffset.Parse("2025-10-18T05:00:00Z"), + actor: actor, + scheduleId: "sch_001", + runId: null, + correlationId: "corr-123", + metadata: metadata, + message: "Paused via API"); + + Assert.Equal("tenant-alpha", record.TenantId); + Assert.Equal("scheduler", record.Category); + Assert.Equal(2, record.Metadata.Count); + Assert.Equal("schedule paused", record.Metadata["details"]); + Assert.Equal("maintenance", record.Metadata["reason"]); + + var json = CanonicalJsonSerializer.Serialize(record); + Assert.Contains("\"category\":\"scheduler\"", json, StringComparison.Ordinal); + Assert.Contains("\"metadata\":{\"details\":\"schedule paused\",\"reason\":\"maintenance\"}", json, StringComparison.Ordinal); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs b/src/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs new file mode 100644 index 00000000..2543f76b --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs @@ -0,0 +1,55 @@ +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class ImpactSetTests +{ + [Fact] + public void ImpactSetSortsImagesByDigest() + { + var selector = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); + var images = new[] + { + new ImpactImage( + imageDigest: "sha256:bbbb", + registry: "registry.internal", + repository: "app/api", + namespaces: new[] { "team-a" }, + tags: new[] { "prod", "latest" }, + usedByEntrypoint: true, + labels: new Dictionary + { + ["env"] = "prod", + }), + new ImpactImage( + imageDigest: "sha256:aaaa", + registry: "registry.internal", + repository: "app/api", + namespaces: new[] { "team-a" }, + tags: new[] { "prod" }, + usedByEntrypoint: false), + }; + + var impactSet = new ImpactSet( + selector, + images, + usageOnly: true, + generatedAt: DateTimeOffset.Parse("2025-10-18T05:04:03Z"), + total: 2, + snapshotId: "snap-001"); + + Assert.Equal(SchedulerSchemaVersions.ImpactSet, impactSet.SchemaVersion); + Assert.Equal(new[] { "sha256:aaaa", "sha256:bbbb" }, impactSet.Images.Select(i => i.ImageDigest)); + Assert.True(impactSet.UsageOnly); + Assert.Equal(2, impactSet.Total); + + var json = CanonicalJsonSerializer.Serialize(impactSet); + Assert.Contains("\"snapshotId\":\"snap-001\"", json, StringComparison.Ordinal); + } + + [Fact] + public void ImpactImageRejectsInvalidDigest() + { + Assert.Throws(() => new ImpactImage("sha1:not-supported", "registry", "repo")); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs b/src/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs new file mode 100644 index 00000000..6a100154 --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs @@ -0,0 +1,59 @@ +using System; +using System.IO; +using System.Text.Json; +using System.Text.Json.Nodes; +using StellaOps.Notify.Models; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class RescanDeltaEventSampleTests +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + [Fact] + public void RescanDeltaEventSampleAlignsWithContracts() + { + const string fileName = "scheduler.rescan.delta@1.sample.json"; + var json = LoadSample(fileName); + var notifyEvent = JsonSerializer.Deserialize(json, SerializerOptions); + + Assert.NotNull(notifyEvent); + Assert.Equal(NotifyEventKinds.SchedulerRescanDelta, notifyEvent!.Kind); + Assert.NotEqual(Guid.Empty, notifyEvent.EventId); + Assert.NotNull(notifyEvent.Payload); + Assert.Null(notifyEvent.Scope); + + var payload = Assert.IsType(notifyEvent.Payload); + var scheduleId = Assert.IsAssignableFrom(payload["scheduleId"]).GetValue(); + Assert.Equal("rescan-weekly-critical", scheduleId); + + var digests = Assert.IsType(payload["impactedDigests"]); + Assert.Equal(2, digests.Count); + foreach (var digestNode in digests) + { + var digest = Assert.IsAssignableFrom(digestNode).GetValue(); + Assert.StartsWith("sha256:", digest, StringComparison.Ordinal); + } + + var summary = Assert.IsType(payload["summary"]); + Assert.Equal(0, summary["newCritical"]!.GetValue()); + Assert.Equal(1, summary["newHigh"]!.GetValue()); + Assert.Equal(4, summary["total"]!.GetValue()); + + var canonicalJson = NotifyCanonicalJsonSerializer.Serialize(notifyEvent); + var canonicalNode = JsonNode.Parse(canonicalJson) ?? throw new InvalidOperationException("Canonical JSON null."); + var sampleNode = JsonNode.Parse(json) ?? throw new InvalidOperationException("Sample JSON null."); + Assert.True(JsonNode.DeepEquals(sampleNode, canonicalNode), "Rescan delta event sample must remain canonical."); + } + + private static string LoadSample(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, fileName); + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Unable to locate sample '{fileName}'.", path); + } + + return File.ReadAllText(path); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs b/src/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs new file mode 100644 index 00000000..3faf0641 --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs @@ -0,0 +1,108 @@ +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class RunStateMachineTests +{ + [Fact] + public void EnsureTransition_FromQueuedToRunningSetsStartedAt() + { + var run = new Run( + id: "run-queued", + tenantId: "tenant-alpha", + trigger: RunTrigger.Manual, + state: RunState.Queued, + stats: RunStats.Empty, + createdAt: DateTimeOffset.Parse("2025-10-18T03:00:00Z")); + + var transitionTime = DateTimeOffset.Parse("2025-10-18T03:05:00Z"); + + var updated = RunStateMachine.EnsureTransition( + run, + RunState.Running, + transitionTime, + mutateStats: builder => builder.SetQueued(1)); + + Assert.Equal(RunState.Running, updated.State); + Assert.Equal(transitionTime.ToUniversalTime(), updated.StartedAt); + Assert.Equal(1, updated.Stats.Queued); + Assert.Null(updated.Error); + } + + [Fact] + public void EnsureTransition_ToCompletedPopulatesFinishedAt() + { + var run = new Run( + id: "run-running", + tenantId: "tenant-alpha", + trigger: RunTrigger.Manual, + state: RunState.Running, + stats: RunStats.Empty, + createdAt: DateTimeOffset.Parse("2025-10-18T03:00:00Z"), + startedAt: DateTimeOffset.Parse("2025-10-18T03:05:00Z")); + + var completedAt = DateTimeOffset.Parse("2025-10-18T03:10:00Z"); + + var updated = RunStateMachine.EnsureTransition( + run, + RunState.Completed, + completedAt, + mutateStats: builder => + { + builder.SetQueued(1); + builder.SetCompleted(1); + }); + + Assert.Equal(RunState.Completed, updated.State); + Assert.Equal(completedAt.ToUniversalTime(), updated.FinishedAt); + Assert.Equal(1, updated.Stats.Completed); + } + + [Fact] + public void EnsureTransition_ErrorRequiresMessage() + { + var run = new Run( + id: "run-running", + tenantId: "tenant-alpha", + trigger: RunTrigger.Manual, + state: RunState.Running, + stats: RunStats.Empty, + createdAt: DateTimeOffset.Parse("2025-10-18T03:00:00Z"), + startedAt: DateTimeOffset.Parse("2025-10-18T03:05:00Z")); + + var timestamp = DateTimeOffset.Parse("2025-10-18T03:06:00Z"); + + var ex = Assert.Throws( + () => RunStateMachine.EnsureTransition(run, RunState.Error, timestamp)); + + Assert.Contains("requires a non-empty error message", ex.Message, StringComparison.Ordinal); + } + + [Fact] + public void Validate_ThrowsWhenTerminalWithoutFinishedAt() + { + var run = new Run( + id: "run-bad", + tenantId: "tenant-alpha", + trigger: RunTrigger.Manual, + state: RunState.Completed, + stats: RunStats.Empty, + createdAt: DateTimeOffset.Parse("2025-10-18T03:00:00Z"), + startedAt: DateTimeOffset.Parse("2025-10-18T03:05:00Z")); + + Assert.Throws(() => RunStateMachine.Validate(run)); + } + + [Fact] + public void RunReasonExtension_NormalizesImpactWindow() + { + var reason = new RunReason(manualReason: "delta"); + var from = DateTimeOffset.Parse("2025-10-18T01:00:00+02:00"); + var to = DateTimeOffset.Parse("2025-10-18T03:30:00+02:00"); + + var updated = reason.WithImpactWindow(from, to); + + Assert.Equal(from.ToUniversalTime().ToString("O"), updated.ImpactWindowFrom); + Assert.Equal(to.ToUniversalTime().ToString("O"), updated.ImpactWindowTo); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs b/src/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs new file mode 100644 index 00000000..2198ae67 --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs @@ -0,0 +1,78 @@ +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class RunValidationTests +{ + [Fact] + public void RunStatsRejectsNegativeValues() + { + Assert.Throws(() => new RunStats(candidates: -1)); + Assert.Throws(() => new RunStats(deduped: -1)); + Assert.Throws(() => new RunStats(queued: -1)); + Assert.Throws(() => new RunStats(completed: -1)); + Assert.Throws(() => new RunStats(deltas: -1)); + Assert.Throws(() => new RunStats(newCriticals: -1)); + Assert.Throws(() => new RunStats(newHigh: -1)); + Assert.Throws(() => new RunStats(newMedium: -1)); + Assert.Throws(() => new RunStats(newLow: -1)); + } + + [Fact] + public void DeltaSummarySortsTopFindingsBySeverityThenId() + { + var summary = new DeltaSummary( + imageDigest: "sha256:0011", + newFindings: 3, + newCriticals: 1, + newHigh: 1, + newMedium: 1, + newLow: 0, + kevHits: new[] { "CVE-2025-0002", "CVE-2025-0001" }, + topFindings: new[] + { + new DeltaFinding("pkg:maven/b", "CVE-2025-0002", SeverityRank.High), + new DeltaFinding("pkg:maven/a", "CVE-2024-0001", SeverityRank.Critical), + new DeltaFinding("pkg:maven/c", "CVE-2025-0008", SeverityRank.Medium), + }, + reportUrl: "https://ui.example/reports/sha256:0011", + attestation: new DeltaAttestation(uuid: "rekor-1", verified: true), + detectedAt: DateTimeOffset.Parse("2025-10-18T00:01:02Z")); + + Assert.Equal(new[] { "pkg:maven/a", "pkg:maven/b", "pkg:maven/c" }, summary.TopFindings.Select(f => f.Purl)); + Assert.Equal(new[] { "CVE-2025-0001", "CVE-2025-0002" }, summary.KevHits); + } + + [Fact] + public void RunSerializationIncludesDeterministicOrdering() + { + var stats = new RunStats(candidates: 10, deduped: 8, queued: 8, completed: 5, deltas: 3, newCriticals: 2); + var run = new Run( + id: "run_001", + tenantId: "tenant-alpha", + trigger: RunTrigger.Feedser, + state: RunState.Running, + stats: stats, + reason: new RunReason(feedserExportId: "exp-123"), + scheduleId: "sch_001", + createdAt: DateTimeOffset.Parse("2025-10-18T01:00:00Z"), + startedAt: DateTimeOffset.Parse("2025-10-18T01:00:05Z"), + finishedAt: null, + error: null, + deltas: new[] + { + new DeltaSummary( + imageDigest: "sha256:aaa", + newFindings: 1, + newCriticals: 1, + newHigh: 0, + newMedium: 0, + newLow: 0) + }); + + var json = CanonicalJsonSerializer.Serialize(run); + Assert.Equal(SchedulerSchemaVersions.Run, run.SchemaVersion); + Assert.Contains("\"trigger\":\"feedser\"", json, StringComparison.Ordinal); + Assert.Contains("\"stats\":{\"candidates\":10,\"deduped\":8,\"queued\":8,\"completed\":5,\"deltas\":3,\"newCriticals\":2,\"newHigh\":0,\"newMedium\":0,\"newLow\":0}", json, StringComparison.Ordinal); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs b/src/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs new file mode 100644 index 00000000..8516103e --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs @@ -0,0 +1,105 @@ +using System.Text.Json; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class SamplePayloadTests +{ + private static readonly string SamplesRoot = LocateSamplesRoot(); + + [Fact] + public void ScheduleSample_RoundtripsThroughCanonicalSerializer() + { + var json = ReadSample("schedule.json"); + var schedule = CanonicalJsonSerializer.Deserialize(json); + + Assert.Equal("sch_20251018a", schedule.Id); + Assert.Equal("tenant-alpha", schedule.TenantId); + + var canonical = CanonicalJsonSerializer.Serialize(schedule); + AssertJsonEquivalent(json, canonical); + } + + [Fact] + public void RunSample_RoundtripsThroughCanonicalSerializer() + { + var json = ReadSample("run.json"); + var run = CanonicalJsonSerializer.Deserialize(json); + + Assert.Equal(RunState.Running, run.State); + Assert.Equal(42, run.Stats.Deltas); + + var canonical = CanonicalJsonSerializer.Serialize(run); + AssertJsonEquivalent(json, canonical); + } + + [Fact] + public void ImpactSetSample_RoundtripsThroughCanonicalSerializer() + { + var json = ReadSample("impact-set.json"); + var impact = CanonicalJsonSerializer.Deserialize(json); + + Assert.True(impact.UsageOnly); + Assert.Single(impact.Images); + + var canonical = CanonicalJsonSerializer.Serialize(impact); + AssertJsonEquivalent(json, canonical); + } + + [Fact] + public void AuditSample_RoundtripsThroughCanonicalSerializer() + { + var json = ReadSample("audit.json"); + var audit = CanonicalJsonSerializer.Deserialize(json); + + Assert.Equal("scheduler", audit.Category); + Assert.Equal("pause", audit.Action); + + var canonical = CanonicalJsonSerializer.Serialize(audit); + AssertJsonEquivalent(json, canonical); + } + + private static string ReadSample(string fileName) + { + var path = Path.Combine(SamplesRoot, fileName); + return File.ReadAllText(path); + } + + private static string LocateSamplesRoot() + { + var current = AppContext.BaseDirectory; + while (!string.IsNullOrEmpty(current)) + { + var candidate = Path.Combine(current, "samples", "api", "scheduler"); + if (Directory.Exists(candidate)) + { + return candidate; + } + + var parent = Path.GetDirectoryName(current.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)); + if (string.Equals(parent, current, StringComparison.Ordinal)) + { + break; + } + + current = parent; + } + + throw new DirectoryNotFoundException("Unable to locate samples/api/scheduler in repository tree."); + } + + private static void AssertJsonEquivalent(string expected, string actual) + { + var normalizedExpected = NormalizeJson(expected); + var normalizedActual = NormalizeJson(actual); + Assert.Equal(normalizedExpected, normalizedActual); + } + + private static string NormalizeJson(string json) + { + using var document = JsonDocument.Parse(json); + return JsonSerializer.Serialize(document.RootElement, new JsonSerializerOptions + { + WriteIndented = false + }); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs b/src/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs new file mode 100644 index 00000000..b089a33c --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs @@ -0,0 +1,113 @@ +using System.Text.Json; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class ScheduleSerializationTests +{ + [Fact] + public void ScheduleSerialization_IsDeterministicRegardlessOfInputOrdering() + { + var selectionA = new Selector( + SelectorScope.ByNamespace, + tenantId: "tenant-alpha", + namespaces: new[] { "team-b", "team-a" }, + repositories: new[] { "app/service-api", "app/service-web" }, + digests: new[] { "sha256:bb", "sha256:aa" }, + includeTags: new[] { "prod", "canary" }, + labels: new[] + { + new LabelSelector("env", new[] { "prod", "staging" }), + new LabelSelector("app", new[] { "web", "api" }), + }, + resolvesTags: true); + + var selectionB = new Selector( + scope: SelectorScope.ByNamespace, + tenantId: "tenant-alpha", + namespaces: new[] { "team-a", "team-b" }, + repositories: new[] { "app/service-web", "app/service-api" }, + digests: new[] { "sha256:aa", "sha256:bb" }, + includeTags: new[] { "canary", "prod" }, + labels: new[] + { + new LabelSelector("app", new[] { "api", "web" }), + new LabelSelector("env", new[] { "staging", "prod" }), + }, + resolvesTags: true); + + var scheduleA = new Schedule( + id: "sch_001", + tenantId: "tenant-alpha", + name: "Nightly Prod", + enabled: true, + cronExpression: "0 2 * * *", + timezone: "UTC", + mode: ScheduleMode.AnalysisOnly, + selection: selectionA, + onlyIf: new ScheduleOnlyIf(lastReportOlderThanDays: 7, policyRevision: "policy@42"), + notify: new ScheduleNotify(onNewFindings: true, SeverityRank.High, includeKev: true), + limits: new ScheduleLimits(maxJobs: 1000, ratePerSecond: 25, parallelism: 4), + createdAt: DateTimeOffset.Parse("2025-10-18T23:00:00Z"), + createdBy: "svc_scheduler", + updatedAt: DateTimeOffset.Parse("2025-10-18T23:00:00Z"), + updatedBy: "svc_scheduler"); + + var scheduleB = new Schedule( + id: scheduleA.Id, + tenantId: scheduleA.TenantId, + name: scheduleA.Name, + enabled: scheduleA.Enabled, + cronExpression: scheduleA.CronExpression, + timezone: scheduleA.Timezone, + mode: scheduleA.Mode, + selection: selectionB, + onlyIf: scheduleA.OnlyIf, + notify: scheduleA.Notify, + limits: scheduleA.Limits, + createdAt: scheduleA.CreatedAt, + createdBy: scheduleA.CreatedBy, + updatedAt: scheduleA.UpdatedAt, + updatedBy: scheduleA.UpdatedBy, + subscribers: scheduleA.Subscribers); + + var jsonA = CanonicalJsonSerializer.Serialize(scheduleA); + var jsonB = CanonicalJsonSerializer.Serialize(scheduleB); + + Assert.Equal(jsonA, jsonB); + + using var doc = JsonDocument.Parse(jsonA); + var root = doc.RootElement; + Assert.Equal(SchedulerSchemaVersions.Schedule, root.GetProperty("schemaVersion").GetString()); + Assert.Equal("analysis-only", root.GetProperty("mode").GetString()); + Assert.Equal("tenant-alpha", root.GetProperty("tenantId").GetString()); + + var namespaces = root.GetProperty("selection").GetProperty("namespaces").EnumerateArray().Select(e => e.GetString()).ToArray(); + Assert.Equal(new[] { "team-a", "team-b" }, namespaces); + } + + [Theory] + [InlineData("")] + [InlineData("not-a-timezone")] + public void Schedule_ThrowsWhenTimezoneInvalid(string timezone) + { + var selection = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); + + Assert.ThrowsAny(() => new Schedule( + id: "sch_002", + tenantId: "tenant-alpha", + name: "Invalid timezone", + enabled: true, + cronExpression: "0 3 * * *", + timezone: timezone, + mode: ScheduleMode.AnalysisOnly, + selection: selection, + onlyIf: null, + notify: null, + limits: null, + createdAt: DateTimeOffset.UtcNow, + createdBy: "svc", + updatedAt: DateTimeOffset.UtcNow, + updatedBy: "svc")); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs b/src/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs new file mode 100644 index 00000000..73944f1b --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs @@ -0,0 +1,72 @@ +using System.Text.Json.Nodes; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class SchedulerSchemaMigrationTests +{ + [Fact] + public void UpgradeSchedule_DefaultsSchemaVersionWhenMissing() + { + var schedule = new Schedule( + id: "sch-01", + tenantId: "tenant-alpha", + name: "Nightly", + enabled: true, + cronExpression: "0 2 * * *", + timezone: "UTC", + mode: ScheduleMode.AnalysisOnly, + selection: new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"), + onlyIf: null, + notify: null, + limits: null, + createdAt: DateTimeOffset.Parse("2025-10-18T00:00:00Z"), + createdBy: "svc-scheduler", + updatedAt: DateTimeOffset.Parse("2025-10-18T00:00:00Z"), + updatedBy: "svc-scheduler"); + + var json = JsonNode.Parse(CanonicalJsonSerializer.Serialize(schedule))!.AsObject(); + json.Remove("schemaVersion"); + + var result = SchedulerSchemaMigration.UpgradeSchedule(json); + + Assert.Equal(SchedulerSchemaVersions.Schedule, result.Value.SchemaVersion); + Assert.Equal(SchedulerSchemaVersions.Schedule, result.ToVersion); + Assert.Empty(result.Warnings); + } + + [Fact] + public void UpgradeRun_StrictModeRemovesUnknownProperties() + { + var run = new Run( + id: "run-01", + tenantId: "tenant-alpha", + trigger: RunTrigger.Manual, + state: RunState.Queued, + stats: RunStats.Empty, + createdAt: DateTimeOffset.Parse("2025-10-18T01:10:00Z")); + + var json = JsonNode.Parse(CanonicalJsonSerializer.Serialize(run))!.AsObject(); + json["extraField"] = "to-be-removed"; + + var result = SchedulerSchemaMigration.UpgradeRun(json, strict: true); + + Assert.Contains(result.Warnings, warning => warning.Contains("extraField", StringComparison.Ordinal)); + } + + [Fact] + public void UpgradeImpactSet_ThrowsForUnsupportedVersion() + { + var impactSet = new ImpactSet( + selector: new Selector(SelectorScope.AllImages, "tenant-alpha"), + images: Array.Empty(), + usageOnly: false, + generatedAt: DateTimeOffset.Parse("2025-10-18T02:00:00Z")); + + var json = JsonNode.Parse(CanonicalJsonSerializer.Serialize(impactSet))!.AsObject(); + json["schemaVersion"] = "scheduler.impact-set@99"; + + var ex = Assert.Throws(() => SchedulerSchemaMigration.UpgradeImpactSet(json)); + Assert.Contains("Unsupported scheduler schema version", ex.Message, StringComparison.Ordinal); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj b/src/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj new file mode 100644 index 00000000..78a3a6b2 --- /dev/null +++ b/src/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj @@ -0,0 +1,18 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + Always + + + diff --git a/src/StellaOps.Scheduler.Models/AuditRecord.cs b/src/StellaOps.Scheduler.Models/AuditRecord.cs new file mode 100644 index 00000000..e46cfdb5 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/AuditRecord.cs @@ -0,0 +1,120 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// +/// Audit log entry capturing schedule/run lifecycle events. +/// +public sealed record AuditRecord +{ + public AuditRecord( + string id, + string tenantId, + string category, + string action, + DateTimeOffset occurredAt, + AuditActor actor, + string? entityId = null, + string? scheduleId = null, + string? runId = null, + string? correlationId = null, + IEnumerable>? metadata = null, + string? message = null) + : this( + id, + tenantId, + Validation.EnsureSimpleIdentifier(category, nameof(category)), + Validation.EnsureSimpleIdentifier(action, nameof(action)), + Validation.NormalizeTimestamp(occurredAt), + actor, + Validation.TrimToNull(entityId), + Validation.TrimToNull(scheduleId), + Validation.TrimToNull(runId), + Validation.TrimToNull(correlationId), + Validation.NormalizeMetadata(metadata), + Validation.TrimToNull(message)) + { + } + + [JsonConstructor] + public AuditRecord( + string id, + string tenantId, + string category, + string action, + DateTimeOffset occurredAt, + AuditActor actor, + string? entityId, + string? scheduleId, + string? runId, + string? correlationId, + ImmutableSortedDictionary metadata, + string? message) + { + Id = Validation.EnsureId(id, nameof(id)); + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + Category = Validation.EnsureSimpleIdentifier(category, nameof(category)); + Action = Validation.EnsureSimpleIdentifier(action, nameof(action)); + OccurredAt = Validation.NormalizeTimestamp(occurredAt); + Actor = actor ?? throw new ArgumentNullException(nameof(actor)); + EntityId = Validation.TrimToNull(entityId); + ScheduleId = Validation.TrimToNull(scheduleId); + RunId = Validation.TrimToNull(runId); + CorrelationId = Validation.TrimToNull(correlationId); + var materializedMetadata = metadata ?? ImmutableSortedDictionary.Empty; + Metadata = materializedMetadata.Count > 0 + ? materializedMetadata.WithComparers(StringComparer.Ordinal) + : ImmutableSortedDictionary.Empty; + Message = Validation.TrimToNull(message); + } + + public string Id { get; } + + public string TenantId { get; } + + public string Category { get; } + + public string Action { get; } + + public DateTimeOffset OccurredAt { get; } + + public AuditActor Actor { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? EntityId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ScheduleId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? RunId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CorrelationId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary Metadata { get; } = ImmutableSortedDictionary.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Message { get; } +} + +/// +/// Actor associated with an audit entry. +/// +public sealed record AuditActor +{ + public AuditActor(string actorId, string displayName, string kind) + { + ActorId = Validation.EnsureSimpleIdentifier(actorId, nameof(actorId)); + DisplayName = Validation.EnsureName(displayName, nameof(displayName)); + Kind = Validation.EnsureSimpleIdentifier(kind, nameof(kind)); + } + + public string ActorId { get; } + + public string DisplayName { get; } + + public string Kind { get; } +} diff --git a/src/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs b/src/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs new file mode 100644 index 00000000..5a7e40e3 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs @@ -0,0 +1,253 @@ +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; + +namespace StellaOps.Scheduler.Models; + +/// +/// Deterministic serializer for scheduler DTOs. +/// +public static class CanonicalJsonSerializer +{ + private static readonly JsonSerializerOptions CompactOptions = CreateOptions(writeIndented: false); + private static readonly JsonSerializerOptions PrettyOptions = CreateOptions(writeIndented: true); + + private static readonly IReadOnlyDictionary PropertyOrder = new Dictionary + { + [typeof(Schedule)] = new[] + { + "schemaVersion", + "id", + "tenantId", + "name", + "enabled", + "cronExpression", + "timezone", + "mode", + "selection", + "onlyIf", + "notify", + "limits", + "subscribers", + "createdAt", + "createdBy", + "updatedAt", + "updatedBy", + }, + [typeof(Selector)] = new[] + { + "scope", + "tenantId", + "namespaces", + "repositories", + "digests", + "includeTags", + "labels", + "resolvesTags", + }, + [typeof(LabelSelector)] = new[] + { + "key", + "values", + }, + [typeof(ScheduleOnlyIf)] = new[] + { + "lastReportOlderThanDays", + "policyRevision", + }, + [typeof(ScheduleNotify)] = new[] + { + "onNewFindings", + "minSeverity", + "includeKev", + "includeQuietFindings", + }, + [typeof(ScheduleLimits)] = new[] + { + "maxJobs", + "ratePerSecond", + "parallelism", + "burst", + }, + [typeof(Run)] = new[] + { + "schemaVersion", + "id", + "tenantId", + "scheduleId", + "trigger", + "state", + "stats", + "reason", + "createdAt", + "startedAt", + "finishedAt", + "error", + "deltas", + }, + [typeof(RunStats)] = new[] + { + "candidates", + "deduped", + "queued", + "completed", + "deltas", + "newCriticals", + "newHigh", + "newMedium", + "newLow", + }, + [typeof(RunReason)] = new[] + { + "manualReason", + "feedserExportId", + "vexerExportId", + "cursor", + "impactWindowFrom", + "impactWindowTo", + }, + [typeof(DeltaSummary)] = new[] + { + "imageDigest", + "newFindings", + "newCriticals", + "newHigh", + "newMedium", + "newLow", + "kevHits", + "topFindings", + "reportUrl", + "attestation", + "detectedAt", + }, + [typeof(DeltaFinding)] = new[] + { + "purl", + "vulnerabilityId", + "severity", + "link", + }, + [typeof(ImpactSet)] = new[] + { + "schemaVersion", + "selector", + "images", + "usageOnly", + "generatedAt", + "total", + "snapshotId", + }, + [typeof(ImpactImage)] = new[] + { + "imageDigest", + "registry", + "repository", + "namespaces", + "tags", + "usedByEntrypoint", + "labels", + }, + [typeof(AuditRecord)] = new[] + { + "id", + "tenantId", + "category", + "action", + "occurredAt", + "actor", + "entityId", + "scheduleId", + "runId", + "correlationId", + "metadata", + "message", + }, + [typeof(AuditActor)] = new[] + { + "actorId", + "displayName", + "kind", + }, + }; + + public static string Serialize(T value) + => JsonSerializer.Serialize(value, CompactOptions); + + public static string SerializeIndented(T value) + => JsonSerializer.Serialize(value, PrettyOptions); + + public static T Deserialize(string json) + => JsonSerializer.Deserialize(json, PrettyOptions) + ?? throw new InvalidOperationException($"Unable to deserialize {typeof(T).Name}."); + + private static JsonSerializerOptions CreateOptions(bool writeIndented) + { + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DictionaryKeyPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = writeIndented, + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + }; + + var resolver = options.TypeInfoResolver ?? new DefaultJsonTypeInfoResolver(); + options.TypeInfoResolver = new DeterministicResolver(resolver); + options.Converters.Add(new ScheduleModeConverter()); + options.Converters.Add(new SelectorScopeConverter()); + options.Converters.Add(new RunTriggerConverter()); + options.Converters.Add(new RunStateConverter()); + options.Converters.Add(new SeverityRankConverter()); + return options; + } + + private sealed class DeterministicResolver : IJsonTypeInfoResolver + { + private readonly IJsonTypeInfoResolver _inner; + + public DeterministicResolver(IJsonTypeInfoResolver inner) + { + _inner = inner ?? throw new ArgumentNullException(nameof(inner)); + } + + public JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions options) + { + var info = _inner.GetTypeInfo(type, options); + if (info is null) + { + throw new InvalidOperationException($"Unable to resolve JsonTypeInfo for '{type}'."); + } + + if (info.Kind is JsonTypeInfoKind.Object && info.Properties.Count > 1) + { + var ordered = info.Properties + .OrderBy(property => ResolveOrder(type, property.Name)) + .ThenBy(property => property.Name, StringComparer.Ordinal) + .ToArray(); + + info.Properties.Clear(); + foreach (var property in ordered) + { + info.Properties.Add(property); + } + } + + return info; + } + + private static int ResolveOrder(Type type, string propertyName) + { + if (PropertyOrder.TryGetValue(type, out var order)) + { + var index = Array.IndexOf(order, propertyName); + if (index >= 0) + { + return index; + } + } + + return int.MaxValue; + } + } +} diff --git a/src/StellaOps.Scheduler.Models/EnumConverters.cs b/src/StellaOps.Scheduler.Models/EnumConverters.cs new file mode 100644 index 00000000..094320c9 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/EnumConverters.cs @@ -0,0 +1,109 @@ +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +internal sealed class ScheduleModeConverter : HyphenatedEnumConverter +{ + protected override IReadOnlyDictionary Map { get; } = new Dictionary + { + [ScheduleMode.AnalysisOnly] = "analysis-only", + [ScheduleMode.ContentRefresh] = "content-refresh", + }; +} + +internal sealed class SelectorScopeConverter : HyphenatedEnumConverter +{ + protected override IReadOnlyDictionary Map { get; } = new Dictionary + { + [SelectorScope.AllImages] = "all-images", + [SelectorScope.ByNamespace] = "by-namespace", + [SelectorScope.ByRepository] = "by-repo", + [SelectorScope.ByDigest] = "by-digest", + [SelectorScope.ByLabels] = "by-labels", + }; +} + +internal sealed class RunTriggerConverter : LowerCaseEnumConverter +{ +} + +internal sealed class RunStateConverter : LowerCaseEnumConverter +{ +} + +internal sealed class SeverityRankConverter : LowerCaseEnumConverter +{ + protected override string ConvertToString(SeverityRank value) + => value switch + { + SeverityRank.None => "none", + SeverityRank.Info => "info", + SeverityRank.Low => "low", + SeverityRank.Medium => "medium", + SeverityRank.High => "high", + SeverityRank.Critical => "critical", + SeverityRank.Unknown => "unknown", + _ => throw new ArgumentOutOfRangeException(nameof(value), value, null), + }; +} + +internal abstract class HyphenatedEnumConverter : JsonConverter + where TEnum : struct, Enum +{ + private readonly Dictionary _reverse; + + protected HyphenatedEnumConverter() + { + _reverse = Map.ToDictionary(static pair => pair.Value, static pair => pair.Key, StringComparer.OrdinalIgnoreCase); + } + + protected abstract IReadOnlyDictionary Map { get; } + + public override TEnum Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var value = reader.GetString(); + if (value is not null && _reverse.TryGetValue(value, out var parsed)) + { + return parsed; + } + + throw new JsonException($"Value '{value}' is not a valid {typeof(TEnum).Name}."); + } + + public override void Write(Utf8JsonWriter writer, TEnum value, JsonSerializerOptions options) + { + if (Map.TryGetValue(value, out var text)) + { + writer.WriteStringValue(text); + return; + } + + throw new JsonException($"Unable to serialize {typeof(TEnum).Name} value '{value}'."); + } +} + +internal class LowerCaseEnumConverter : JsonConverter + where TEnum : struct, Enum +{ + private static readonly Dictionary Reverse = Enum + .GetValues() + .ToDictionary(static value => value.ToString().ToLowerInvariant(), static value => value, StringComparer.OrdinalIgnoreCase); + + public override TEnum Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var value = reader.GetString(); + if (value is not null && Reverse.TryGetValue(value, out var parsed)) + { + return parsed; + } + + throw new JsonException($"Value '{value}' is not a valid {typeof(TEnum).Name}."); + } + + public override void Write(Utf8JsonWriter writer, TEnum value, JsonSerializerOptions options) + => writer.WriteStringValue(ConvertToString(value)); + + protected virtual string ConvertToString(TEnum value) + => value.ToString().ToLowerInvariant(); +} diff --git a/src/StellaOps.Scheduler.Models/Enums.cs b/src/StellaOps.Scheduler.Models/Enums.cs new file mode 100644 index 00000000..0ee0de02 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/Enums.cs @@ -0,0 +1,67 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// +/// Execution mode for a schedule. +/// +[JsonConverter(typeof(ScheduleModeConverter))] +public enum ScheduleMode +{ + AnalysisOnly, + ContentRefresh, +} + +/// +/// Selector scope determining which filters are applied. +/// +[JsonConverter(typeof(SelectorScopeConverter))] +public enum SelectorScope +{ + AllImages, + ByNamespace, + ByRepository, + ByDigest, + ByLabels, +} + +/// +/// Source that triggered a run. +/// +[JsonConverter(typeof(RunTriggerConverter))] +public enum RunTrigger +{ + Cron, + Feedser, + Vexer, + Manual, +} + +/// +/// Lifecycle state of a scheduler run. +/// +[JsonConverter(typeof(RunStateConverter))] +public enum RunState +{ + Planning, + Queued, + Running, + Completed, + Error, + Cancelled, +} + +/// +/// Severity rankings used in scheduler payloads. +/// +[JsonConverter(typeof(SeverityRankConverter))] +public enum SeverityRank +{ + None = 0, + Info = 1, + Low = 2, + Medium = 3, + High = 4, + Critical = 5, + Unknown = 6, +} diff --git a/src/StellaOps.Scheduler.Models/ImpactSet.cs b/src/StellaOps.Scheduler.Models/ImpactSet.cs new file mode 100644 index 00000000..0c389b59 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/ImpactSet.cs @@ -0,0 +1,138 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// +/// Result from resolving impacted images for a selector. +/// +public sealed record ImpactSet +{ + public ImpactSet( + Selector selector, + IEnumerable images, + bool usageOnly, + DateTimeOffset generatedAt, + int? total = null, + string? snapshotId = null, + string? schemaVersion = null) + : this( + selector, + NormalizeImages(images), + usageOnly, + Validation.NormalizeTimestamp(generatedAt), + total ?? images.Count(), + Validation.TrimToNull(snapshotId), + schemaVersion) + { + } + + [JsonConstructor] + public ImpactSet( + Selector selector, + ImmutableArray images, + bool usageOnly, + DateTimeOffset generatedAt, + int total, + string? snapshotId, + string? schemaVersion = null) + { + Selector = selector ?? throw new ArgumentNullException(nameof(selector)); + Images = images.IsDefault ? ImmutableArray.Empty : images; + UsageOnly = usageOnly; + GeneratedAt = Validation.NormalizeTimestamp(generatedAt); + Total = Validation.EnsureNonNegative(total, nameof(total)); + SnapshotId = Validation.TrimToNull(snapshotId); + SchemaVersion = SchedulerSchemaVersions.EnsureImpactSet(schemaVersion); + } + + public string SchemaVersion { get; } + + public Selector Selector { get; } + + public ImmutableArray Images { get; } = ImmutableArray.Empty; + + public bool UsageOnly { get; } + + public DateTimeOffset GeneratedAt { get; } + + public int Total { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? SnapshotId { get; } + + private static ImmutableArray NormalizeImages(IEnumerable images) + { + ArgumentNullException.ThrowIfNull(images); + + return images + .Where(static image => image is not null) + .Select(static image => image!) + .OrderBy(static image => image.ImageDigest, StringComparer.Ordinal) + .ToImmutableArray(); + } +} + +/// +/// Impacted image descriptor returned from the impact index. +/// +public sealed record ImpactImage +{ + public ImpactImage( + string imageDigest, + string registry, + string repository, + IEnumerable? namespaces = null, + IEnumerable? tags = null, + bool usedByEntrypoint = false, + IEnumerable>? labels = null) + : this( + Validation.EnsureDigestFormat(imageDigest, nameof(imageDigest)), + Validation.EnsureSimpleIdentifier(registry, nameof(registry)), + Validation.EnsureSimpleIdentifier(repository, nameof(repository)), + Validation.NormalizeStringSet(namespaces, nameof(namespaces)), + Validation.NormalizeTagPatterns(tags), + usedByEntrypoint, + Validation.NormalizeMetadata(labels)) + { + } + + [JsonConstructor] + public ImpactImage( + string imageDigest, + string registry, + string repository, + ImmutableArray namespaces, + ImmutableArray tags, + bool usedByEntrypoint, + ImmutableSortedDictionary labels) + { + ImageDigest = Validation.EnsureDigestFormat(imageDigest, nameof(imageDigest)); + Registry = Validation.EnsureSimpleIdentifier(registry, nameof(registry)); + Repository = Validation.EnsureSimpleIdentifier(repository, nameof(repository)); + Namespaces = namespaces.IsDefault ? ImmutableArray.Empty : namespaces; + Tags = tags.IsDefault ? ImmutableArray.Empty : tags; + UsedByEntrypoint = usedByEntrypoint; + var materializedLabels = labels ?? ImmutableSortedDictionary.Empty; + Labels = materializedLabels.Count > 0 + ? materializedLabels.WithComparers(StringComparer.Ordinal) + : ImmutableSortedDictionary.Empty; + } + + public string ImageDigest { get; } + + public string Registry { get; } + + public string Repository { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Namespaces { get; } = ImmutableArray.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Tags { get; } = ImmutableArray.Empty; + + public bool UsedByEntrypoint { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary Labels { get; } = ImmutableSortedDictionary.Empty; +} diff --git a/src/StellaOps.Scheduler.Models/Run.cs b/src/StellaOps.Scheduler.Models/Run.cs new file mode 100644 index 00000000..e0cabf7d --- /dev/null +++ b/src/StellaOps.Scheduler.Models/Run.cs @@ -0,0 +1,378 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// +/// Execution record for a scheduler run. +/// +public sealed record Run +{ + public Run( + string id, + string tenantId, + RunTrigger trigger, + RunState state, + RunStats stats, + DateTimeOffset createdAt, + RunReason? reason = null, + string? scheduleId = null, + DateTimeOffset? startedAt = null, + DateTimeOffset? finishedAt = null, + string? error = null, + IEnumerable? deltas = null, + string? schemaVersion = null) + : this( + id, + tenantId, + trigger, + state, + stats, + reason ?? RunReason.Empty, + scheduleId, + Validation.NormalizeTimestamp(createdAt), + Validation.NormalizeTimestamp(startedAt), + Validation.NormalizeTimestamp(finishedAt), + Validation.TrimToNull(error), + NormalizeDeltas(deltas), + schemaVersion) + { + } + + [JsonConstructor] + public Run( + string id, + string tenantId, + RunTrigger trigger, + RunState state, + RunStats stats, + RunReason reason, + string? scheduleId, + DateTimeOffset createdAt, + DateTimeOffset? startedAt, + DateTimeOffset? finishedAt, + string? error, + ImmutableArray deltas, + string? schemaVersion = null) + { + Id = Validation.EnsureId(id, nameof(id)); + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + Trigger = trigger; + State = state; + Stats = stats ?? throw new ArgumentNullException(nameof(stats)); + Reason = reason ?? RunReason.Empty; + ScheduleId = Validation.TrimToNull(scheduleId); + CreatedAt = Validation.NormalizeTimestamp(createdAt); + StartedAt = Validation.NormalizeTimestamp(startedAt); + FinishedAt = Validation.NormalizeTimestamp(finishedAt); + Error = Validation.TrimToNull(error); + Deltas = deltas.IsDefault + ? ImmutableArray.Empty + : deltas.OrderBy(static delta => delta.ImageDigest, StringComparer.Ordinal).ToImmutableArray(); + SchemaVersion = SchedulerSchemaVersions.EnsureRun(schemaVersion); + } + + public string SchemaVersion { get; } + + public string Id { get; } + + public string TenantId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ScheduleId { get; } + + public RunTrigger Trigger { get; } + + public RunState State { get; init; } + + public RunStats Stats { get; init; } + + public RunReason Reason { get; } + + public DateTimeOffset CreatedAt { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? StartedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? FinishedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Error { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Deltas { get; } = ImmutableArray.Empty; + + private static ImmutableArray NormalizeDeltas(IEnumerable? deltas) + { + if (deltas is null) + { + return ImmutableArray.Empty; + } + + return deltas + .Where(static delta => delta is not null) + .Select(static delta => delta!) + .OrderBy(static delta => delta.ImageDigest, StringComparer.Ordinal) + .ToImmutableArray(); + } +} + +/// +/// Context describing why a run executed. +/// +public sealed record RunReason +{ + public static RunReason Empty { get; } = new(); + + public RunReason( + string? manualReason = null, + string? feedserExportId = null, + string? vexerExportId = null, + string? cursor = null) + { + ManualReason = Validation.TrimToNull(manualReason); + FeedserExportId = Validation.TrimToNull(feedserExportId); + VexerExportId = Validation.TrimToNull(vexerExportId); + Cursor = Validation.TrimToNull(cursor); + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ManualReason { get; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? FeedserExportId { get; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? VexerExportId { get; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Cursor { get; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ImpactWindowFrom { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ImpactWindowTo { get; init; } +} + +/// +/// Aggregated counters for a scheduler run. +/// +public sealed record RunStats +{ + public static RunStats Empty { get; } = new(); + + public RunStats( + int candidates = 0, + int deduped = 0, + int queued = 0, + int completed = 0, + int deltas = 0, + int newCriticals = 0, + int newHigh = 0, + int newMedium = 0, + int newLow = 0) + { + Candidates = Validation.EnsureNonNegative(candidates, nameof(candidates)); + Deduped = Validation.EnsureNonNegative(deduped, nameof(deduped)); + Queued = Validation.EnsureNonNegative(queued, nameof(queued)); + Completed = Validation.EnsureNonNegative(completed, nameof(completed)); + Deltas = Validation.EnsureNonNegative(deltas, nameof(deltas)); + NewCriticals = Validation.EnsureNonNegative(newCriticals, nameof(newCriticals)); + NewHigh = Validation.EnsureNonNegative(newHigh, nameof(newHigh)); + NewMedium = Validation.EnsureNonNegative(newMedium, nameof(newMedium)); + NewLow = Validation.EnsureNonNegative(newLow, nameof(newLow)); + } + + public int Candidates { get; } = 0; + + public int Deduped { get; } = 0; + + public int Queued { get; } = 0; + + public int Completed { get; } = 0; + + public int Deltas { get; } = 0; + + public int NewCriticals { get; } = 0; + + public int NewHigh { get; } = 0; + + public int NewMedium { get; } = 0; + + public int NewLow { get; } = 0; +} + +/// +/// Snapshot of delta impact for an image processed in a run. +/// +public sealed record DeltaSummary +{ + public DeltaSummary( + string imageDigest, + int newFindings, + int newCriticals, + int newHigh, + int newMedium, + int newLow, + IEnumerable? kevHits = null, + IEnumerable? topFindings = null, + string? reportUrl = null, + DeltaAttestation? attestation = null, + DateTimeOffset? detectedAt = null) + : this( + imageDigest, + Validation.EnsureNonNegative(newFindings, nameof(newFindings)), + Validation.EnsureNonNegative(newCriticals, nameof(newCriticals)), + Validation.EnsureNonNegative(newHigh, nameof(newHigh)), + Validation.EnsureNonNegative(newMedium, nameof(newMedium)), + Validation.EnsureNonNegative(newLow, nameof(newLow)), + NormalizeKevHits(kevHits), + NormalizeFindings(topFindings), + Validation.TrimToNull(reportUrl), + attestation, + Validation.NormalizeTimestamp(detectedAt)) + { + } + + [JsonConstructor] + public DeltaSummary( + string imageDigest, + int newFindings, + int newCriticals, + int newHigh, + int newMedium, + int newLow, + ImmutableArray kevHits, + ImmutableArray topFindings, + string? reportUrl, + DeltaAttestation? attestation, + DateTimeOffset? detectedAt) + { + ImageDigest = Validation.EnsureDigestFormat(imageDigest, nameof(imageDigest)); + NewFindings = Validation.EnsureNonNegative(newFindings, nameof(newFindings)); + NewCriticals = Validation.EnsureNonNegative(newCriticals, nameof(newCriticals)); + NewHigh = Validation.EnsureNonNegative(newHigh, nameof(newHigh)); + NewMedium = Validation.EnsureNonNegative(newMedium, nameof(newMedium)); + NewLow = Validation.EnsureNonNegative(newLow, nameof(newLow)); + KevHits = kevHits.IsDefault ? ImmutableArray.Empty : kevHits; + TopFindings = topFindings.IsDefault + ? ImmutableArray.Empty + : topFindings + .OrderBy(static finding => finding.Severity, SeverityRankComparer.Instance) + .ThenBy(static finding => finding.VulnerabilityId, StringComparer.Ordinal) + .ToImmutableArray(); + ReportUrl = Validation.TrimToNull(reportUrl); + Attestation = attestation; + DetectedAt = Validation.NormalizeTimestamp(detectedAt); + } + + public string ImageDigest { get; } + + public int NewFindings { get; } + + public int NewCriticals { get; } + + public int NewHigh { get; } + + public int NewMedium { get; } + + public int NewLow { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray KevHits { get; } = ImmutableArray.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray TopFindings { get; } = ImmutableArray.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ReportUrl { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DeltaAttestation? Attestation { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? DetectedAt { get; } + + private static ImmutableArray NormalizeKevHits(IEnumerable? kevHits) + => Validation.NormalizeStringSet(kevHits, nameof(kevHits)); + + private static ImmutableArray NormalizeFindings(IEnumerable? findings) + { + if (findings is null) + { + return ImmutableArray.Empty; + } + + return findings + .Where(static finding => finding is not null) + .Select(static finding => finding!) + .OrderBy(static finding => finding.Severity, SeverityRankComparer.Instance) + .ThenBy(static finding => finding.VulnerabilityId, StringComparer.Ordinal) + .ToImmutableArray(); + } +} + +/// +/// Top finding entry included in delta summaries. +/// +public sealed record DeltaFinding +{ + public DeltaFinding(string purl, string vulnerabilityId, SeverityRank severity, string? link = null) + { + Purl = Validation.EnsureSimpleIdentifier(purl, nameof(purl)); + VulnerabilityId = Validation.EnsureSimpleIdentifier(vulnerabilityId, nameof(vulnerabilityId)); + Severity = severity; + Link = Validation.TrimToNull(link); + } + + public string Purl { get; } + + public string VulnerabilityId { get; } + + public SeverityRank Severity { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Link { get; } +} + +/// +/// Rekor/attestation information surfaced with a delta summary. +/// +public sealed record DeltaAttestation +{ + public DeltaAttestation(string? uuid, bool? verified = null) + { + Uuid = Validation.TrimToNull(uuid); + Verified = verified; + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Uuid { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? Verified { get; } +} + +internal sealed class SeverityRankComparer : IComparer +{ + public static SeverityRankComparer Instance { get; } = new(); + + private static readonly Dictionary Order = new() + { + [SeverityRank.Critical] = 0, + [SeverityRank.High] = 1, + [SeverityRank.Unknown] = 2, + [SeverityRank.Medium] = 3, + [SeverityRank.Low] = 4, + [SeverityRank.Info] = 5, + [SeverityRank.None] = 6, + }; + + public int Compare(SeverityRank x, SeverityRank y) + => GetOrder(x).CompareTo(GetOrder(y)); + + private static int GetOrder(SeverityRank severity) + => Order.TryGetValue(severity, out var value) ? value : int.MaxValue; +} diff --git a/src/StellaOps.Scheduler.Models/RunReasonExtensions.cs b/src/StellaOps.Scheduler.Models/RunReasonExtensions.cs new file mode 100644 index 00000000..5ddd837e --- /dev/null +++ b/src/StellaOps.Scheduler.Models/RunReasonExtensions.cs @@ -0,0 +1,33 @@ +using System; +using System.Globalization; + +namespace StellaOps.Scheduler.Models; + +/// +/// Convenience helpers for mutations. +/// +public static class RunReasonExtensions +{ + /// + /// Returns a copy of with impact window timestamps normalized to ISO-8601. + /// + public static RunReason WithImpactWindow( + this RunReason reason, + DateTimeOffset? from, + DateTimeOffset? to) + { + var normalizedFrom = Validation.NormalizeTimestamp(from); + var normalizedTo = Validation.NormalizeTimestamp(to); + + if (normalizedFrom.HasValue && normalizedTo.HasValue && normalizedFrom > normalizedTo) + { + throw new ArgumentException("Impact window start must be earlier than or equal to end."); + } + + return reason with + { + ImpactWindowFrom = normalizedFrom?.ToString("O", CultureInfo.InvariantCulture), + ImpactWindowTo = normalizedTo?.ToString("O", CultureInfo.InvariantCulture), + }; + } +} diff --git a/src/StellaOps.Scheduler.Models/RunStateMachine.cs b/src/StellaOps.Scheduler.Models/RunStateMachine.cs new file mode 100644 index 00000000..e3fc298b --- /dev/null +++ b/src/StellaOps.Scheduler.Models/RunStateMachine.cs @@ -0,0 +1,157 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Scheduler.Models; + +/// +/// Encapsulates allowed transitions and invariants. +/// +public static class RunStateMachine +{ + private static readonly IReadOnlyDictionary Adjacency = new Dictionary + { + [RunState.Planning] = new[] { RunState.Planning, RunState.Queued, RunState.Cancelled }, + [RunState.Queued] = new[] { RunState.Queued, RunState.Running, RunState.Cancelled }, + [RunState.Running] = new[] { RunState.Running, RunState.Completed, RunState.Error, RunState.Cancelled }, + [RunState.Completed] = new[] { RunState.Completed }, + [RunState.Error] = new[] { RunState.Error }, + [RunState.Cancelled] = new[] { RunState.Cancelled }, + }; + + public static bool CanTransition(RunState from, RunState to) + { + if (!Adjacency.TryGetValue(from, out var allowed)) + { + return false; + } + + return allowed.Contains(to); + } + + public static bool IsTerminal(RunState state) + => state is RunState.Completed or RunState.Error or RunState.Cancelled; + + /// + /// Applies a state transition ensuring timestamps, stats, and error contracts stay consistent. + /// + public static Run EnsureTransition( + Run run, + RunState next, + DateTimeOffset timestamp, + Action? mutateStats = null, + string? errorMessage = null) + { + ArgumentNullException.ThrowIfNull(run); + + var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp); + var current = run.State; + + if (!CanTransition(current, next)) + { + throw new InvalidOperationException($"Run state transition from '{current}' to '{next}' is not allowed."); + } + + var statsBuilder = new RunStatsBuilder(run.Stats); + mutateStats?.Invoke(statsBuilder); + var newStats = statsBuilder.Build(); + + var startedAt = run.StartedAt; + var finishedAt = run.FinishedAt; + + if (current != RunState.Running && next == RunState.Running && startedAt is null) + { + startedAt = normalizedTimestamp; + } + + if (IsTerminal(next)) + { + finishedAt ??= normalizedTimestamp; + } + + if (startedAt is { } start && start < run.CreatedAt) + { + throw new InvalidOperationException("Run started time cannot be earlier than created time."); + } + + if (finishedAt is { } finish) + { + if (startedAt is { } startTime && finish < startTime) + { + throw new InvalidOperationException("Run finished time cannot be earlier than start time."); + } + + if (!IsTerminal(next)) + { + throw new InvalidOperationException("Finished time present but next state is not terminal."); + } + } + + string? nextError = null; + if (next == RunState.Error) + { + var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? run.Error : errorMessage.Trim(); + if (string.IsNullOrWhiteSpace(effectiveError)) + { + throw new InvalidOperationException("Transitioning to Error requires a non-empty error message."); + } + + nextError = effectiveError; + } + else if (!string.IsNullOrWhiteSpace(errorMessage)) + { + throw new InvalidOperationException("Error message can only be provided when transitioning to Error state."); + } + + var updated = run with + { + State = next, + Stats = newStats, + StartedAt = startedAt, + FinishedAt = finishedAt, + Error = nextError, + }; + + Validate(updated); + return updated; + } + + public static void Validate(Run run) + { + ArgumentNullException.ThrowIfNull(run); + + if (run.StartedAt is { } started && started < run.CreatedAt) + { + throw new InvalidOperationException("Run.StartedAt cannot be earlier than CreatedAt."); + } + + if (run.FinishedAt is { } finished) + { + if (run.StartedAt is { } startedAt && finished < startedAt) + { + throw new InvalidOperationException("Run.FinishedAt cannot be earlier than StartedAt."); + } + + if (!IsTerminal(run.State)) + { + throw new InvalidOperationException("Run.FinishedAt set while state is not terminal."); + } + } + else if (IsTerminal(run.State)) + { + throw new InvalidOperationException("Terminal run states must include FinishedAt."); + } + + if (run.State == RunState.Error) + { + if (string.IsNullOrWhiteSpace(run.Error)) + { + throw new InvalidOperationException("Run.Error must be populated when state is Error."); + } + } + else if (!string.IsNullOrWhiteSpace(run.Error)) + { + throw new InvalidOperationException("Run.Error must be null for non-error states."); + } + } +} diff --git a/src/StellaOps.Scheduler.Models/RunStatsBuilder.cs b/src/StellaOps.Scheduler.Models/RunStatsBuilder.cs new file mode 100644 index 00000000..039d6742 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/RunStatsBuilder.cs @@ -0,0 +1,92 @@ +using System; + +namespace StellaOps.Scheduler.Models; + +/// +/// Helper that enforces monotonic updates. +/// +public sealed class RunStatsBuilder +{ + private int _candidates; + private int _deduped; + private int _queued; + private int _completed; + private int _deltas; + private int _newCriticals; + private int _newHigh; + private int _newMedium; + private int _newLow; + + public RunStatsBuilder(RunStats? baseline = null) + { + baseline ??= RunStats.Empty; + _candidates = baseline.Candidates; + _deduped = baseline.Deduped; + _queued = baseline.Queued; + _completed = baseline.Completed; + _deltas = baseline.Deltas; + _newCriticals = baseline.NewCriticals; + _newHigh = baseline.NewHigh; + _newMedium = baseline.NewMedium; + _newLow = baseline.NewLow; + } + + public void SetCandidates(int value) => _candidates = EnsureMonotonic(value, _candidates, nameof(RunStats.Candidates)); + + public void IncrementCandidates(int value = 1) => SetCandidates(_candidates + value); + + public void SetDeduped(int value) => _deduped = EnsureMonotonic(value, _deduped, nameof(RunStats.Deduped)); + + public void IncrementDeduped(int value = 1) => SetDeduped(_deduped + value); + + public void SetQueued(int value) => _queued = EnsureMonotonic(value, _queued, nameof(RunStats.Queued)); + + public void IncrementQueued(int value = 1) => SetQueued(_queued + value); + + public void SetCompleted(int value) => _completed = EnsureMonotonic(value, _completed, nameof(RunStats.Completed)); + + public void IncrementCompleted(int value = 1) => SetCompleted(_completed + value); + + public void SetDeltas(int value) => _deltas = EnsureMonotonic(value, _deltas, nameof(RunStats.Deltas)); + + public void IncrementDeltas(int value = 1) => SetDeltas(_deltas + value); + + public void SetNewCriticals(int value) => _newCriticals = EnsureMonotonic(value, _newCriticals, nameof(RunStats.NewCriticals)); + + public void IncrementNewCriticals(int value = 1) => SetNewCriticals(_newCriticals + value); + + public void SetNewHigh(int value) => _newHigh = EnsureMonotonic(value, _newHigh, nameof(RunStats.NewHigh)); + + public void IncrementNewHigh(int value = 1) => SetNewHigh(_newHigh + value); + + public void SetNewMedium(int value) => _newMedium = EnsureMonotonic(value, _newMedium, nameof(RunStats.NewMedium)); + + public void IncrementNewMedium(int value = 1) => SetNewMedium(_newMedium + value); + + public void SetNewLow(int value) => _newLow = EnsureMonotonic(value, _newLow, nameof(RunStats.NewLow)); + + public void IncrementNewLow(int value = 1) => SetNewLow(_newLow + value); + + public RunStats Build() + => new( + candidates: _candidates, + deduped: _deduped, + queued: _queued, + completed: _completed, + deltas: _deltas, + newCriticals: _newCriticals, + newHigh: _newHigh, + newMedium: _newMedium, + newLow: _newLow); + + private static int EnsureMonotonic(int value, int current, string fieldName) + { + Validation.EnsureNonNegative(value, fieldName); + if (value < current) + { + throw new InvalidOperationException($"RunStats.{fieldName} cannot decrease (current: {current}, attempted: {value})."); + } + + return value; + } +} diff --git a/src/StellaOps.Scheduler.Models/Schedule.cs b/src/StellaOps.Scheduler.Models/Schedule.cs new file mode 100644 index 00000000..470246b3 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/Schedule.cs @@ -0,0 +1,227 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// +/// Scheduler configuration entity persisted in Mongo. +/// +public sealed record Schedule +{ + public Schedule( + string id, + string tenantId, + string name, + bool enabled, + string cronExpression, + string timezone, + ScheduleMode mode, + Selector selection, + ScheduleOnlyIf? onlyIf, + ScheduleNotify? notify, + ScheduleLimits? limits, + DateTimeOffset createdAt, + string createdBy, + DateTimeOffset updatedAt, + string updatedBy, + ImmutableArray? subscribers = null, + string? schemaVersion = null) + : this( + id, + tenantId, + name, + enabled, + cronExpression, + timezone, + mode, + selection, + onlyIf ?? ScheduleOnlyIf.Default, + notify ?? ScheduleNotify.Default, + limits ?? ScheduleLimits.Default, + subscribers ?? ImmutableArray.Empty, + createdAt, + createdBy, + updatedAt, + updatedBy, + schemaVersion) + { + } + + [JsonConstructor] + public Schedule( + string id, + string tenantId, + string name, + bool enabled, + string cronExpression, + string timezone, + ScheduleMode mode, + Selector selection, + ScheduleOnlyIf onlyIf, + ScheduleNotify notify, + ScheduleLimits limits, + ImmutableArray subscribers, + DateTimeOffset createdAt, + string createdBy, + DateTimeOffset updatedAt, + string updatedBy, + string? schemaVersion = null) + { + Id = Validation.EnsureId(id, nameof(id)); + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + Name = Validation.EnsureName(name, nameof(name)); + Enabled = enabled; + CronExpression = Validation.EnsureCronExpression(cronExpression, nameof(cronExpression)); + Timezone = Validation.EnsureTimezone(timezone, nameof(timezone)); + Mode = mode; + Selection = selection ?? throw new ArgumentNullException(nameof(selection)); + OnlyIf = onlyIf ?? ScheduleOnlyIf.Default; + Notify = notify ?? ScheduleNotify.Default; + Limits = limits ?? ScheduleLimits.Default; + Subscribers = (subscribers.IsDefault ? ImmutableArray.Empty : subscribers) + .Select(static value => Validation.EnsureSimpleIdentifier(value, nameof(subscribers))) + .Distinct(StringComparer.Ordinal) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToImmutableArray(); + CreatedAt = Validation.NormalizeTimestamp(createdAt); + CreatedBy = Validation.EnsureSimpleIdentifier(createdBy, nameof(createdBy)); + UpdatedAt = Validation.NormalizeTimestamp(updatedAt); + UpdatedBy = Validation.EnsureSimpleIdentifier(updatedBy, nameof(updatedBy)); + SchemaVersion = SchedulerSchemaVersions.EnsureSchedule(schemaVersion); + + if (Selection.TenantId is not null && !string.Equals(Selection.TenantId, TenantId, StringComparison.Ordinal)) + { + throw new ArgumentException("Selection tenant must match schedule tenant.", nameof(selection)); + } + } + + public string SchemaVersion { get; } + + public string Id { get; } + + public string TenantId { get; } + + public string Name { get; } + + public bool Enabled { get; } + + public string CronExpression { get; } + + public string Timezone { get; } + + public ScheduleMode Mode { get; } + + public Selector Selection { get; } + + public ScheduleOnlyIf OnlyIf { get; } + + public ScheduleNotify Notify { get; } + + public ScheduleLimits Limits { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Subscribers { get; } = ImmutableArray.Empty; + + public DateTimeOffset CreatedAt { get; } + + public string CreatedBy { get; } + + public DateTimeOffset UpdatedAt { get; } + + public string UpdatedBy { get; } +} + +/// +/// Conditions that must hold before a schedule enqueues work. +/// +public sealed record ScheduleOnlyIf +{ + public static ScheduleOnlyIf Default { get; } = new(); + + [JsonConstructor] + public ScheduleOnlyIf(int? lastReportOlderThanDays = null, string? policyRevision = null) + { + LastReportOlderThanDays = Validation.EnsurePositiveOrNull(lastReportOlderThanDays, nameof(lastReportOlderThanDays)); + PolicyRevision = Validation.TrimToNull(policyRevision); + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? LastReportOlderThanDays { get; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? PolicyRevision { get; } = null; +} + +/// +/// Notification preferences for schedule outcomes. +/// +public sealed record ScheduleNotify +{ + public static ScheduleNotify Default { get; } = new(onNewFindings: true, null, includeKev: true); + + public ScheduleNotify(bool onNewFindings, SeverityRank? minSeverity, bool includeKev) + { + OnNewFindings = onNewFindings; + if (minSeverity is SeverityRank.Unknown or SeverityRank.None) + { + MinSeverity = minSeverity == SeverityRank.Unknown ? SeverityRank.Unknown : SeverityRank.Low; + } + else + { + MinSeverity = minSeverity; + } + + IncludeKev = includeKev; + } + + [JsonConstructor] + public ScheduleNotify(bool onNewFindings, SeverityRank? minSeverity, bool includeKev, bool includeQuietFindings = false) + : this(onNewFindings, minSeverity, includeKev) + { + IncludeQuietFindings = includeQuietFindings; + } + + public bool OnNewFindings { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public SeverityRank? MinSeverity { get; } + + public bool IncludeKev { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool IncludeQuietFindings { get; } +} + +/// +/// Execution limits that bound scheduler throughput. +/// +public sealed record ScheduleLimits +{ + public static ScheduleLimits Default { get; } = new(); + + public ScheduleLimits(int? maxJobs = null, int? ratePerSecond = null, int? parallelism = null) + { + MaxJobs = Validation.EnsurePositiveOrNull(maxJobs, nameof(maxJobs)); + RatePerSecond = Validation.EnsurePositiveOrNull(ratePerSecond, nameof(ratePerSecond)); + Parallelism = Validation.EnsurePositiveOrNull(parallelism, nameof(parallelism)); + } + + [JsonConstructor] + public ScheduleLimits(int? maxJobs, int? ratePerSecond, int? parallelism, int? burst = null) + : this(maxJobs, ratePerSecond, parallelism) + { + Burst = Validation.EnsurePositiveOrNull(burst, nameof(burst)); + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxJobs { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? RatePerSecond { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? Parallelism { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? Burst { get; } +} diff --git a/src/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs b/src/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs new file mode 100644 index 00000000..6399e9c1 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs @@ -0,0 +1,172 @@ +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; + +namespace StellaOps.Scheduler.Models; + +/// +/// Upgrades scheduler documents emitted by earlier schema revisions to the latest DTOs. +/// +public static class SchedulerSchemaMigration +{ + private static readonly ImmutableHashSet ScheduleProperties = ImmutableHashSet.Create( + StringComparer.Ordinal, + "schemaVersion", + "id", + "tenantId", + "name", + "enabled", + "cronExpression", + "timezone", + "mode", + "selection", + "onlyIf", + "notify", + "limits", + "subscribers", + "createdAt", + "createdBy", + "updatedAt", + "updatedBy"); + + private static readonly ImmutableHashSet RunProperties = ImmutableHashSet.Create( + StringComparer.Ordinal, + "schemaVersion", + "id", + "tenantId", + "scheduleId", + "trigger", + "state", + "stats", + "reason", + "createdAt", + "startedAt", + "finishedAt", + "error", + "deltas"); + + private static readonly ImmutableHashSet ImpactSetProperties = ImmutableHashSet.Create( + StringComparer.Ordinal, + "schemaVersion", + "selector", + "images", + "usageOnly", + "generatedAt", + "total", + "snapshotId"); + + public static SchedulerSchemaMigrationResult UpgradeSchedule(JsonNode document, bool strict = false) + => Upgrade( + document, + SchedulerSchemaVersions.Schedule, + SchedulerSchemaVersions.EnsureSchedule, + ScheduleProperties, + static json => CanonicalJsonSerializer.Deserialize(json), + strict); + + public static SchedulerSchemaMigrationResult UpgradeRun(JsonNode document, bool strict = false) + => Upgrade( + document, + SchedulerSchemaVersions.Run, + SchedulerSchemaVersions.EnsureRun, + RunProperties, + static json => CanonicalJsonSerializer.Deserialize(json), + strict); + + public static SchedulerSchemaMigrationResult UpgradeImpactSet(JsonNode document, bool strict = false) + => Upgrade( + document, + SchedulerSchemaVersions.ImpactSet, + SchedulerSchemaVersions.EnsureImpactSet, + ImpactSetProperties, + static json => CanonicalJsonSerializer.Deserialize(json), + strict); + + private static SchedulerSchemaMigrationResult Upgrade( + JsonNode document, + string latestVersion, + Func ensureVersion, + ImmutableHashSet knownProperties, + Func deserialize, + bool strict) + { + ArgumentNullException.ThrowIfNull(document); + + var (normalized, fromVersion) = Normalize(document, ensureVersion); + var warnings = ImmutableArray.CreateBuilder(); + + if (strict) + { + RemoveUnknownMembers(normalized, knownProperties, warnings, fromVersion); + } + + if (!string.Equals(fromVersion, latestVersion, StringComparison.Ordinal)) + { + // Placeholder for forward upgrades once schema@2 exists. + throw new NotSupportedException($"Unsupported scheduler schema version '{fromVersion}', expected '{latestVersion}'."); + } + + var canonicalJson = normalized.ToJsonString(new JsonSerializerOptions + { + WriteIndented = false, + }); + + var value = deserialize(canonicalJson); + return new SchedulerSchemaMigrationResult( + value, + fromVersion, + latestVersion, + warnings.ToImmutable()); + } + + private static (JsonObject Clone, string SchemaVersion) Normalize(JsonNode node, Func ensureVersion) + { + if (node is not JsonObject obj) + { + throw new ArgumentException("Document must be a JSON object.", nameof(node)); + } + + if (obj.DeepClone() is not JsonObject clone) + { + throw new InvalidOperationException("Unable to clone scheduler document."); + } + + string schemaVersion; + if (clone.TryGetPropertyValue("schemaVersion", out var value) && + value is JsonValue jsonValue && + jsonValue.TryGetValue(out string? rawVersion)) + { + schemaVersion = ensureVersion(rawVersion); + } + else + { + schemaVersion = ensureVersion(null); + clone["schemaVersion"] = schemaVersion; + } + + // Ensure schemaVersion is normalized in the clone. + clone["schemaVersion"] = schemaVersion; + + return (clone, schemaVersion); + } + + private static void RemoveUnknownMembers( + JsonObject json, + ImmutableHashSet knownProperties, + ImmutableArray.Builder warnings, + string schemaVersion) + { + var unknownKeys = json + .Where(static pair => pair.Key is not null) + .Select(pair => pair.Key!) + .Where(key => !knownProperties.Contains(key)) + .ToArray(); + + foreach (var key in unknownKeys) + { + json.Remove(key); + warnings.Add($"Removed unknown property '{key}' from scheduler document (schemaVersion={schemaVersion})."); + } + } +} diff --git a/src/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs b/src/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs new file mode 100644 index 00000000..38d2a601 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs @@ -0,0 +1,13 @@ +using System.Collections.Immutable; + +namespace StellaOps.Scheduler.Models; + +/// +/// Result from upgrading a scheduler document to the latest schema version. +/// +/// Target DTO type. +public sealed record SchedulerSchemaMigrationResult( + T Value, + string FromVersion, + string ToVersion, + ImmutableArray Warnings); diff --git a/src/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs b/src/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs new file mode 100644 index 00000000..59571e4b --- /dev/null +++ b/src/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs @@ -0,0 +1,26 @@ +namespace StellaOps.Scheduler.Models; + +/// +/// Canonical schema version identifiers for scheduler documents. +/// +public static class SchedulerSchemaVersions +{ + public const string Schedule = "scheduler.schedule@1"; + public const string Run = "scheduler.run@1"; + public const string ImpactSet = "scheduler.impact-set@1"; + public const string ScheduleLegacy0 = "scheduler.schedule@0"; + public const string RunLegacy0 = "scheduler.run@0"; + public const string ImpactSetLegacy0 = "scheduler.impact-set@0"; + + public static string EnsureSchedule(string? value) + => Normalize(value, Schedule); + + public static string EnsureRun(string? value) + => Normalize(value, Run); + + public static string EnsureImpactSet(string? value) + => Normalize(value, ImpactSet); + + private static string Normalize(string? value, string fallback) + => string.IsNullOrWhiteSpace(value) ? fallback : value.Trim(); +} diff --git a/src/StellaOps.Scheduler.Models/Selector.cs b/src/StellaOps.Scheduler.Models/Selector.cs new file mode 100644 index 00000000..c48eb663 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/Selector.cs @@ -0,0 +1,134 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// +/// Selector filters used to resolve impacted assets. +/// +public sealed record Selector +{ + public Selector( + SelectorScope scope, + string? tenantId = null, + IEnumerable? namespaces = null, + IEnumerable? repositories = null, + IEnumerable? digests = null, + IEnumerable? includeTags = null, + IEnumerable? labels = null, + bool resolvesTags = false) + : this( + scope, + tenantId, + Validation.NormalizeStringSet(namespaces, nameof(namespaces)), + Validation.NormalizeStringSet(repositories, nameof(repositories)), + Validation.NormalizeDigests(digests, nameof(digests)), + Validation.NormalizeTagPatterns(includeTags), + NormalizeLabels(labels), + resolvesTags) + { + } + + [JsonConstructor] + public Selector( + SelectorScope scope, + string? tenantId, + ImmutableArray namespaces, + ImmutableArray repositories, + ImmutableArray digests, + ImmutableArray includeTags, + ImmutableArray labels, + bool resolvesTags) + { + Scope = scope; + TenantId = tenantId is null ? null : Validation.EnsureTenantId(tenantId, nameof(tenantId)); + Namespaces = namespaces.IsDefault ? ImmutableArray.Empty : namespaces; + Repositories = repositories.IsDefault ? ImmutableArray.Empty : repositories; + Digests = digests.IsDefault ? ImmutableArray.Empty : digests; + IncludeTags = includeTags.IsDefault ? ImmutableArray.Empty : includeTags; + Labels = labels.IsDefault ? ImmutableArray.Empty : labels; + ResolvesTags = resolvesTags; + + if (Scope is SelectorScope.ByDigest && Digests.Length == 0) + { + throw new ArgumentException("At least one digest is required when scope is by-digest.", nameof(digests)); + } + + if (Scope is SelectorScope.ByNamespace && Namespaces.Length == 0) + { + throw new ArgumentException("Namespaces are required when scope is by-namespace.", nameof(namespaces)); + } + + if (Scope is SelectorScope.ByRepository && Repositories.Length == 0) + { + throw new ArgumentException("Repositories are required when scope is by-repo.", nameof(repositories)); + } + + if (Scope is SelectorScope.ByLabels && Labels.Length == 0) + { + throw new ArgumentException("Labels are required when scope is by-labels.", nameof(labels)); + } + } + + public SelectorScope Scope { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? TenantId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Namespaces { get; } = ImmutableArray.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Repositories { get; } = ImmutableArray.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Digests { get; } = ImmutableArray.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray IncludeTags { get; } = ImmutableArray.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Labels { get; } = ImmutableArray.Empty; + + public bool ResolvesTags { get; } + + private static ImmutableArray NormalizeLabels(IEnumerable? labels) + { + if (labels is null) + { + return ImmutableArray.Empty; + } + + return labels + .Where(static label => label is not null) + .Select(static label => label!) + .OrderBy(static label => label.Key, StringComparer.Ordinal) + .ToImmutableArray(); + } +} + +/// +/// Describes a label match (key and optional accepted values). +/// +public sealed record LabelSelector +{ + public LabelSelector(string key, IEnumerable? values = null) + : this(key, NormalizeValues(values)) + { + } + + [JsonConstructor] + public LabelSelector(string key, ImmutableArray values) + { + Key = Validation.EnsureSimpleIdentifier(key, nameof(key)); + Values = values.IsDefault ? ImmutableArray.Empty : values; + } + + public string Key { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray Values { get; } = ImmutableArray.Empty; + + private static ImmutableArray NormalizeValues(IEnumerable? values) + => Validation.NormalizeStringSet(values, nameof(values)); +} diff --git a/src/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj b/src/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj index 6c3a8871..514869b9 100644 --- a/src/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj +++ b/src/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj @@ -1,7 +1,9 @@ net10.0 + preview enable enable + true diff --git a/src/StellaOps.Scheduler.Models/TASKS.md b/src/StellaOps.Scheduler.Models/TASKS.md index 062722fb..2e0d240c 100644 --- a/src/StellaOps.Scheduler.Models/TASKS.md +++ b/src/StellaOps.Scheduler.Models/TASKS.md @@ -2,6 +2,6 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCHED-MODELS-16-101 | TODO | Scheduler Models Guild | — | Define DTOs (Schedule, Run, ImpactSet, Selector, DeltaSummary, AuditRecord) with validation + canonical JSON. | DTOs merged with tests; documentation snippet added; serialization deterministic. | -| SCHED-MODELS-16-102 | TODO | Scheduler Models Guild | SCHED-MODELS-16-101 | Publish schema docs & sample payloads for UI/Notify integration. | Samples committed; docs referenced; contract tests pass. | -| SCHED-MODELS-16-103 | TODO | Scheduler Models Guild | SCHED-MODELS-16-101 | Versioning/migration helpers (schedule evolution, run state transitions). | Migration helpers implemented; tests cover upgrade/downgrade; guidelines documented. | +| SCHED-MODELS-16-101 | DONE (2025-10-19) | Scheduler Models Guild | — | Define DTOs (Schedule, Run, ImpactSet, Selector, DeltaSummary, AuditRecord) with validation + canonical JSON. | DTOs merged with tests; documentation snippet added; serialization deterministic. | +| SCHED-MODELS-16-102 | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Publish schema docs & sample payloads for UI/Notify integration. | Samples committed; docs referenced; contract tests pass. | +| SCHED-MODELS-16-103 | DOING (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Versioning/migration helpers (schedule evolution, run state transitions). | Migration helpers implemented; tests cover upgrade/downgrade; guidelines documented. | diff --git a/src/StellaOps.Scheduler.Models/Validation.cs b/src/StellaOps.Scheduler.Models/Validation.cs new file mode 100644 index 00000000..5f59c823 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/Validation.cs @@ -0,0 +1,247 @@ +using System.Collections.Immutable; +using System.Text.RegularExpressions; + +namespace StellaOps.Scheduler.Models; + +/// +/// Lightweight validation helpers for scheduler DTO constructors. +/// +internal static partial class Validation +{ + private const int MaxIdentifierLength = 256; + private const int MaxNameLength = 200; + + public static string EnsureId(string value, string paramName) + { + var normalized = EnsureNotNullOrWhiteSpace(value, paramName); + if (normalized.Length > MaxIdentifierLength) + { + throw new ArgumentException($"Value exceeds {MaxIdentifierLength} characters.", paramName); + } + + return normalized; + } + + public static string EnsureName(string value, string paramName) + { + var normalized = EnsureNotNullOrWhiteSpace(value, paramName); + if (normalized.Length > MaxNameLength) + { + throw new ArgumentException($"Value exceeds {MaxNameLength} characters.", paramName); + } + + return normalized; + } + + public static string EnsureTenantId(string value, string paramName) + { + var normalized = EnsureId(value, paramName); + if (!TenantRegex().IsMatch(normalized)) + { + throw new ArgumentException("Tenant id must be alphanumeric with '-', '_' separators.", paramName); + } + + return normalized; + } + + public static string EnsureCronExpression(string value, string paramName) + { + var normalized = EnsureNotNullOrWhiteSpace(value, paramName); + if (normalized.Length > 128 || normalized.Contains('\n', StringComparison.Ordinal) || normalized.Contains('\r', StringComparison.Ordinal)) + { + throw new ArgumentException("Cron expression too long or contains invalid characters.", paramName); + } + + if (!CronSegmentRegex().IsMatch(normalized)) + { + throw new ArgumentException("Cron expression contains unsupported characters.", paramName); + } + + return normalized; + } + + public static string EnsureTimezone(string value, string paramName) + { + var normalized = EnsureNotNullOrWhiteSpace(value, paramName); + try + { + _ = TimeZoneInfo.FindSystemTimeZoneById(normalized); + } + catch (TimeZoneNotFoundException ex) + { + throw new ArgumentException($"Timezone '{normalized}' is not recognized on this host.", paramName, ex); + } + catch (InvalidTimeZoneException ex) + { + throw new ArgumentException($"Timezone '{normalized}' is invalid.", paramName, ex); + } + + return normalized; + } + + public static string? TrimToNull(string? value) + => string.IsNullOrWhiteSpace(value) + ? null + : value.Trim(); + + public static ImmutableArray NormalizeStringSet(IEnumerable? values, string paramName, bool allowWildcards = false) + { + if (values is null) + { + return ImmutableArray.Empty; + } + + var result = values + .Select(static value => TrimToNull(value)) + .Where(static value => value is not null) + .Select(value => allowWildcards ? value! : EnsureSimpleIdentifier(value!, paramName)) + .Distinct(StringComparer.Ordinal) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToImmutableArray(); + + return result; + } + + public static ImmutableArray NormalizeTagPatterns(IEnumerable? values) + { + if (values is null) + { + return ImmutableArray.Empty; + } + + var result = values + .Select(static value => TrimToNull(value)) + .Where(static value => value is not null) + .Select(static value => value!) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return result; + } + + public static ImmutableArray NormalizeDigests(IEnumerable? values, string paramName) + { + if (values is null) + { + return ImmutableArray.Empty; + } + + var result = values + .Select(static value => TrimToNull(value)) + .Where(static value => value is not null) + .Select(value => EnsureDigestFormat(value!, paramName)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return result; + } + + public static int? EnsurePositiveOrNull(int? value, string paramName) + { + if (value is null) + { + return null; + } + + if (value <= 0) + { + throw new ArgumentOutOfRangeException(paramName, value, "Value must be greater than zero."); + } + + return value; + } + + public static int EnsureNonNegative(int value, string paramName) + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(paramName, value, "Value must be zero or greater."); + } + + return value; + } + + public static ImmutableSortedDictionary NormalizeMetadata(IEnumerable>? metadata) + { + if (metadata is null) + { + return ImmutableSortedDictionary.Empty; + } + + var builder = ImmutableSortedDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var pair in metadata) + { + var key = TrimToNull(pair.Key); + var value = TrimToNull(pair.Value); + if (key is null || value is null) + { + continue; + } + + var normalizedKey = key.ToLowerInvariant(); + if (!builder.ContainsKey(normalizedKey)) + { + builder[normalizedKey] = value; + } + } + + return builder.ToImmutable(); + } + + public static string EnsureSimpleIdentifier(string value, string paramName) + { + var normalized = EnsureNotNullOrWhiteSpace(value, paramName); + if (!SimpleIdentifierRegex().IsMatch(normalized)) + { + throw new ArgumentException("Value must contain letters, digits, '-', '_', '.', or '/'.", paramName); + } + + return normalized; + } + + public static string EnsureDigestFormat(string value, string paramName) + { + var normalized = EnsureNotNullOrWhiteSpace(value, paramName).ToLowerInvariant(); + if (!normalized.StartsWith("sha256:", StringComparison.Ordinal) || normalized.Length <= 7) + { + throw new ArgumentException("Digest must start with 'sha256:' and contain a hex payload.", paramName); + } + + if (!HexRegex().IsMatch(normalized.AsSpan(7))) + { + throw new ArgumentException("Digest must be hexadecimal.", paramName); + } + + return normalized; + } + + public static string EnsureNotNullOrWhiteSpace(string value, string paramName) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value cannot be null or whitespace.", paramName); + } + + return value.Trim(); + } + + public static DateTimeOffset NormalizeTimestamp(DateTimeOffset value) + => value.ToUniversalTime(); + + public static DateTimeOffset? NormalizeTimestamp(DateTimeOffset? value) + => value?.ToUniversalTime(); + + [GeneratedRegex("^[A-Za-z0-9_-]+$")] + private static partial Regex TenantRegex(); + + [GeneratedRegex("^[A-Za-z0-9_./:@+\\-]+$")] + private static partial Regex SimpleIdentifierRegex(); + + [GeneratedRegex("^[A-Za-z0-9:*?/_.,\\- ]+$")] + private static partial Regex CronSegmentRegex(); + + [GeneratedRegex("^[a-f0-9]+$", RegexOptions.IgnoreCase)] + private static partial Regex HexRegex(); +} diff --git a/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md b/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md new file mode 100644 index 00000000..c4760152 --- /dev/null +++ b/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md @@ -0,0 +1,80 @@ +# SCHED-MODELS-16-103 — Scheduler Schema Versioning & Run State Helpers + +## Goals +- Track schema revisions for `Schedule` and `Run` documents so storage upgrades are deterministic across air-gapped installs. +- Provide reusable upgrade helpers that normalize Mongo snapshots (raw BSON → JSON) into the latest DTOs without mutating inputs. +- Formalize the allowed `RunState` graph and surface guard-rail helpers (timestamps, stats monotonicity) for planners/runners. + +## Non-goals +- Implementing the helpers (covered by the main task). +- Downgrading documents to legacy schema revisions (can be added if Offline Kit requires it). +- Persisted data backfills or data migration jobs; we focus on in-process upgrades during read. + +## Schema Version Strategy +- Introduce `SchedulerSchemaVersions` constants: + - `scheduler.schedule@1` (base record with subscribers, limits burst default). + - `scheduler.run@1` (run metadata + delta summaries). + - `scheduler.impact-set@1` (shared envelope used by planners). +- Expose `EnsureSchedule`, `EnsureRun`, `EnsureImpactSet` helpers mirroring the Notify model pattern to normalize missing/whitespace values. +- Extend `Schedule`, `Run`, and `ImpactSet` records with an optional `schemaVersion` constructor parameter defaulting through the `Ensure*` helpers. The canonical JSON serializer will list `schemaVersion` first so documents round-trip deterministically. +- Persisted Mongo documents will now always include `schemaVersion`; exporters/backups can rely on this when bundling Offline Kit snapshots. + +## Migration Helper Shape +- Add `SchedulerSchemaMigration` static class with: + - `Schedule UpgradeSchedule(JsonNode document)` + - `Run UpgradeRun(JsonNode document)` + - `ImpactSet UpgradeImpactSet(JsonNode document)` +- Each method clones the incoming node, normalizes `schemaVersion` (injecting default if missing), then applies an upgrade pipeline: + 1. `Normalize` — ensure object, strip unknown members when `strict` flag is set, coerce enums via converters. + 2. `ApplyLegacyFixups` — version-specific patches, e.g., backfill `subscribers`, migrate `limits.burst`, convert legacy trigger strings. + 3. `Deserialize` — use `CanonicalJsonSerializer.Deserialize` so property order/enum parsing stays centralized. +- Expose `SchedulerSchemaMigrationResult` record returning `(T Value, string FromVersion, string ToVersion, ImmutableArray Warnings)` to surface non-blocking issues to callers (web service, worker, storage). +- Helpers remain dependency-free so storage/web modules can reference them without circular dependencies. + +## Schedule Evolution Considerations +- **@1** fields: `mode`, `selection`, `onlyIf`, `notify`, `limits` (incl. `burst` default 0), `subscribers` (sorted unique), audit metadata. +- Future **@2** candidate changes to plan for in helpers: + - `limits`: splitting `parallelism` into planner/runner concurrency. + - `selection`: adding `impactWindow` semantics. + - `notify`: optional per-channel overrides. +- Upgrade pipeline will carry forward unknown fields in a `JsonNode` bag so future versions can opt-in to strict dropping while maintaining backwards compatibility for current release. + +## Run State Transition Helper +- Introduce `RunStateMachine` (static) encapsulating allowed transitions and invariants. + - Define adjacency map: + - `Planning → {Queued, Cancelled}` + - `Queued → {Running, Cancelled}` + - `Running → {Completed, Error, Cancelled}` + - `Completed`, `Error`, `Cancelled` are terminal. + - Provide `bool CanTransition(RunState from, RunState to)` and `Run EnsureTransition(Run run, RunState next, DateTimeOffset now, Action? mutateStats = null)`. +- `EnsureTransition` performs: + - Timestamp enforcement: `StartedAt` auto-populated on first entry into `Running`; `FinishedAt` set when entering any terminal state; ensures monotonic ordering (`CreatedAt ≤ StartedAt ≤ FinishedAt`). + - Stats guardrails: cumulative counters must not decrease; `RunStatsBuilder` wrapper ensures atomic updates. + - Error context: require `error` message when transitioning to `Error`; clear error for non-error entries. +- Provide `Validate(Run run)` to check invariants for documents loaded from storage before use (e.g., stale snapshots). +- Expose small helper to tag `RunReason.ImpactWindowFrom/To` automatically when set by planners (using normalized ISO-8601). + +## Interaction Points +- **WebService**: call `SchedulerSchemaMigration.UpgradeSchedule` when returning schedules from Mongo, so clients always see the newest DTO regardless of stored version. +- **Storage.Mongo**: wrap DTO round-trips; the migration helper acts during read, and the state machine ensures updates respect transition rules before writing. +- **Queue/Worker**: use `RunStateMachine.EnsureTransition` to guard planner/runner state updates (replace ad-hoc `with run` clones). +- **Offline Kit**: embed `schemaVersion` in exported JSON/Trivy artifacts; migrations ensure air-gapped upgrades flow without manual scripts. + +## Implementation Steps (for follow-up task) +1. Add `SchedulerSchemaVersions` + update DTO constructors/properties. +2. Implement `SchedulerSchemaMigration` helpers and shared `MigrationResult` envelope. +3. Introduce `RunStateMachine` with invariants + supporting `RunStatsBuilder`. +4. Update modules (Storage, WebService, Worker) to use new helpers; add logging around migrations/transitions. + +## Test Strategy +- **Migration happy-path**: load sample Mongo fixtures for `schedule@1` and `run@1`, assert `schemaVersion` normalization, deduplicated subscribers, limits defaults. Include snapshots without the version field to exercise defaulting logic. +- **Legacy upgrade cases**: craft synthetic `schedule@0` / `run@0` JSON fragments (missing new fields, using old enum names) and verify version-specific fixups produce the latest DTO while populating `MigrationResult.Warnings`. +- **Strict mode behavior**: attempt to upgrade documents with unexpected properties and ensure warnings/throws align with configuration. +- **Run state transitions**: unit-test `RunStateMachine` for every allowed edge, invalid transitions, and timestamp/error invariants (e.g., `FinishedAt` only set on terminal states). Provide parameterized tests to confirm stats monotonicity enforcement. +- **Serialization determinism**: round-trip upgraded DTOs via `CanonicalJsonSerializer` to confirm property order includes `schemaVersion` first and produces stable hashes. +- **Documentation snippets**: extend module README or API docs with example migrations/run-state usage; verify via doc samples test (if available) or include as part of CI doc linting. + +## Open Questions +- Do we need downgrade (`ToVersion`) helpers for Offline Kit exports? (Assumed no for now. Add backlog item if required.) +- Should `ImpactSet` migrations live here or in ImpactIndex module? (Lean towards here because DTO defined in Models; coordinate with ImpactIndex guild if they need specialized upgrades.) +- How do we surface migration warnings to telemetry? Proposal: caller logs `warning` with `MigrationResult.Warnings` immediately after calling helper. diff --git a/src/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs b/src/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs new file mode 100644 index 00000000..1b8fe0ee --- /dev/null +++ b/src/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs @@ -0,0 +1,110 @@ +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using FluentAssertions; +using StellaOps.Scheduler.Models; +using Xunit; + +namespace StellaOps.Scheduler.Queue.Tests; + +public sealed class PlannerAndRunnerMessageTests +{ + [Fact] + public void PlannerMessage_CanonicalSerialization_RoundTrips() + { + var schedule = new Schedule( + id: "sch-tenant-nightly", + tenantId: "tenant-alpha", + name: "Nightly Deltas", + enabled: true, + cronExpression: "0 2 * * *", + timezone: "UTC", + mode: ScheduleMode.AnalysisOnly, + selection: new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"), + onlyIf: new ScheduleOnlyIf(lastReportOlderThanDays: 3), + notify: new ScheduleNotify(onNewFindings: true, SeverityRank.High, includeKev: true), + limits: new ScheduleLimits(maxJobs: 10, ratePerSecond: 5, parallelism: 3), + createdAt: DateTimeOffset.Parse("2025-10-01T02:00:00Z"), + createdBy: "system", + updatedAt: DateTimeOffset.Parse("2025-10-02T02:00:00Z"), + updatedBy: "system", + subscribers: ImmutableArray.Empty, + schemaVersion: "1.0.0"); + + var run = new Run( + id: "run-123", + tenantId: "tenant-alpha", + trigger: RunTrigger.Cron, + state: RunState.Planning, + stats: new RunStats(candidates: 5, deduped: 4, queued: 0, completed: 0, deltas: 0), + createdAt: DateTimeOffset.Parse("2025-10-02T02:05:00Z"), + reason: new RunReason(manualReason: null, feedserExportId: null, vexerExportId: null, cursor: null) + with { ImpactWindowFrom = "2025-10-01T00:00:00Z", ImpactWindowTo = "2025-10-02T00:00:00Z" }, + scheduleId: "sch-tenant-nightly"); + + var impactSet = new ImpactSet( + selector: new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"), + images: new[] + { + new ImpactImage( + imageDigest: "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + registry: "registry", + repository: "repo", + namespaces: new[] { "prod" }, + tags: new[] { "latest" }, + usedByEntrypoint: true, + labels: new[] { KeyValuePair.Create("team", "appsec") }) + }, + usageOnly: true, + generatedAt: DateTimeOffset.Parse("2025-10-02T02:06:00Z"), + total: 1, + snapshotId: "snap-001"); + + var message = new PlannerQueueMessage(run, impactSet, schedule, correlationId: "corr-1"); + + var json = CanonicalJsonSerializer.Serialize(message); + var roundTrip = CanonicalJsonSerializer.Deserialize(json); + + roundTrip.Should().BeEquivalentTo(message, options => options.WithStrictOrdering()); + } + + [Fact] + public void RunnerSegmentMessage_RequiresAtLeastOneDigest() + { + var act = () => new RunnerSegmentQueueMessage( + segmentId: "segment-empty", + runId: "run-123", + tenantId: "tenant-alpha", + imageDigests: Array.Empty()); + + act.Should().Throw(); + } + + [Fact] + public void RunnerSegmentMessage_CanonicalSerialization_RoundTrips() + { + var message = new RunnerSegmentQueueMessage( + segmentId: "segment-01", + runId: "run-123", + tenantId: "tenant-alpha", + imageDigests: new[] + { + "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" + }, + scheduleId: "sch-tenant-nightly", + ratePerSecond: 25, + usageOnly: true, + attributes: new Dictionary + { + ["plannerShard"] = "0", + ["priority"] = "kev" + }, + correlationId: "corr-2"); + + var json = CanonicalJsonSerializer.Serialize(message); + var roundTrip = CanonicalJsonSerializer.Deserialize(json); + + roundTrip.Should().BeEquivalentTo(message, options => options.WithStrictOrdering()); + } +} diff --git a/src/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs b/src/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs new file mode 100644 index 00000000..85f1545b --- /dev/null +++ b/src/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs @@ -0,0 +1,269 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Containers; +using DotNet.Testcontainers.Configurations; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StackExchange.Redis; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Queue.Redis; +using Xunit; + +namespace StellaOps.Scheduler.Queue.Tests; + +public sealed class RedisSchedulerQueueTests : IAsyncLifetime +{ + private readonly RedisTestcontainer _redis; + private string? _skipReason; + + public RedisSchedulerQueueTests() + { + var configuration = new RedisTestcontainerConfiguration(); + + _redis = new TestcontainersBuilder() + .WithDatabase(configuration) + .Build(); + } + + public async Task InitializeAsync() + { + try + { + await _redis.StartAsync(); + } + catch (Exception ex) when (IsDockerUnavailable(ex)) + { + _skipReason = $"Docker engine is not available for Redis-backed tests: {ex.Message}"; + } + } + + public async Task DisposeAsync() + { + if (_skipReason is not null) + { + return; + } + + await _redis.DisposeAsync().AsTask(); + } + + [Fact] + public async Task PlannerQueue_EnqueueLeaseAck_RemovesMessage() + { + SkipIfUnavailable(); + + var options = CreateOptions(); + + await using var queue = new RedisSchedulerPlannerQueue( + options, + options.Redis, + NullLogger.Instance, + TimeProvider.System, + async config => (IConnectionMultiplexer)await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false)); + + var message = TestData.CreatePlannerMessage(); + + var enqueue = await queue.EnqueueAsync(message); + enqueue.Deduplicated.Should().BeFalse(); + + var leases = await queue.LeaseAsync(new SchedulerQueueLeaseRequest("planner-1", batchSize: 5, options.DefaultLeaseDuration)); + leases.Should().HaveCount(1); + + var lease = leases[0]; + lease.Message.Run.Id.Should().Be(message.Run.Id); + lease.TenantId.Should().Be(message.TenantId); + lease.ScheduleId.Should().Be(message.ScheduleId); + + await lease.AcknowledgeAsync(); + + var afterAck = await queue.LeaseAsync(new SchedulerQueueLeaseRequest("planner-1", 5, options.DefaultLeaseDuration)); + afterAck.Should().BeEmpty(); + } + + [Fact] + public async Task RunnerQueue_Retry_IncrementsDeliveryAttempt() + { + SkipIfUnavailable(); + + var options = CreateOptions(); + options.RetryInitialBackoff = TimeSpan.Zero; + options.RetryMaxBackoff = TimeSpan.Zero; + + await using var queue = new RedisSchedulerRunnerQueue( + options, + options.Redis, + NullLogger.Instance, + TimeProvider.System, + async config => (IConnectionMultiplexer)await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false)); + + var message = TestData.CreateRunnerMessage(); + + await queue.EnqueueAsync(message); + + var firstLease = await queue.LeaseAsync(new SchedulerQueueLeaseRequest("runner-1", batchSize: 1, options.DefaultLeaseDuration)); + firstLease.Should().ContainSingle(); + + var lease = firstLease[0]; + lease.Attempt.Should().Be(1); + + await lease.ReleaseAsync(SchedulerQueueReleaseDisposition.Retry); + + var secondLease = await queue.LeaseAsync(new SchedulerQueueLeaseRequest("runner-1", batchSize: 1, options.DefaultLeaseDuration)); + secondLease.Should().ContainSingle(); + secondLease[0].Attempt.Should().Be(2); + } + + [Fact] + public async Task PlannerQueue_ClaimExpired_ReassignsLease() + { + SkipIfUnavailable(); + + var options = CreateOptions(); + + await using var queue = new RedisSchedulerPlannerQueue( + options, + options.Redis, + NullLogger.Instance, + TimeProvider.System, + async config => (IConnectionMultiplexer)await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false)); + + var message = TestData.CreatePlannerMessage(); + await queue.EnqueueAsync(message); + + var leases = await queue.LeaseAsync(new SchedulerQueueLeaseRequest("planner-a", 1, options.DefaultLeaseDuration)); + leases.Should().ContainSingle(); + + await Task.Delay(50); + + var reclaimed = await queue.ClaimExpiredAsync(new SchedulerQueueClaimOptions("planner-b", batchSize: 1, minIdleTime: TimeSpan.Zero)); + reclaimed.Should().ContainSingle(); + reclaimed[0].Consumer.Should().Be("planner-b"); + reclaimed[0].RunId.Should().Be(message.Run.Id); + + await reclaimed[0].AcknowledgeAsync(); + } + + private SchedulerQueueOptions CreateOptions() + { + var unique = Guid.NewGuid().ToString("N"); + + return new SchedulerQueueOptions + { + Kind = SchedulerQueueTransportKind.Redis, + DefaultLeaseDuration = TimeSpan.FromSeconds(2), + MaxDeliveryAttempts = 5, + RetryInitialBackoff = TimeSpan.FromMilliseconds(10), + RetryMaxBackoff = TimeSpan.FromMilliseconds(50), + Redis = new SchedulerRedisQueueOptions + { + ConnectionString = _redis.ConnectionString, + Database = 0, + InitializationTimeout = TimeSpan.FromSeconds(10), + Planner = new RedisSchedulerStreamOptions + { + Stream = $"scheduler:test:planner:{unique}", + ConsumerGroup = $"planner-consumers-{unique}", + DeadLetterStream = $"scheduler:test:planner:{unique}:dead", + IdempotencyKeyPrefix = $"scheduler:test:planner:{unique}:idemp:", + IdempotencyWindow = TimeSpan.FromMinutes(5) + }, + Runner = new RedisSchedulerStreamOptions + { + Stream = $"scheduler:test:runner:{unique}", + ConsumerGroup = $"runner-consumers-{unique}", + DeadLetterStream = $"scheduler:test:runner:{unique}:dead", + IdempotencyKeyPrefix = $"scheduler:test:runner:{unique}:idemp:", + IdempotencyWindow = TimeSpan.FromMinutes(5) + } + } + }; + } + + private void SkipIfUnavailable() + { + if (_skipReason is not null) + { + Skip.If(true, _skipReason); + } + } + + private static bool IsDockerUnavailable(Exception exception) + { + while (exception is AggregateException aggregate && aggregate.InnerException is not null) + { + exception = aggregate.InnerException; + } + + return exception is TimeoutException + || exception.GetType().Name.Contains("Docker", StringComparison.OrdinalIgnoreCase); + } + + private static class TestData + { + public static PlannerQueueMessage CreatePlannerMessage() + { + var schedule = new Schedule( + id: "sch-test", + tenantId: "tenant-alpha", + name: "Test", + enabled: true, + cronExpression: "0 0 * * *", + timezone: "UTC", + mode: ScheduleMode.AnalysisOnly, + selection: new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"), + onlyIf: ScheduleOnlyIf.Default, + notify: ScheduleNotify.Default, + limits: ScheduleLimits.Default, + createdAt: DateTimeOffset.UtcNow, + createdBy: "tests", + updatedAt: DateTimeOffset.UtcNow, + updatedBy: "tests"); + + var run = new Run( + id: "run-test", + tenantId: "tenant-alpha", + trigger: RunTrigger.Manual, + state: RunState.Planning, + stats: RunStats.Empty, + createdAt: DateTimeOffset.UtcNow, + reason: RunReason.Empty, + scheduleId: schedule.Id); + + var impactSet = new ImpactSet( + selector: new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"), + images: new[] + { + new ImpactImage( + imageDigest: "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + registry: "registry", + repository: "repo", + namespaces: new[] { "prod" }, + tags: new[] { "latest" }) + }, + usageOnly: true, + generatedAt: DateTimeOffset.UtcNow, + total: 1); + + return new PlannerQueueMessage(run, impactSet, schedule, correlationId: "corr-test"); + } + + public static RunnerSegmentQueueMessage CreateRunnerMessage() + { + return new RunnerSegmentQueueMessage( + segmentId: "segment-test", + runId: "run-test", + tenantId: "tenant-alpha", + imageDigests: new[] + { + "sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" + }, + scheduleId: "sch-test", + ratePerSecond: 10, + usageOnly: true, + attributes: new Dictionary { ["priority"] = "kev" }, + correlationId: "corr-runner"); + } + } +} diff --git a/src/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj b/src/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj new file mode 100644 index 00000000..7b33942f --- /dev/null +++ b/src/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj @@ -0,0 +1,25 @@ + + + net10.0 + enable + enable + false + false + + + + + + + + all + + + all + + + + + + + diff --git a/src/StellaOps.Scheduler.Queue/AssemblyInfo.cs b/src/StellaOps.Scheduler.Queue/AssemblyInfo.cs new file mode 100644 index 00000000..90d86929 --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scheduler.Queue.Tests")] diff --git a/src/StellaOps.Scheduler.Queue/README.md b/src/StellaOps.Scheduler.Queue/README.md new file mode 100644 index 00000000..c265f40f --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/README.md @@ -0,0 +1,16 @@ +# Scheduler Queue — Sprint 16 Coordination Notes + +Queue work now has concrete contracts from `StellaOps.Scheduler.Models`: + +* Planner inputs reference `Schedule` and `ImpactSet` samples (`samples/api/scheduler/`). +* Runner segment payloads should carry `runId`, `scheduleId?`, `tenantId`, and the impacted digest list (mirrors `Run.Deltas`). +* Notify fanout relies on the `DeltaSummary` shape already emitted by the model layer. + +## Action items for SCHED-QUEUE-16-401..403 + +1. Reference `StellaOps.Scheduler.Models` so adapters can serialise `Run`/`DeltaSummary` without bespoke DTOs. +2. Use the canonical serializer for queue messages to keep ordering consistent with API payloads. +3. Coverage: add fixture-driven tests that enqueue the sample payloads, then dequeue and re-serialise to verify byte-for-byte stability. +4. Expose queue depth/lease metrics with the identifiers provided by the models (`Run.Id`, `Schedule.Id`). + +These notes unblock the queue guild now that SCHED-MODELS-16-102 is complete. diff --git a/src/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs b/src/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs new file mode 100644 index 00000000..4c940522 --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; + +namespace StellaOps.Scheduler.Queue.Redis; + +internal interface IRedisSchedulerQueuePayload +{ + string QueueName { get; } + + string GetIdempotencyKey(TMessage message); + + string Serialize(TMessage message); + + TMessage Deserialize(string payload); + + string GetRunId(TMessage message); + + string GetTenantId(TMessage message); + + string? GetScheduleId(TMessage message); + + string? GetSegmentId(TMessage message); + + string? GetCorrelationId(TMessage message); + + IReadOnlyDictionary? GetAttributes(TMessage message); +} diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs new file mode 100644 index 00000000..910e2749 --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StackExchange.Redis; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Queue.Redis; + +internal sealed class RedisSchedulerPlannerQueue + : RedisSchedulerQueueBase, ISchedulerPlannerQueue +{ + public RedisSchedulerPlannerQueue( + SchedulerQueueOptions queueOptions, + SchedulerRedisQueueOptions redisOptions, + ILogger logger, + TimeProvider timeProvider, + Func>? connectionFactory = null) + : base( + queueOptions, + redisOptions, + redisOptions.Planner, + PlannerPayload.Instance, + logger, + timeProvider, + connectionFactory) + { + } + + private sealed class PlannerPayload : IRedisSchedulerQueuePayload + { + public static PlannerPayload Instance { get; } = new(); + + public string QueueName => "planner"; + + public string GetIdempotencyKey(PlannerQueueMessage message) + => message.IdempotencyKey; + + public string Serialize(PlannerQueueMessage message) + => CanonicalJsonSerializer.Serialize(message); + + public PlannerQueueMessage Deserialize(string payload) + => CanonicalJsonSerializer.Deserialize(payload); + + public string GetRunId(PlannerQueueMessage message) + => message.Run.Id; + + public string GetTenantId(PlannerQueueMessage message) + => message.Run.TenantId; + + public string? GetScheduleId(PlannerQueueMessage message) + => message.ScheduleId; + + public string? GetSegmentId(PlannerQueueMessage message) + => null; + + public string? GetCorrelationId(PlannerQueueMessage message) + => message.CorrelationId; + + public IReadOnlyDictionary? GetAttributes(PlannerQueueMessage message) + => null; + } +} diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs new file mode 100644 index 00000000..62e8f64c --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs @@ -0,0 +1,758 @@ +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StackExchange.Redis; + +namespace StellaOps.Scheduler.Queue.Redis; + +internal abstract class RedisSchedulerQueueBase : ISchedulerQueue, IAsyncDisposable +{ + private const string TransportName = "redis"; + + private readonly SchedulerQueueOptions _queueOptions; + private readonly SchedulerRedisQueueOptions _redisOptions; + private readonly RedisSchedulerStreamOptions _streamOptions; + private readonly IRedisSchedulerQueuePayload _payload; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly Func> _connectionFactory; + private readonly SemaphoreSlim _connectionLock = new(1, 1); + private readonly SemaphoreSlim _groupInitLock = new(1, 1); + + private IConnectionMultiplexer? _connection; + private volatile bool _groupInitialized; + private bool _disposed; + + protected RedisSchedulerQueueBase( + SchedulerQueueOptions queueOptions, + SchedulerRedisQueueOptions redisOptions, + RedisSchedulerStreamOptions streamOptions, + IRedisSchedulerQueuePayload payload, + ILogger logger, + TimeProvider timeProvider, + Func>? connectionFactory = null) + { + _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); + _redisOptions = redisOptions ?? throw new ArgumentNullException(nameof(redisOptions)); + _streamOptions = streamOptions ?? throw new ArgumentNullException(nameof(streamOptions)); + _payload = payload ?? throw new ArgumentNullException(nameof(payload)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _connectionFactory = connectionFactory ?? (config => Task.FromResult(ConnectionMultiplexer.Connect(config))); + + if (string.IsNullOrWhiteSpace(_redisOptions.ConnectionString)) + { + throw new InvalidOperationException("Redis connection string must be configured for the scheduler queue."); + } + + if (string.IsNullOrWhiteSpace(_streamOptions.Stream)) + { + throw new InvalidOperationException("Redis stream name must be configured for the scheduler queue."); + } + + if (string.IsNullOrWhiteSpace(_streamOptions.ConsumerGroup)) + { + throw new InvalidOperationException("Redis consumer group must be configured for the scheduler queue."); + } + } + + public async ValueTask EnqueueAsync( + TMessage message, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message); + cancellationToken.ThrowIfCancellationRequested(); + + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await EnsureConsumerGroupAsync(database, cancellationToken).ConfigureAwait(false); + + var now = _timeProvider.GetUtcNow(); + var attempt = 1; + var entries = BuildEntries(message, now, attempt); + + var messageId = await AddToStreamAsync( + database, + _streamOptions.Stream, + entries, + _streamOptions.ApproximateMaxLength, + _streamOptions.ApproximateMaxLength is not null) + .ConfigureAwait(false); + + var idempotencyKey = BuildIdempotencyKey(_payload.GetIdempotencyKey(message)); + var stored = await database.StringSetAsync( + idempotencyKey, + messageId, + when: When.NotExists, + expiry: _streamOptions.IdempotencyWindow) + .ConfigureAwait(false); + + if (!stored) + { + await database.StreamDeleteAsync(_streamOptions.Stream, new RedisValue[] { messageId }).ConfigureAwait(false); + + var existing = await database.StringGetAsync(idempotencyKey).ConfigureAwait(false); + var reusable = existing.IsNullOrEmpty ? messageId : existing; + + SchedulerQueueMetrics.RecordDeduplicated(TransportName, _payload.QueueName); + _logger.LogDebug( + "Duplicate enqueue detected for scheduler queue {Queue} with key {Key}; returning existing stream id {StreamId}.", + _payload.QueueName, + idempotencyKey, + reusable.ToString()); + + return new SchedulerQueueEnqueueResult(reusable.ToString(), true); + } + + SchedulerQueueMetrics.RecordEnqueued(TransportName, _payload.QueueName); + _logger.LogDebug( + "Enqueued {Queue} message into {Stream} with id {StreamId}.", + _payload.QueueName, + _streamOptions.Stream, + messageId.ToString()); + + return new SchedulerQueueEnqueueResult(messageId.ToString(), false); + } + + public async ValueTask>> LeaseAsync( + SchedulerQueueLeaseRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + cancellationToken.ThrowIfCancellationRequested(); + + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await EnsureConsumerGroupAsync(database, cancellationToken).ConfigureAwait(false); + + var entries = await database.StreamReadGroupAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + request.Consumer, + position: ">", + count: request.BatchSize, + flags: CommandFlags.None) + .ConfigureAwait(false); + + if (entries is null || entries.Length == 0) + { + return Array.Empty>(); + } + + var now = _timeProvider.GetUtcNow(); + var leases = new List>(entries.Length); + + foreach (var entry in entries) + { + var lease = TryMapLease(entry, request.Consumer, now, request.LeaseDuration, attemptOverride: null); + if (lease is null) + { + await HandlePoisonEntryAsync(database, entry.Id).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask>> ClaimExpiredAsync( + SchedulerQueueClaimOptions options, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(options); + cancellationToken.ThrowIfCancellationRequested(); + + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await EnsureConsumerGroupAsync(database, cancellationToken).ConfigureAwait(false); + + var pending = await database.StreamPendingMessagesAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + options.BatchSize, + RedisValue.Null, + (long)options.MinIdleTime.TotalMilliseconds) + .ConfigureAwait(false); + + if (pending is null || pending.Length == 0) + { + return Array.Empty>(); + } + + var eligible = pending + .Where(info => info.IdleTimeInMilliseconds >= options.MinIdleTime.TotalMilliseconds) + .ToArray(); + + if (eligible.Length == 0) + { + return Array.Empty>(); + } + + var messageIds = eligible + .Select(info => (RedisValue)info.MessageId) + .ToArray(); + + var claimed = await database.StreamClaimAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + options.ClaimantConsumer, + 0, + messageIds, + CommandFlags.None) + .ConfigureAwait(false); + + if (claimed is null || claimed.Length == 0) + { + return Array.Empty>(); + } + + var now = _timeProvider.GetUtcNow(); + var attemptLookup = eligible.ToDictionary( + info => info.MessageId.IsNullOrEmpty ? string.Empty : info.MessageId.ToString(), + info => (int)Math.Max(1, info.DeliveryCount), + StringComparer.Ordinal); + + var leases = new List>(claimed.Length); + foreach (var entry in claimed) + { + var entryId = entry.Id.ToString(); + attemptLookup.TryGetValue(entryId, out var attempt); + + var lease = TryMapLease( + entry, + options.ClaimantConsumer, + now, + _queueOptions.DefaultLeaseDuration, + attemptOverride: attempt); + + if (lease is null) + { + await HandlePoisonEntryAsync(database, entry.Id).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + + if (_connection is not null) + { + await _connection.CloseAsync(); + _connection.Dispose(); + } + + _connectionLock.Dispose(); + _groupInitLock.Dispose(); + GC.SuppressFinalize(this); + } + + internal async Task AcknowledgeAsync( + RedisSchedulerQueueLease lease, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + + await database.StreamAcknowledgeAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await database.StreamDeleteAsync( + _streamOptions.Stream, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName); + } + + internal async Task RenewLeaseAsync( + RedisSchedulerQueueLease lease, + TimeSpan leaseDuration, + CancellationToken cancellationToken) + { + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + + await database.StreamClaimAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + lease.Consumer, + 0, + new RedisValue[] { lease.MessageId }, + CommandFlags.None) + .ConfigureAwait(false); + + var expires = _timeProvider.GetUtcNow().Add(leaseDuration); + lease.RefreshLease(expires); + } + + internal async Task ReleaseAsync( + RedisSchedulerQueueLease lease, + SchedulerQueueReleaseDisposition disposition, + CancellationToken cancellationToken) + { + if (disposition == SchedulerQueueReleaseDisposition.Retry + && lease.Attempt >= _queueOptions.MaxDeliveryAttempts) + { + await DeadLetterAsync( + lease, + $"max-delivery-attempts:{lease.Attempt}", + cancellationToken).ConfigureAwait(false); + return; + } + + if (!lease.TryBeginCompletion()) + { + return; + } + + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + + await database.StreamAcknowledgeAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await database.StreamDeleteAsync( + _streamOptions.Stream, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName); + + if (disposition == SchedulerQueueReleaseDisposition.Retry) + { + SchedulerQueueMetrics.RecordRetry(TransportName, _payload.QueueName); + + lease.IncrementAttempt(); + + var backoff = CalculateBackoff(lease.Attempt); + if (backoff > TimeSpan.Zero) + { + try + { + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + } + catch (TaskCanceledException) + { + return; + } + } + + var now = _timeProvider.GetUtcNow(); + var entries = BuildEntries(lease.Message, now, lease.Attempt); + + await AddToStreamAsync( + database, + _streamOptions.Stream, + entries, + _streamOptions.ApproximateMaxLength, + _streamOptions.ApproximateMaxLength is not null) + .ConfigureAwait(false); + + SchedulerQueueMetrics.RecordEnqueued(TransportName, _payload.QueueName); + } + } + + internal async Task DeadLetterAsync( + RedisSchedulerQueueLease lease, + string reason, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + + await database.StreamAcknowledgeAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await database.StreamDeleteAsync( + _streamOptions.Stream, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + var now = _timeProvider.GetUtcNow(); + var entries = BuildEntries(lease.Message, now, lease.Attempt); + + await AddToStreamAsync( + database, + _streamOptions.DeadLetterStream, + entries, + null, + false) + .ConfigureAwait(false); + + SchedulerQueueMetrics.RecordDeadLetter(TransportName, _payload.QueueName); + _logger.LogError( + "Dead-lettered {Queue} message {MessageId} after {Attempt} attempt(s): {Reason}", + _payload.QueueName, + lease.MessageId, + lease.Attempt, + reason); + } + + internal async ValueTask PingAsync(CancellationToken cancellationToken) + { + var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await database.ExecuteAsync("PING").ConfigureAwait(false); + } + + private string BuildIdempotencyKey(string key) + => string.Concat(_streamOptions.IdempotencyKeyPrefix, key); + + private TimeSpan CalculateBackoff(int attempt) + { + if (attempt <= 1) + { + return _queueOptions.RetryInitialBackoff > TimeSpan.Zero + ? _queueOptions.RetryInitialBackoff + : TimeSpan.Zero; + } + + var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero + ? _queueOptions.RetryInitialBackoff + : TimeSpan.Zero; + + if (initial <= TimeSpan.Zero) + { + return TimeSpan.Zero; + } + + var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero + ? _queueOptions.RetryMaxBackoff + : initial; + + var exponent = attempt - 1; + var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); + var cappedTicks = Math.Min(max.Ticks, scaledTicks); + + return TimeSpan.FromTicks((long)Math.Max(initial.Ticks, cappedTicks)); + } + + private async ValueTask GetDatabaseAsync(CancellationToken cancellationToken) + { + if (_connection is not null) + { + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + + await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_connection is null) + { + var config = ConfigurationOptions.Parse(_redisOptions.ConnectionString!); + config.AbortOnConnectFail = false; + config.ConnectTimeout = (int)_redisOptions.InitializationTimeout.TotalMilliseconds; + config.ConnectRetry = 3; + + if (_redisOptions.Database is not null) + { + config.DefaultDatabase = _redisOptions.Database; + } + + _connection = await _connectionFactory(config).ConfigureAwait(false); + } + } + finally + { + _connectionLock.Release(); + } + + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + + private async Task EnsureConsumerGroupAsync( + IDatabase database, + CancellationToken cancellationToken) + { + if (_groupInitialized) + { + return; + } + + await _groupInitLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_groupInitialized) + { + return; + } + + try + { + await database.StreamCreateConsumerGroupAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + StreamPosition.Beginning, + createStream: true) + .ConfigureAwait(false); + } + catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase)) + { + // Group already exists. + } + + _groupInitialized = true; + } + finally + { + _groupInitLock.Release(); + } + } + + private NameValueEntry[] BuildEntries( + TMessage message, + DateTimeOffset enqueuedAt, + int attempt) + { + var attributes = _payload.GetAttributes(message); + var attributeCount = attributes?.Count ?? 0; + var entries = ArrayPool.Shared.Rent(10 + attributeCount); + var index = 0; + + entries[index++] = new NameValueEntry(SchedulerQueueFields.QueueKind, _payload.QueueName); + entries[index++] = new NameValueEntry(SchedulerQueueFields.RunId, _payload.GetRunId(message)); + entries[index++] = new NameValueEntry(SchedulerQueueFields.TenantId, _payload.GetTenantId(message)); + + var scheduleId = _payload.GetScheduleId(message); + if (!string.IsNullOrWhiteSpace(scheduleId)) + { + entries[index++] = new NameValueEntry(SchedulerQueueFields.ScheduleId, scheduleId); + } + + var segmentId = _payload.GetSegmentId(message); + if (!string.IsNullOrWhiteSpace(segmentId)) + { + entries[index++] = new NameValueEntry(SchedulerQueueFields.SegmentId, segmentId); + } + + var correlationId = _payload.GetCorrelationId(message); + if (!string.IsNullOrWhiteSpace(correlationId)) + { + entries[index++] = new NameValueEntry(SchedulerQueueFields.CorrelationId, correlationId); + } + + entries[index++] = new NameValueEntry(SchedulerQueueFields.IdempotencyKey, _payload.GetIdempotencyKey(message)); + entries[index++] = new NameValueEntry(SchedulerQueueFields.Attempt, attempt); + entries[index++] = new NameValueEntry(SchedulerQueueFields.EnqueuedAt, enqueuedAt.ToUnixTimeMilliseconds()); + entries[index++] = new NameValueEntry(SchedulerQueueFields.Payload, _payload.Serialize(message)); + + if (attributeCount > 0 && attributes is not null) + { + foreach (var kvp in attributes) + { + entries[index++] = new NameValueEntry( + SchedulerQueueFields.AttributePrefix + kvp.Key, + kvp.Value); + } + } + + var result = entries.AsSpan(0, index).ToArray(); + ArrayPool.Shared.Return(entries, clearArray: true); + return result; + } + + private RedisSchedulerQueueLease? TryMapLease( + StreamEntry entry, + string consumer, + DateTimeOffset now, + TimeSpan leaseDuration, + int? attemptOverride) + { + if (entry.Values is null || entry.Values.Length == 0) + { + return null; + } + + string? payload = null; + string? runId = null; + string? tenantId = null; + string? scheduleId = null; + string? segmentId = null; + string? correlationId = null; + string? idempotencyKey = null; + long? enqueuedAtUnix = null; + var attempt = attemptOverride ?? 1; + var attributes = new Dictionary(StringComparer.Ordinal); + + foreach (var field in entry.Values) + { + var name = field.Name.ToString(); + var value = field.Value; + + if (name.Equals(SchedulerQueueFields.Payload, StringComparison.Ordinal)) + { + payload = value.ToString(); + } + else if (name.Equals(SchedulerQueueFields.RunId, StringComparison.Ordinal)) + { + runId = value.ToString(); + } + else if (name.Equals(SchedulerQueueFields.TenantId, StringComparison.Ordinal)) + { + tenantId = value.ToString(); + } + else if (name.Equals(SchedulerQueueFields.ScheduleId, StringComparison.Ordinal)) + { + scheduleId = NormalizeOptional(value.ToString()); + } + else if (name.Equals(SchedulerQueueFields.SegmentId, StringComparison.Ordinal)) + { + segmentId = NormalizeOptional(value.ToString()); + } + else if (name.Equals(SchedulerQueueFields.CorrelationId, StringComparison.Ordinal)) + { + correlationId = NormalizeOptional(value.ToString()); + } + else if (name.Equals(SchedulerQueueFields.IdempotencyKey, StringComparison.Ordinal)) + { + idempotencyKey = value.ToString(); + } + else if (name.Equals(SchedulerQueueFields.EnqueuedAt, StringComparison.Ordinal)) + { + if (long.TryParse(value.ToString(), out var unixMs)) + { + enqueuedAtUnix = unixMs; + } + } + else if (name.Equals(SchedulerQueueFields.Attempt, StringComparison.Ordinal)) + { + if (int.TryParse(value.ToString(), out var parsedAttempt)) + { + attempt = attemptOverride.HasValue + ? Math.Max(attemptOverride.Value, parsedAttempt) + : Math.Max(1, parsedAttempt); + } + } + else if (name.StartsWith(SchedulerQueueFields.AttributePrefix, StringComparison.Ordinal)) + { + var key = name[SchedulerQueueFields.AttributePrefix.Length..]; + attributes[key] = value.ToString(); + } + } + + if (payload is null || runId is null || tenantId is null || enqueuedAtUnix is null || idempotencyKey is null) + { + return null; + } + + var message = _payload.Deserialize(payload); + var enqueuedAt = DateTimeOffset.FromUnixTimeMilliseconds(enqueuedAtUnix.Value); + var leaseExpires = now.Add(leaseDuration); + + IReadOnlyDictionary attributeView = attributes.Count == 0 + ? EmptyReadOnlyDictionary.Instance + : new ReadOnlyDictionary(attributes); + + return new RedisSchedulerQueueLease( + this, + entry.Id.ToString(), + idempotencyKey, + runId, + tenantId, + scheduleId, + segmentId, + correlationId, + attributeView, + message, + attempt, + enqueuedAt, + leaseExpires, + consumer); + } + + private async Task HandlePoisonEntryAsync(IDatabase database, RedisValue entryId) + { + await database.StreamAcknowledgeAsync( + _streamOptions.Stream, + _streamOptions.ConsumerGroup, + new RedisValue[] { entryId }) + .ConfigureAwait(false); + + await database.StreamDeleteAsync( + _streamOptions.Stream, + new RedisValue[] { entryId }) + .ConfigureAwait(false); + } + + private async Task AddToStreamAsync( + IDatabase database, + RedisKey stream, + NameValueEntry[] entries, + int? maxLength, + bool useApproximateLength) + { + var capacity = 4 + (entries.Length * 2); + var args = new List(capacity) + { + stream + }; + + if (maxLength.HasValue) + { + args.Add("MAXLEN"); + if (useApproximateLength) + { + args.Add("~"); + } + + args.Add(maxLength.Value); + } + + args.Add("*"); + + for (var i = 0; i < entries.Length; i++) + { + args.Add(entries[i].Name); + args.Add(entries[i].Value); + } + + var result = await database.ExecuteAsync("XADD", args.ToArray()).ConfigureAwait(false); + return (RedisValue)result!; + } + + private static string? NormalizeOptional(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return value; + } + + private sealed class EmptyReadOnlyDictionary + where TKey : notnull + { + public static readonly IReadOnlyDictionary Instance = + new ReadOnlyDictionary(new Dictionary(0, EqualityComparer.Default)); + } +} diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs new file mode 100644 index 00000000..67c6283c --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs @@ -0,0 +1,91 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Scheduler.Queue.Redis; + +internal sealed class RedisSchedulerQueueLease : ISchedulerQueueLease +{ + private readonly RedisSchedulerQueueBase _queue; + private int _completed; + + internal RedisSchedulerQueueLease( + RedisSchedulerQueueBase queue, + string messageId, + string idempotencyKey, + string runId, + string tenantId, + string? scheduleId, + string? segmentId, + string? correlationId, + IReadOnlyDictionary attributes, + TMessage message, + int attempt, + DateTimeOffset enqueuedAt, + DateTimeOffset leaseExpiresAt, + string consumer) + { + _queue = queue; + MessageId = messageId; + IdempotencyKey = idempotencyKey; + RunId = runId; + TenantId = tenantId; + ScheduleId = scheduleId; + SegmentId = segmentId; + CorrelationId = correlationId; + Attributes = attributes; + Message = message; + Attempt = attempt; + EnqueuedAt = enqueuedAt; + LeaseExpiresAt = leaseExpiresAt; + Consumer = consumer; + } + + public string MessageId { get; } + + public string IdempotencyKey { get; } + + public string RunId { get; } + + public string TenantId { get; } + + public string? ScheduleId { get; } + + public string? SegmentId { get; } + + public string? CorrelationId { get; } + + public IReadOnlyDictionary Attributes { get; } + + public TMessage Message { get; } + + public int Attempt { get; private set; } + + public DateTimeOffset EnqueuedAt { get; } + + public DateTimeOffset LeaseExpiresAt { get; private set; } + + public string Consumer { get; } + + public Task AcknowledgeAsync(CancellationToken cancellationToken = default) + => _queue.AcknowledgeAsync(this, cancellationToken); + + public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) + => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); + + public Task ReleaseAsync(SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) + => _queue.ReleaseAsync(this, disposition, cancellationToken); + + public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) + => _queue.DeadLetterAsync(this, reason, cancellationToken); + + internal bool TryBeginCompletion() + => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; + + internal void RefreshLease(DateTimeOffset expiresAt) + => LeaseExpiresAt = expiresAt; + + internal void IncrementAttempt() + => Attempt++; +} diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs new file mode 100644 index 00000000..d8bef315 --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs @@ -0,0 +1,90 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StackExchange.Redis; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Queue.Redis; + +internal sealed class RedisSchedulerRunnerQueue + : RedisSchedulerQueueBase, ISchedulerRunnerQueue +{ + public RedisSchedulerRunnerQueue( + SchedulerQueueOptions queueOptions, + SchedulerRedisQueueOptions redisOptions, + ILogger logger, + TimeProvider timeProvider, + Func>? connectionFactory = null) + : base( + queueOptions, + redisOptions, + redisOptions.Runner, + RunnerPayload.Instance, + logger, + timeProvider, + connectionFactory) + { + } + + private sealed class RunnerPayload : IRedisSchedulerQueuePayload + { + public static RunnerPayload Instance { get; } = new(); + + public string QueueName => "runner"; + + public string GetIdempotencyKey(RunnerSegmentQueueMessage message) + => message.IdempotencyKey; + + public string Serialize(RunnerSegmentQueueMessage message) + => CanonicalJsonSerializer.Serialize(message); + + public RunnerSegmentQueueMessage Deserialize(string payload) + => CanonicalJsonSerializer.Deserialize(payload); + + public string GetRunId(RunnerSegmentQueueMessage message) + => message.RunId; + + public string GetTenantId(RunnerSegmentQueueMessage message) + => message.TenantId; + + public string? GetScheduleId(RunnerSegmentQueueMessage message) + => message.ScheduleId; + + public string? GetSegmentId(RunnerSegmentQueueMessage message) + => message.SegmentId; + + public string? GetCorrelationId(RunnerSegmentQueueMessage message) + => message.CorrelationId; + + public IReadOnlyDictionary? GetAttributes(RunnerSegmentQueueMessage message) + { + if (message.Attributes.Count == 0 && message.ImageDigests.Count == 0) + { + return null; + } + + // Ensure digests remain accessible without deserializing the entire payload. + var map = new Dictionary(message.Attributes, StringComparer.Ordinal); + map["imageDigestCount"] = message.ImageDigests.Count.ToString(); + + // populate first few digests for quick inspection (bounded) + var take = Math.Min(message.ImageDigests.Count, 5); + for (var i = 0; i < take; i++) + { + map[$"digest{i}"] = message.ImageDigests[i]; + } + + if (message.RatePerSecond.HasValue) + { + map["ratePerSecond"] = message.RatePerSecond.Value.ToString(); + } + + map["usageOnly"] = message.UsageOnly ? "true" : "false"; + + return map; + } + } +} diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs b/src/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs new file mode 100644 index 00000000..dd052bae --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs @@ -0,0 +1,274 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Queue; + +public sealed class PlannerQueueMessage +{ + [JsonConstructor] + public PlannerQueueMessage( + Run run, + ImpactSet impactSet, + Schedule? schedule = null, + string? correlationId = null) + { + Run = run ?? throw new ArgumentNullException(nameof(run)); + ImpactSet = impactSet ?? throw new ArgumentNullException(nameof(impactSet)); + + if (schedule is not null && string.IsNullOrWhiteSpace(schedule.Id)) + { + throw new ArgumentException("Schedule must have a valid identifier.", nameof(schedule)); + } + + if (!string.IsNullOrWhiteSpace(correlationId)) + { + correlationId = correlationId!.Trim(); + } + + Schedule = schedule; + CorrelationId = string.IsNullOrWhiteSpace(correlationId) ? null : correlationId; + } + + public Run Run { get; } + + public ImpactSet ImpactSet { get; } + + public Schedule? Schedule { get; } + + public string? CorrelationId { get; } + + public string IdempotencyKey => Run.Id; + + public string TenantId => Run.TenantId; + + public string? ScheduleId => Run.ScheduleId; +} + +public sealed class RunnerSegmentQueueMessage +{ + private readonly ReadOnlyCollection _imageDigests; + private readonly IReadOnlyDictionary _attributes; + + [JsonConstructor] + public RunnerSegmentQueueMessage( + string segmentId, + string runId, + string tenantId, + IReadOnlyList imageDigests, + string? scheduleId = null, + int? ratePerSecond = null, + bool usageOnly = true, + IReadOnlyDictionary? attributes = null, + string? correlationId = null) + { + if (string.IsNullOrWhiteSpace(segmentId)) + { + throw new ArgumentException("Segment identifier must be provided.", nameof(segmentId)); + } + + if (string.IsNullOrWhiteSpace(runId)) + { + throw new ArgumentException("Run identifier must be provided.", nameof(runId)); + } + + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant identifier must be provided.", nameof(tenantId)); + } + + SegmentId = segmentId; + RunId = runId; + TenantId = tenantId; + ScheduleId = string.IsNullOrWhiteSpace(scheduleId) ? null : scheduleId; + RatePerSecond = ratePerSecond; + UsageOnly = usageOnly; + CorrelationId = string.IsNullOrWhiteSpace(correlationId) ? null : correlationId; + + _imageDigests = new ReadOnlyCollection(NormalizeDigests(imageDigests)); + _attributes = attributes is null + ? EmptyReadOnlyDictionary.Instance + : new ReadOnlyDictionary(new Dictionary(attributes, StringComparer.Ordinal)); + } + + public string SegmentId { get; } + + public string RunId { get; } + + public string TenantId { get; } + + public string? ScheduleId { get; } + + public int? RatePerSecond { get; } + + public bool UsageOnly { get; } + + public string? CorrelationId { get; } + + public IReadOnlyList ImageDigests => _imageDigests; + + public IReadOnlyDictionary Attributes => _attributes; + + public string IdempotencyKey => SegmentId; + + private static List NormalizeDigests(IReadOnlyList digests) + { + if (digests is null) + { + throw new ArgumentNullException(nameof(digests)); + } + + var list = new List(); + foreach (var digest in digests) + { + if (string.IsNullOrWhiteSpace(digest)) + { + continue; + } + + list.Add(digest.Trim()); + } + + if (list.Count == 0) + { + throw new ArgumentException("At least one image digest must be provided.", nameof(digests)); + } + + return list; + } + + private sealed class EmptyReadOnlyDictionary + where TKey : notnull + { + public static readonly IReadOnlyDictionary Instance = + new ReadOnlyDictionary(new Dictionary(0, EqualityComparer.Default)); + } +} + +public readonly record struct SchedulerQueueEnqueueResult(string MessageId, bool Deduplicated); + +public sealed class SchedulerQueueLeaseRequest +{ + public SchedulerQueueLeaseRequest(string consumer, int batchSize, TimeSpan leaseDuration) + { + if (string.IsNullOrWhiteSpace(consumer)) + { + throw new ArgumentException("Consumer identifier must be provided.", nameof(consumer)); + } + + if (batchSize <= 0) + { + throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive."); + } + + if (leaseDuration <= TimeSpan.Zero) + { + throw new ArgumentOutOfRangeException(nameof(leaseDuration), leaseDuration, "Lease duration must be positive."); + } + + Consumer = consumer; + BatchSize = batchSize; + LeaseDuration = leaseDuration; + } + + public string Consumer { get; } + + public int BatchSize { get; } + + public TimeSpan LeaseDuration { get; } +} + +public sealed class SchedulerQueueClaimOptions +{ + public SchedulerQueueClaimOptions(string claimantConsumer, int batchSize, TimeSpan minIdleTime) + { + if (string.IsNullOrWhiteSpace(claimantConsumer)) + { + throw new ArgumentException("Consumer identifier must be provided.", nameof(claimantConsumer)); + } + + if (batchSize <= 0) + { + throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive."); + } + + if (minIdleTime < TimeSpan.Zero) + { + throw new ArgumentOutOfRangeException(nameof(minIdleTime), minIdleTime, "Idle time cannot be negative."); + } + + ClaimantConsumer = claimantConsumer; + BatchSize = batchSize; + MinIdleTime = minIdleTime; + } + + public string ClaimantConsumer { get; } + + public int BatchSize { get; } + + public TimeSpan MinIdleTime { get; } +} + +public enum SchedulerQueueReleaseDisposition +{ + Retry, + Abandon +} + +public interface ISchedulerQueue +{ + ValueTask EnqueueAsync(TMessage message, CancellationToken cancellationToken = default); + + ValueTask>> LeaseAsync(SchedulerQueueLeaseRequest request, CancellationToken cancellationToken = default); + + ValueTask>> ClaimExpiredAsync(SchedulerQueueClaimOptions options, CancellationToken cancellationToken = default); +} + +public interface ISchedulerQueueLease +{ + string MessageId { get; } + + int Attempt { get; } + + DateTimeOffset EnqueuedAt { get; } + + DateTimeOffset LeaseExpiresAt { get; } + + string Consumer { get; } + + string TenantId { get; } + + string RunId { get; } + + string? ScheduleId { get; } + + string? SegmentId { get; } + + string? CorrelationId { get; } + + string IdempotencyKey { get; } + + IReadOnlyDictionary Attributes { get; } + + TMessage Message { get; } + + Task AcknowledgeAsync(CancellationToken cancellationToken = default); + + Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default); + + Task ReleaseAsync(SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken = default); + + Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default); +} + +public interface ISchedulerPlannerQueue : ISchedulerQueue +{ +} + +public interface ISchedulerRunnerQueue : ISchedulerQueue +{ +} diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs b/src/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs new file mode 100644 index 00000000..de0531be --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs @@ -0,0 +1,16 @@ +namespace StellaOps.Scheduler.Queue; + +internal static class SchedulerQueueFields +{ + public const string Payload = "payload"; + public const string Attempt = "attempt"; + public const string EnqueuedAt = "enqueuedAt"; + public const string IdempotencyKey = "idempotency"; + public const string RunId = "runId"; + public const string TenantId = "tenantId"; + public const string ScheduleId = "scheduleId"; + public const string SegmentId = "segmentId"; + public const string QueueKind = "queueKind"; + public const string CorrelationId = "correlationId"; + public const string AttributePrefix = "attr:"; +} diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs b/src/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs new file mode 100644 index 00000000..de2622e9 --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs @@ -0,0 +1,39 @@ +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Scheduler.Queue; + +internal static class SchedulerQueueMetrics +{ + private const string TransportTagName = "transport"; + private const string QueueTagName = "queue"; + + private static readonly Meter Meter = new("StellaOps.Scheduler.Queue"); + private static readonly Counter EnqueuedCounter = Meter.CreateCounter("scheduler_queue_enqueued_total"); + private static readonly Counter DeduplicatedCounter = Meter.CreateCounter("scheduler_queue_deduplicated_total"); + private static readonly Counter AckCounter = Meter.CreateCounter("scheduler_queue_ack_total"); + private static readonly Counter RetryCounter = Meter.CreateCounter("scheduler_queue_retry_total"); + private static readonly Counter DeadLetterCounter = Meter.CreateCounter("scheduler_queue_deadletter_total"); + + public static void RecordEnqueued(string transport, string queue) + => EnqueuedCounter.Add(1, BuildTags(transport, queue)); + + public static void RecordDeduplicated(string transport, string queue) + => DeduplicatedCounter.Add(1, BuildTags(transport, queue)); + + public static void RecordAck(string transport, string queue) + => AckCounter.Add(1, BuildTags(transport, queue)); + + public static void RecordRetry(string transport, string queue) + => RetryCounter.Add(1, BuildTags(transport, queue)); + + public static void RecordDeadLetter(string transport, string queue) + => DeadLetterCounter.Add(1, BuildTags(transport, queue)); + + private static KeyValuePair[] BuildTags(string transport, string queue) + => new[] + { + new KeyValuePair(TransportTagName, transport), + new KeyValuePair(QueueTagName, queue) + }; +} diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs b/src/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs new file mode 100644 index 00000000..4ae76858 --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs @@ -0,0 +1,76 @@ +using System; + +namespace StellaOps.Scheduler.Queue; + +public sealed class SchedulerQueueOptions +{ + public SchedulerQueueTransportKind Kind { get; set; } = SchedulerQueueTransportKind.Redis; + + public SchedulerRedisQueueOptions Redis { get; set; } = new(); + + /// + /// Default lease/visibility window applied when callers do not override the duration. + /// + public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5); + + /// + /// Maximum number of deliveries before a message is shunted to the dead-letter stream. + /// + public int MaxDeliveryAttempts { get; set; } = 5; + + /// + /// Base retry delay used when a message is released for retry. + /// + public TimeSpan RetryInitialBackoff { get; set; } = TimeSpan.FromSeconds(5); + + /// + /// Cap applied to the retry delay when exponential backoff is used. + /// + public TimeSpan RetryMaxBackoff { get; set; } = TimeSpan.FromMinutes(1); +} + +public sealed class SchedulerRedisQueueOptions +{ + public string? ConnectionString { get; set; } + + public int? Database { get; set; } + + public TimeSpan InitializationTimeout { get; set; } = TimeSpan.FromSeconds(30); + + public RedisSchedulerStreamOptions Planner { get; set; } = RedisSchedulerStreamOptions.ForPlanner(); + + public RedisSchedulerStreamOptions Runner { get; set; } = RedisSchedulerStreamOptions.ForRunner(); +} + +public sealed class RedisSchedulerStreamOptions +{ + public string Stream { get; set; } = string.Empty; + + public string ConsumerGroup { get; set; } = string.Empty; + + public string DeadLetterStream { get; set; } = string.Empty; + + public string IdempotencyKeyPrefix { get; set; } = string.Empty; + + public TimeSpan IdempotencyWindow { get; set; } = TimeSpan.FromHours(12); + + public int? ApproximateMaxLength { get; set; } + + public static RedisSchedulerStreamOptions ForPlanner() + => new() + { + Stream = "scheduler:planner", + ConsumerGroup = "scheduler-planners", + DeadLetterStream = "scheduler:planner:dead", + IdempotencyKeyPrefix = "scheduler:planner:idemp:" + }; + + public static RedisSchedulerStreamOptions ForRunner() + => new() + { + Stream = "scheduler:runner", + ConsumerGroup = "scheduler-runners", + DeadLetterStream = "scheduler:runner:dead", + IdempotencyKeyPrefix = "scheduler:runner:idemp:" + }; +} diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs b/src/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs new file mode 100644 index 00000000..533d597c --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs @@ -0,0 +1,60 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.Queue.Redis; + +namespace StellaOps.Scheduler.Queue; + +public static class SchedulerQueueServiceCollectionExtensions +{ + public static IServiceCollection AddSchedulerQueues( + this IServiceCollection services, + IConfiguration configuration, + string sectionName = "scheduler:queue") + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + var options = new SchedulerQueueOptions(); + configuration.GetSection(sectionName).Bind(options); + + services.TryAddSingleton(TimeProvider.System); + services.AddSingleton(options); + + services.AddSingleton(sp => + { + var loggerFactory = sp.GetRequiredService(); + var timeProvider = sp.GetService() ?? TimeProvider.System; + + return options.Kind switch + { + SchedulerQueueTransportKind.Redis => new RedisSchedulerPlannerQueue( + options, + options.Redis, + loggerFactory.CreateLogger(), + timeProvider), + _ => throw new InvalidOperationException($"Unsupported scheduler queue transport '{options.Kind}'.") + }; + }); + + services.AddSingleton(sp => + { + var loggerFactory = sp.GetRequiredService(); + var timeProvider = sp.GetService() ?? TimeProvider.System; + + return options.Kind switch + { + SchedulerQueueTransportKind.Redis => new RedisSchedulerRunnerQueue( + options, + options.Redis, + loggerFactory.CreateLogger(), + timeProvider), + _ => throw new InvalidOperationException($"Unsupported scheduler queue transport '{options.Kind}'.") + }; + }); + + return services; + } +} diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs b/src/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs new file mode 100644 index 00000000..65c08276 --- /dev/null +++ b/src/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs @@ -0,0 +1,10 @@ +namespace StellaOps.Scheduler.Queue; + +/// +/// Transport backends supported by the scheduler queue abstraction. +/// +public enum SchedulerQueueTransportKind +{ + Redis = 0, + Nats = 1, +} diff --git a/src/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj b/src/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj index 6c3a8871..75c781fa 100644 --- a/src/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj +++ b/src/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj @@ -4,4 +4,14 @@ enable enable + + + + + + + + + + diff --git a/src/StellaOps.Scheduler.Queue/TASKS.md b/src/StellaOps.Scheduler.Queue/TASKS.md index b529a2ba..3e6f2994 100644 --- a/src/StellaOps.Scheduler.Queue/TASKS.md +++ b/src/StellaOps.Scheduler.Queue/TASKS.md @@ -1,7 +1,9 @@ # Scheduler Queue Task Board (Sprint 16) +> **Status note (2025-10-19):** Scheduler DTOs and sample payloads are now available (SCHED-MODELS-16-102). Queue tasks remain pending on this board. + | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCHED-QUEUE-16-401 | TODO | Scheduler Queue Guild | SCHED-MODELS-16-101 | Implement queue abstraction + Redis Streams adapter (planner inputs, runner segments) with ack/lease semantics. | Integration tests cover enqueue/dequeue/ack; lease renewal implemented; ordering preserved. | +| SCHED-QUEUE-16-401 | DOING (2025-10-19) | Scheduler Queue Guild | SCHED-MODELS-16-101 | Implement queue abstraction + Redis Streams adapter (planner inputs, runner segments) with ack/lease semantics. | Integration tests cover enqueue/dequeue/ack; lease renewal implemented; ordering preserved. | | SCHED-QUEUE-16-402 | TODO | Scheduler Queue Guild | SCHED-QUEUE-16-401 | Add NATS JetStream adapter with configuration binding, health probes, failover. | Health endpoints verified; failover documented; adapter tested. | | SCHED-QUEUE-16-403 | TODO | Scheduler Queue Guild | SCHED-QUEUE-16-401 | Dead-letter handling + metrics (queue depth, retry counts), configuration toggles. | Dead-letter policy tested; metrics exported; docs updated. | diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs b/src/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs new file mode 100644 index 00000000..a5ea4830 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs @@ -0,0 +1,12 @@ +global using System.Text.Json; +global using System.Text.Json.Nodes; +global using Microsoft.Extensions.Logging.Abstractions; +global using Microsoft.Extensions.Options; +global using Mongo2Go; +global using MongoDB.Bson; +global using MongoDB.Driver; +global using StellaOps.Scheduler.Models; +global using StellaOps.Scheduler.Storage.Mongo.Internal; +global using StellaOps.Scheduler.Storage.Mongo.Migrations; +global using StellaOps.Scheduler.Storage.Mongo.Options; +global using Xunit; diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs b/src/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs new file mode 100644 index 00000000..4e070c43 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs @@ -0,0 +1,126 @@ +using System.Text.Json.Nodes; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Integration; + +public sealed class SchedulerMongoRoundTripTests : IDisposable +{ + private readonly MongoDbRunner _runner; + private readonly SchedulerMongoContext _context; + + public SchedulerMongoRoundTripTests() + { + _runner = MongoDbRunner.Start(additionalMongodArguments: "--quiet"); + var options = new SchedulerMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = $"scheduler_roundtrip_{Guid.NewGuid():N}" + }; + + _context = new SchedulerMongoContext(Microsoft.Extensions.Options.Options.Create(options), NullLogger.Instance); + var migrations = new ISchedulerMongoMigration[] + { + new EnsureSchedulerCollectionsMigration(NullLogger.Instance), + new EnsureSchedulerIndexesMigration() + }; + var runner = new SchedulerMongoMigrationRunner(_context, migrations, NullLogger.Instance); + runner.RunAsync(CancellationToken.None).GetAwaiter().GetResult(); + } + + [Fact] + public async Task SamplesRoundTripThroughMongoWithoutLosingCanonicalShape() + { + var samplesRoot = LocateSamplesRoot(); + + var scheduleJson = await File.ReadAllTextAsync(Path.Combine(samplesRoot, "schedule.json"), CancellationToken.None); + await AssertRoundTripAsync( + scheduleJson, + _context.Options.SchedulesCollection, + CanonicalJsonSerializer.Deserialize, + schedule => schedule.Id); + + var runJson = await File.ReadAllTextAsync(Path.Combine(samplesRoot, "run.json"), CancellationToken.None); + await AssertRoundTripAsync( + runJson, + _context.Options.RunsCollection, + CanonicalJsonSerializer.Deserialize, + run => run.Id); + + var impactJson = await File.ReadAllTextAsync(Path.Combine(samplesRoot, "impact-set.json"), CancellationToken.None); + await AssertRoundTripAsync( + impactJson, + _context.Options.ImpactSnapshotsCollection, + CanonicalJsonSerializer.Deserialize, + _ => null); + + var auditJson = await File.ReadAllTextAsync(Path.Combine(samplesRoot, "audit.json"), CancellationToken.None); + await AssertRoundTripAsync( + auditJson, + _context.Options.AuditCollection, + CanonicalJsonSerializer.Deserialize, + audit => audit.Id); + } + + private async Task AssertRoundTripAsync( + string json, + string collectionName, + Func deserialize, + Func resolveId) + { + ArgumentNullException.ThrowIfNull(deserialize); + ArgumentNullException.ThrowIfNull(resolveId); + + var model = deserialize(json); + var canonical = CanonicalJsonSerializer.Serialize(model); + + var document = BsonDocument.Parse(canonical); + var identifier = resolveId(model); + if (!string.IsNullOrEmpty(identifier)) + { + document["_id"] = identifier; + } + + var collection = _context.Database.GetCollection(collectionName); + await collection.InsertOneAsync(document, cancellationToken: CancellationToken.None); + + var filter = identifier is null ? Builders.Filter.Empty : Builders.Filter.Eq("_id", identifier); + var stored = await collection.Find(filter).FirstOrDefaultAsync(); + Assert.NotNull(stored); + + var sanitized = stored!.DeepClone().AsBsonDocument; + sanitized.Remove("_id"); + + var storedJson = sanitized.ToJson(); + + var parsedExpected = JsonNode.Parse(canonical) ?? throw new InvalidOperationException("Canonical node null."); + var parsedActual = JsonNode.Parse(storedJson) ?? throw new InvalidOperationException("Stored node null."); + Assert.True(JsonNode.DeepEquals(parsedExpected, parsedActual), "Document changed shape after Mongo round-trip."); + } + + private static string LocateSamplesRoot() + { + var current = AppContext.BaseDirectory; + while (!string.IsNullOrEmpty(current)) + { + var candidate = Path.Combine(current, "samples", "api", "scheduler"); + if (Directory.Exists(candidate)) + { + return candidate; + } + + var parent = Path.GetDirectoryName(current.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar)); + if (string.Equals(parent, current, StringComparison.Ordinal)) + { + break; + } + + current = parent; + } + + throw new DirectoryNotFoundException("Unable to locate samples/api/scheduler in repository tree."); + } + + public void Dispose() + { + _runner.Dispose(); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs b/src/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs new file mode 100644 index 00000000..72a1d4ae --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs @@ -0,0 +1,106 @@ +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Migrations; + +public sealed class SchedulerMongoMigrationTests : IDisposable +{ + private readonly MongoDbRunner _runner; + + public SchedulerMongoMigrationTests() + { + _runner = MongoDbRunner.Start(additionalMongodArguments: "--quiet"); + } + + [Fact] + public async Task RunAsync_CreatesCollectionsAndIndexes() + { + var options = new SchedulerMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = $"scheduler_tests_{Guid.NewGuid():N}" + }; + + var context = new SchedulerMongoContext(Microsoft.Extensions.Options.Options.Create(options), NullLogger.Instance); + var migrations = new ISchedulerMongoMigration[] + { + new EnsureSchedulerCollectionsMigration(NullLogger.Instance), + new EnsureSchedulerIndexesMigration() + }; + + var runner = new SchedulerMongoMigrationRunner(context, migrations, NullLogger.Instance); + await runner.RunAsync(CancellationToken.None); + + var cursor = await context.Database.ListCollectionNamesAsync(cancellationToken: CancellationToken.None); + var collections = await cursor.ToListAsync(); + + Assert.Contains(options.SchedulesCollection, collections); + Assert.Contains(options.RunsCollection, collections); + Assert.Contains(options.ImpactSnapshotsCollection, collections); + Assert.Contains(options.AuditCollection, collections); + Assert.Contains(options.LocksCollection, collections); + Assert.Contains(options.MigrationsCollection, collections); + + await AssertScheduleIndexesAsync(context, options); + await AssertRunIndexesAsync(context, options); + await AssertImpactSnapshotIndexesAsync(context, options); + await AssertAuditIndexesAsync(context, options); + await AssertLockIndexesAsync(context, options); + } + + private static async Task AssertScheduleIndexesAsync(SchedulerMongoContext context, SchedulerMongoOptions options) + { + var names = await ListIndexNamesAsync(context.Database.GetCollection(options.SchedulesCollection)); + Assert.Contains("tenant_enabled", names); + Assert.Contains("cron_timezone", names); + } + + private static async Task AssertRunIndexesAsync(SchedulerMongoContext context, SchedulerMongoOptions options) + { + var collection = context.Database.GetCollection(options.RunsCollection); + var indexes = await ListIndexesAsync(collection); + + Assert.Contains(indexes, doc => string.Equals(doc["name"].AsString, "tenant_createdAt_desc", StringComparison.Ordinal)); + Assert.Contains(indexes, doc => string.Equals(doc["name"].AsString, "state_lookup", StringComparison.Ordinal)); + Assert.Contains(indexes, doc => string.Equals(doc["name"].AsString, "schedule_createdAt_desc", StringComparison.Ordinal)); + + var ttl = indexes.FirstOrDefault(doc => doc.TryGetValue("name", out var name) && name == "finishedAt_ttl"); + Assert.NotNull(ttl); + Assert.Equal(options.CompletedRunRetention.TotalSeconds, ttl!["expireAfterSeconds"].ToDouble()); + } + + private static async Task AssertImpactSnapshotIndexesAsync(SchedulerMongoContext context, SchedulerMongoOptions options) + { + var names = await ListIndexNamesAsync(context.Database.GetCollection(options.ImpactSnapshotsCollection)); + Assert.Contains("selector_tenant_scope", names); + Assert.Contains("snapshotId_unique", names); + } + + private static async Task AssertAuditIndexesAsync(SchedulerMongoContext context, SchedulerMongoOptions options) + { + var names = await ListIndexNamesAsync(context.Database.GetCollection(options.AuditCollection)); + Assert.Contains("tenant_occurredAt_desc", names); + Assert.Contains("correlation_lookup", names); + } + + private static async Task AssertLockIndexesAsync(SchedulerMongoContext context, SchedulerMongoOptions options) + { + var names = await ListIndexNamesAsync(context.Database.GetCollection(options.LocksCollection)); + Assert.Contains("tenant_resource_unique", names); + Assert.Contains("expiresAt_ttl", names); + } + + private static async Task> ListIndexNamesAsync(IMongoCollection collection) + { + var documents = await ListIndexesAsync(collection); + return documents.Select(doc => doc["name"].AsString).ToArray(); + } + + private static async Task> ListIndexesAsync(IMongoCollection collection) + { + using var cursor = await collection.Indexes.ListAsync(); + return await cursor.ToListAsync(); + } + + public void Dispose() + { + _runner.Dispose(); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj b/src/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj new file mode 100644 index 00000000..60e78365 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj @@ -0,0 +1,23 @@ + + + net10.0 + enable + enable + false + + + + + + + + + + + + + + Always + + + diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs b/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs new file mode 100644 index 00000000..258cddb3 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs @@ -0,0 +1,46 @@ +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Scheduler.Storage.Mongo.Options; + +namespace StellaOps.Scheduler.Storage.Mongo.Internal; + +internal sealed class SchedulerMongoContext +{ + public SchedulerMongoContext(IOptions options, ILogger logger) + { + ArgumentNullException.ThrowIfNull(logger); + var value = options?.Value ?? throw new ArgumentNullException(nameof(options)); + + if (string.IsNullOrWhiteSpace(value.ConnectionString)) + { + throw new InvalidOperationException("Scheduler Mongo connection string is not configured."); + } + + if (string.IsNullOrWhiteSpace(value.Database)) + { + throw new InvalidOperationException("Scheduler Mongo database name is not configured."); + } + + Client = new MongoClient(value.ConnectionString); + var settings = new MongoDatabaseSettings(); + if (value.UseMajorityReadConcern) + { + settings.ReadConcern = ReadConcern.Majority; + } + + if (value.UseMajorityWriteConcern) + { + settings.WriteConcern = WriteConcern.WMajority; + } + + Database = Client.GetDatabase(value.Database, settings); + Options = value; + } + + public MongoClient Client { get; } + + public IMongoDatabase Database { get; } + + public SchedulerMongoOptions Options { get; } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs b/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs new file mode 100644 index 00000000..d9a9d475 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs @@ -0,0 +1,32 @@ +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.Storage.Mongo.Migrations; + +namespace StellaOps.Scheduler.Storage.Mongo.Internal; + +internal interface ISchedulerMongoInitializer +{ + Task EnsureMigrationsAsync(CancellationToken cancellationToken = default); +} + +internal sealed class SchedulerMongoInitializer : ISchedulerMongoInitializer +{ + private readonly SchedulerMongoContext _context; + private readonly SchedulerMongoMigrationRunner _migrationRunner; + private readonly ILogger _logger; + + public SchedulerMongoInitializer( + SchedulerMongoContext context, + SchedulerMongoMigrationRunner migrationRunner, + ILogger logger) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + _migrationRunner = migrationRunner ?? throw new ArgumentNullException(nameof(migrationRunner)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task EnsureMigrationsAsync(CancellationToken cancellationToken = default) + { + _logger.LogInformation("Ensuring Scheduler Mongo migrations are applied for database {Database}.", _context.Options.Database); + await _migrationRunner.RunAsync(cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs b/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs new file mode 100644 index 00000000..ec65fd4b --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs @@ -0,0 +1,27 @@ +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scheduler.Storage.Mongo.Internal; + +internal sealed class SchedulerMongoInitializerHostedService : IHostedService +{ + private readonly ISchedulerMongoInitializer _initializer; + private readonly ILogger _logger; + + public SchedulerMongoInitializerHostedService( + ISchedulerMongoInitializer initializer, + ILogger logger) + { + _initializer = initializer ?? throw new ArgumentNullException(nameof(initializer)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + _logger.LogInformation("Applying Scheduler Mongo migrations."); + await _initializer.EnsureMigrationsAsync(cancellationToken).ConfigureAwait(false); + } + + public Task StopAsync(CancellationToken cancellationToken) + => Task.CompletedTask; +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs new file mode 100644 index 00000000..b511b009 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs @@ -0,0 +1,47 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Driver; +using StellaOps.Scheduler.Storage.Mongo.Internal; + +namespace StellaOps.Scheduler.Storage.Mongo.Migrations; + +internal sealed class EnsureSchedulerCollectionsMigration : ISchedulerMongoMigration +{ + private readonly ILogger _logger; + + public EnsureSchedulerCollectionsMigration(ILogger logger) + => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + public string Id => "20251019_scheduler_collections_v1"; + + public async ValueTask ExecuteAsync(SchedulerMongoContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var requiredCollections = new[] + { + context.Options.SchedulesCollection, + context.Options.RunsCollection, + context.Options.ImpactSnapshotsCollection, + context.Options.AuditCollection, + context.Options.LocksCollection, + context.Options.MigrationsCollection + }; + + var cursor = await context.Database + .ListCollectionNamesAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + + foreach (var collection in requiredCollections) + { + if (existing.Contains(collection, StringComparer.Ordinal)) + { + continue; + } + + _logger.LogInformation("Creating Scheduler Mongo collection '{CollectionName}'.", collection); + await context.Database.CreateCollectionAsync(collection, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs new file mode 100644 index 00000000..a1ba97a9 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs @@ -0,0 +1,175 @@ +using System; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Scheduler.Storage.Mongo.Internal; + +namespace StellaOps.Scheduler.Storage.Mongo.Migrations; + +internal sealed class EnsureSchedulerIndexesMigration : ISchedulerMongoMigration +{ + public string Id => "20251019_scheduler_indexes_v1"; + + public async ValueTask ExecuteAsync(SchedulerMongoContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + await EnsureSchedulesIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureRunsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureImpactSnapshotsIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureAuditIndexesAsync(context, cancellationToken).ConfigureAwait(false); + await EnsureLocksIndexesAsync(context, cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureSchedulesIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.SchedulesCollection); + + var tenantEnabled = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("enabled"), + new CreateIndexOptions + { + Name = "tenant_enabled" + }); + + var cronTimezone = new CreateIndexModel( + Builders.IndexKeys + .Ascending("cronExpression") + .Ascending("timezone"), + new CreateIndexOptions + { + Name = "cron_timezone" + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantEnabled, cronTimezone }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + private static async Task EnsureRunsIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.RunsCollection); + + var tenantCreated = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Descending("createdAt"), + new CreateIndexOptions + { + Name = "tenant_createdAt_desc" + }); + + var stateIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending("state"), + new CreateIndexOptions + { + Name = "state_lookup" + }); + + var scheduleIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending("scheduleId") + .Descending("createdAt"), + new CreateIndexOptions + { + Name = "schedule_createdAt_desc" + }); + + var models = new List> { tenantCreated, stateIndex, scheduleIndex }; + + if (context.Options.CompletedRunRetention > TimeSpan.Zero) + { + var ttlModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("finishedAt"), + new CreateIndexOptions + { + Name = "finishedAt_ttl", + ExpireAfter = context.Options.CompletedRunRetention + }); + + models.Add(ttlModel); + } + + await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureImpactSnapshotsIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.ImpactSnapshotsCollection); + + var tenantScope = new CreateIndexModel( + Builders.IndexKeys + .Ascending("selector.tenantId") + .Ascending("selector.scope"), + new CreateIndexOptions + { + Name = "selector_tenant_scope" + }); + + var snapshotId = new CreateIndexModel( + Builders.IndexKeys.Ascending("snapshotId"), + new CreateIndexOptions + { + Name = "snapshotId_unique", + Unique = true, + PartialFilterExpression = Builders.Filter.Exists("snapshotId", true) + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantScope, snapshotId }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + private static async Task EnsureAuditIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.AuditCollection); + + var tenantOccurred = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Descending("occurredAt"), + new CreateIndexOptions + { + Name = "tenant_occurredAt_desc" + }); + + var correlationIndex = new CreateIndexModel( + Builders.IndexKeys + .Ascending("correlationId"), + new CreateIndexOptions + { + Name = "correlation_lookup", + PartialFilterExpression = Builders.Filter.Exists("correlationId", true) + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantOccurred, correlationIndex }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + + private static async Task EnsureLocksIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken) + { + var collection = context.Database.GetCollection(context.Options.LocksCollection); + + var tenantResource = new CreateIndexModel( + Builders.IndexKeys + .Ascending("tenantId") + .Ascending("resource"), + new CreateIndexOptions + { + Name = "tenant_resource_unique", + Unique = true, + PartialFilterExpression = Builders.Filter.Exists("resource", true) + }); + + var ttlModel = new CreateIndexModel( + Builders.IndexKeys.Ascending("expiresAt"), + new CreateIndexOptions + { + Name = "expiresAt_ttl", + ExpireAfter = TimeSpan.Zero + }); + + await collection.Indexes.CreateManyAsync(new[] { tenantResource, ttlModel }, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs new file mode 100644 index 00000000..385c9ca2 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs @@ -0,0 +1,10 @@ +using StellaOps.Scheduler.Storage.Mongo.Internal; + +namespace StellaOps.Scheduler.Storage.Mongo.Migrations; + +internal interface ISchedulerMongoMigration +{ + string Id { get; } + + ValueTask ExecuteAsync(SchedulerMongoContext context, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs new file mode 100644 index 00000000..938db91a --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs @@ -0,0 +1,16 @@ +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Scheduler.Storage.Mongo.Migrations; + +internal sealed class SchedulerMongoMigrationRecord +{ + [BsonId] + public ObjectId Id { get; set; } + + [BsonElement("migrationId")] + public string MigrationId { get; set; } = string.Empty; + + [BsonElement("appliedAt")] + public DateTimeOffset AppliedAt { get; set; } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs new file mode 100644 index 00000000..80e26444 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs @@ -0,0 +1,77 @@ +using Microsoft.Extensions.Logging; +using MongoDB.Driver; +using StellaOps.Scheduler.Storage.Mongo.Internal; + +namespace StellaOps.Scheduler.Storage.Mongo.Migrations; + +internal sealed class SchedulerMongoMigrationRunner +{ + private readonly SchedulerMongoContext _context; + private readonly IReadOnlyList _migrations; + private readonly ILogger _logger; + + public SchedulerMongoMigrationRunner( + SchedulerMongoContext context, + IEnumerable migrations, + ILogger logger) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + ArgumentNullException.ThrowIfNull(migrations); + _migrations = migrations.OrderBy(migration => migration.Id, StringComparer.Ordinal).ToArray(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask RunAsync(CancellationToken cancellationToken) + { + if (_migrations.Count == 0) + { + return; + } + + var collection = _context.Database.GetCollection(_context.Options.MigrationsCollection); + await EnsureMigrationIndexAsync(collection, cancellationToken).ConfigureAwait(false); + + var applied = await collection + .Find(FilterDefinition.Empty) + .Project(record => record.MigrationId) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + var appliedSet = applied.ToHashSet(StringComparer.Ordinal); + + foreach (var migration in _migrations) + { + if (appliedSet.Contains(migration.Id)) + { + continue; + } + + _logger.LogInformation("Applying Scheduler Mongo migration {MigrationId}.", migration.Id); + await migration.ExecuteAsync(_context, cancellationToken).ConfigureAwait(false); + + var record = new SchedulerMongoMigrationRecord + { + Id = MongoDB.Bson.ObjectId.GenerateNewId(), + MigrationId = migration.Id, + AppliedAt = DateTimeOffset.UtcNow + }; + + await collection.InsertOneAsync(record, cancellationToken: cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Completed Scheduler Mongo migration {MigrationId}.", migration.Id); + } + } + + private static async Task EnsureMigrationIndexAsync( + IMongoCollection collection, + CancellationToken cancellationToken) + { + var keys = Builders.IndexKeys.Ascending(record => record.MigrationId); + var model = new CreateIndexModel(keys, new CreateIndexOptions + { + Name = "migrationId_unique", + Unique = true + }); + + await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs b/src/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs new file mode 100644 index 00000000..db495b54 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs @@ -0,0 +1,34 @@ +using System; + +namespace StellaOps.Scheduler.Storage.Mongo.Options; + +/// +/// Configures MongoDB connectivity and collection names for Scheduler storage. +/// +public sealed class SchedulerMongoOptions +{ + public string ConnectionString { get; set; } = "mongodb://localhost:27017"; + + public string Database { get; set; } = "stellaops_scheduler"; + + public string SchedulesCollection { get; set; } = "schedules"; + + public string RunsCollection { get; set; } = "runs"; + + public string ImpactSnapshotsCollection { get; set; } = "impact_snapshots"; + + public string AuditCollection { get; set; } = "audit"; + + public string LocksCollection { get; set; } = "locks"; + + public string MigrationsCollection { get; set; } = "_scheduler_migrations"; + + /// + /// Optional TTL applied to completed runs. When zero or negative no TTL index is created. + /// + public TimeSpan CompletedRunRetention { get; set; } = TimeSpan.FromDays(180); + + public bool UseMajorityReadConcern { get; set; } = true; + + public bool UseMajorityWriteConcern { get; set; } = true; +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs b/src/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..f5d75c32 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scheduler.Storage.Mongo.Tests")] diff --git a/src/StellaOps.Scheduler.Storage.Mongo/README.md b/src/StellaOps.Scheduler.Storage.Mongo/README.md new file mode 100644 index 00000000..d12683b2 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/README.md @@ -0,0 +1,25 @@ +# Scheduler Storage Mongo — Sprint 16 Handoff + +This module now consumes the canonical DTOs defined in `StellaOps.Scheduler.Models`. +Samples covering REST shapes live under `samples/api/scheduler/` and are referenced from `docs/11_DATA_SCHEMAS.md#3.1`. + +## Collections & DTO mapping + +| Collection | DTO | Notes | +|-------------------|--------------------------|---------------------------------------------------------------------------------------| +| `schedules` | `Schedule` | Persist `Schedule` as-is. `_id` → `Schedule.Id`. Use compound indexes on `{tenantId, enabled}` and `{whenCron}` per doc. | +| `runs` | `Run` | Store `Run.Stats` inside the document; omit `deltas` array when empty. | +| `impact_snapshots`| `ImpactSet` | Normalise selector filter fields exactly as emitted by the canonical serializer. | +| `audit` | `AuditRecord` | Lower-case metadata keys are already enforced by the model. | + +All timestamps are persisted as UTC (`+00:00`). Empty selector filters remain empty arrays (see `impact-set.json` sample). + +## Implementation guidance + +1. Add a project reference to `StellaOps.Scheduler.Models` and reuse the records directly; avoid duplicate BSON POCOs. +2. When serialising/deserialising to MongoDB, call `CanonicalJsonSerializer` to keep ordering stable for diffable fixtures. +3. Integration tests should load the JSON samples and round-trip through the Mongo persistence layer to guarantee parity. +4. Follow `docs/11_DATA_SCHEMAS.md` for index requirements; update that doc if storage diverges. +5. Register `AddSchedulerMongoStorage` in the host and call `ISchedulerMongoInitializer.EnsureMigrationsAsync` during bootstrap so collections/indexes are created before workers/web APIs start. + +With these artefacts in place the dependency on SCHED-MODELS-16-101/102 is cleared—storage work can move to DOING. diff --git a/src/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs b/src/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..07914270 --- /dev/null +++ b/src/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs @@ -0,0 +1,26 @@ +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Scheduler.Storage.Mongo.Internal; +using StellaOps.Scheduler.Storage.Mongo.Migrations; +using StellaOps.Scheduler.Storage.Mongo.Options; + +namespace StellaOps.Scheduler.Storage.Mongo; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddSchedulerMongoStorage(this IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.Configure(configuration); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddHostedService(); + + return services; + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj b/src/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj index 6c3a8871..6897f7f2 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj +++ b/src/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj @@ -4,4 +4,16 @@ enable enable + + + + + + + + + + + + diff --git a/src/StellaOps.Scheduler.Storage.Mongo/TASKS.md b/src/StellaOps.Scheduler.Storage.Mongo/TASKS.md index 2173bcfe..b4ca2acf 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/TASKS.md +++ b/src/StellaOps.Scheduler.Storage.Mongo/TASKS.md @@ -1,7 +1,9 @@ # Scheduler Storage Task Board (Sprint 16) +> **Status note (2025-10-19):** Scheduler models/samples delivered in SCHED-MODELS-16-102. Tasks below remain pending for the Storage guild. + | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCHED-STORAGE-16-201 | TODO | Scheduler Storage Guild | SCHED-MODELS-16-101 | Create Mongo collections (schedules, runs, impact_cursors, locks, audit) with indexes/migrations per architecture. | Migration scripts and indexes implemented; integration tests cover CRUD paths. | +| SCHED-STORAGE-16-201 | DONE (2025-10-19) | Scheduler Storage Guild | SCHED-MODELS-16-101 | Create Mongo collections (schedules, runs, impact_cursors, locks, audit) with indexes/migrations per architecture. | Migration scripts and indexes implemented; integration tests cover CRUD paths. | | SCHED-STORAGE-16-202 | TODO | Scheduler Storage Guild | SCHED-STORAGE-16-201 | Implement repositories/services with tenant scoping, soft delete, TTL for completed runs, and causal consistency options. | Unit tests pass; TTL/soft delete validated; documentation updated. | | SCHED-STORAGE-16-203 | TODO | Scheduler Storage Guild | SCHED-STORAGE-16-201 | Audit/logging pipeline + run stats materialized views for UI. | Audit entries persisted; stats queries efficient; docs capture usage. | diff --git a/src/StellaOps.Scheduler.WebService/TASKS.md b/src/StellaOps.Scheduler.WebService/TASKS.md index 0b65569d..6bb66286 100644 --- a/src/StellaOps.Scheduler.WebService/TASKS.md +++ b/src/StellaOps.Scheduler.WebService/TASKS.md @@ -2,7 +2,11 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCHED-WEB-16-101 | TODO | Scheduler WebService Guild | SCHED-MODELS-16-101 | Bootstrap Minimal API host with Authority OpTok + DPoP, health endpoints, plug-in discovery per architecture §§1–2. | Service boots with config validation; `/healthz`/`/readyz` pass; restart-only plug-ins enforced. | +| SCHED-WEB-16-101 | DOING (2025-10-19) | Scheduler WebService Guild | SCHED-MODELS-16-101 | Bootstrap Minimal API host with Authority OpTok + DPoP, health endpoints, plug-in discovery per architecture §§1–2. | Service boots with config validation; `/healthz`/`/readyz` pass; restart-only plug-ins enforced. | | SCHED-WEB-16-102 | TODO | Scheduler WebService Guild | SCHED-WEB-16-101 | Implement schedules CRUD (tenant-scoped) with cron validation, pause/resume, audit logging. | CRUD operations tested; invalid cron inputs rejected; audit entries persisted. | | SCHED-WEB-16-103 | TODO | Scheduler WebService Guild | SCHED-WEB-16-102 | Runs API (list/detail/cancel), ad-hoc run POST, and impact preview endpoints. | Integration tests cover run lifecycle; preview returns counts/sample; cancellation honoured. | | SCHED-WEB-16-104 | TODO | Scheduler WebService Guild | SCHED-QUEUE-16-401, SCHED-STORAGE-16-201 | Webhook endpoints for Feedser/Vexer exports with mTLS/HMAC validation and rate limiting. | Webhooks validated via tests; invalid signatures rejected; rate limits documented. | + +## Notes +- 2025-10-19: SCHED-MODELS-16-101 (schemas/DTOs) is DONE, so API contracts for schedules/runs are ready to consume. +- Next steps for SCHED-WEB-16-101: create Minimal API host project scaffold, wire Authority OpTok + DPoP authentication via existing DI helpers, expose `/healthz` + `/readyz`, and load restart-only plugins per architecture §§1–2. Capture configuration validation and log shape aligned with Scheduler platform guidance before moving to CRUD implementation. diff --git a/src/StellaOps.Signer/AGENTS.md b/src/StellaOps.Signer/AGENTS.md new file mode 100644 index 00000000..5c12fbf4 --- /dev/null +++ b/src/StellaOps.Signer/AGENTS.md @@ -0,0 +1,21 @@ +# Signer Guild + +## Mission +Operate the Stella Ops Signer service: authenticate trusted callers, enforce proof‑of‑entitlement and release integrity policy, and mint verifiable DSSE bundles (keyless or KMS-backed) for downstream attestation. + +## Teams On Call +- Team 11 (Signer API) +- Team 12 (Signer Reliability & Quotas) + +## Operating Principles +- Accept requests only with Authority-issued OpToks plus DPoP or mTLS sender binding; reject unsigned/cross-tenant traffic. +- Treat PoE claims as hard gates for quota, version windows, and license validity; cache results deterministically with bounded TTLs. +- Verify scanner image release signatures via OCI Referrers before signing; fail closed on ambiguity. +- Keep the hot path stateless and deterministic; persist audit trails with structured logging, metrics, and correlation IDs. +- Update `TASKS.md`, architecture notes, and tests whenever behaviour or contracts evolve. + +## Key Directories +- `src/StellaOps.Signer/StellaOps.Signer.WebService/` — Minimal API host and HTTP surface (to be scaffolded). +- `src/StellaOps.Signer/StellaOps.Signer.Core/` — Domain contracts, signing pipeline, quota enforcement (to be scaffolded). +- `src/StellaOps.Signer/StellaOps.Signer.Infrastructure/` — External clients (Authority, Licensing, Fulcio/KMS, OCI) and persistence (to be scaffolded). +- `src/StellaOps.Signer/StellaOps.Signer.Tests/` — Unit/integration test suites (to be scaffolded). diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs new file mode 100644 index 00000000..daa5c671 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs @@ -0,0 +1,55 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Signer.Core; + +public interface IProofOfEntitlementIntrospector +{ + ValueTask IntrospectAsync( + ProofOfEntitlement proof, + CallerContext caller, + CancellationToken cancellationToken); +} + +public interface IReleaseIntegrityVerifier +{ + ValueTask VerifyAsync( + string scannerImageDigest, + CancellationToken cancellationToken); +} + +public interface ISignerQuotaService +{ + ValueTask EnsureWithinLimitsAsync( + SigningRequest request, + ProofOfEntitlementResult entitlement, + CallerContext caller, + CancellationToken cancellationToken); +} + +public interface IDsseSigner +{ + ValueTask SignAsync( + SigningRequest request, + ProofOfEntitlementResult entitlement, + CallerContext caller, + CancellationToken cancellationToken); +} + +public interface ISignerAuditSink +{ + ValueTask WriteAsync( + SigningRequest request, + SigningBundle bundle, + ProofOfEntitlementResult entitlement, + CallerContext caller, + CancellationToken cancellationToken); +} + +public interface ISignerPipeline +{ + ValueTask SignAsync( + SigningRequest request, + CallerContext caller, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs new file mode 100644 index 00000000..ab69cf98 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs @@ -0,0 +1,105 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; + +namespace StellaOps.Signer.Core; + +public enum SignerPoEFormat +{ + Jwt, + Mtls, +} + +public enum SigningMode +{ + Keyless, + Kms, +} + +public sealed record SigningSubject( + string Name, + IReadOnlyDictionary Digest); + +public sealed record ProofOfEntitlement( + SignerPoEFormat Format, + string Value); + +public sealed record SigningOptions( + SigningMode Mode, + int? ExpirySeconds, + string ReturnBundle); + +public sealed record SigningRequest( + IReadOnlyList Subjects, + string PredicateType, + JsonDocument Predicate, + string ScannerImageDigest, + ProofOfEntitlement ProofOfEntitlement, + SigningOptions Options); + +public sealed record CallerContext( + string Subject, + string Tenant, + IReadOnlyList Scopes, + IReadOnlyList Audiences, + string? SenderBinding, + string? ClientCertificateThumbprint); + +public sealed record ProofOfEntitlementResult( + string LicenseId, + string CustomerId, + string Plan, + int MaxArtifactBytes, + int QpsLimit, + int QpsRemaining, + DateTimeOffset ExpiresAtUtc); + +public sealed record ReleaseVerificationResult( + bool Trusted, + string? ReleaseSigner); + +public sealed record SigningIdentity( + string Mode, + string Issuer, + string Subject, + DateTimeOffset? ExpiresAtUtc); + +public sealed record SigningMetadata( + SigningIdentity Identity, + IReadOnlyList CertificateChain, + string ProviderName, + string AlgorithmId); + +public sealed record SigningBundle( + DsseEnvelope Envelope, + SigningMetadata Metadata); + +public sealed record PolicyCounters( + string Plan, + int MaxArtifactBytes, + int QpsRemaining); + +public sealed record SigningOutcome( + SigningBundle Bundle, + PolicyCounters Policy, + string AuditId); + +public sealed record SignerAuditEntry( + string AuditId, + DateTimeOffset TimestampUtc, + string Subject, + string Tenant, + string Plan, + string ScannerImageDigest, + string SigningMode, + string ProviderName, + IReadOnlyList Subjects); + +public sealed record DsseEnvelope( + string Payload, + string PayloadType, + IReadOnlyList Signatures); + +public sealed record DsseSignature( + string Signature, + string? KeyId); diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs new file mode 100644 index 00000000..3da0100f --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs @@ -0,0 +1,46 @@ +using System; + +namespace StellaOps.Signer.Core; + +public abstract class SignerException : Exception +{ + protected SignerException(string code, string message) + : base(message) + { + Code = code; + } + + public string Code { get; } +} + +public sealed class SignerValidationException : SignerException +{ + public SignerValidationException(string code, string message) + : base(code, message) + { + } +} + +public sealed class SignerAuthorizationException : SignerException +{ + public SignerAuthorizationException(string code, string message) + : base(code, message) + { + } +} + +public sealed class SignerReleaseVerificationException : SignerException +{ + public SignerReleaseVerificationException(string code, string message) + : base(code, message) + { + } +} + +public sealed class SignerQuotaException : SignerException +{ + public SignerQuotaException(string code, string message) + : base(code, message) + { + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs new file mode 100644 index 00000000..53f057e0 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs @@ -0,0 +1,147 @@ +using System; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Signer.Core; + +public sealed class SignerPipeline : ISignerPipeline +{ + private const string RequiredScope = "signer.sign"; + private const string RequiredAudience = "signer"; + + private readonly IProofOfEntitlementIntrospector _poe; + private readonly IReleaseIntegrityVerifier _releaseVerifier; + private readonly ISignerQuotaService _quotaService; + private readonly IDsseSigner _signer; + private readonly ISignerAuditSink _auditSink; + private readonly TimeProvider _timeProvider; + + public SignerPipeline( + IProofOfEntitlementIntrospector poe, + IReleaseIntegrityVerifier releaseVerifier, + ISignerQuotaService quotaService, + IDsseSigner signer, + ISignerAuditSink auditSink, + TimeProvider timeProvider) + { + _poe = poe ?? throw new ArgumentNullException(nameof(poe)); + _releaseVerifier = releaseVerifier ?? throw new ArgumentNullException(nameof(releaseVerifier)); + _quotaService = quotaService ?? throw new ArgumentNullException(nameof(quotaService)); + _signer = signer ?? throw new ArgumentNullException(nameof(signer)); + _auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async ValueTask SignAsync( + SigningRequest request, + CallerContext caller, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(caller); + + ValidateCaller(caller); + ValidateRequest(request); + + var entitlement = await _poe + .IntrospectAsync(request.ProofOfEntitlement, caller, cancellationToken) + .ConfigureAwait(false); + + if (entitlement.ExpiresAtUtc <= _timeProvider.GetUtcNow()) + { + throw new SignerAuthorizationException("entitlement_denied", "Proof of entitlement is expired."); + } + + var releaseResult = await _releaseVerifier + .VerifyAsync(request.ScannerImageDigest, cancellationToken) + .ConfigureAwait(false); + if (!releaseResult.Trusted) + { + throw new SignerReleaseVerificationException("release_untrusted", "Scanner image digest failed release verification."); + } + + await _quotaService + .EnsureWithinLimitsAsync(request, entitlement, caller, cancellationToken) + .ConfigureAwait(false); + + var bundle = await _signer + .SignAsync(request, entitlement, caller, cancellationToken) + .ConfigureAwait(false); + + var auditId = await _auditSink + .WriteAsync(request, bundle, entitlement, caller, cancellationToken) + .ConfigureAwait(false); + + var outcome = new SigningOutcome( + bundle, + new PolicyCounters(entitlement.Plan, entitlement.MaxArtifactBytes, entitlement.QpsRemaining), + auditId); + return outcome; + } + + private static void ValidateCaller(CallerContext caller) + { + if (string.IsNullOrWhiteSpace(caller.Subject)) + { + throw new SignerAuthorizationException("invalid_caller", "Caller subject is required."); + } + + if (string.IsNullOrWhiteSpace(caller.Tenant)) + { + throw new SignerAuthorizationException("invalid_caller", "Caller tenant is required."); + } + + if (!caller.Scopes.Contains(RequiredScope, StringComparer.OrdinalIgnoreCase)) + { + throw new SignerAuthorizationException("insufficient_scope", $"Scope '{RequiredScope}' is required."); + } + + if (!caller.Audiences.Contains(RequiredAudience, StringComparer.OrdinalIgnoreCase)) + { + throw new SignerAuthorizationException("invalid_audience", $"Audience '{RequiredAudience}' is required."); + } + } + + private static void ValidateRequest(SigningRequest request) + { + if (request.Subjects.Count == 0) + { + throw new SignerValidationException("subject_missing", "At least one subject must be provided."); + } + + foreach (var subject in request.Subjects) + { + if (string.IsNullOrWhiteSpace(subject.Name)) + { + throw new SignerValidationException("subject_invalid", "Subject name is required."); + } + + if (subject.Digest is null || subject.Digest.Count == 0) + { + throw new SignerValidationException("subject_digest_invalid", "Subject digest is required."); + } + } + + if (string.IsNullOrWhiteSpace(request.PredicateType)) + { + throw new SignerValidationException("predicate_type_missing", "Predicate type is required."); + } + + if (request.Predicate is null || request.Predicate.RootElement.ValueKind == JsonValueKind.Undefined) + { + throw new SignerValidationException("predicate_missing", "Predicate payload is required."); + } + + if (string.IsNullOrWhiteSpace(request.ScannerImageDigest)) + { + throw new SignerValidationException("scanner_digest_missing", "Scanner image digest is required."); + } + + if (request.ProofOfEntitlement is null) + { + throw new SignerValidationException("poe_missing", "Proof of entitlement is required."); + } + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs new file mode 100644 index 00000000..3cefd203 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; + +namespace StellaOps.Signer.Core; + +public static class SignerStatementBuilder +{ + private const string StatementType = "https://in-toto.io/Statement/v0.1"; + + public static byte[] BuildStatementPayload(SigningRequest request) + { + ArgumentNullException.ThrowIfNull(request); + + var subjects = new List(request.Subjects.Count); + foreach (var subject in request.Subjects) + { + var digest = new SortedDictionary(StringComparer.Ordinal); + foreach (var kvp in subject.Digest) + { + digest[kvp.Key.ToLowerInvariant()] = kvp.Value; + } + + subjects.Add(new + { + name = subject.Name, + digest + }); + } + + var statement = new + { + _type = StatementType, + predicateType = request.PredicateType, + subject = subjects, + predicate = request.Predicate.RootElement.Clone() + }; + + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = null, + WriteIndented = false, + }; + options.Converters.Add(new JsonElementConverter()); + return JsonSerializer.SerializeToUtf8Bytes(statement, options); + } + + private sealed class JsonElementConverter : System.Text.Json.Serialization.JsonConverter + { + public override JsonElement Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + using var document = JsonDocument.ParseValue(ref reader); + return document.RootElement.Clone(); + } + + public override void Write(Utf8JsonWriter writer, JsonElement value, JsonSerializerOptions options) + { + value.WriteTo(writer); + } + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj b/src/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj new file mode 100644 index 00000000..ecc3af66 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj @@ -0,0 +1,9 @@ + + + net10.0 + preview + enable + enable + true + + diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs new file mode 100644 index 00000000..4015d0d0 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs @@ -0,0 +1,49 @@ +using System; +using System.Collections.Concurrent; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Signer.Core; + +namespace StellaOps.Signer.Infrastructure.Auditing; + +public sealed class InMemorySignerAuditSink : ISignerAuditSink +{ + private readonly ConcurrentDictionary _entries = new(StringComparer.Ordinal); + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public InMemorySignerAuditSink(TimeProvider timeProvider, ILogger logger) + { + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public ValueTask WriteAsync( + SigningRequest request, + SigningBundle bundle, + ProofOfEntitlementResult entitlement, + CallerContext caller, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(bundle); + ArgumentNullException.ThrowIfNull(entitlement); + ArgumentNullException.ThrowIfNull(caller); + + var auditId = Guid.NewGuid().ToString("d"); + var entry = new SignerAuditEntry( + auditId, + _timeProvider.GetUtcNow(), + caller.Subject, + caller.Tenant, + entitlement.Plan, + request.ScannerImageDigest, + bundle.Metadata.Identity.Mode, + bundle.Metadata.ProviderName, + request.Subjects); + + _entries[auditId] = entry; + _logger.LogInformation("Signer audit event {AuditId} recorded for tenant {Tenant}", auditId, caller.Tenant); + return ValueTask.FromResult(auditId); + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs new file mode 100644 index 00000000..d97b7947 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs @@ -0,0 +1,16 @@ +using System; + +namespace StellaOps.Signer.Infrastructure.Options; + +public sealed class SignerCryptoOptions +{ + public string KeyId { get; set; } = "signer-kms-default"; + + public string AlgorithmId { get; set; } = "HS256"; + + public string Secret { get; set; } = Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes("stellaops-signer-secret")); + + public string ProviderName { get; set; } = "InMemoryHmacProvider"; + + public string Mode { get; set; } = "kms"; +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs new file mode 100644 index 00000000..bdff2fed --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Signer.Infrastructure.Options; + +public sealed class SignerEntitlementOptions +{ + public IDictionary Tokens { get; } = + new Dictionary(StringComparer.Ordinal); +} + +public sealed record SignerEntitlementDefinition( + string LicenseId, + string CustomerId, + string Plan, + int MaxArtifactBytes, + int QpsLimit, + int QpsRemaining, + DateTimeOffset ExpiresAtUtc); diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs new file mode 100644 index 00000000..7de63ed3 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs @@ -0,0 +1,11 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Signer.Infrastructure.Options; + +public sealed class SignerReleaseVerificationOptions +{ + public ISet TrustedScannerDigests { get; } = new HashSet(StringComparer.OrdinalIgnoreCase); + + public string TrustedSigner { get; set; } = "StellaOps Release"; +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs new file mode 100644 index 00000000..a2dea4de --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs @@ -0,0 +1,54 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Options; +using StellaOps.Signer.Core; +using StellaOps.Signer.Infrastructure.Options; + +namespace StellaOps.Signer.Infrastructure.ProofOfEntitlement; + +public sealed class InMemoryProofOfEntitlementIntrospector : IProofOfEntitlementIntrospector +{ + private readonly IOptionsMonitor _options; + private readonly TimeProvider _timeProvider; + + public InMemoryProofOfEntitlementIntrospector( + IOptionsMonitor options, + TimeProvider timeProvider) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public ValueTask IntrospectAsync( + ProofOfEntitlement proof, + CallerContext caller, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(proof); + ArgumentNullException.ThrowIfNull(caller); + + var token = proof.Value ?? string.Empty; + var snapshot = _options.CurrentValue; + if (!snapshot.Tokens.TryGetValue(token, out var definition)) + { + throw new SignerAuthorizationException("entitlement_denied", "Proof of entitlement is invalid or revoked."); + } + + if (definition.ExpiresAtUtc <= _timeProvider.GetUtcNow()) + { + throw new SignerAuthorizationException("entitlement_denied", "Proof of entitlement has expired."); + } + + var result = new ProofOfEntitlementResult( + definition.LicenseId, + definition.CustomerId, + definition.Plan, + definition.MaxArtifactBytes, + definition.QpsLimit, + definition.QpsRemaining, + definition.ExpiresAtUtc); + + return ValueTask.FromResult(result); + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs new file mode 100644 index 00000000..193fe509 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs @@ -0,0 +1,100 @@ +using System; +using System.Collections.Concurrent; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Signer.Core; + +namespace StellaOps.Signer.Infrastructure.Quotas; + +public sealed class InMemoryQuotaService : ISignerQuotaService +{ + private readonly ConcurrentDictionary _windows = new(StringComparer.Ordinal); + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public InMemoryQuotaService(TimeProvider timeProvider, ILogger logger) + { + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public ValueTask EnsureWithinLimitsAsync( + SigningRequest request, + ProofOfEntitlementResult entitlement, + CallerContext caller, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(entitlement); + ArgumentNullException.ThrowIfNull(caller); + + var payloadSize = EstimatePayloadSize(request); + if (payloadSize > entitlement.MaxArtifactBytes) + { + throw new SignerQuotaException("artifact_too_large", $"Artifact size {payloadSize} exceeds plan cap ({entitlement.MaxArtifactBytes})."); + } + + if (entitlement.QpsLimit <= 0) + { + return ValueTask.CompletedTask; + } + + var window = _windows.GetOrAdd(caller.Tenant, static _ => new QuotaWindow()); + lock (window) + { + var now = _timeProvider.GetUtcNow(); + if (window.ResetAt <= now) + { + window.Reset(now, entitlement.QpsLimit); + } + + if (window.Remaining <= 0) + { + _logger.LogWarning("Quota exceeded for tenant {Tenant}", caller.Tenant); + throw new SignerQuotaException("plan_throttled", "Plan QPS limit exceeded."); + } + + window.Remaining--; + window.LastUpdated = now; + } + + return ValueTask.CompletedTask; + } + + private static int EstimatePayloadSize(SigningRequest request) + { + var predicateBytes = request.Predicate is null + ? Array.Empty() + : Encoding.UTF8.GetBytes(request.Predicate.RootElement.GetRawText()); + + var subjectBytes = 0; + foreach (var subject in request.Subjects) + { + subjectBytes += subject.Name.Length; + foreach (var digest in subject.Digest) + { + subjectBytes += digest.Key.Length + digest.Value.Length; + } + } + + return predicateBytes.Length + subjectBytes; + } + + private sealed class QuotaWindow + { + public DateTimeOffset ResetAt { get; private set; } = DateTimeOffset.MinValue; + + public int Remaining { get; set; } + + public DateTimeOffset LastUpdated { get; set; } + + public void Reset(DateTimeOffset now, int limit) + { + ResetAt = now.AddSeconds(1); + Remaining = limit; + LastUpdated = now; + } + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs new file mode 100644 index 00000000..216a92ca --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs @@ -0,0 +1,38 @@ +using System; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Options; +using StellaOps.Signer.Core; +using StellaOps.Signer.Infrastructure.Options; + +namespace StellaOps.Signer.Infrastructure.ReleaseVerification; + +public sealed class DefaultReleaseIntegrityVerifier : IReleaseIntegrityVerifier +{ + private static readonly Regex DigestPattern = new("^sha256:[a-fA-F0-9]{64}$", RegexOptions.Compiled | RegexOptions.CultureInvariant); + + private readonly IOptionsMonitor _options; + + public DefaultReleaseIntegrityVerifier(IOptionsMonitor options) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + } + + public ValueTask VerifyAsync(string scannerImageDigest, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(scannerImageDigest) || !DigestPattern.IsMatch(scannerImageDigest)) + { + throw new SignerReleaseVerificationException("release_digest_invalid", "Scanner image digest must be a valid sha256 string."); + } + + var options = _options.CurrentValue; + if (options.TrustedScannerDigests.Count > 0 && + !options.TrustedScannerDigests.Contains(scannerImageDigest)) + { + return ValueTask.FromResult(new ReleaseVerificationResult(false, null)); + } + + return ValueTask.FromResult(new ReleaseVerificationResult(true, options.TrustedSigner)); + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..7165c8a9 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs @@ -0,0 +1,24 @@ +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Signer.Core; +using StellaOps.Signer.Infrastructure.Auditing; +using StellaOps.Signer.Infrastructure.ProofOfEntitlement; +using StellaOps.Signer.Infrastructure.Quotas; +using StellaOps.Signer.Infrastructure.ReleaseVerification; +using StellaOps.Signer.Infrastructure.Signing; + +namespace StellaOps.Signer.Infrastructure; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddSignerPipeline(this IServiceCollection services) + { + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(TimeProvider.System); + return services; + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs new file mode 100644 index 00000000..765ff02d --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Options; +using StellaOps.Signer.Core; +using StellaOps.Signer.Infrastructure.Options; + +namespace StellaOps.Signer.Infrastructure.Signing; + +public sealed class HmacDsseSigner : IDsseSigner +{ + private readonly IOptionsMonitor _options; + private readonly TimeProvider _timeProvider; + + public HmacDsseSigner(IOptionsMonitor options, TimeProvider timeProvider) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public ValueTask SignAsync( + SigningRequest request, + ProofOfEntitlementResult entitlement, + CallerContext caller, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(entitlement); + ArgumentNullException.ThrowIfNull(caller); + + var options = _options.CurrentValue; + var payloadBytes = SignerStatementBuilder.BuildStatementPayload(request); + + var secretBytes = Convert.FromBase64String(options.Secret); + using var hmac = new HMACSHA256(secretBytes); + var signatureBytes = hmac.ComputeHash(payloadBytes); + var signature = Convert.ToBase64String(signatureBytes); + var payloadBase64 = Convert.ToBase64String(payloadBytes); + + var envelope = new DsseEnvelope( + payloadBase64, + "application/vnd.in-toto+json", + new[] + { + new DsseSignature(signature, options.KeyId), + }); + + var metadata = new SigningMetadata( + new SigningIdentity( + options.Mode, + caller.Subject, + caller.Subject, + _timeProvider.GetUtcNow().AddMinutes(10)), + Array.Empty(), + options.ProviderName, + options.AlgorithmId); + + var bundle = new SigningBundle(envelope, metadata); + return ValueTask.FromResult(bundle); + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj new file mode 100644 index 00000000..577ec9ba --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + diff --git a/src/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj b/src/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj new file mode 100644 index 00000000..c04e55f2 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj @@ -0,0 +1,26 @@ + + + net10.0 + preview + enable + enable + true + false + + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs b/src/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs new file mode 100644 index 00000000..5008f3a7 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs @@ -0,0 +1,30 @@ +using System.Collections.Generic; +using System.Text.Json; + +namespace StellaOps.Signer.WebService.Contracts; + +public sealed record SignDsseSubjectDto(string Name, Dictionary Digest); + +public sealed record SignDssePoeDto(string Format, string Value); + +public sealed record SignDsseOptionsDto(string? SigningMode, int? ExpirySeconds, string? ReturnBundle); + +public sealed record SignDsseRequestDto( + List Subject, + string PredicateType, + JsonElement Predicate, + string ScannerImageDigest, + SignDssePoeDto Poe, + SignDsseOptionsDto? Options); + +public sealed record SignDsseResponseDto(SignDsseBundleDto Bundle, SignDssePolicyDto Policy, string AuditId); + +public sealed record SignDsseBundleDto(SignDsseEnvelopeDto Dsse, IReadOnlyList CertificateChain, string Mode, SignDsseIdentityDto SigningIdentity); + +public sealed record SignDsseEnvelopeDto(string PayloadType, string Payload, IReadOnlyList Signatures); + +public sealed record SignDsseSignatureDto(string Signature, string? KeyId); + +public sealed record SignDsseIdentityDto(string Issuer, string Subject, string? CertExpiry); + +public sealed record SignDssePolicyDto(string Plan, int MaxArtifactBytes, int QpsRemaining); diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs b/src/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs new file mode 100644 index 00000000..d7aa704e --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs @@ -0,0 +1,245 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Claims; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using StellaOps.Auth.Abstractions; +using StellaOps.Signer.Core; +using StellaOps.Signer.WebService.Contracts; + +namespace StellaOps.Signer.WebService.Endpoints; + +public static class SignerEndpoints +{ + public static IEndpointRouteBuilder MapSignerEndpoints(this IEndpointRouteBuilder endpoints) + { + var group = endpoints.MapGroup("/api/v1/signer") + .WithTags("Signer") + .RequireAuthorization(); + + group.MapPost("/sign/dsse", SignDsseAsync); + return endpoints; + } + + private static async Task SignDsseAsync( + HttpContext httpContext, + [FromBody] SignDsseRequestDto requestDto, + ISignerPipeline pipeline, + ILoggerFactory loggerFactory, + CancellationToken cancellationToken) + { + if (requestDto is null) + { + return Results.Problem("Request body is required.", statusCode: StatusCodes.Status400BadRequest); + } + + var logger = loggerFactory.CreateLogger("SignerEndpoints.SignDsse"); + try + { + var caller = BuildCallerContext(httpContext); + ValidateSenderBinding(httpContext, requestDto.Poe, caller); + + using var predicateDocument = JsonDocument.Parse(requestDto.Predicate.GetRawText()); + var signingRequest = new SigningRequest( + ConvertSubjects(requestDto.Subject), + requestDto.PredicateType, + predicateDocument, + requestDto.ScannerImageDigest, + new ProofOfEntitlement( + ParsePoeFormat(requestDto.Poe.Format), + requestDto.Poe.Value), + ConvertOptions(requestDto.Options)); + + var outcome = await pipeline.SignAsync(signingRequest, caller, cancellationToken).ConfigureAwait(false); + var response = ConvertOutcome(outcome); + return Results.Ok(response); + } + catch (SignerValidationException ex) + { + logger.LogWarning(ex, "Validation failure while signing DSSE."); + return Results.Problem(ex.Message, statusCode: StatusCodes.Status400BadRequest, type: ex.Code); + } + catch (SignerAuthorizationException ex) + { + logger.LogWarning(ex, "Authorization failure while signing DSSE."); + return Results.Problem(ex.Message, statusCode: StatusCodes.Status403Forbidden, type: ex.Code); + } + catch (SignerReleaseVerificationException ex) + { + logger.LogWarning(ex, "Release verification failed."); + return Results.Problem(ex.Message, statusCode: StatusCodes.Status403Forbidden, type: ex.Code); + } + catch (SignerQuotaException ex) + { + logger.LogWarning(ex, "Quota enforcement rejected request."); + return Results.Problem(ex.Message, statusCode: StatusCodes.Status429TooManyRequests, type: ex.Code); + } + catch (Exception ex) + { + logger.LogError(ex, "Unexpected error while signing DSSE."); + return Results.Problem("Internal server error.", statusCode: StatusCodes.Status500InternalServerError, type: "signing_unavailable"); + } + } + + private static CallerContext BuildCallerContext(HttpContext context) + { + var user = context.User ?? throw new SignerAuthorizationException("invalid_caller", "Caller is not authenticated."); + + string subject = user.FindFirstValue(StellaOpsClaimTypes.Subject) ?? + throw new SignerAuthorizationException("invalid_caller", "Subject claim is required."); + string tenant = user.FindFirstValue(StellaOpsClaimTypes.Tenant) ?? subject; + + var scopes = new HashSet(StringComparer.OrdinalIgnoreCase); + if (user.HasClaim(c => c.Type == StellaOpsClaimTypes.Scope)) + { + foreach (var value in user.FindAll(StellaOpsClaimTypes.Scope)) + { + foreach (var scope in value.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries)) + { + scopes.Add(scope); + } + } + } + + foreach (var scopeClaim in user.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + scopes.Add(scopeClaim.Value); + } + + var audiences = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var audClaim in user.FindAll(StellaOpsClaimTypes.Audience)) + { + if (audClaim.Value.Contains(' ')) + { + foreach (var aud in audClaim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries)) + { + audiences.Add(aud); + } + } + else + { + audiences.Add(audClaim.Value); + } + } + + if (audiences.Count == 0) + { + throw new SignerAuthorizationException("invalid_audience", "Audience claim is required."); + } + + var sender = context.Request.Headers.TryGetValue("DPoP", out var dpop) + ? dpop.ToString() + : null; + + var clientCert = context.Connection.ClientCertificate?.Thumbprint; + + return new CallerContext( + subject, + tenant, + scopes.ToArray(), + audiences.ToArray(), + sender, + clientCert); + } + + private static void ValidateSenderBinding(HttpContext context, SignDssePoeDto poe, CallerContext caller) + { + if (poe is null) + { + throw new SignerValidationException("poe_missing", "Proof of entitlement is required."); + } + + var format = ParsePoeFormat(poe.Format); + if (format == SignerPoEFormat.Jwt) + { + if (string.IsNullOrWhiteSpace(caller.SenderBinding)) + { + throw new SignerAuthorizationException("invalid_token", "DPoP proof is required for JWT PoE."); + } + } + else if (format == SignerPoEFormat.Mtls) + { + if (string.IsNullOrWhiteSpace(caller.ClientCertificateThumbprint)) + { + throw new SignerAuthorizationException("invalid_token", "Client certificate is required for mTLS PoE."); + } + } + } + + private static IReadOnlyList ConvertSubjects(List subjects) + { + if (subjects is null || subjects.Count == 0) + { + throw new SignerValidationException("subject_missing", "At least one subject is required."); + } + + return subjects.Select(subject => + { + if (subject.Digest is null || subject.Digest.Count == 0) + { + throw new SignerValidationException("subject_digest_invalid", $"Digest for subject '{subject.Name}' is required."); + } + + return new SigningSubject(subject.Name, subject.Digest); + }).ToArray(); + } + + private static SigningOptions ConvertOptions(SignDsseOptionsDto? optionsDto) + { + if (optionsDto is null) + { + return new SigningOptions(SigningMode.Kms, null, "dsse+cert"); + } + + var mode = optionsDto.SigningMode switch + { + null or "" => SigningMode.Kms, + "kms" or "KMS" => SigningMode.Kms, + "keyless" or "KEYLESS" => SigningMode.Keyless, + _ => throw new SignerValidationException("signing_mode_invalid", $"Unsupported signing mode '{optionsDto.SigningMode}'."), + }; + + return new SigningOptions(mode, optionsDto.ExpirySeconds, optionsDto.ReturnBundle ?? "dsse+cert"); + } + + private static SignerPoEFormat ParsePoeFormat(string? format) + { + return format?.ToLowerInvariant() switch + { + "jwt" => SignerPoEFormat.Jwt, + "mtls" => SignerPoEFormat.Mtls, + _ => throw new SignerValidationException("poe_invalid", $"Unsupported PoE format '{format}'."), + }; + } + + private static SignDsseResponseDto ConvertOutcome(SigningOutcome outcome) + { + var signatures = outcome.Bundle.Envelope.Signatures + .Select(signature => new SignDsseSignatureDto(signature.Signature, signature.KeyId)) + .ToArray(); + + var bundle = new SignDsseBundleDto( + new SignDsseEnvelopeDto( + outcome.Bundle.Envelope.PayloadType, + outcome.Bundle.Envelope.Payload, + signatures), + outcome.Bundle.Metadata.CertificateChain, + outcome.Bundle.Metadata.Identity.Mode, + new SignDsseIdentityDto( + outcome.Bundle.Metadata.Identity.Issuer, + outcome.Bundle.Metadata.Identity.Subject, + outcome.Bundle.Metadata.Identity.ExpiresAtUtc?.ToString("O"))); + + var policy = new SignDssePolicyDto( + outcome.Policy.Plan, + outcome.Policy.MaxArtifactBytes, + outcome.Policy.QpsRemaining); + + return new SignDsseResponseDto(bundle, policy, outcome.AuditId); + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs b/src/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs new file mode 100644 index 00000000..fc789d49 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs @@ -0,0 +1,43 @@ +using Microsoft.AspNetCore.Authentication; +using StellaOps.Signer.Infrastructure; +using StellaOps.Signer.Infrastructure.Options; +using StellaOps.Signer.WebService.Endpoints; +using StellaOps.Signer.WebService.Security; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddLogging(); +builder.Services.AddAuthentication(StubBearerAuthenticationDefaults.AuthenticationScheme) + .AddScheme( + StubBearerAuthenticationDefaults.AuthenticationScheme, + _ => { }); + +builder.Services.AddAuthorization(); + +builder.Services.AddSignerPipeline(); +builder.Services.Configure(options => +{ + options.Tokens["valid-poe"] = new SignerEntitlementDefinition( + LicenseId: "LIC-TEST", + CustomerId: "CUST-TEST", + Plan: "pro", + MaxArtifactBytes: 128 * 1024, + QpsLimit: 5, + QpsRemaining: 5, + ExpiresAtUtc: DateTimeOffset.UtcNow.AddHours(1)); +}); +builder.Services.Configure(options => +{ + options.TrustedScannerDigests.Add("sha256:trusted-scanner-digest"); +}); +builder.Services.Configure(_ => { }); + +var app = builder.Build(); + +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapGet("/", () => Results.Ok("StellaOps Signer service ready.")); +app.MapSignerEndpoints(); + +app.Run(); diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj b/src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj new file mode 100644 index 00000000..52060ff0 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj @@ -0,0 +1,29 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/StellaOps.Signer/StellaOps.Signer.sln b/src/StellaOps.Signer/StellaOps.Signer.sln new file mode 100644 index 00000000..b09bf0a4 --- /dev/null +++ b/src/StellaOps.Signer/StellaOps.Signer.sln @@ -0,0 +1,174 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Core", "StellaOps.Signer.Core\StellaOps.Signer.Core.csproj", "{81EB20CC-54DE-4450-9370-92B489B64F19}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Infrastructure", "StellaOps.Signer.Infrastructure\StellaOps.Signer.Infrastructure.csproj", "{AD28F5E8-CF69-4587-B3D2-C2B42935993D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.WebService", "StellaOps.Signer.WebService\StellaOps.Signer.WebService.csproj", "{104C429B-2122-43B5-BE2A-5FC846FEBDC4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\StellaOps.Configuration\StellaOps.Configuration.csproj", "{7A261EB8-60DF-4DD7-83E0-43811B0433B3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{B0E46302-AAC2-409C-AA2F-526F8328C696}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{726F764A-EEE9-4910-8149-42F326E37AF0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{D17E135F-57B9-476A-8ECE-BE081F25E917}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{526A921C-E020-4B7E-A195-29CC6FD1C634}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{BA683E2B-350F-4719-ACF7-1C5C35F5B72F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{EA1037DD-3213-4360-87B8-1129936D89CE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Tests", "StellaOps.Signer.Tests\StellaOps.Signer.Tests.csproj", "{B09322C0-6827-46D6-91AD-D2380BD36F21}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {81EB20CC-54DE-4450-9370-92B489B64F19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Debug|Any CPU.Build.0 = Debug|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Debug|x64.ActiveCfg = Debug|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Debug|x64.Build.0 = Debug|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Debug|x86.ActiveCfg = Debug|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Debug|x86.Build.0 = Debug|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Release|Any CPU.ActiveCfg = Release|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Release|Any CPU.Build.0 = Release|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Release|x64.ActiveCfg = Release|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Release|x64.Build.0 = Release|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Release|x86.ActiveCfg = Release|Any CPU + {81EB20CC-54DE-4450-9370-92B489B64F19}.Release|x86.Build.0 = Release|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Debug|x64.ActiveCfg = Debug|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Debug|x64.Build.0 = Debug|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Debug|x86.ActiveCfg = Debug|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Debug|x86.Build.0 = Debug|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Release|Any CPU.Build.0 = Release|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Release|x64.ActiveCfg = Release|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Release|x64.Build.0 = Release|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Release|x86.ActiveCfg = Release|Any CPU + {AD28F5E8-CF69-4587-B3D2-C2B42935993D}.Release|x86.Build.0 = Release|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Debug|x64.ActiveCfg = Debug|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Debug|x64.Build.0 = Debug|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Debug|x86.ActiveCfg = Debug|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Debug|x86.Build.0 = Debug|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Release|Any CPU.Build.0 = Release|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Release|x64.ActiveCfg = Release|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Release|x64.Build.0 = Release|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Release|x86.ActiveCfg = Release|Any CPU + {104C429B-2122-43B5-BE2A-5FC846FEBDC4}.Release|x86.Build.0 = Release|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Debug|x64.ActiveCfg = Debug|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Debug|x64.Build.0 = Debug|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Debug|x86.ActiveCfg = Debug|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Debug|x86.Build.0 = Debug|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Release|Any CPU.Build.0 = Release|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Release|x64.ActiveCfg = Release|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Release|x64.Build.0 = Release|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Release|x86.ActiveCfg = Release|Any CPU + {7A261EB8-60DF-4DD7-83E0-43811B0433B3}.Release|x86.Build.0 = Release|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Debug|x64.ActiveCfg = Debug|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Debug|x64.Build.0 = Debug|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Debug|x86.ActiveCfg = Debug|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Debug|x86.Build.0 = Debug|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Release|Any CPU.Build.0 = Release|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Release|x64.ActiveCfg = Release|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Release|x64.Build.0 = Release|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Release|x86.ActiveCfg = Release|Any CPU + {B0E46302-AAC2-409C-AA2F-526F8328C696}.Release|x86.Build.0 = Release|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Debug|x64.ActiveCfg = Debug|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Debug|x64.Build.0 = Debug|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Debug|x86.ActiveCfg = Debug|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Debug|x86.Build.0 = Debug|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Release|Any CPU.Build.0 = Release|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Release|x64.ActiveCfg = Release|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Release|x64.Build.0 = Release|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Release|x86.ActiveCfg = Release|Any CPU + {726F764A-EEE9-4910-8149-42F326E37AF0}.Release|x86.Build.0 = Release|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Debug|x64.ActiveCfg = Debug|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Debug|x64.Build.0 = Debug|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Debug|x86.ActiveCfg = Debug|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Debug|x86.Build.0 = Debug|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Release|Any CPU.Build.0 = Release|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Release|x64.ActiveCfg = Release|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Release|x64.Build.0 = Release|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Release|x86.ActiveCfg = Release|Any CPU + {D17E135F-57B9-476A-8ECE-BE081F25E917}.Release|x86.Build.0 = Release|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Debug|Any CPU.Build.0 = Debug|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Debug|x64.ActiveCfg = Debug|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Debug|x64.Build.0 = Debug|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Debug|x86.ActiveCfg = Debug|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Debug|x86.Build.0 = Debug|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Release|Any CPU.ActiveCfg = Release|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Release|Any CPU.Build.0 = Release|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Release|x64.ActiveCfg = Release|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Release|x64.Build.0 = Release|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Release|x86.ActiveCfg = Release|Any CPU + {526A921C-E020-4B7E-A195-29CC6FD1C634}.Release|x86.Build.0 = Release|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Debug|x64.ActiveCfg = Debug|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Debug|x64.Build.0 = Debug|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Debug|x86.ActiveCfg = Debug|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Debug|x86.Build.0 = Debug|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Release|Any CPU.Build.0 = Release|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Release|x64.ActiveCfg = Release|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Release|x64.Build.0 = Release|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Release|x86.ActiveCfg = Release|Any CPU + {BA683E2B-350F-4719-ACF7-1C5C35F5B72F}.Release|x86.Build.0 = Release|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Debug|x64.ActiveCfg = Debug|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Debug|x64.Build.0 = Debug|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Debug|x86.ActiveCfg = Debug|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Debug|x86.Build.0 = Debug|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Release|Any CPU.Build.0 = Release|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Release|x64.ActiveCfg = Release|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Release|x64.Build.0 = Release|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Release|x86.ActiveCfg = Release|Any CPU + {EA1037DD-3213-4360-87B8-1129936D89CE}.Release|x86.Build.0 = Release|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Debug|x64.ActiveCfg = Debug|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Debug|x64.Build.0 = Debug|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Debug|x86.ActiveCfg = Debug|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Debug|x86.Build.0 = Debug|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Release|Any CPU.Build.0 = Release|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Release|x64.ActiveCfg = Release|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Release|x64.Build.0 = Release|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Release|x86.ActiveCfg = Release|Any CPU + {B09322C0-6827-46D6-91AD-D2380BD36F21}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Signer/TASKS.md b/src/StellaOps.Signer/TASKS.md new file mode 100644 index 00000000..7bb0fdcb --- /dev/null +++ b/src/StellaOps.Signer/TASKS.md @@ -0,0 +1,11 @@ +# Signer Guild Task Board (UTC 2025-10-19) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SIGNER-API-11-101 | DOING (2025-10-19) | Signer Guild | — | `/sign/dsse` pipeline with Authority auth, PoE introspection, release verification, DSSE signing. | ✅ `POST /api/v1/signer/sign/dsse` enforces OpTok audience/scope, DPoP/mTLS binding, PoE introspection, and rejects untrusted scanner digests.
✅ Signing pipeline supports keyless (Fulcio) plus optional KMS modes, returning DSSE bundles + cert metadata; deterministic audits persisted.
✅ Unit/integration tests cover happy path, invalid PoE, untrusted release, Fulcio/KMS failure, and documentation updated in `docs/ARCHITECTURE_SIGNER.md`/API reference. | +| SIGNER-REF-11-102 | DOING (2025-10-19) | Signer Guild | — | `/verify/referrers` endpoint with OCI lookup, caching, and policy enforcement. | ✅ `GET /api/v1/signer/verify/referrers` hits OCI Referrers API, validates cosign signatures against Stella release keys, and hard-fails on ambiguity.
✅ Deterministic cache with policy-aware TTLs and invalidation guards repeated registry load; metrics/logs expose hit/miss/error counters.
✅ Tests simulate trusted/untrusted digests, cache expiry, and registry failures; docs capture usage and quota interplay. | +| SIGNER-QUOTA-11-103 | DOING (2025-10-19) | Signer Guild | — | Enforce plan quotas, concurrency/QPS limits, artifact size caps with metrics/audit logs. | ✅ Quota middleware derives plan limits from PoE claims, applies per-tenant concurrency/QPS/size caps, and surfaces remaining capacity in responses.
✅ Rate limiter + token bucket state stored in Redis (or equivalent) with deterministic keying and backpressure semantics; overruns emit structured audits.
✅ Observability dashboards/counters added; failure modes (throttle, oversize, burst) covered by tests and documented operator runbook. | + +> Remark (2025-10-19): Wave 0 prerequisites reviewed—none outstanding. SIGNER-API-11-101, SIGNER-REF-11-102, and SIGNER-QUOTA-11-103 moved to DOING for kickoff per EXECPLAN.md. + +> Update status columns (TODO / DOING / DONE / BLOCKED) in tandem with code changes and associated tests. diff --git a/src/StellaOps.UI/TASKS.md b/src/StellaOps.UI/TASKS.md index 96f7f3fd..1f2cfbd6 100644 --- a/src/StellaOps.UI/TASKS.md +++ b/src/StellaOps.UI/TASKS.md @@ -9,3 +9,4 @@ | UI-ATTEST-11-005 | TODO | UI Guild | SIGNER-API-11-101, ATTESTOR-API-11-201 | Attestation visibility (Rekor id, status) on Scan Detail. | UI shows Rekor UUID/status; mock attestation fixtures displayed; tests cover success/failure. | | UI-SCHED-13-005 | TODO | UI Guild | SCHED-WEB-16-101 | Scheduler panel: schedules CRUD, run history, dry-run preview using API/mocks. | Panel functional with mocked endpoints; UX signoff; integration tests added. | | UI-NOTIFY-13-006 | TODO | UI Guild | NOTIFY-WEB-15-101 | Notify panel: channels/rules CRUD, deliveries view, test send integration. | Panel interacts with mocked Notify API; tests cover rule lifecycle; docs updated. | +| UI-POLICY-13-007 | TODO | UI Guild | POLICY-CORE-09-006, SCANNER-WEB-09-103 | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | UI renders new columns/tooltips, accessibility and responsive checks pass, Cypress regression updated with confidence fixtures. | diff --git a/src/StellaOps.Web/.editorconfig b/src/StellaOps.Web/.editorconfig new file mode 100644 index 00000000..59d9a3a3 --- /dev/null +++ b/src/StellaOps.Web/.editorconfig @@ -0,0 +1,16 @@ +# Editor configuration, see https://editorconfig.org +root = true + +[*] +charset = utf-8 +indent_style = space +indent_size = 2 +insert_final_newline = true +trim_trailing_whitespace = true + +[*.ts] +quote_type = single + +[*.md] +max_line_length = off +trim_trailing_whitespace = false diff --git a/src/StellaOps.Web/.gitignore b/src/StellaOps.Web/.gitignore new file mode 100644 index 00000000..cc7b1413 --- /dev/null +++ b/src/StellaOps.Web/.gitignore @@ -0,0 +1,42 @@ +# See https://docs.github.com/get-started/getting-started-with-git/ignoring-files for more about ignoring files. + +# Compiled output +/dist +/tmp +/out-tsc +/bazel-out + +# Node +/node_modules +npm-debug.log +yarn-error.log + +# IDEs and editors +.idea/ +.project +.classpath +.c9/ +*.launch +.settings/ +*.sublime-workspace + +# Visual Studio Code +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +.history/* + +# Miscellaneous +/.angular/cache +.sass-cache/ +/connect.lock +/coverage +/libpeerconnection.log +testem.log +/typings + +# System files +.DS_Store +Thumbs.db diff --git a/src/StellaOps.Web/README.md b/src/StellaOps.Web/README.md new file mode 100644 index 00000000..5a869273 --- /dev/null +++ b/src/StellaOps.Web/README.md @@ -0,0 +1,27 @@ +# StellaopsWeb + +This project was generated with [Angular CLI](https://github.com/angular/angular-cli) version 17.3.17. + +## Development server + +Run `ng serve` for a dev server. Navigate to `http://localhost:4200/`. The application will automatically reload if you change any of the source files. + +## Code scaffolding + +Run `ng generate component component-name` to generate a new component. You can also use `ng generate directive|pipe|service|class|guard|interface|enum|module`. + +## Build + +Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. + +## Running unit tests + +Run `ng test` to execute the unit tests via [Karma](https://karma-runner.github.io). + +## Running end-to-end tests + +Run `ng e2e` to execute the end-to-end tests via a platform of your choice. To use this command, you need to first add a package that implements end-to-end testing capabilities. + +## Further help + +To get more help on the Angular CLI use `ng help` or go check out the [Angular CLI Overview and Command Reference](https://angular.io/cli) page. diff --git a/src/StellaOps.Web/TASKS.md b/src/StellaOps.Web/TASKS.md index 65306a0d..d27c773e 100644 --- a/src/StellaOps.Web/TASKS.md +++ b/src/StellaOps.Web/TASKS.md @@ -2,4 +2,4 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| WEB1.TRIVY-SETTINGS | TODO | UX Specialist, Angular Eng | Backend `/exporters/trivy-db` contract | Implement Trivy DB exporter settings panel with `publishFull`, `publishDelta`, `includeFull`, `includeDelta` toggles and “Run export now” action using future `/exporters/trivy-db/settings` API. | ✅ Panel wired to mocked API; ✅ Overrides persisted via settings endpoint; ✅ Manual run button reuses overrides. | +| WEB1.TRIVY-SETTINGS | DOING (2025-10-19) | UX Specialist, Angular Eng | Backend `/exporters/trivy-db` contract | Implement Trivy DB exporter settings panel with `publishFull`, `publishDelta`, `includeFull`, `includeDelta` toggles and “Run export now” action using future `/exporters/trivy-db/settings` API. | ✅ Panel wired to mocked API; ✅ Overrides persisted via settings endpoint; ✅ Manual run button reuses overrides. | diff --git a/src/StellaOps.Web/angular.json b/src/StellaOps.Web/angular.json new file mode 100644 index 00000000..3b1163bb --- /dev/null +++ b/src/StellaOps.Web/angular.json @@ -0,0 +1,101 @@ +{ + "$schema": "./node_modules/@angular/cli/lib/config/schema.json", + "version": 1, + "newProjectRoot": "projects", + "projects": { + "stellaops-web": { + "projectType": "application", + "schematics": { + "@schematics/angular:component": { + "style": "scss" + } + }, + "root": "", + "sourceRoot": "src", + "prefix": "app", + "architect": { + "build": { + "builder": "@angular-devkit/build-angular:application", + "options": { + "outputPath": "dist/stellaops-web", + "index": "src/index.html", + "browser": "src/main.ts", + "polyfills": [ + "zone.js" + ], + "tsConfig": "tsconfig.app.json", + "inlineStyleLanguage": "scss", + "assets": [ + "src/favicon.ico", + "src/assets" + ], + "styles": [ + "src/styles.scss" + ], + "scripts": [] + }, + "configurations": { + "production": { + "budgets": [ + { + "type": "initial", + "maximumWarning": "500kb", + "maximumError": "1mb" + }, + { + "type": "anyComponentStyle", + "maximumWarning": "2kb", + "maximumError": "4kb" + } + ], + "outputHashing": "all" + }, + "development": { + "optimization": false, + "extractLicenses": false, + "sourceMap": true + } + }, + "defaultConfiguration": "production" + }, + "serve": { + "builder": "@angular-devkit/build-angular:dev-server", + "configurations": { + "production": { + "buildTarget": "stellaops-web:build:production" + }, + "development": { + "buildTarget": "stellaops-web:build:development" + } + }, + "defaultConfiguration": "development" + }, + "extract-i18n": { + "builder": "@angular-devkit/build-angular:extract-i18n", + "options": { + "buildTarget": "stellaops-web:build" + } + }, + "test": { + "builder": "@angular-devkit/build-angular:karma", + "options": { + "polyfills": [ + "zone.js", + "zone.js/testing" + ], + "tsConfig": "tsconfig.spec.json", + "inlineStyleLanguage": "scss", + "assets": [ + "src/favicon.ico", + "src/assets" + ], + "styles": [ + "src/styles.scss" + ], + "scripts": [] + } + } + } + } + } +} diff --git a/src/StellaOps.Web/docs/TrivyDbSettings.md b/src/StellaOps.Web/docs/TrivyDbSettings.md new file mode 100644 index 00000000..1c8eef31 --- /dev/null +++ b/src/StellaOps.Web/docs/TrivyDbSettings.md @@ -0,0 +1,37 @@ +# WEB1.TRIVY-SETTINGS – Backend Contract & UI Wiring Notes + +## 1. Known backend surfaces + +- `POST /jobs/export:trivy-db` + Payload is wrapped as `{ "trigger": "", "parameters": { ... } }` and accepts the overrides shown in `TrivyDbExportJob` (`publishFull`, `publishDelta`, `includeFull`, `includeDelta`). + Evidence: `src/StellaOps.Cli/Commands/CommandHandlers.cs:263`, `src/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs:5`, `src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs:27`. +- Export configuration defaults sit under `TrivyDbExportOptions.Oras` and `.OfflineBundle`. Both booleans default to `true`, so overriding to `false` must be explicit. + Evidence: `src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs:8`. + +## 2. Clarifications needed from Concelier backend + +| Topic | Questions to resolve | Suggested owner | +| --- | --- | --- | +| Settings endpoint surface | `Program.cs` only exposes `/jobs/*` and health endpoints—there is currently **no** `/exporters/trivy-db/settings` route. Confirm the intended path (`/api/v1/concelier/exporters/trivy-db/settings`?), verbs (`GET`/`PUT` or `PATCH`), and DTO schema (flat booleans vs nested `oras`/`offlineBundle`). | Concelier WebService | +| Auth scopes | Verify required roles (likely `concelier.export` or `concelier.admin`) and whether UI needs to request additional scopes beyond existing dashboard access. | Authority & Concelier teams | +| Concurrency control | Determine if settings payload includes an ETag or timestamp we must echo (`If-Match`) to avoid stomping concurrent edits. | Concelier WebService | +| Validation & defaults | Clarify server-side validation rules (e.g., must `publishDelta` be `false` when `publishFull` is `false`?) and shape of Problem+JSON responses. | Concelier WebService | +| Manual run trigger | Confirm whether settings update should immediately kick an export or if UI should call `POST /jobs/export:trivy-db` separately (current CLI behaviour suggests a separate call). | Concelier WebService | + +## 3. Proposed Angular implementation (pending contract lock) + +- **Feature module**: `app/concelier/trivy-db-settings/` with a standalone routed page (`TrivyDbSettingsPage`) and a reusable form component (`TrivyDbSettingsForm`). +- **State & transport**: + - Client wrapper under `core/api/concelier-exporter.client.ts` exposing `getTrivyDbSettings`, `updateTrivyDbSettings`, and `runTrivyDbExport`. + - Store built with `@ngrx/signals` keeping `settings`, `isDirty`, `lastFetchedAt`, and error state; optimistic updates gated on ETag confirmation once the backend specifies the shape. + - Shared DTOs generated from the confirmed schema to keep Concelier/CLI alignment. +- **UX flow**: + - Load settings on navigation; show inline info about current publish/bundle defaults. + - “Run export now” button opens confirmation modal summarising overrides, then calls `runTrivyDbExport` (separate API call) while reusing local state. + - Surface Problem+JSON errors via existing toast/notification pattern and capture correlation IDs for ops visibility. +- **Offline posture**: cache latest successful settings payload in IndexedDB (read-only when offline) and disable the run button when token/scopes are missing. + +## 4. Next steps + +1. Share section 2 with Concelier WebService owners to confirm the REST contract (blocking before scaffolding DTOs). +2. Once confirmed, scaffold the Angular workspace and feature shell, keeping deterministic build outputs per `src/StellaOps.Web/AGENTS.md`. diff --git a/src/StellaOps.Web/package.json b/src/StellaOps.Web/package.json new file mode 100644 index 00000000..c7b0338f --- /dev/null +++ b/src/StellaOps.Web/package.json @@ -0,0 +1,38 @@ +{ + "name": "stellaops-web", + "version": "0.0.0", + "scripts": { + "ng": "ng", + "start": "ng serve", + "build": "ng build", + "watch": "ng build --watch --configuration development", + "test": "ng test" + }, + "private": true, + "dependencies": { + "@angular/animations": "^17.3.0", + "@angular/common": "^17.3.0", + "@angular/compiler": "^17.3.0", + "@angular/core": "^17.3.0", + "@angular/forms": "^17.3.0", + "@angular/platform-browser": "^17.3.0", + "@angular/platform-browser-dynamic": "^17.3.0", + "@angular/router": "^17.3.0", + "rxjs": "~7.8.0", + "tslib": "^2.3.0", + "zone.js": "~0.14.3" + }, + "devDependencies": { + "@angular-devkit/build-angular": "^17.3.17", + "@angular/cli": "^17.3.17", + "@angular/compiler-cli": "^17.3.0", + "@types/jasmine": "~5.1.0", + "jasmine-core": "~5.1.0", + "karma": "~6.4.0", + "karma-chrome-launcher": "~3.2.0", + "karma-coverage": "~2.2.0", + "karma-jasmine": "~5.1.0", + "karma-jasmine-html-reporter": "~2.1.0", + "typescript": "~5.4.2" + } +} diff --git a/src/StellaOps.Web/src/app/app.component.html b/src/StellaOps.Web/src/app/app.component.html new file mode 100644 index 00000000..36093e18 --- /dev/null +++ b/src/StellaOps.Web/src/app/app.component.html @@ -0,0 +1,336 @@ + + + + + + + + + + + +
+
+
+ +

Hello, {{ title }}

+

Congratulations! Your app is running. 🎉

+
+ +
+
+ @for (item of [ + { title: 'Explore the Docs', link: 'https://angular.dev' }, + { title: 'Learn with Tutorials', link: 'https://angular.dev/tutorials' }, + { title: 'CLI Docs', link: 'https://angular.dev/tools/cli' }, + { title: 'Angular Language Service', link: 'https://angular.dev/tools/language-service' }, + { title: 'Angular DevTools', link: 'https://angular.dev/tools/devtools' }, + ]; track item.title) { + + {{ item.title }} + + + + + } +
+ +
+
+
+ + + + + + + + + + + diff --git a/src/StellaOps.Web/src/app/app.component.scss b/src/StellaOps.Web/src/app/app.component.scss new file mode 100644 index 00000000..e69de29b diff --git a/src/StellaOps.Web/src/app/app.component.spec.ts b/src/StellaOps.Web/src/app/app.component.spec.ts new file mode 100644 index 00000000..eb19a66d --- /dev/null +++ b/src/StellaOps.Web/src/app/app.component.spec.ts @@ -0,0 +1,29 @@ +import { TestBed } from '@angular/core/testing'; +import { AppComponent } from './app.component'; + +describe('AppComponent', () => { + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [AppComponent], + }).compileComponents(); + }); + + it('should create the app', () => { + const fixture = TestBed.createComponent(AppComponent); + const app = fixture.componentInstance; + expect(app).toBeTruthy(); + }); + + it(`should have the 'stellaops-web' title`, () => { + const fixture = TestBed.createComponent(AppComponent); + const app = fixture.componentInstance; + expect(app.title).toEqual('stellaops-web'); + }); + + it('should render title', () => { + const fixture = TestBed.createComponent(AppComponent); + fixture.detectChanges(); + const compiled = fixture.nativeElement as HTMLElement; + expect(compiled.querySelector('h1')?.textContent).toContain('Hello, stellaops-web'); + }); +}); diff --git a/src/StellaOps.Web/src/app/app.component.ts b/src/StellaOps.Web/src/app/app.component.ts new file mode 100644 index 00000000..038cbddc --- /dev/null +++ b/src/StellaOps.Web/src/app/app.component.ts @@ -0,0 +1,13 @@ +import { Component } from '@angular/core'; +import { RouterOutlet } from '@angular/router'; + +@Component({ + selector: 'app-root', + standalone: true, + imports: [RouterOutlet], + templateUrl: './app.component.html', + styleUrl: './app.component.scss' +}) +export class AppComponent { + title = 'stellaops-web'; +} diff --git a/src/StellaOps.Web/src/app/app.config.ts b/src/StellaOps.Web/src/app/app.config.ts new file mode 100644 index 00000000..6c6ef603 --- /dev/null +++ b/src/StellaOps.Web/src/app/app.config.ts @@ -0,0 +1,8 @@ +import { ApplicationConfig } from '@angular/core'; +import { provideRouter } from '@angular/router'; + +import { routes } from './app.routes'; + +export const appConfig: ApplicationConfig = { + providers: [provideRouter(routes)] +}; diff --git a/src/StellaOps.Web/src/app/app.routes.ts b/src/StellaOps.Web/src/app/app.routes.ts new file mode 100644 index 00000000..dc39edb5 --- /dev/null +++ b/src/StellaOps.Web/src/app/app.routes.ts @@ -0,0 +1,3 @@ +import { Routes } from '@angular/router'; + +export const routes: Routes = []; diff --git a/src/StellaOps.Web/src/assets/.gitkeep b/src/StellaOps.Web/src/assets/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/src/StellaOps.Web/src/favicon.ico b/src/StellaOps.Web/src/favicon.ico new file mode 100644 index 00000000..57614f9c Binary files /dev/null and b/src/StellaOps.Web/src/favicon.ico differ diff --git a/src/StellaOps.Web/src/index.html b/src/StellaOps.Web/src/index.html new file mode 100644 index 00000000..77a633f0 --- /dev/null +++ b/src/StellaOps.Web/src/index.html @@ -0,0 +1,13 @@ + + + + + StellaopsWeb + + + + + + + + diff --git a/src/StellaOps.Web/src/main.ts b/src/StellaOps.Web/src/main.ts new file mode 100644 index 00000000..35b00f34 --- /dev/null +++ b/src/StellaOps.Web/src/main.ts @@ -0,0 +1,6 @@ +import { bootstrapApplication } from '@angular/platform-browser'; +import { appConfig } from './app/app.config'; +import { AppComponent } from './app/app.component'; + +bootstrapApplication(AppComponent, appConfig) + .catch((err) => console.error(err)); diff --git a/src/StellaOps.Web/src/styles.scss b/src/StellaOps.Web/src/styles.scss new file mode 100644 index 00000000..90d4ee00 --- /dev/null +++ b/src/StellaOps.Web/src/styles.scss @@ -0,0 +1 @@ +/* You can add global styles to this file, and also import other style files */ diff --git a/src/StellaOps.Web/tsconfig.app.json b/src/StellaOps.Web/tsconfig.app.json new file mode 100644 index 00000000..374cc9d2 --- /dev/null +++ b/src/StellaOps.Web/tsconfig.app.json @@ -0,0 +1,14 @@ +/* To learn more about this file see: https://angular.io/config/tsconfig. */ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./out-tsc/app", + "types": [] + }, + "files": [ + "src/main.ts" + ], + "include": [ + "src/**/*.d.ts" + ] +} diff --git a/src/StellaOps.Web/tsconfig.json b/src/StellaOps.Web/tsconfig.json new file mode 100644 index 00000000..eb49734a --- /dev/null +++ b/src/StellaOps.Web/tsconfig.json @@ -0,0 +1,32 @@ +/* To learn more about this file see: https://angular.io/config/tsconfig. */ +{ + "compileOnSave": false, + "compilerOptions": { + "outDir": "./dist/out-tsc", + "strict": true, + "noImplicitOverride": true, + "noPropertyAccessFromIndexSignature": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "skipLibCheck": true, + "esModuleInterop": true, + "sourceMap": true, + "declaration": false, + "experimentalDecorators": true, + "moduleResolution": "node", + "importHelpers": true, + "target": "ES2022", + "module": "ES2022", + "useDefineForClassFields": false, + "lib": [ + "ES2022", + "dom" + ] + }, + "angularCompilerOptions": { + "enableI18nLegacyMessageIdFormat": false, + "strictInjectionParameters": true, + "strictInputAccessModifiers": true, + "strictTemplates": true + } +} diff --git a/src/StellaOps.Web/tsconfig.spec.json b/src/StellaOps.Web/tsconfig.spec.json new file mode 100644 index 00000000..be7e9da7 --- /dev/null +++ b/src/StellaOps.Web/tsconfig.spec.json @@ -0,0 +1,14 @@ +/* To learn more about this file see: https://angular.io/config/tsconfig. */ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./out-tsc/spec", + "types": [ + "jasmine" + ] + }, + "include": [ + "src/**/*.spec.ts", + "src/**/*.d.ts" + ] +} diff --git a/src/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs b/src/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs new file mode 100644 index 00000000..4e5e5dc0 --- /dev/null +++ b/src/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs @@ -0,0 +1,66 @@ +using StellaOps.Zastava.Core.Contracts; + +namespace StellaOps.Zastava.Core.Tests.Contracts; + +public sealed class ZastavaContractVersionsTests +{ + [Theory] + [InlineData("zastava.runtime.event@v1", "zastava.runtime.event", 1, 0)] + [InlineData("zastava.runtime.event@v1.0", "zastava.runtime.event", 1, 0)] + [InlineData("zastava.admission.decision@v1.2", "zastava.admission.decision", 1, 2)] + public void TryParse_ParsesCanonicalForms(string input, string schema, int major, int minor) + { + var success = ZastavaContractVersions.ContractVersion.TryParse(input, out var contract); + + Assert.True(success); + Assert.Equal(schema, contract.Schema); + Assert.Equal(new Version(major, minor), contract.Version); + Assert.Equal($"{schema}@v{major}.{minor}", contract.ToString()); + } + + [Theory] + [InlineData("")] + [InlineData("zastava.runtime.event")] + [InlineData("runtime@1.0")] + [InlineData("zastava.runtime.event@vinvalid")] + public void TryParse_InvalidInputs_ReturnsFalse(string input) + { + var success = ZastavaContractVersions.ContractVersion.TryParse(input, out _); + + Assert.False(success); + } + + [Fact] + public void IsRuntimeEventSupported_RespectsMajorCompatibility() + { + Assert.True(ZastavaContractVersions.IsRuntimeEventSupported("zastava.runtime.event@v1")); + Assert.True(ZastavaContractVersions.IsRuntimeEventSupported("zastava.runtime.event@v1.0")); + Assert.False(ZastavaContractVersions.IsRuntimeEventSupported("zastava.runtime.event@v2.0")); + Assert.False(ZastavaContractVersions.IsRuntimeEventSupported("zastava.admission.decision@v1")); + } + + [Fact] + public void NegotiateRuntimeEvent_PicksHighestCommonVersion() + { + var negotiated = ZastavaContractVersions.NegotiateRuntimeEvent(new[] + { + "zastava.runtime.event@v1.0", + "zastava.runtime.event@v0.9", + "zastava.admission.decision@v1" + }); + + Assert.Equal("zastava.runtime.event@v1.0", negotiated.ToString()); + } + + [Fact] + public void NegotiateRuntimeEvent_FallsBackToLocalWhenNoMatch() + { + var negotiated = ZastavaContractVersions.NegotiateRuntimeEvent(new[] + { + "zastava.runtime.event@v2.0", + "zastava.admission.decision@v2.0" + }); + + Assert.Equal(ZastavaContractVersions.RuntimeEvent.ToString(), negotiated.ToString()); + } +} diff --git a/src/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs b/src/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs new file mode 100644 index 00000000..6044a357 --- /dev/null +++ b/src/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs @@ -0,0 +1,204 @@ +using System.Text; +using StellaOps.Zastava.Core.Contracts; +using StellaOps.Zastava.Core.Hashing; +using StellaOps.Zastava.Core.Serialization; + +namespace StellaOps.Zastava.Core.Tests.Serialization; + +public sealed class ZastavaCanonicalJsonSerializerTests +{ + [Fact] + public void Serialize_RuntimeEventEnvelope_ProducesDeterministicOrdering() + { + var runtimeEvent = new RuntimeEvent + { + EventId = "evt-123", + When = DateTimeOffset.Parse("2025-10-19T12:34:56Z"), + Kind = RuntimeEventKind.ContainerStart, + Tenant = "tenant-01", + Node = "node-a", + Runtime = new RuntimeEngine + { + Engine = "containerd", + Version = "1.7.19" + }, + Workload = new RuntimeWorkload + { + Platform = "kubernetes", + Namespace = "payments", + Pod = "api-7c9fbbd8b7-ktd84", + Container = "api", + ContainerId = "containerd://abc", + ImageRef = "ghcr.io/acme/api@sha256:abcd", + Owner = new RuntimeWorkloadOwner + { + Kind = "Deployment", + Name = "api" + } + }, + Process = new RuntimeProcess + { + Pid = 12345, + Entrypoint = new[] { "/entrypoint.sh", "--serve" }, + EntryTrace = new[] + { + new RuntimeEntryTrace + { + File = "/entrypoint.sh", + Line = 3, + Op = "exec", + Target = "/usr/bin/python3" + } + } + }, + LoadedLibraries = new[] + { + new RuntimeLoadedLibrary + { + Path = "/lib/x86_64-linux-gnu/libssl.so.3", + Inode = 123456, + Sha256 = "abc123" + } + }, + Posture = new RuntimePosture + { + ImageSigned = true, + SbomReferrer = "present", + Attestation = new RuntimeAttestation + { + Uuid = "rekor-uuid", + Verified = true + } + }, + Delta = new RuntimeDelta + { + BaselineImageDigest = "sha256:abcd", + ChangedFiles = new[] { "/opt/app/server.py" }, + NewBinaries = new[] + { + new RuntimeNewBinary + { + Path = "/usr/local/bin/helper", + Sha256 = "def456" + } + } + }, + Evidence = new[] + { + new RuntimeEvidence + { + Signal = "procfs.maps", + Value = "/lib/.../libssl.so.3@0x7f..." + } + }, + Annotations = new Dictionary + { + ["source"] = "unit-test" + } + }; + + var envelope = RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent); + var json = ZastavaCanonicalJsonSerializer.Serialize(envelope); + + var expectedOrder = new[] + { + "\"schemaVersion\"", + "\"event\"", + "\"eventId\"", + "\"when\"", + "\"kind\"", + "\"tenant\"", + "\"node\"", + "\"runtime\"", + "\"engine\"", + "\"version\"", + "\"workload\"", + "\"platform\"", + "\"namespace\"", + "\"pod\"", + "\"container\"", + "\"containerId\"", + "\"imageRef\"", + "\"owner\"", + "\"kind\"", + "\"name\"", + "\"process\"", + "\"pid\"", + "\"entrypoint\"", + "\"entryTrace\"", + "\"loadedLibs\"", + "\"posture\"", + "\"imageSigned\"", + "\"sbomReferrer\"", + "\"attestation\"", + "\"uuid\"", + "\"verified\"", + "\"delta\"", + "\"baselineImageDigest\"", + "\"changedFiles\"", + "\"newBinaries\"", + "\"path\"", + "\"sha256\"", + "\"evidence\"", + "\"signal\"", + "\"value\"", + "\"annotations\"", + "\"source\"" + }; + + var cursor = -1; + foreach (var token in expectedOrder) + { + var position = json.IndexOf(token, cursor + 1, StringComparison.Ordinal); + Assert.True(position > cursor, $"Property token {token} not found in the expected order."); + cursor = position; + } + + Assert.DoesNotContain(" ", json, StringComparison.Ordinal); + Assert.StartsWith("{\"schemaVersion\"", json, StringComparison.Ordinal); + Assert.EndsWith("}}", json, StringComparison.Ordinal); + } + + [Fact] + public void ComputeMultihash_ProducesStableBase64UrlDigest() + { + var decision = AdmissionDecisionEnvelope.Create( + new AdmissionDecision + { + AdmissionId = "admission-123", + Namespace = "payments", + PodSpecDigest = "sha256:deadbeef", + Images = new[] + { + new AdmissionImageVerdict + { + Name = "ghcr.io/acme/api:1.2.3", + Resolved = "ghcr.io/acme/api@sha256:abcd", + Signed = true, + HasSbomReferrers = true, + PolicyVerdict = PolicyVerdict.Pass, + Reasons = Array.Empty(), + Rekor = new AdmissionRekorEvidence + { + Uuid = "xyz", + Verified = true + } + } + }, + Decision = AdmissionDecisionOutcome.Allow, + TtlSeconds = 300 + }, + ZastavaContractVersions.AdmissionDecision); + + var canonicalJson = ZastavaCanonicalJsonSerializer.Serialize(decision); + var expectedDigestBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson)); + var expected = $"sha256-{Convert.ToBase64String(expectedDigestBytes).TrimEnd('=').Replace('+', '-').Replace('/', '_')}"; + + var hash = ZastavaHashing.ComputeMultihash(decision); + + Assert.Equal(expected, hash); + + var sha512 = ZastavaHashing.ComputeMultihash(Encoding.UTF8.GetBytes(canonicalJson), "sha512"); + Assert.StartsWith("sha512-", sha512, StringComparison.Ordinal); + } +} diff --git a/src/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj b/src/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj new file mode 100644 index 00000000..6c61ec9f --- /dev/null +++ b/src/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj @@ -0,0 +1,14 @@ + + + net10.0 + preview + enable + enable + + + + + + + + diff --git a/src/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs b/src/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs new file mode 100644 index 00000000..a95764fb --- /dev/null +++ b/src/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs @@ -0,0 +1,86 @@ +namespace StellaOps.Zastava.Core.Contracts; + +/// +/// Envelope returned by the admission webhook to the Kubernetes API server. +/// +public sealed record class AdmissionDecisionEnvelope +{ + public required string SchemaVersion { get; init; } + + public required AdmissionDecision Decision { get; init; } + + public static AdmissionDecisionEnvelope Create(AdmissionDecision decision, ZastavaContractVersions.ContractVersion contract) + { + ArgumentNullException.ThrowIfNull(decision); + return new AdmissionDecisionEnvelope + { + SchemaVersion = contract.ToString(), + Decision = decision + }; + } + + public bool IsSupported() + => ZastavaContractVersions.IsAdmissionDecisionSupported(SchemaVersion); +} + +/// +/// Canonical admission decision payload. +/// +public sealed record class AdmissionDecision +{ + public required string AdmissionId { get; init; } + + [JsonPropertyName("namespace")] + public required string Namespace { get; init; } + + public required string PodSpecDigest { get; init; } + + public IReadOnlyList Images { get; init; } = Array.Empty(); + + public required AdmissionDecisionOutcome Decision { get; init; } + + public int TtlSeconds { get; init; } + + public IReadOnlyDictionary? Annotations { get; init; } +} + +public enum AdmissionDecisionOutcome +{ + Allow, + Deny +} + +public sealed record class AdmissionImageVerdict +{ + public required string Name { get; init; } + + public required string Resolved { get; init; } + + public bool Signed { get; init; } + + [JsonPropertyName("hasSbomReferrers")] + public bool HasSbomReferrers { get; init; } + + public PolicyVerdict PolicyVerdict { get; init; } + + public IReadOnlyList Reasons { get; init; } = Array.Empty(); + + public AdmissionRekorEvidence? Rekor { get; init; } + + public IReadOnlyDictionary? Metadata { get; init; } +} + +public enum PolicyVerdict +{ + Pass, + Warn, + Fail, + Error +} + +public sealed record class AdmissionRekorEvidence +{ + public string? Uuid { get; init; } + + public bool? Verified { get; init; } +} diff --git a/src/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs b/src/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs new file mode 100644 index 00000000..7d9402a6 --- /dev/null +++ b/src/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs @@ -0,0 +1,179 @@ +namespace StellaOps.Zastava.Core.Contracts; + +/// +/// Envelope published by the observer towards Scanner runtime ingestion. +/// +public sealed record class RuntimeEventEnvelope +{ + /// + /// Contract identifier consumed by negotiation logic (zastava.runtime.event@v1). + /// + public required string SchemaVersion { get; init; } + + /// + /// Runtime event payload. + /// + public required RuntimeEvent Event { get; init; } + + /// + /// Creates an envelope using the provided runtime contract version. + /// + public static RuntimeEventEnvelope Create(RuntimeEvent runtimeEvent, ZastavaContractVersions.ContractVersion contract) + { + ArgumentNullException.ThrowIfNull(runtimeEvent); + return new RuntimeEventEnvelope + { + SchemaVersion = contract.ToString(), + Event = runtimeEvent + }; + } + + /// + /// Checks whether the envelope schema is supported by the current runtime. + /// + public bool IsSupported() + => ZastavaContractVersions.IsRuntimeEventSupported(SchemaVersion); +} + +/// +/// Canonical runtime event emitted by the observer. +/// +public sealed record class RuntimeEvent +{ + public required string EventId { get; init; } + + public required DateTimeOffset When { get; init; } + + public required RuntimeEventKind Kind { get; init; } + + public required string Tenant { get; init; } + + public required string Node { get; init; } + + public required RuntimeEngine Runtime { get; init; } + + public required RuntimeWorkload Workload { get; init; } + + public RuntimeProcess? Process { get; init; } + + [JsonPropertyName("loadedLibs")] + public IReadOnlyList LoadedLibraries { get; init; } = Array.Empty(); + + public RuntimePosture? Posture { get; init; } + + public RuntimeDelta? Delta { get; init; } + + public IReadOnlyList Evidence { get; init; } = Array.Empty(); + + public IReadOnlyDictionary? Annotations { get; init; } +} + +public enum RuntimeEventKind +{ + ContainerStart, + ContainerStop, + Drift, + PolicyViolation, + AttestationStatus +} + +public sealed record class RuntimeEngine +{ + public required string Engine { get; init; } + + public string? Version { get; init; } +} + +public sealed record class RuntimeWorkload +{ + public required string Platform { get; init; } + + [JsonPropertyName("namespace")] + public string? Namespace { get; init; } + + public string? Pod { get; init; } + + public string? Container { get; init; } + + public string? ContainerId { get; init; } + + public string? ImageRef { get; init; } + + public RuntimeWorkloadOwner? Owner { get; init; } +} + +public sealed record class RuntimeWorkloadOwner +{ + public string? Kind { get; init; } + + public string? Name { get; init; } +} + +public sealed record class RuntimeProcess +{ + public int Pid { get; init; } + + public IReadOnlyList Entrypoint { get; init; } = Array.Empty(); + + [JsonPropertyName("entryTrace")] + public IReadOnlyList EntryTrace { get; init; } = Array.Empty(); +} + +public sealed record class RuntimeEntryTrace +{ + public string? File { get; init; } + + public int? Line { get; init; } + + public string? Op { get; init; } + + public string? Target { get; init; } +} + +public sealed record class RuntimeLoadedLibrary +{ + public required string Path { get; init; } + + public long? Inode { get; init; } + + public string? Sha256 { get; init; } +} + +public sealed record class RuntimePosture +{ + public bool? ImageSigned { get; init; } + + public string? SbomReferrer { get; init; } + + public RuntimeAttestation? Attestation { get; init; } +} + +public sealed record class RuntimeAttestation +{ + public string? Uuid { get; init; } + + public bool? Verified { get; init; } +} + +public sealed record class RuntimeDelta +{ + public string? BaselineImageDigest { get; init; } + + public IReadOnlyList ChangedFiles { get; init; } = Array.Empty(); + + public IReadOnlyList NewBinaries { get; init; } = Array.Empty(); +} + +public sealed record class RuntimeNewBinary +{ + public required string Path { get; init; } + + public string? Sha256 { get; init; } +} + +public sealed record class RuntimeEvidence +{ + public required string Signal { get; init; } + + public string? Value { get; init; } +} diff --git a/src/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs b/src/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs new file mode 100644 index 00000000..cb234d62 --- /dev/null +++ b/src/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs @@ -0,0 +1,173 @@ +namespace StellaOps.Zastava.Core.Contracts; + +/// +/// Centralises schema identifiers and version negotiation rules for Zastava contracts. +/// +public static class ZastavaContractVersions +{ + /// + /// Current local runtime event contract (major version 1). + /// + public static ContractVersion RuntimeEvent { get; } = new("zastava.runtime.event", new Version(1, 0)); + + /// + /// Current local admission decision contract (major version 1). + /// + public static ContractVersion AdmissionDecision { get; } = new("zastava.admission.decision", new Version(1, 0)); + + /// + /// Determines whether the provided schema string is supported for runtime events. + /// + public static bool IsRuntimeEventSupported(string schemaVersion) + => ContractVersion.TryParse(schemaVersion, out var candidate) && candidate.IsCompatibleWith(RuntimeEvent); + + /// + /// Determines whether the provided schema string is supported for admission decisions. + /// + public static bool IsAdmissionDecisionSupported(string schemaVersion) + => ContractVersion.TryParse(schemaVersion, out var candidate) && candidate.IsCompatibleWith(AdmissionDecision); + + /// + /// Selects the newest runtime event contract shared between the local implementation and a remote peer. + /// + public static ContractVersion NegotiateRuntimeEvent(IEnumerable offeredSchemaVersions) + => Negotiate(RuntimeEvent, offeredSchemaVersions); + + /// + /// Selects the newest admission decision contract shared between the local implementation and a remote peer. + /// + public static ContractVersion NegotiateAdmissionDecision(IEnumerable offeredSchemaVersions) + => Negotiate(AdmissionDecision, offeredSchemaVersions); + + private static ContractVersion Negotiate(ContractVersion local, IEnumerable offered) + { + ArgumentNullException.ThrowIfNull(offered); + + ContractVersion? best = null; + foreach (var entry in offered) + { + if (!ContractVersion.TryParse(entry, out var candidate)) + { + continue; + } + + if (!candidate.Schema.Equals(local.Schema, StringComparison.Ordinal)) + { + continue; + } + + if (candidate.Version.Major != local.Version.Major) + { + continue; + } + + if (candidate.Version > local.Version) + { + continue; + } + + if (best is null || candidate.Version > best.Value.Version) + { + best = candidate; + } + } + + return best ?? local; + } + + /// + /// Represents a schema + semantic version pairing in canonical form. + /// + public readonly record struct ContractVersion + { + public ContractVersion(string schema, Version version) + { + if (string.IsNullOrWhiteSpace(schema)) + { + throw new ArgumentException("Schema cannot be null or whitespace.", nameof(schema)); + } + + Schema = schema.Trim(); + Version = new Version(Math.Max(version.Major, 0), Math.Max(version.Minor, 0)); + } + + /// + /// Schema identifier (e.g. zastava.runtime.event). + /// + public string Schema { get; } + + /// + /// Major/minor version recognised by the implementation. + /// + public Version Version { get; } + + /// + /// Canonical string representation (schema@vMajor.Minor). + /// + public override string ToString() + => $"{Schema}@v{Version.ToString(2, CultureInfo.InvariantCulture)}"; + + /// + /// Determines whether a remote contract is compatible with the local definition. + /// + public bool IsCompatibleWith(ContractVersion local) + { + if (!Schema.Equals(local.Schema, StringComparison.Ordinal)) + { + return false; + } + + if (Version.Major != local.Version.Major) + { + return false; + } + + return Version <= local.Version; + } + + /// + /// Attempts to parse a schema string in canonical format. + /// + public static bool TryParse(string? value, out ContractVersion contract) + { + contract = default; + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var trimmed = value.Trim(); + var separator = trimmed.IndexOf('@'); + if (separator < 0) + { + return false; + } + + var schema = trimmed[..separator]; + if (!schema.Contains('.', StringComparison.Ordinal)) + { + return false; + } + + var versionToken = trimmed[(separator + 1)..]; + if (versionToken.Length == 0) + { + return false; + } + + if (versionToken[0] is 'v' or 'V') + { + versionToken = versionToken[1..]; + } + + if (!Version.TryParse(versionToken, out var parsed)) + { + return false; + } + + var canonical = new Version(Math.Max(parsed.Major, 0), Math.Max(parsed.Minor, 0)); + contract = new ContractVersion(schema, canonical); + return true; + } + } +} diff --git a/src/StellaOps.Zastava.Core/GlobalUsings.cs b/src/StellaOps.Zastava.Core/GlobalUsings.cs new file mode 100644 index 00000000..02573c41 --- /dev/null +++ b/src/StellaOps.Zastava.Core/GlobalUsings.cs @@ -0,0 +1,10 @@ +global using System.Collections.Generic; +global using System.Collections.Immutable; +global using System.Diagnostics; +global using System.Diagnostics.Metrics; +global using System.Security.Cryptography; +global using System.Text; +global using System.Text.Json; +global using System.Text.Json.Serialization; +global using System.Text.Json.Serialization.Metadata; +global using System.Globalization; diff --git a/src/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs b/src/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs new file mode 100644 index 00000000..ea3b4c95 --- /dev/null +++ b/src/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs @@ -0,0 +1,59 @@ +using StellaOps.Zastava.Core.Serialization; + +namespace StellaOps.Zastava.Core.Hashing; + +/// +/// Produces deterministic multihashes for runtime and admission payloads. +/// +public static class ZastavaHashing +{ + public const string DefaultAlgorithm = "sha256"; + + /// + /// Serialises the payload using canonical options and computes a multihash string. + /// + public static string ComputeMultihash(T value, string? algorithm = null) + { + ArgumentNullException.ThrowIfNull(value); + var bytes = ZastavaCanonicalJsonSerializer.SerializeToUtf8Bytes(value); + return ComputeMultihash(bytes, algorithm); + } + + /// + /// Computes a multihash string from the provided payload. + /// + public static string ComputeMultihash(ReadOnlySpan payload, string? algorithm = null) + { + var normalized = NormalizeAlgorithm(algorithm); + var digest = normalized switch + { + "sha256" => SHA256.HashData(payload), + "sha512" => SHA512.HashData(payload), + _ => throw new NotSupportedException($"Hash algorithm '{normalized}' is not supported.") + }; + + return $"{normalized}-{ToBase64Url(digest)}"; + } + + private static string NormalizeAlgorithm(string? algorithm) + { + if (string.IsNullOrWhiteSpace(algorithm)) + { + return DefaultAlgorithm; + } + + var normalized = algorithm.Trim().ToLowerInvariant(); + return normalized switch + { + "sha-256" or "sha256" => "sha256", + "sha-512" or "sha512" => "sha512", + _ => normalized + }; + } + + private static string ToBase64Url(ReadOnlySpan bytes) + { + var base64 = Convert.ToBase64String(bytes); + return base64.TrimEnd('=').Replace('+', '-').Replace('/', '_'); + } +} diff --git a/src/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs b/src/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs new file mode 100644 index 00000000..613715c9 --- /dev/null +++ b/src/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs @@ -0,0 +1,110 @@ +namespace StellaOps.Zastava.Core.Serialization; + +/// +/// Deterministic serializer used for runtime/admission contracts. +/// +public static class ZastavaCanonicalJsonSerializer +{ + private static readonly JsonSerializerOptions CompactOptions = CreateOptions(writeIndented: false); + private static readonly JsonSerializerOptions PrettyOptions = CreateOptions(writeIndented: true); + + private static readonly IReadOnlyDictionary PropertyOrderOverrides = new Dictionary + { + { typeof(RuntimeEventEnvelope), new[] { "schemaVersion", "event" } }, + { typeof(RuntimeEvent), new[] { "eventId", "when", "kind", "tenant", "node", "runtime", "workload", "process", "loadedLibs", "posture", "delta", "evidence", "annotations" } }, + { typeof(RuntimeEngine), new[] { "engine", "version" } }, + { typeof(RuntimeWorkload), new[] { "platform", "namespace", "pod", "container", "containerId", "imageRef", "owner" } }, + { typeof(RuntimeWorkloadOwner), new[] { "kind", "name" } }, + { typeof(RuntimeProcess), new[] { "pid", "entrypoint", "entryTrace" } }, + { typeof(RuntimeEntryTrace), new[] { "file", "line", "op", "target" } }, + { typeof(RuntimeLoadedLibrary), new[] { "path", "inode", "sha256" } }, + { typeof(RuntimePosture), new[] { "imageSigned", "sbomReferrer", "attestation" } }, + { typeof(RuntimeAttestation), new[] { "uuid", "verified" } }, + { typeof(RuntimeDelta), new[] { "baselineImageDigest", "changedFiles", "newBinaries" } }, + { typeof(RuntimeNewBinary), new[] { "path", "sha256" } }, + { typeof(RuntimeEvidence), new[] { "signal", "value" } }, + { typeof(AdmissionDecisionEnvelope), new[] { "schemaVersion", "decision" } }, + { typeof(AdmissionDecision), new[] { "admissionId", "namespace", "podSpecDigest", "images", "decision", "ttlSeconds", "annotations" } }, + { typeof(AdmissionImageVerdict), new[] { "name", "resolved", "signed", "hasSbomReferrers", "policyVerdict", "reasons", "rekor", "metadata" } }, + { typeof(AdmissionRekorEvidence), new[] { "uuid", "verified" } }, + { typeof(ZastavaContractVersions.ContractVersion), new[] { "schema", "version" } } + }; + + public static string Serialize(T value) + => JsonSerializer.Serialize(value, CompactOptions); + + public static string SerializeIndented(T value) + => JsonSerializer.Serialize(value, PrettyOptions); + + public static byte[] SerializeToUtf8Bytes(T value) + => JsonSerializer.SerializeToUtf8Bytes(value, CompactOptions); + + public static T Deserialize(string json) + => JsonSerializer.Deserialize(json, CompactOptions)!; + + private static JsonSerializerOptions CreateOptions(bool writeIndented) + { + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DictionaryKeyPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = writeIndented, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping + }; + + var baselineResolver = options.TypeInfoResolver ?? new DefaultJsonTypeInfoResolver(); + options.TypeInfoResolver = new DeterministicTypeInfoResolver(baselineResolver); + options.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase, allowIntegerValues: false)); + return options; + } + + private sealed class DeterministicTypeInfoResolver : IJsonTypeInfoResolver + { + private readonly IJsonTypeInfoResolver inner; + + public DeterministicTypeInfoResolver(IJsonTypeInfoResolver inner) + { + this.inner = inner ?? throw new ArgumentNullException(nameof(inner)); + } + + public JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions options) + { + var info = inner.GetTypeInfo(type, options); + if (info is null) + { + throw new InvalidOperationException($"Unable to resolve JsonTypeInfo for '{type}'."); + } + + if (info.Kind is JsonTypeInfoKind.Object && info.Properties is { Count: > 1 }) + { + var ordered = info.Properties + .OrderBy(property => GetPropertyOrder(type, property.Name)) + .ThenBy(property => property.Name, StringComparer.Ordinal) + .ToArray(); + + info.Properties.Clear(); + foreach (var property in ordered) + { + info.Properties.Add(property); + } + } + + return info; + } + + private static int GetPropertyOrder(Type type, string propertyName) + { + if (PropertyOrderOverrides.TryGetValue(type, out var order)) + { + var index = Array.IndexOf(order, propertyName); + if (index >= 0) + { + return index; + } + } + + return int.MaxValue; + } + } +} diff --git a/src/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj b/src/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj new file mode 100644 index 00000000..ee4d4368 --- /dev/null +++ b/src/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj @@ -0,0 +1,20 @@ + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + diff --git a/src/StellaOps.Zastava.Core/TASKS.md b/src/StellaOps.Zastava.Core/TASKS.md new file mode 100644 index 00000000..ee153082 --- /dev/null +++ b/src/StellaOps.Zastava.Core/TASKS.md @@ -0,0 +1,10 @@ +# Zastava Core Task Board + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ZASTAVA-CORE-12-201 | DOING (2025-10-19) | Zastava Core Guild | — | Define runtime event/admission DTOs, hashing helpers, and versioning strategy. | DTOs cover runtime events and admission verdict envelopes with canonical JSON schema; hashing helpers accept payloads and yield deterministic multihash outputs; version negotiation rules documented and exercised by serialization tests. | +| ZASTAVA-CORE-12-202 | DOING (2025-10-19) | Zastava Core Guild | — | Provide configuration/logging/metrics utilities shared by Observer/Webhook. | Shared options bind from configuration with validation; logging scopes/metrics exporters registered via reusable DI extension; integration test host demonstrates Observer/Webhook consumption with deterministic instrumentation. | +| ZASTAVA-CORE-12-203 | DOING (2025-10-19) | Zastava Core Guild | — | Authority client helpers, OpTok caching, and security guardrails for runtime services. | Typed Authority client surfaces OpTok retrieval + renewal with configurable cache; guardrails enforce DPoP/mTLS expectations and emit structured audit logs; negative-path tests cover expired/invalid tokens and configuration toggles. | +| ZASTAVA-OPS-12-204 | DOING (2025-10-19) | Zastava Core Guild | — | Operational runbooks, alert rules, and dashboard exports for runtime plane. | Runbooks capture install/upgrade/rollback + incident handling; alert rules and dashboard JSON exported for Prometheus/Grafana bundle; docs reference Offline Kit packaging and verification checklist. | + +> Remark (2025-10-19): Prerequisites reviewed—none outstanding. ZASTAVA-CORE-12-201, ZASTAVA-CORE-12-202, ZASTAVA-CORE-12-203, and ZASTAVA-OPS-12-204 moved to DOING for Wave 0 kickoff. diff --git a/src/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs b/src/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs new file mode 100644 index 00000000..7142aa51 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs @@ -0,0 +1,80 @@ +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Zastava.Webhook.Certificates; +using StellaOps.Zastava.Webhook.Configuration; +using Xunit; + +namespace StellaOps.Zastava.Webhook.Tests.Certificates; + +public sealed class SecretFileCertificateSourceTests +{ + [Fact] + public void LoadCertificate_FromPemPair_Succeeds() + { + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=zastava-webhook", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); + using var certificateWithKey = certificate.CopyWithPrivateKey(rsa); + + var certificatePath = Path.GetTempFileName(); + var privateKeyPath = Path.GetTempFileName(); + + try + { + File.WriteAllText(certificatePath, certificateWithKey.ExportCertificatePem()); + using var exportRsa = certificateWithKey.GetRSAPrivateKey() ?? throw new InvalidOperationException("Missing RSA private key"); + var privateKeyPem = PemEncoding.Write("PRIVATE KEY", exportRsa.ExportPkcs8PrivateKey()); + File.WriteAllText(privateKeyPath, privateKeyPem); + + var source = new SecretFileCertificateSource(NullLogger.Instance); + var options = new ZastavaWebhookTlsOptions + { + Mode = ZastavaWebhookTlsMode.Secret, + CertificatePath = certificatePath, + PrivateKeyPath = privateKeyPath + }; + + using var loaded = source.LoadCertificate(options); + + Assert.Equal(certificateWithKey.Thumbprint, loaded.Thumbprint); + Assert.NotNull(loaded.GetRSAPrivateKey()); + } + finally + { + File.Delete(certificatePath); + File.Delete(privateKeyPath); + } + } + + [Fact] + public void LoadCertificate_FromPfx_Succeeds() + { + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=zastava-webhook", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); + using var certificateWithKey = certificate.CopyWithPrivateKey(rsa); + + var pfxPath = Path.GetTempFileName(); + try + { + var pfxBytes = certificateWithKey.Export(X509ContentType.Pfx, "test"); + File.WriteAllBytes(pfxPath, pfxBytes); + + var source = new SecretFileCertificateSource(NullLogger.Instance); + var options = new ZastavaWebhookTlsOptions + { + Mode = ZastavaWebhookTlsMode.Secret, + PfxPath = pfxPath, + PfxPassword = "test" + }; + + using var loaded = source.LoadCertificate(options); + Assert.Equal(certificateWithKey.Thumbprint, loaded.Thumbprint); + } + finally + { + File.Delete(pfxPath); + } + } +} diff --git a/src/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs b/src/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs new file mode 100644 index 00000000..38d0ab7e --- /dev/null +++ b/src/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs @@ -0,0 +1,43 @@ +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Webhook.Certificates; +using StellaOps.Zastava.Webhook.Configuration; +using Xunit; + +namespace StellaOps.Zastava.Webhook.Tests.Certificates; + +public sealed class WebhookCertificateProviderTests +{ + [Fact] + public void Provider_UsesMatchingSource() + { + var options = Options.Create(new ZastavaWebhookOptions + { + Tls = new ZastavaWebhookTlsOptions + { + Mode = ZastavaWebhookTlsMode.Secret, + CertificatePath = "/tmp/cert.pem", + PrivateKeyPath = "/tmp/key.pem" + } + }); + + var source = new ThrowingCertificateSource(); + var provider = new WebhookCertificateProvider(options, new[] { source }, NullLogger.Instance); + + Assert.Throws(() => provider.GetCertificate()); + Assert.True(source.Requested); + } + + private sealed class ThrowingCertificateSource : IWebhookCertificateSource + { + public bool Requested { get; private set; } + + public bool CanHandle(ZastavaWebhookTlsMode mode) => true; + + public System.Security.Cryptography.X509Certificates.X509Certificate2 LoadCertificate(ZastavaWebhookTlsOptions options) + { + Requested = true; + throw new InvalidOperationException("test"); + } + } +} diff --git a/src/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj b/src/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj new file mode 100644 index 00000000..cbd64140 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj @@ -0,0 +1,19 @@ + + + net10.0 + preview + enable + enable + false + true + false + + + + + + + + + + diff --git a/src/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs b/src/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs new file mode 100644 index 00000000..5838480a --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs @@ -0,0 +1,93 @@ +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Webhook.Configuration; + +namespace StellaOps.Zastava.Webhook.Authority; + +public interface IAuthorityTokenProvider +{ + ValueTask GetTokenAsync(CancellationToken cancellationToken = default); +} + +public sealed record AuthorityToken(string Value, DateTimeOffset? ExpiresAtUtc); + +public sealed class StaticAuthorityTokenProvider : IAuthorityTokenProvider +{ + private readonly ZastavaWebhookAuthorityOptions _options; + private readonly ILogger _logger; + private AuthorityToken? _cachedToken; + + public StaticAuthorityTokenProvider( + IOptionsMonitor options, + ILogger logger) + { + _options = options.CurrentValue.Authority; + _logger = logger; + } + + public ValueTask GetTokenAsync(CancellationToken cancellationToken = default) + { + if (_cachedToken is { } token) + { + return ValueTask.FromResult(token); + } + + var value = !string.IsNullOrWhiteSpace(_options.StaticTokenValue) + ? _options.StaticTokenValue + : LoadTokenFromFile(_options.StaticTokenPath); + + if (string.IsNullOrWhiteSpace(value)) + { + throw new InvalidOperationException("No Authority token configured. Provide either 'StaticTokenValue' or 'StaticTokenPath'."); + } + + token = new AuthorityToken(value.Trim(), ExpiresAtUtc: null); + _cachedToken = token; + _logger.LogInformation("Loaded static Authority token (length {Length}).", token.Value.Length); + return ValueTask.FromResult(token); + } + + private string LoadTokenFromFile(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + throw new InvalidOperationException("Authority static token path not set."); + } + + if (!File.Exists(path)) + { + throw new FileNotFoundException("Authority static token file not found.", path); + } + + return File.ReadAllText(path); + } +} + +public sealed class AuthorityTokenHealthCheck : IHealthCheck +{ + private readonly IAuthorityTokenProvider _tokenProvider; + private readonly ILogger _logger; + + public AuthorityTokenHealthCheck(IAuthorityTokenProvider tokenProvider, ILogger logger) + { + _tokenProvider = tokenProvider; + _logger = logger; + } + + public async Task CheckHealthAsync(HealthCheckContext context, CancellationToken cancellationToken = default) + { + try + { + var token = await _tokenProvider.GetTokenAsync(cancellationToken); + return HealthCheckResult.Healthy("Authority token acquired.", data: new Dictionary + { + ["expiresAtUtc"] = token.ExpiresAtUtc?.ToString("O") ?? "static" + }); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to obtain Authority token."); + return HealthCheckResult.Unhealthy("Failed to obtain Authority token.", ex); + } + } +} diff --git a/src/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs b/src/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs new file mode 100644 index 00000000..a18c9339 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs @@ -0,0 +1,25 @@ +using System.Security.Cryptography.X509Certificates; +using StellaOps.Zastava.Webhook.Configuration; + +namespace StellaOps.Zastava.Webhook.Certificates; + +/// +/// Placeholder implementation for CSR-based certificate provisioning. +/// +public sealed class CsrCertificateSource : IWebhookCertificateSource +{ + private readonly ILogger _logger; + + public CsrCertificateSource(ILogger logger) + { + _logger = logger; + } + + public bool CanHandle(ZastavaWebhookTlsMode mode) => mode == ZastavaWebhookTlsMode.CertificateSigningRequest; + + public X509Certificate2 LoadCertificate(ZastavaWebhookTlsOptions options) + { + _logger.LogError("CSR certificate mode is not implemented yet. Configuration requested CSR mode."); + throw new NotSupportedException("CSR certificate provisioning is not implemented (tracked by ZASTAVA-WEBHOOK-12-101)."); + } +} diff --git a/src/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs b/src/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs new file mode 100644 index 00000000..d1d24210 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs @@ -0,0 +1,49 @@ +using System.Security.Cryptography.X509Certificates; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Webhook.Configuration; + +namespace StellaOps.Zastava.Webhook.Certificates; + +public interface IWebhookCertificateProvider +{ + X509Certificate2 GetCertificate(); +} + +public sealed class WebhookCertificateProvider : IWebhookCertificateProvider +{ + private readonly ILogger _logger; + private readonly ZastavaWebhookTlsOptions _options; + private readonly Lazy _certificate; + private readonly IWebhookCertificateSource _certificateSource; + + public WebhookCertificateProvider( + IOptions options, + IEnumerable certificateSources, + ILogger logger) + { + _logger = logger; + _options = options.Value.Tls; + _certificateSource = certificateSources.FirstOrDefault(source => source.CanHandle(_options.Mode)) + ?? throw new InvalidOperationException($"No certificate source registered for mode {_options.Mode}."); + + _certificate = new Lazy(LoadCertificate, LazyThreadSafetyMode.ExecutionAndPublication); + } + + public X509Certificate2 GetCertificate() => _certificate.Value; + + private X509Certificate2 LoadCertificate() + { + _logger.LogInformation("Loading webhook TLS certificate using {Mode} mode.", _options.Mode); + var certificate = _certificateSource.LoadCertificate(_options); + _logger.LogInformation("Loaded webhook TLS certificate with subject {Subject} and thumbprint {Thumbprint}.", + certificate.Subject, certificate.Thumbprint); + return certificate; + } +} + +public interface IWebhookCertificateSource +{ + bool CanHandle(ZastavaWebhookTlsMode mode); + + X509Certificate2 LoadCertificate(ZastavaWebhookTlsOptions options); +} diff --git a/src/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs b/src/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs new file mode 100644 index 00000000..2873b284 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs @@ -0,0 +1,98 @@ +using System.Security.Cryptography.X509Certificates; +using Microsoft.Extensions.Logging; +using StellaOps.Zastava.Webhook.Configuration; + +namespace StellaOps.Zastava.Webhook.Certificates; + +public sealed class SecretFileCertificateSource : IWebhookCertificateSource +{ + private readonly ILogger _logger; + + public SecretFileCertificateSource(ILogger logger) + { + _logger = logger; + } + + public bool CanHandle(ZastavaWebhookTlsMode mode) => mode == ZastavaWebhookTlsMode.Secret; + + public X509Certificate2 LoadCertificate(ZastavaWebhookTlsOptions options) + { + if (options is null) + { + throw new ArgumentNullException(nameof(options)); + } + + if (!string.IsNullOrWhiteSpace(options.PfxPath)) + { + return LoadFromPfx(options.PfxPath, options.PfxPassword); + } + + if (string.IsNullOrWhiteSpace(options.CertificatePath) || string.IsNullOrWhiteSpace(options.PrivateKeyPath)) + { + throw new InvalidOperationException("TLS mode 'Secret' requires either a PFX bundle or both PEM certificate and private key paths."); + } + + if (!File.Exists(options.CertificatePath)) + { + throw new FileNotFoundException("Webhook certificate file not found.", options.CertificatePath); + } + + if (!File.Exists(options.PrivateKeyPath)) + { + throw new FileNotFoundException("Webhook certificate private key file not found.", options.PrivateKeyPath); + } + + try + { + var certificate = X509Certificate2.CreateFromPemFile(options.CertificatePath, options.PrivateKeyPath) + .WithExportablePrivateKey(); + + _logger.LogDebug("Loaded certificate {Subject} from PEM secret files.", certificate.Subject); + return certificate; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to load webhook certificate from PEM files {CertPath} / {KeyPath}.", + options.CertificatePath, options.PrivateKeyPath); + throw; + } + } + + private X509Certificate2 LoadFromPfx(string pfxPath, string? password) + { + if (!File.Exists(pfxPath)) + { + throw new FileNotFoundException("Webhook certificate PFX bundle not found.", pfxPath); + } + + try + { + var storageFlags = X509KeyStorageFlags.MachineKeySet | X509KeyStorageFlags.EphemeralKeySet; + var certificate = X509CertificateLoader.LoadPkcs12FromFile(pfxPath, password, storageFlags); + _logger.LogDebug("Loaded certificate {Subject} from PFX bundle.", certificate.Subject); + return certificate; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to load webhook certificate from PFX bundle {PfxPath}.", pfxPath); + throw; + } + } +} + +internal static class X509Certificate2Extensions +{ + public static X509Certificate2 WithExportablePrivateKey(this X509Certificate2 certificate) + { + // Ensure the private key is exportable for Kestrel; CreateFromPemFile returns a temporary key material otherwise. + using var rsa = certificate.GetRSAPrivateKey(); + if (rsa is null) + { + return certificate; + } + + var certificateWithKey = certificate.CopyWithPrivateKey(rsa); + certificate.Dispose(); + return certificateWithKey; + } +} diff --git a/src/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs b/src/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs new file mode 100644 index 00000000..74dc92f8 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs @@ -0,0 +1,56 @@ +using Microsoft.Extensions.Diagnostics.HealthChecks; + +namespace StellaOps.Zastava.Webhook.Certificates; + +public sealed class WebhookCertificateHealthCheck : IHealthCheck +{ + private readonly IWebhookCertificateProvider _certificateProvider; + private readonly ILogger _logger; + private readonly TimeSpan _expiryThreshold = TimeSpan.FromDays(7); + + public WebhookCertificateHealthCheck( + IWebhookCertificateProvider certificateProvider, + ILogger logger) + { + _certificateProvider = certificateProvider; + _logger = logger; + } + + public Task CheckHealthAsync(HealthCheckContext context, CancellationToken cancellationToken = default) + { + try + { + var certificate = _certificateProvider.GetCertificate(); + var expires = certificate.NotAfter.ToUniversalTime(); + var remaining = expires - DateTimeOffset.UtcNow; + + if (remaining <= TimeSpan.Zero) + { + return Task.FromResult(HealthCheckResult.Unhealthy("Webhook certificate expired.", data: new Dictionary + { + ["expiresAtUtc"] = expires.ToString("O") + })); + } + + if (remaining <= _expiryThreshold) + { + return Task.FromResult(HealthCheckResult.Degraded("Webhook certificate nearing expiry.", data: new Dictionary + { + ["expiresAtUtc"] = expires.ToString("O"), + ["daysRemaining"] = remaining.TotalDays + })); + } + + return Task.FromResult(HealthCheckResult.Healthy("Webhook certificate valid.", data: new Dictionary + { + ["expiresAtUtc"] = expires.ToString("O"), + ["daysRemaining"] = remaining.TotalDays + })); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to load webhook certificate."); + return Task.FromResult(HealthCheckResult.Unhealthy("Failed to load webhook certificate.", ex)); + } + } +} diff --git a/src/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs b/src/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs new file mode 100644 index 00000000..15b8dd5b --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs @@ -0,0 +1,146 @@ +using System.ComponentModel.DataAnnotations; + +namespace StellaOps.Zastava.Webhook.Configuration; + +public sealed class ZastavaWebhookOptions +{ + public const string SectionName = "zastava:webhook"; + + [Required] + public ZastavaWebhookTlsOptions Tls { get; init; } = new(); + + [Required] + public ZastavaWebhookAuthorityOptions Authority { get; init; } = new(); + + [Required] + public ZastavaWebhookAdmissionOptions Admission { get; init; } = new(); +} + +public sealed class ZastavaWebhookAdmissionOptions +{ + /// + /// Namespaces that default to fail-open when backend calls fail. + /// + public HashSet FailOpenNamespaces { get; init; } = new(StringComparer.Ordinal); + + /// + /// Namespaces that must fail-closed even if the global default is fail-open. + /// + public HashSet FailClosedNamespaces { get; init; } = new(StringComparer.Ordinal); + + /// + /// Global fail-open toggle. When true, namespaces not in will allow requests on backend failures. + /// + public bool FailOpenByDefault { get; init; } + + /// + /// Enables tag resolution to immutable digests when set. + /// + public bool ResolveTags { get; init; } = true; + + /// + /// Optional cache seed path for pre-computed runtime verdicts. + /// + public string? CacheSeedPath { get; init; } +} + +public enum ZastavaWebhookTlsMode +{ + Secret = 0, + CertificateSigningRequest = 1 +} + +public sealed class ZastavaWebhookTlsOptions +{ + [Required] + public ZastavaWebhookTlsMode Mode { get; init; } = ZastavaWebhookTlsMode.Secret; + + /// + /// PEM certificate path when using . + /// + public string? CertificatePath { get; init; } + + /// + /// PEM private key path when using . + /// + public string? PrivateKeyPath { get; init; } + + /// + /// Optional PFX bundle path; takes precedence over PEM values when provided. + /// + public string? PfxPath { get; init; } + + /// + /// Optional password for the PFX bundle. + /// + public string? PfxPassword { get; init; } + + /// + /// Optional CA bundle path to present to Kubernetes when configuring webhook registration. + /// + public string? CaBundlePath { get; init; } + + /// + /// CSR related settings when equals . + /// + public ZastavaWebhookTlsCsrOptions Csr { get; init; } = new(); +} + +public sealed class ZastavaWebhookTlsCsrOptions +{ + /// + /// Kubernetes namespace that owns the CertificateSigningRequest object. + /// + [Required(AllowEmptyStrings = false)] + public string Namespace { get; init; } = "stellaops"; + + /// + /// CSR object name; defaults to zastava-webhook. + /// + [Required(AllowEmptyStrings = false)] + [MaxLength(253)] + public string Name { get; init; } = "zastava-webhook"; + + /// + /// DNS names placed in the CSR subjectAltName. + /// + [MinLength(1)] + public string[] DnsNames { get; init; } = Array.Empty(); + + /// + /// Where the signed certificate is persisted after approval (mounted emptyDir). + /// + [Required(AllowEmptyStrings = false)] + public string PersistPath { get; init; } = "/var/run/zastava-webhook/certs"; +} + +public sealed class ZastavaWebhookAuthorityOptions +{ + /// + /// Authority issuer URL for token acquisition. + /// + [Required(AllowEmptyStrings = false)] + public Uri Issuer { get; init; } = new("https://authority.internal"); + + /// + /// Audience that tokens must target. + /// + [MinLength(1)] + public string[] Audience { get; init; } = new[] { "scanner", "zastava" }; + + /// + /// Optional path to static OpTok for bootstrap environments. + /// + public string? StaticTokenPath { get; init; } + + /// + /// Optional literal token value (test only). Takes precedence over . + /// + public string? StaticTokenValue { get; init; } + + /// + /// Interval for refreshing cached tokens before expiry. + /// + [Range(typeof(double), "1", "3600")] + public double RefreshSkewSeconds { get; init; } = TimeSpan.FromMinutes(5).TotalSeconds; +} diff --git a/src/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs b/src/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..cad030a5 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs @@ -0,0 +1,33 @@ +using Microsoft.Extensions.DependencyInjection.Extensions; +using StellaOps.Zastava.Webhook.Authority; +using StellaOps.Zastava.Webhook.Certificates; +using StellaOps.Zastava.Webhook.Configuration; +using StellaOps.Zastava.Webhook.Hosting; + +namespace Microsoft.Extensions.DependencyInjection; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddZastavaWebhook(this IServiceCollection services, IConfiguration configuration) + { + services.AddOptions() + .Bind(configuration.GetSection(ZastavaWebhookOptions.SectionName)) + .ValidateDataAnnotations() + .ValidateOnStart(); + + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.TryAddSingleton(); + services.TryAddSingleton(); + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.AddHostedService(); + + services.AddHealthChecks() + .AddCheck("webhook_tls") + .AddCheck("authority_token"); + + return services; + } +} diff --git a/src/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs b/src/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs new file mode 100644 index 00000000..a6e3ee47 --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs @@ -0,0 +1,31 @@ +using StellaOps.Zastava.Webhook.Authority; +using StellaOps.Zastava.Webhook.Certificates; + +namespace StellaOps.Zastava.Webhook.Hosting; + +public sealed class StartupValidationHostedService : IHostedService +{ + private readonly IWebhookCertificateProvider _certificateProvider; + private readonly IAuthorityTokenProvider _authorityTokenProvider; + private readonly ILogger _logger; + + public StartupValidationHostedService( + IWebhookCertificateProvider certificateProvider, + IAuthorityTokenProvider authorityTokenProvider, + ILogger logger) + { + _certificateProvider = certificateProvider; + _authorityTokenProvider = authorityTokenProvider; + _logger = logger; + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + _logger.LogInformation("Running webhook startup validation."); + _certificateProvider.GetCertificate(); + await _authorityTokenProvider.GetTokenAsync(cancellationToken); + _logger.LogInformation("Webhook startup validation complete."); + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; +} diff --git a/src/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md b/src/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md new file mode 100644 index 00000000..cef2460e --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md @@ -0,0 +1,105 @@ +# Zastava Webhook · Wave 0 Implementation Notes + +> Authored 2025-10-19 by Zastava Webhook Guild. + +## ZASTAVA-WEBHOOK-12-101 — Admission Controller Host (TLS bootstrap + Authority auth) + +**Objectives** +- Provide a deterministic, restart-safe .NET 10 host that exposes a Kubernetes ValidatingAdmissionWebhook endpoint. +- Load serving certificates at start-up only (per restart-time plug-in rule) and surface reload guidance via documentation rather than hot-reload. +- Authenticate outbound calls to Authority/Scanner using OpTok + DPoP as defined in `docs/ARCHITECTURE_ZASTAVA.md`. + +**Plan** +1. **Project scaffolding** + - Create `StellaOps.Zastava.Webhook` project with minimal API pipeline (`Program.cs`, `Startup` equivalent via extension methods). + - Reference shared helpers once `ZASTAVA-CORE-12-201/202` land; temporarily stub interfaces behind `IZastavaAdmissionRequest`/`IZastavaAdmissionResult`. +2. **TLS bootstrap** + - Support two certificate sources: + 1. Mounted secret path (`/var/run/secrets/zastava-webhook/tls.{crt,key}`) with optional CA bundle. + 2. CSR workflow: generate CSR + private key, submit to Kubernetes Certificates API when `admission.tls.autoApprove` enabled; persist signed cert/key to mounted emptyDir for reuse across replicas. + - Validate cert/key pair on boot; abort start-up if invalid to preserve deterministic behavior. + - Configure Kestrel for mutual TLS off (API Server already provides client auth) but enforce minimum TLS 1.3, strong cipher suite list, HTTP/2 disabled (K8s uses HTTP/1.1). +3. **Authority auth** + - Bootstrap Authority client via shared DI extension (`AuthorityClientBuilder` once exposed); until then, placeholder `IAuthorityTokenSource` reading static OpTok from secret for smoke testing. + - Implement DPoP proof generator bound to webhook host keypair (prefer Ed25519) with configurable rotation period (default 24h, triggered at restart). + - Add background health check verifying token freshness and surfacing metrics (`zastava.authority_token_renew_failures_total`). +4. **Hosting concerns** + - Configure structured logging with correlation id from AdmissionReview UID. + - Expose `/healthz` (reads cert expiry, Authority token status) and `/metrics` (Prometheus). + - Add readiness gate that requires initial TLS and Authority bootstrap to succeed. + +**Deliverables** +- Compilable host project with integration tests covering TLS load (mounted files + CSR mock) and Authority token acquisition. +- Documentation snippet for deploy charts describing secret/CSR wiring. + +**Open Questions** +- Need confirmation from Core guild on DTO naming (`AdmissionReviewEnvelope`, `AdmissionDecision`) to avoid rework. +- Determine whether CSR auto-approval is acceptable for air-gapped clusters without Kubernetes cert-manager; may require fallback manual cert import path. + +## ZASTAVA-WEBHOOK-12-102 — Backend policy query & digest resolution + +**Objectives** +- Resolve all images within AdmissionReview to immutable digests before policy evaluation. +- Call Scanner WebService `/api/v1/scanner/policy/runtime` with namespace/labels/images payload, enforce verdicts with deterministic error messaging. + +**Plan** +1. **Image resolution** + - Implement resolver service with pluggable strategies: + - Use existing digest if present. + - Resolve tags via registry HEAD (respecting `admission.resolveTags` flag); fallback to Observer-provided digest once core DTOs available. + - Cache per-registry auth to minimise latency; adhere to allow/deny lists from configuration. +2. **Scanner client** + - Define typed request/response models mirroring `docs/ARCHITECTURE_ZASTAVA.md` structure (`ttlSeconds`, `results[digest] -> { signed, hasSbom, policyVerdict, reasons, rekor }`). + - Implement retry policy (3 attempts, exponential backoff) and map HTTP errors to webhook fail-open/closed depending on namespace configuration. + - Instrument latency (`zastava.backend_latency_seconds`) and failure counts. +3. **Verdict enforcement** + - Evaluate per-image results: if any `policyVerdict != pass` (or `warn` when `enforceWarnings=false`), deny with aggregated reasons. + - Attach `ttlSeconds` to admission response annotations for auditing. + - Record structured logs with namespace, pod, image digest, decision, reasons, backend latency. +4. **Contract coordination** + - Schedule joint review with Scanner WebService guild once SCANNER-RUNTIME-12-302 schema stabilises; track in TASKS sub-items. + - Provide sample payload fixtures for CLI team (`CLI-RUNTIME-13-005`) to validate table output; ensure field names stay aligned. + +**Deliverables** +- Registry resolver unit tests (tag->digest) with deterministic fixtures. +- HTTP client integration tests using Scanner stub returning varied verdict combinations. +- Documentation update summarising contract and failure handling. + +**Open Questions** +- Confirm expected policy verdict enumeration (`pass|warn|fail|error`?) and textual reason codes. +- Need TTL behaviour: should webhook reduce TTL when backend returns > configured max? + +## ZASTAVA-WEBHOOK-12-103 — Caching, fail-open/closed toggles, metrics/logging + +**Objectives** +- Provide deterministic caching layer respecting backend TTL while ensuring eviction on policy mutation. +- Allow namespace-scoped fail-open behaviour with explicit metrics and alerts. +- Surface actionable metrics/logging aligned with Architecture doc. + +**Plan** +1. **Cache design** + - In-memory LRU keyed by image digest; value carries verdict payload + expiry timestamp. + - Support optional persistent seed (read-only) to prime hot digests for offline clusters (config: `admission.cache.seedPath`). + - On startup, load seed file and emit metric `zastava.cache_seed_entries_total`. + - Evict entries on TTL or when `policyRevision` annotation in AdmissionReview changes (requires hook from Core DTO). +2. **Fail-open/closed toggles** + - Configuration: global default + namespace overrides through `admission.failOpenNamespaces`, `admission.failClosedNamespaces`. + - Decision matrix: + - Backend success + verdict PASS → allow. + - Backend success + non-pass → deny unless namespace override says warn allowed. + - Backend failure → allow if namespace fail-open, deny otherwise; annotate response with `zastava.ops/fail-open=true`. + - Implement policy change event hook (future) to clear cache if observer signals revocation. +3. **Metrics & logging** + - Counters: `zastava.admission_requests_total{decision}`, `zastava.cache_hits_total{result=hit|miss}`, `zastava.fail_open_total`, `zastava.backend_failures_total{stage}`. + - Histograms: `zastava.admission_latency_seconds` (overall), `zastava.resolve_latency_seconds`. + - Logs: structured JSON with `decision`, `namespace`, `pod`, `imageDigest`, `reasons`, `cacheStatus`, `failMode`. + - Optionally emit OpenTelemetry span for admission path with attributes capturing backend latency + cache path. +4. **Testing & ops hooks** + - Unit tests for cache TTL, namespace override logic, fail-open metric increments. + - Integration test simulating backend outage ensuring fail-open/closed behaviour matches config. + - Document runbook snippet describing interpreting metrics and toggling namespaces. + +**Open Questions** +- Confirm whether cache entries should include `policyRevision` to detect backend policy updates; requires coordination with Policy guild. +- Need guidance on maximum cache size (default suggestions: 5k entries per replica?) to avoid memory blow-up. + diff --git a/src/StellaOps.Zastava.Webhook/Program.cs b/src/StellaOps.Zastava.Webhook/Program.cs new file mode 100644 index 00000000..04342f3d --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/Program.cs @@ -0,0 +1,68 @@ +using System.Security.Authentication; +using Microsoft.AspNetCore.Diagnostics.HealthChecks; +using Serilog; +using Serilog.Events; +using StellaOps.Zastava.Webhook.Authority; +using StellaOps.Zastava.Webhook.Certificates; +using StellaOps.Zastava.Webhook.Configuration; + +var builder = WebApplication.CreateBuilder(args); + +builder.Host.UseSerilog((context, services, loggerConfiguration) => +{ + loggerConfiguration + .MinimumLevel.Information() + .MinimumLevel.Override("Microsoft.Hosting.Lifetime", LogEventLevel.Information) + .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Warning) + .Enrich.FromLogContext() + .WriteTo.Console(); +}); + +builder.Services.AddRouting(); +builder.Services.AddProblemDetails(); +builder.Services.AddEndpointsApiExplorer(); +builder.Services.AddHttpClient(); +builder.Services.AddZastavaWebhook(builder.Configuration); + +builder.WebHost.ConfigureKestrel((context, options) => +{ + options.AddServerHeader = false; + options.Limits.MinRequestBodyDataRate = null; // Admission payloads are small; relax defaults for determinism. + + options.ConfigureHttpsDefaults(httpsOptions => + { + var certificateProvider = options.ApplicationServices?.GetRequiredService() + ?? throw new InvalidOperationException("Webhook certificate provider unavailable."); + + httpsOptions.SslProtocols = SslProtocols.Tls13; + httpsOptions.ClientCertificateMode = Microsoft.AspNetCore.Server.Kestrel.Https.ClientCertificateMode.NoCertificate; + httpsOptions.CheckCertificateRevocation = false; // Kubernetes API server terminates client auth; revocation handled upstream. + httpsOptions.ServerCertificate = certificateProvider.GetCertificate(); + }); +}); + +var app = builder.Build(); + +app.UseSerilogRequestLogging(); +app.UseRouting(); + +app.UseStatusCodePages(); + +// Health endpoints. +app.MapHealthChecks("/healthz/ready", new HealthCheckOptions +{ + AllowCachingResponses = false +}); +app.MapHealthChecks("/healthz/live", new HealthCheckOptions +{ + AllowCachingResponses = false, + Predicate = _ => false +}); + +// Placeholder admission endpoint; will be replaced as tasks 12-102/12-103 land. +app.MapPost("/admission", () => Results.StatusCode(StatusCodes.Status501NotImplemented)) + .WithName("AdmissionReview"); + +app.MapGet("/", () => Results.Ok(new { status = "ok", service = "zastava-webhook" })); + +app.Run(); diff --git a/src/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj b/src/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj new file mode 100644 index 00000000..bf6e1e9b --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj @@ -0,0 +1,16 @@ + + + net10.0 + preview + enable + enable + true + StellaOps.Zastava.Webhook + $(NoWarn);CA2254 + + + + + + + diff --git a/src/StellaOps.Zastava.Webhook/TASKS.md b/src/StellaOps.Zastava.Webhook/TASKS.md new file mode 100644 index 00000000..1a40e32d --- /dev/null +++ b/src/StellaOps.Zastava.Webhook/TASKS.md @@ -0,0 +1,9 @@ +# Zastava Webhook Task Board + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ZASTAVA-WEBHOOK-12-101 | DOING | Zastava Webhook Guild | — | Admission controller host with TLS bootstrap and Authority auth. | Webhook host boots with deterministic TLS bootstrap, enforces Authority-issued credentials, e2e smoke proves admission callback lifecycle, structured logs + metrics emit on each decision. | +| ZASTAVA-WEBHOOK-12-102 | DOING | Zastava Webhook Guild | — | Query Scanner `/policy/runtime`, resolve digests, enforce verdicts. | Scanner client resolves image digests + policy verdicts, unit tests cover allow/deny, integration harness rejects/admits workloads per policy with deterministic payloads. | +| ZASTAVA-WEBHOOK-12-103 | DOING | Zastava Webhook Guild | — | Caching, fail-open/closed toggles, metrics/logging for admission decisions. | Configurable cache TTL + seeds survive restart, fail-open/closed toggles verified via tests, metrics/logging exported per decision path, docs note operational knobs. | + +> Status update · 2025-10-19: Confirmed no prerequisites for ZASTAVA-WEBHOOK-12-101/102/103; tasks moved to DOING for kickoff. Implementation plan covering TLS bootstrap, backend contract, caching/metrics recorded in `IMPLEMENTATION_PLAN.md`. diff --git a/src/StellaOps.sln b/src/StellaOps.sln index 007759e7..dea89c57 100644 --- a/src/StellaOps.sln +++ b/src/StellaOps.sln @@ -259,6 +259,70 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker.Tests", "StellaOps.Scanner.Worker.Tests\StellaOps.Scanner.Worker.Tests.csproj", "{8342286A-BE36-4ACA-87FF-EBEB4E268498}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace", "StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj", "{05D844B6-51C1-4926-919C-D99E24FB3BC9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace.Tests", "StellaOps.Scanner.EntryTrace.Tests\StellaOps.Scanner.EntryTrace.Tests.csproj", "{03E15545-D6A0-4287-A88C-6EDE77C0DCBE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang", "StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj", "{A072C46F-BA45-419E-B1B6-416919F78440}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Tests", "StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj", "{6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff", "StellaOps.Scanner.Diff\StellaOps.Scanner.Diff.csproj", "{10088067-7B8F-4D2E-A8E1-ED546DC17369}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff.Tests", "StellaOps.Scanner.Diff.Tests\StellaOps.Scanner.Diff.Tests.csproj", "{E014565C-2456-4BD0-9481-557F939C1E36}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit", "StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj", "{44825FDA-68D2-4675-8B1D-6D5303DC38CF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit.Tests", "StellaOps.Scanner.Emit.Tests\StellaOps.Scanner.Emit.Tests.csproj", "{6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache", "StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj", "{5E5EB0A7-7A19-4144-81FE-13C31DB678B2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache.Tests", "StellaOps.Scanner.Cache.Tests\StellaOps.Scanner.Cache.Tests.csproj", "{7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java", "StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj", "{B86C287A-734E-4527-A03E-6B970F22E27E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS", "StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj", "{E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Apk", "StellaOps.Scanner.Analyzers.OS.Apk\StellaOps.Scanner.Analyzers.OS.Apk.csproj", "{50D014B5-99A6-46FC-B745-26687595B293}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Dpkg", "StellaOps.Scanner.Analyzers.OS.Dpkg\StellaOps.Scanner.Analyzers.OS.Dpkg.csproj", "{D99C1F78-67EA-40E7-BD4C-985592F5265A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Rpm", "StellaOps.Scanner.Analyzers.OS.Rpm\StellaOps.Scanner.Analyzers.OS.Rpm.csproj", "{1CBC0B9C-A96B-4143-B70F-37C69229FFF2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Tests", "StellaOps.Scanner.Analyzers.OS.Tests\StellaOps.Scanner.Analyzers.OS.Tests.csproj", "{760E2855-31B3-4CCB-BACB-34B7196A59B8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node", "StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj", "{3F688F21-7E31-4781-8995-9DD34276773F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python", "StellaOps.Scanner.Analyzers.Lang.Python\StellaOps.Scanner.Analyzers.Lang.Python.csproj", "{80AD7C4D-E4C6-4700-87AD-77B5698B338F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go", "StellaOps.Scanner.Analyzers.Lang.Go\StellaOps.Scanner.Analyzers.Lang.Go.csproj", "{60ABAB54-2EE9-4A16-A109-67F7B6F29184}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.DotNet", "StellaOps.Scanner.Analyzers.Lang.DotNet\StellaOps.Scanner.Analyzers.Lang.DotNet.csproj", "{D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Rust", "StellaOps.Scanner.Analyzers.Lang.Rust\StellaOps.Scanner.Analyzers.Lang.Rust.csproj", "{5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{05475C0A-C225-4F07-A3C7-9E17E660042E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Attestor", "StellaOps.Attestor", "{78C966F5-2242-D8EC-ADCA-A1A9C7F723A6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Core", "StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj", "{BA47D456-4657-4C86-A665-21293E3AC47F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Infrastructure", "StellaOps.Attestor\StellaOps.Attestor.Infrastructure\StellaOps.Attestor.Infrastructure.csproj", "{49EF86AC-1CC2-4A24-8637-C5151E23DF9D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.WebService", "StellaOps.Attestor\StellaOps.Attestor.WebService\StellaOps.Attestor.WebService.csproj", "{C22333B3-D132-4960-A490-6BEF1EB1C917}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor\StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{B8B15A8D-F647-41AE-A55F-A283A47E97C4}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Zastava", "StellaOps.Zastava", "{F1F029E6-2E4B-4A42-8D8F-AB325EE3B608}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core", "StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj", "{CBE6E3D8-230C-4513-B98F-99D82B83B9F7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core.Tests", "StellaOps.Zastava.Core.Tests\StellaOps.Zastava.Core.Tests.csproj", "{821C7F88-B775-4D3C-8D89-850B6C34E818}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook", "StellaOps.Zastava.Webhook\StellaOps.Zastava.Webhook.csproj", "{3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook.Tests", "StellaOps.Zastava.Webhook.Tests\StellaOps.Zastava.Webhook.Tests.csproj", "{3C500ECB-5422-4FFB-BD3D-48A850763D31}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1769,6 +1833,366 @@ Global {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x64.Build.0 = Release|Any CPU {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x86.ActiveCfg = Release|Any CPU {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x86.Build.0 = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x64.ActiveCfg = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x64.Build.0 = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x86.ActiveCfg = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x86.Build.0 = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|Any CPU.Build.0 = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x64.ActiveCfg = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x64.Build.0 = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x86.ActiveCfg = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x86.Build.0 = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x64.ActiveCfg = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x64.Build.0 = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x86.ActiveCfg = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x86.Build.0 = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|Any CPU.Build.0 = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x64.ActiveCfg = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x64.Build.0 = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x86.ActiveCfg = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x86.Build.0 = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x64.ActiveCfg = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x64.Build.0 = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x86.ActiveCfg = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x86.Build.0 = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|Any CPU.Build.0 = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x64.ActiveCfg = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x64.Build.0 = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x86.ActiveCfg = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x86.Build.0 = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x64.ActiveCfg = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x64.Build.0 = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x86.ActiveCfg = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x86.Build.0 = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|Any CPU.Build.0 = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x64.ActiveCfg = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x64.Build.0 = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x86.ActiveCfg = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x86.Build.0 = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|Any CPU.Build.0 = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x64.ActiveCfg = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x64.Build.0 = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x86.ActiveCfg = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x86.Build.0 = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|Any CPU.ActiveCfg = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|Any CPU.Build.0 = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x64.ActiveCfg = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x64.Build.0 = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x86.ActiveCfg = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x86.Build.0 = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x64.ActiveCfg = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x64.Build.0 = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x86.ActiveCfg = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x86.Build.0 = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|Any CPU.Build.0 = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x64.ActiveCfg = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x64.Build.0 = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x86.ActiveCfg = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x86.Build.0 = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x64.ActiveCfg = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x64.Build.0 = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x86.ActiveCfg = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x86.Build.0 = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|Any CPU.Build.0 = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x64.ActiveCfg = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x64.Build.0 = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x86.ActiveCfg = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x86.Build.0 = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x64.ActiveCfg = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x64.Build.0 = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x86.ActiveCfg = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x86.Build.0 = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|Any CPU.Build.0 = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x64.ActiveCfg = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x64.Build.0 = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x86.ActiveCfg = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x86.Build.0 = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x64.ActiveCfg = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x64.Build.0 = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x86.ActiveCfg = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x86.Build.0 = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|Any CPU.Build.0 = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x64.ActiveCfg = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x64.Build.0 = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x86.ActiveCfg = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x86.Build.0 = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x64.ActiveCfg = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x64.Build.0 = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x86.ActiveCfg = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x86.Build.0 = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|Any CPU.Build.0 = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x64.ActiveCfg = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x64.Build.0 = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x86.ActiveCfg = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x86.Build.0 = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x64.ActiveCfg = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x64.Build.0 = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x86.ActiveCfg = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x86.Build.0 = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|Any CPU.Build.0 = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x64.ActiveCfg = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x64.Build.0 = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x86.ActiveCfg = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x86.Build.0 = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x64.ActiveCfg = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x64.Build.0 = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x86.ActiveCfg = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x86.Build.0 = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|Any CPU.Build.0 = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x64.ActiveCfg = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x64.Build.0 = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x86.ActiveCfg = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x86.Build.0 = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|Any CPU.Build.0 = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x64.ActiveCfg = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x64.Build.0 = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x86.ActiveCfg = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x86.Build.0 = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|Any CPU.ActiveCfg = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|Any CPU.Build.0 = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x64.ActiveCfg = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x64.Build.0 = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x86.ActiveCfg = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x86.Build.0 = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x64.ActiveCfg = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x64.Build.0 = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x86.ActiveCfg = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x86.Build.0 = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|Any CPU.Build.0 = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x64.ActiveCfg = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x64.Build.0 = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x86.ActiveCfg = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x86.Build.0 = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x64.ActiveCfg = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x64.Build.0 = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x86.ActiveCfg = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x86.Build.0 = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|Any CPU.Build.0 = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x64.ActiveCfg = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x64.Build.0 = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x86.ActiveCfg = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x86.Build.0 = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x64.ActiveCfg = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x64.Build.0 = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x86.ActiveCfg = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x86.Build.0 = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|Any CPU.Build.0 = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x64.ActiveCfg = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x64.Build.0 = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x86.ActiveCfg = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x86.Build.0 = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x64.ActiveCfg = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x64.Build.0 = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x86.ActiveCfg = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x86.Build.0 = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|Any CPU.Build.0 = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x64.ActiveCfg = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x64.Build.0 = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x86.ActiveCfg = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x86.Build.0 = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x64.ActiveCfg = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x64.Build.0 = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x86.ActiveCfg = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x86.Build.0 = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|Any CPU.Build.0 = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x64.ActiveCfg = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x64.Build.0 = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x86.ActiveCfg = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x86.Build.0 = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|Any CPU.Build.0 = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x64.ActiveCfg = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x64.Build.0 = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x86.ActiveCfg = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x86.Build.0 = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|Any CPU.ActiveCfg = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|Any CPU.Build.0 = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x64.ActiveCfg = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x64.Build.0 = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x86.ActiveCfg = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x86.Build.0 = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x64.ActiveCfg = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x64.Build.0 = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x86.ActiveCfg = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x86.Build.0 = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|Any CPU.Build.0 = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x64.ActiveCfg = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x64.Build.0 = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x86.ActiveCfg = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x86.Build.0 = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x64.ActiveCfg = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x64.Build.0 = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x86.ActiveCfg = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x86.Build.0 = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|Any CPU.Build.0 = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x64.ActiveCfg = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x64.Build.0 = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x86.ActiveCfg = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x86.Build.0 = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x64.ActiveCfg = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x64.Build.0 = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x86.ActiveCfg = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x86.Build.0 = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|Any CPU.Build.0 = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x64.ActiveCfg = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x64.Build.0 = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x86.ActiveCfg = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x86.Build.0 = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x64.ActiveCfg = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x64.Build.0 = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x86.ActiveCfg = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x86.Build.0 = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|Any CPU.Build.0 = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x64.ActiveCfg = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x64.Build.0 = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x86.ActiveCfg = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x86.Build.0 = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x64.ActiveCfg = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x64.Build.0 = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x86.ActiveCfg = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x86.Build.0 = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|Any CPU.Build.0 = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x64.ActiveCfg = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x64.Build.0 = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x86.ActiveCfg = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x86.Build.0 = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x64.ActiveCfg = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x64.Build.0 = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x86.ActiveCfg = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x86.Build.0 = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|Any CPU.Build.0 = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x64.ActiveCfg = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x64.Build.0 = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x86.ActiveCfg = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x86.Build.0 = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x64.ActiveCfg = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x64.Build.0 = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x86.ActiveCfg = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x86.Build.0 = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|Any CPU.Build.0 = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x64.ActiveCfg = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x64.Build.0 = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.ActiveCfg = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.Build.0 = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x64.ActiveCfg = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x64.Build.0 = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x86.ActiveCfg = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x86.Build.0 = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|Any CPU.Build.0 = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x64.ActiveCfg = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x64.Build.0 = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x86.ActiveCfg = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x86.Build.0 = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|Any CPU.Build.0 = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x64.ActiveCfg = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x64.Build.0 = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x86.ActiveCfg = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x86.Build.0 = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|Any CPU.ActiveCfg = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|Any CPU.Build.0 = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x64.ActiveCfg = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x64.Build.0 = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x86.ActiveCfg = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x86.Build.0 = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x64.ActiveCfg = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x64.Build.0 = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x86.ActiveCfg = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x86.Build.0 = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|Any CPU.Build.0 = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x64.ActiveCfg = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x64.Build.0 = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x86.ActiveCfg = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x86.Build.0 = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x64.ActiveCfg = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x64.Build.0 = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x86.ActiveCfg = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x86.Build.0 = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|Any CPU.Build.0 = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x64.ActiveCfg = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x64.Build.0 = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x86.ActiveCfg = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1866,5 +2290,14 @@ Global {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {31277AFF-9BFF-4C17-8593-B562A385058E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {3A8F090F-678D-46E2-8899-67402129749C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {05D844B6-51C1-4926-919C-D99E24FB3BC9} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {BA47D456-4657-4C86-A665-21293E3AC47F} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {C22333B3-D132-4960-A490-6BEF1EB1C917} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {B8B15A8D-F647-41AE-A55F-A283A47E97C4} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} + {821C7F88-B775-4D3C-8D89-850B6C34E818} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} EndGlobalSection EndGlobal diff --git a/tools/SourceStateSeeder/Program.cs b/tools/SourceStateSeeder/Program.cs index 6607f13c..863655fd 100644 --- a/tools/SourceStateSeeder/Program.cs +++ b/tools/SourceStateSeeder/Program.cs @@ -1,12 +1,11 @@ using System.Globalization; -using System.Security.Cryptography; using System.Text.Json; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using MongoDB.Bson; using MongoDB.Driver; using StellaOps.Concelier.Connector.Common; using StellaOps.Concelier.Connector.Common.Fetch; +using StellaOps.Concelier.Connector.Common.State; using StellaOps.Concelier.Storage.Mongo; using StellaOps.Concelier.Storage.Mongo.Documents; @@ -40,58 +39,28 @@ internal static class Program return 1; } + var specification = await BuildSpecificationAsync(seed, sourceName, options.InputPath, CancellationToken.None).ConfigureAwait(false); + var client = new MongoClient(options.ConnectionString); var database = client.GetDatabase(options.DatabaseName); - var loggerFactory = NullLoggerFactory.Instance; + var documentStore = new DocumentStore(database, loggerFactory.CreateLogger()); var rawStorage = new RawDocumentStorage(database); var stateRepository = new MongoSourceStateRepository(database, loggerFactory.CreateLogger()); - var pendingDocumentIds = new List(); - var pendingMappingIds = new List(); - var knownAdvisories = new List(); - - var now = DateTimeOffset.UtcNow; - var baseDirectory = Path.GetDirectoryName(Path.GetFullPath(options.InputPath)) ?? Directory.GetCurrentDirectory(); - - foreach (var document in seed.Documents) - { - var (record, addedToPendingDocs, addedToPendingMaps, known) = await UpsertDocumentAsync( - documentStore, - rawStorage, - sourceName, - baseDirectory, - now, - document, - cancellationToken: default).ConfigureAwait(false); - - if (addedToPendingDocs) - { - pendingDocumentIds.Add(record.Id); - } - - if (addedToPendingMaps) - { - pendingMappingIds.Add(record.Id); - } - - if (known is not null) - { - knownAdvisories.AddRange(known); - } - } - - await UpdateCursorAsync( + var processor = new SourceStateSeedProcessor( + documentStore, + rawStorage, stateRepository, - sourceName, - seed.Cursor, - pendingDocumentIds, - pendingMappingIds, - knownAdvisories, - now).ConfigureAwait(false); + TimeProvider.System, + loggerFactory.CreateLogger()); - Console.WriteLine($"Seeded {pendingDocumentIds.Count + pendingMappingIds.Count} documents for {sourceName}."); + var result = await processor.ProcessAsync(specification, CancellationToken.None).ConfigureAwait(false); + + Console.WriteLine( + $"Seeded {result.DocumentsProcessed} document(s) for {sourceName} " + + $"(pendingDocuments+= {result.PendingDocumentsAdded}, pendingMappings+= {result.PendingMappingsAdded}, knownAdvisories+= {result.KnownAdvisoriesAdded.Count})."); return 0; } catch (Exception ex) @@ -109,13 +78,33 @@ internal static class Program return seed; } - private static async Task<(DocumentRecord Record, bool PendingDoc, bool PendingMap, IReadOnlyCollection? Known)> UpsertDocumentAsync( - DocumentStore documentStore, - RawDocumentStorage rawStorage, + private static async Task BuildSpecificationAsync( + StateSeed seed, string sourceName, - string baseDirectory, - DateTimeOffset fetchedAt, + string inputPath, + CancellationToken cancellationToken) + { + var baseDirectory = Path.GetDirectoryName(Path.GetFullPath(inputPath)) ?? Directory.GetCurrentDirectory(); + var documents = new List(seed.Documents.Count); + + foreach (var documentSeed in seed.Documents) + { + documents.Add(await BuildDocumentAsync(documentSeed, baseDirectory, cancellationToken).ConfigureAwait(false)); + } + + return new SourceStateSeedSpecification + { + Source = sourceName, + Documents = documents.AsReadOnly(), + Cursor = BuildCursor(seed.Cursor), + KnownAdvisories = NormalizeStrings(seed.KnownAdvisories), + CompletedAt = seed.CompletedAt, + }; + } + + private static async Task BuildDocumentAsync( DocumentSeed seed, + string baseDirectory, CancellationToken cancellationToken) { if (string.IsNullOrWhiteSpace(seed.Uri)) @@ -128,152 +117,120 @@ internal static class Program throw new InvalidOperationException($"Seed entry for '{seed.Uri}' missing 'contentFile'."); } - var contentPath = Path.IsPathRooted(seed.ContentFile) - ? seed.ContentFile - : Path.GetFullPath(Path.Combine(baseDirectory, seed.ContentFile)); - + var contentPath = ResolvePath(seed.ContentFile, baseDirectory); if (!File.Exists(contentPath)) { throw new FileNotFoundException($"Content file not found for '{seed.Uri}'.", contentPath); } var contentBytes = await File.ReadAllBytesAsync(contentPath, cancellationToken).ConfigureAwait(false); - var sha256 = Convert.ToHexString(SHA256.HashData(contentBytes)).ToLowerInvariant(); - var gridId = await rawStorage.UploadAsync( - sourceName, - seed.Uri, - contentBytes, - seed.ContentType, - seed.ExpiresAt, - cancellationToken).ConfigureAwait(false); var metadata = seed.Metadata is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) + ? null : new Dictionary(seed.Metadata, StringComparer.OrdinalIgnoreCase); var headers = seed.Headers is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) + ? null : new Dictionary(seed.Headers, StringComparer.OrdinalIgnoreCase); - if (!headers.ContainsKey("content-type") && !string.IsNullOrWhiteSpace(seed.ContentType)) + if (!string.IsNullOrWhiteSpace(seed.ContentType)) { - headers["content-type"] = seed.ContentType!; + headers ??= new Dictionary(StringComparer.OrdinalIgnoreCase); + if (!headers.ContainsKey("content-type")) + { + headers["content-type"] = seed.ContentType!; + } } - var lastModified = seed.LastModified is null - ? (DateTimeOffset?)null - : DateTimeOffset.Parse(seed.LastModified, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); - - var record = new DocumentRecord( - Guid.NewGuid(), - sourceName, - seed.Uri, - fetchedAt, - sha256, - string.IsNullOrWhiteSpace(seed.Status) ? DocumentStatuses.PendingParse : seed.Status, - seed.ContentType, - headers, - metadata, - seed.Etag, - lastModified, - gridId, - seed.ExpiresAt); - - var upserted = await documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - - return (upserted, seed.AddToPendingDocuments, seed.AddToPendingMappings, seed.KnownIdentifiers); + return new SourceStateSeedDocument + { + Uri = seed.Uri, + DocumentId = seed.DocumentId, + Content = contentBytes, + ContentType = seed.ContentType, + Status = string.IsNullOrWhiteSpace(seed.Status) ? DocumentStatuses.PendingParse : seed.Status, + Headers = headers, + Metadata = metadata, + Etag = seed.Etag, + LastModified = ParseOptionalDate(seed.LastModified), + ExpiresAt = seed.ExpiresAt, + FetchedAt = ParseOptionalDate(seed.FetchedAt), + AddToPendingDocuments = seed.AddToPendingDocuments, + AddToPendingMappings = seed.AddToPendingMappings, + KnownIdentifiers = NormalizeStrings(seed.KnownIdentifiers), + }; } - private static async Task UpdateCursorAsync( - ISourceStateRepository repository, - string sourceName, - CursorSeed? cursorSeed, - IReadOnlyCollection pendingDocuments, - IReadOnlyCollection pendingMappings, - IReadOnlyCollection knownAdvisories, - DateTimeOffset completedAt) + private static SourceStateSeedCursor? BuildCursor(CursorSeed? cursorSeed) { - var state = await repository.TryGetAsync(sourceName, CancellationToken.None).ConfigureAwait(false); - var cursor = state?.Cursor ?? new BsonDocument(); - - MergeGuidArray(cursor, "pendingDocuments", pendingDocuments); - MergeGuidArray(cursor, "pendingMappings", pendingMappings); - - if (knownAdvisories.Count > 0) + if (cursorSeed is null) { - MergeStringArray(cursor, "knownAdvisories", knownAdvisories); + return null; } - if (cursorSeed is not null) + return new SourceStateSeedCursor { - if (cursorSeed.LastModifiedCursor.HasValue) - { - cursor["lastModifiedCursor"] = cursorSeed.LastModifiedCursor.Value.UtcDateTime; - } - - if (cursorSeed.LastFetchAt.HasValue) - { - cursor["lastFetchAt"] = cursorSeed.LastFetchAt.Value.UtcDateTime; - } - - if (cursorSeed.Additional is not null) - { - foreach (var kvp in cursorSeed.Additional) - { - cursor[kvp.Key] = kvp.Value; - } - } - } - - cursor["lastSeededAt"] = completedAt.UtcDateTime; - - await repository.UpdateCursorAsync(sourceName, cursor, completedAt, CancellationToken.None).ConfigureAwait(false); + PendingDocuments = NormalizeGuids(cursorSeed.PendingDocuments), + PendingMappings = NormalizeGuids(cursorSeed.PendingMappings), + KnownAdvisories = NormalizeStrings(cursorSeed.KnownAdvisories), + LastModifiedCursor = cursorSeed.LastModifiedCursor, + LastFetchAt = cursorSeed.LastFetchAt, + Additional = cursorSeed.Additional is null + ? null + : new Dictionary(cursorSeed.Additional, StringComparer.OrdinalIgnoreCase), + }; } - private static void MergeGuidArray(BsonDocument cursor, string field, IReadOnlyCollection values) + private static IReadOnlyCollection? NormalizeGuids(IEnumerable? values) { - if (values.Count == 0) + if (values is null) { - return; + return null; } - var existing = cursor.TryGetValue(field, out var value) && value is BsonArray array - ? array.Select(v => Guid.TryParse(v?.AsString, out var parsed) ? parsed : Guid.Empty) - .Where(g => g != Guid.Empty) - .ToHashSet() - : new HashSet(); - + var set = new HashSet(); foreach (var guid in values) { - existing.Add(guid); - } - - cursor[field] = new BsonArray(existing.Select(g => g.ToString())); - } - - private static void MergeStringArray(BsonDocument cursor, string field, IReadOnlyCollection values) - { - if (values.Count == 0) - { - return; - } - - var existing = cursor.TryGetValue(field, out var value) && value is BsonArray array - ? array.Select(v => v?.AsString ?? string.Empty) - .Where(s => !string.IsNullOrWhiteSpace(s)) - .ToHashSet(StringComparer.OrdinalIgnoreCase) - : new HashSet(StringComparer.OrdinalIgnoreCase); - - foreach (var entry in values) - { - if (!string.IsNullOrWhiteSpace(entry)) + if (guid != Guid.Empty) { - existing.Add(entry.Trim()); + set.Add(guid); } } - cursor[field] = new BsonArray(existing.OrderBy(s => s, StringComparer.OrdinalIgnoreCase)); + return set.Count == 0 ? null : set.ToList(); } + + private static IReadOnlyCollection? NormalizeStrings(IEnumerable? values) + { + if (values is null) + { + return null; + } + + var set = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var value in values) + { + if (!string.IsNullOrWhiteSpace(value)) + { + set.Add(value.Trim()); + } + } + + return set.Count == 0 ? null : set.ToList(); + } + + private static DateTimeOffset? ParseOptionalDate(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return DateTimeOffset.Parse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); + } + + private static string ResolvePath(string path, string baseDirectory) + => Path.IsPathRooted(path) ? path : Path.GetFullPath(Path.Combine(baseDirectory, path)); } internal sealed record SeedOptions @@ -356,12 +313,15 @@ internal sealed record StateSeed public string? Source { get; init; } public List Documents { get; init; } = new(); public CursorSeed? Cursor { get; init; } + public List? KnownAdvisories { get; init; } + public DateTimeOffset? CompletedAt { get; init; } } internal sealed record DocumentSeed { public string Uri { get; init; } = string.Empty; public string ContentFile { get; init; } = string.Empty; + public Guid? DocumentId { get; init; } public string? ContentType { get; init; } public Dictionary? Metadata { get; init; } public Dictionary? Headers { get; init; } @@ -369,13 +329,17 @@ internal sealed record DocumentSeed public bool AddToPendingDocuments { get; init; } = true; public bool AddToPendingMappings { get; init; } public string? LastModified { get; init; } + public string? FetchedAt { get; init; } public string? Etag { get; init; } public DateTimeOffset? ExpiresAt { get; init; } - public IReadOnlyCollection? KnownIdentifiers { get; init; } + public List? KnownIdentifiers { get; init; } } internal sealed record CursorSeed { + public List? PendingDocuments { get; init; } + public List? PendingMappings { get; init; } + public List? KnownAdvisories { get; init; } public DateTimeOffset? LastModifiedCursor { get; init; } public DateTimeOffset? LastFetchAt { get; init; } public Dictionary? Additional { get; init; }