up
Some checks failed
Build Test Deploy / docs (push) Has been cancelled
Build Test Deploy / deploy (push) Has been cancelled
Build Test Deploy / build-test (push) Has been cancelled
Build Test Deploy / authority-container (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Build Test Deploy / docs (push) Has been cancelled
Build Test Deploy / deploy (push) Has been cancelled
Build Test Deploy / build-test (push) Has been cancelled
Build Test Deploy / authority-container (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
This commit is contained in:
163
.gitea/workflows/authority-key-rotation.yml
Normal file
163
.gitea/workflows/authority-key-rotation.yml
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
# .gitea/workflows/authority-key-rotation.yml
|
||||||
|
# Manual workflow to push a new Authority signing key using OPS3 tooling
|
||||||
|
|
||||||
|
name: Authority Key Rotation
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
environment:
|
||||||
|
description: 'Target environment name (used to select secrets/vars)'
|
||||||
|
required: true
|
||||||
|
default: 'staging'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- staging
|
||||||
|
- production
|
||||||
|
authority_url:
|
||||||
|
description: 'Override Authority URL (leave blank to use env-specific secret)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
key_id:
|
||||||
|
description: 'New signing key identifier (kid)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
key_path:
|
||||||
|
description: 'Path (as Authority sees it) to the PEM key'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
source:
|
||||||
|
description: 'Signing key source loader (default: file)'
|
||||||
|
required: false
|
||||||
|
default: 'file'
|
||||||
|
type: string
|
||||||
|
algorithm:
|
||||||
|
description: 'Signing algorithm (default: ES256)'
|
||||||
|
required: false
|
||||||
|
default: 'ES256'
|
||||||
|
type: string
|
||||||
|
provider:
|
||||||
|
description: 'Preferred crypto provider hint'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
metadata:
|
||||||
|
description: 'Optional key=value metadata entries (comma-separated)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
rotate:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
environment: ${{ inputs.environment }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Resolve Authority configuration
|
||||||
|
id: config
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
env_name=${{ inputs.environment }}
|
||||||
|
echo "Environment: $env_name"
|
||||||
|
|
||||||
|
bootstrap_key=""
|
||||||
|
authority_url="${{ inputs.authority_url }}"
|
||||||
|
|
||||||
|
# Helper to prefer secrets over variables and fall back to shared defaults
|
||||||
|
resolve_var() {
|
||||||
|
local name="$1"
|
||||||
|
local default="$2"
|
||||||
|
local value="${{ secrets[name] }}"
|
||||||
|
if [ -z "$value" ]; then value="${{ vars[name] }}"; fi
|
||||||
|
if [ -z "$value" ]; then value="$default"; fi
|
||||||
|
printf '%s' "$value"
|
||||||
|
}
|
||||||
|
|
||||||
|
key_name="${env_name^^}_AUTHORITY_BOOTSTRAP_KEY"
|
||||||
|
bootstrap_key="$(resolve_var "$key_name" "")"
|
||||||
|
if [ -z "$bootstrap_key" ]; then
|
||||||
|
bootstrap_key="$(resolve_var "AUTHORITY_BOOTSTRAP_KEY" "")"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$bootstrap_key" ]; then
|
||||||
|
echo "::error::Missing bootstrap key secret (expected $key_name or AUTHORITY_BOOTSTRAP_KEY)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$authority_url" ]; then
|
||||||
|
url_name="${env_name^^}_AUTHORITY_URL"
|
||||||
|
authority_url="$(resolve_var "$url_name" "")"
|
||||||
|
if [ -z "$authority_url" ]; then
|
||||||
|
authority_url="$(resolve_var "AUTHORITY_URL" "")"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$authority_url" ]; then
|
||||||
|
echo "::error::Authority URL not provided and no secret/var found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
key_file="${RUNNER_TEMP}/authority-bootstrap-key"
|
||||||
|
printf '%s\n' "$bootstrap_key" > "$key_file"
|
||||||
|
chmod 600 "$key_file"
|
||||||
|
|
||||||
|
echo "bootstrap-key-file=$key_file" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "authority-url=$authority_url" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Execute key rotation
|
||||||
|
id: rotate
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
AUTHORITY_BOOTSTRAP_KEY_FILE: ${{ steps.config.outputs['bootstrap-key-file'] }}
|
||||||
|
AUTHORITY_URL: ${{ steps.config.outputs['authority-url'] }}
|
||||||
|
KEY_ID: ${{ inputs.key_id }}
|
||||||
|
KEY_PATH: ${{ inputs.key_path }}
|
||||||
|
KEY_SOURCE: ${{ inputs.source }}
|
||||||
|
KEY_ALGORITHM: ${{ inputs.algorithm }}
|
||||||
|
KEY_PROVIDER: ${{ inputs.provider }}
|
||||||
|
KEY_METADATA: ${{ inputs.metadata }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
bootstrap_key=$(cat "$AUTHORITY_BOOTSTRAP_KEY_FILE")
|
||||||
|
|
||||||
|
metadata_args=()
|
||||||
|
if [ -n "$KEY_METADATA" ]; then
|
||||||
|
IFS=',' read -ra META <<< "$KEY_METADATA"
|
||||||
|
for entry in "${META[@]}"; do
|
||||||
|
trimmed="$(echo "$entry" | xargs)"
|
||||||
|
[ -z "$trimmed" ] && continue
|
||||||
|
metadata_args+=(-m "$trimmed")
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
provider_args=()
|
||||||
|
if [ -n "$KEY_PROVIDER" ]; then
|
||||||
|
provider_args+=(--provider "$KEY_PROVIDER")
|
||||||
|
fi
|
||||||
|
|
||||||
|
./ops/authority/key-rotation.sh \
|
||||||
|
--authority-url "$AUTHORITY_URL" \
|
||||||
|
--api-key "$bootstrap_key" \
|
||||||
|
--key-id "$KEY_ID" \
|
||||||
|
--key-path "$KEY_PATH" \
|
||||||
|
--source "$KEY_SOURCE" \
|
||||||
|
--algorithm "$KEY_ALGORITHM" \
|
||||||
|
"${provider_args[@]}" \
|
||||||
|
"${metadata_args[@]}"
|
||||||
|
|
||||||
|
- name: JWKS summary
|
||||||
|
run: |
|
||||||
|
echo "✅ Rotation complete"
|
||||||
|
echo "Environment: ${{ inputs.environment }}"
|
||||||
|
echo "Authority: ${{ steps.config.outputs['authority-url'] }}"
|
||||||
|
echo "Key ID: ${{ inputs.key_id }}"
|
||||||
|
echo "Key Path: ${{ inputs.key_path }}"
|
||||||
|
echo "Source: ${{ inputs.source }}"
|
||||||
|
echo "Algorithm: ${{ inputs.algorithm }}"
|
||||||
@@ -119,7 +119,10 @@ You main characteristics:
|
|||||||
- **Scoping**: Use each module’s `AGENTS.md` and `TASKS.md` to plan; autonomous agents must read `src/AGENTS.md` and the module docs before acting.
|
- **Scoping**: Use each module’s `AGENTS.md` and `TASKS.md` to plan; autonomous agents must read `src/AGENTS.md` and the module docs before acting.
|
||||||
- **Determinism**: Sort keys, normalize timestamps to UTC ISO‑8601, avoid non‑deterministic data in exports and tests.
|
- **Determinism**: Sort keys, normalize timestamps to UTC ISO‑8601, avoid non‑deterministic data in exports and tests.
|
||||||
- **Status tracking**: Update your module’s `TASKS.md` as you progress (TODO → DOING → DONE/BLOCKED). Before starting of actual work - ensure you have set the task to DOING. When complete or stop update the status in corresponding TASKS.md or in ./SPRINTS.md file.
|
- **Status tracking**: Update your module’s `TASKS.md` as you progress (TODO → DOING → DONE/BLOCKED). Before starting of actual work - ensure you have set the task to DOING. When complete or stop update the status in corresponding TASKS.md or in ./SPRINTS.md file.
|
||||||
|
- **Coordination**: In case task is discovered as blocked on other team or task, according TASKS.md files that dependency is on needs to be changed by adding new tasks describing the requirement. the current task must be updated as completed. In case task changes, scope or requirements or rules - other documentations needs be updated accordingly.
|
||||||
|
- **Sprint synchronization**: When given task seek for relevant directory to work on from SPRINTS.md. Confirm its state on both SPRINTS.md and the relevant TODOS.md file. Always check the AGENTS.md in the relevant TODOS.md directory.
|
||||||
- **Tests**: Add/extend fixtures and unit tests per change; never regress determinism or precedence.
|
- **Tests**: Add/extend fixtures and unit tests per change; never regress determinism or precedence.
|
||||||
- **Test layout**: Use module-specific projects in `StellaOps.Feedser.<Component>.Tests`; shared fixtures/harnesses live in `StellaOps.Feedser.Testing`.
|
- **Test layout**: Use module-specific projects in `StellaOps.Feedser.<Component>.Tests`; shared fixtures/harnesses live in `StellaOps.Feedser.Testing`.
|
||||||
|
- **Execution autonomous**: In case you need to continue with more than one options just continue sequentially, unless the continue requires design decision.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
60
SPRINTS.md
60
SPRINTS.md
@@ -1,18 +1,18 @@
|
|||||||
| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description |
|
| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description |
|
||||||
| --- | --- | --- | --- | --- | --- | --- |
|
| --- | --- | --- | --- | --- | --- | --- |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | — | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-001 | SemVer primitive range-style metadata<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.Models/AGENTS.md. This task lays the groundwork—complete the SemVer helper updates before teammates pick up FEEDMODELS-SCHEMA-01-002/003 and FEEDMODELS-SCHEMA-02-900. Use ./src/FASTER_MODELING_AND_NORMALIZATION.md for the target rule structure. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-001 | SemVer primitive range-style metadata<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.Models/AGENTS.md. This task lays the groundwork—complete the SemVer helper updates before teammates pick up FEEDMODELS-SCHEMA-01-002/003 and FEEDMODELS-SCHEMA-02-900. Use ./src/FASTER_MODELING_AND_NORMALIZATION.md for the target rule structure. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-002 | Provenance decision rationale field<br>Instructions to work:<br>AdvisoryProvenance now carries `decisionReason` and docs/tests were updated. Connectors and merge tasks should populate the field when applying precedence/freshness/tie-breaker logic; see src/StellaOps.Feedser.Models/PROVENANCE_GUIDELINES.md for usage guidance. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-002 | Provenance decision rationale field<br>Instructions to work:<br>AdvisoryProvenance now carries `decisionReason` and docs/tests were updated. Connectors and merge tasks should populate the field when applying precedence/freshness/tie-breaker logic; see src/StellaOps.Feedser.Models/PROVENANCE_GUIDELINES.md for usage guidance. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-003 | Normalized version rules collection<br>Instructions to work:<br>`AffectedPackage.NormalizedVersions` and supporting comparer/docs/tests shipped. Connector owners must emit rule arrays per ./src/FASTER_MODELING_AND_NORMALIZATION.md and report progress via FEEDMERGE-COORD-02-900 so merge/storage backfills can proceed. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-003 | Normalized version rules collection<br>Instructions to work:<br>`AffectedPackage.NormalizedVersions` and supporting comparer/docs/tests shipped. Connector owners must emit rule arrays per ./src/FASTER_MODELING_AND_NORMALIZATION.md and report progress via FEEDMERGE-COORD-02-900 so merge/storage backfills can proceed. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | — | Team Models & Merge Leads | FEEDMODELS-SCHEMA-02-900 | Range primitives for SemVer/EVR/NEVRA metadata<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.Models/AGENTS.md before resuming this stalled effort. Confirm helpers align with the new `NormalizedVersions` representation so connectors finishing in Sprint 2 can emit consistent metadata. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-02-900 | Range primitives for SemVer/EVR/NEVRA metadata<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.Models/AGENTS.md before resuming this stalled effort. Confirm helpers align with the new `NormalizedVersions` representation so connectors finishing in Sprint 2 can emit consistent metadata. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Normalization/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter<br>Shared `SemVerRangeRuleBuilder` now outputs primitives + normalized rules per `FASTER_MODELING_AND_NORMALIZATION.md`; CVE/GHSA connectors consuming the API have verified fixtures. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Normalization/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter<br>Shared `SemVerRangeRuleBuilder` now outputs primitives + normalized rules per `FASTER_MODELING_AND_NORMALIZATION.md`; CVE/GHSA connectors consuming the API have verified fixtures. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill<br>AdvisoryStore dual-writes flattened `normalizedVersions` when `feedser.storage.enableSemVerStyle` is set; migration `20251011-semver-style-backfill` updates historical records and docs outline the rollout. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill<br>AdvisoryStore dual-writes flattened `normalizedVersions` when `feedser.storage.enableSemVerStyle` is set; migration `20251011-semver-style-backfill` updates historical records and docs outline the rollout. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence<br>Storage now persists `provenance.decisionReason` for advisories and merge events; tests cover round-trips. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence<br>Storage now persists `provenance.decisionReason` for advisories and merge events; tests cover round-trips. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing<br>Bootstrapper seeds compound/sparse indexes for flattened normalized rules and `docs/dev/mongo_indices.md` documents query guidance. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing<br>Bootstrapper seeds compound/sparse indexes for flattened normalized rules and `docs/dev/mongo_indices.md` documents query guidance. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor<br>Updated constructors/tests keep storage suites passing with the new feature flag defaults. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor<br>Updated constructors/tests keep storage suites passing with the new feature flag defaults. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options<br>WebService now binds `authority.resilience.*` into `AddStellaOpsAuthClient`; integration tests verify retry/offline tolerance wiring. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options<br>WebService wires `authority.resilience.*` into `AddStellaOpsAuthClient` and adds binding coverage via `AuthorityClientResilienceOptionsAreBound`. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning<br>Quickstart/install docs ship `authority.resilience.*` defaults, online vs air-gapped tuning, and monitoring references aligned with WebService coverage. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning<br>Install/runbooks document connected vs air-gapped resilience profiles and monitoring hooks. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns<br>Guides now include the structured audit log fields (`route/status/subject/clientId/scopes/bypass/remote`) and SIEM alert guidance. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns<br>Operator guides now call out `route/status/subject/clientId/scopes/bypass/remote` audit fields and SIEM triggers. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Feedser operator guide for enforcement cutoff<br>Operator guide details the 2025-12-31 cutoff checklist and env var rollout to disable `allowAnonymousFallback`. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Feedser operator guide for enforcement cutoff<br>Install guide reiterates the 2025-12-31 cutoff and links audit signals to the rollout checklist. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding<br>Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding<br>Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through<br>`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through<br>`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish<br>Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish<br>Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. |
|
||||||
@@ -20,30 +20,30 @@
|
|||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher<br>Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher<br>Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-003 | DTO & parser implementation<br>Added VINCE DTO aggregate, Markdown→text sanitizer, vendor/status/vulnerability parsers, and parser regression fixture. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-003 | DTO & parser implementation<br>Added VINCE DTO aggregate, Markdown→text sanitizer, vendor/status/vulnerability parsers, and parser regression fixture. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-004 | Canonical mapping & range primitives<br>VINCE DTO aggregate flows through `CertCcMapper`, emitting vendor range primitives + normalized version rules that persist via `_advisoryStore`. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-004 | Canonical mapping & range primitives<br>VINCE DTO aggregate flows through `CertCcMapper`, emitting vendor range primitives + normalized version rules that persist via `_advisoryStore`. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DOING (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests<br>Fetch→parse→map integration suite in place; snapshot harness reactivation pending fixture regeneration & documentation of the `UPDATE_CERTCC_FIXTURES` flow. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests<br>Snapshot harness refreshed 2025-10-12; `certcc-*.snapshot.json` regenerated and regression suite green without UPDATE flag drift. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation<br>WebService now exports the `StellaOps.Feedser.Source.CertCc` meter and fetch integration tests assert `certcc.*` counters; README updated with observability guidance. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation<br>`CertCcDiagnostics` publishes summary/detail/parse/map metrics (meter `StellaOps.Feedser.Source.CertCc`), README documents instruments, and log guidance captured for Ops on 2025-10-12. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DOING (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation<br>Canned-response harness restored; snapshot fixtures still being realigned and regeneration steps documented. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation<br>Harness now wires `AddSourceCommon`, resets `FakeTimeProvider`, and passes canned-response regression run dated 2025-10-12. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | BLOCKED (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff<br>Upstream repo version lacks SemVer primitives + provenance decision reason fields, so snapshot regeneration fails; resume once Models/Storage sprint lands those changes. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | BLOCKED (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff<br>Upstream repo version lacks SemVer primitives + provenance decision reason fields, so snapshot regeneration fails; resume once Models/Storage sprint lands those changes. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | TODO | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up<br>Re-run fixture updates and deliver Merge handoff after SemVer style + decision reason updates merge into main. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up<br>Fixtures regenerated with normalizedVersions + provenance decision reasons; handoff notes updated for Merge backfill 2025-10-12. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan<br>Staged reintegration plan published in `src/StellaOps.Feedser.Source.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan<br>Staged reintegration plan published in `src/StellaOps.Feedser.Source.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | TODO (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation<br>Ensure missing VINCE endpoints downgrade gracefully without fatal errors; add coverage for partial note ingestion paths. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation<br>Detail fetch now tolerates 404/403/410 responses and regression tests cover mixed endpoint availability. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Distro.RedHat/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-REDHAT-02-001 | Fixture validation sweep<br>Instructions to work:<br>Fixtures regenerated post-model-helper rollout; provenance ordering and normalizedVersions scaffolding verified via tests. Conflict resolver deltas logged in src/StellaOps.Feedser.Source.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Distro.RedHat/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-REDHAT-02-001 | Fixture validation sweep<br>Instructions to work:<br>Fixtures regenerated post-model-helper rollout; provenance ordering and normalizedVersions scaffolding verified via tests. Conflict resolver deltas logged in src/StellaOps.Feedser.Source.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | — | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-001 | Canonical mapping & range primitives |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-001 | Canonical mapping & range primitives<br>Mapper emits SemVer rules (`scheme=apple:*`); fixtures regenerated with trimmed references + new RSR coverage, update tooling finalized. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-002 | Deterministic fixtures/tests<br>Sanitized live fixtures + regression snapshots wired into tests; normalized rule coverage asserted. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-002 | Deterministic fixtures/tests<br>Sanitized live fixtures + regression snapshots wired into tests; normalized rule coverage asserted. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-003 | Telemetry & documentation<br>Apple meter metrics wired into Feedser WebService OpenTelemetry configuration; README and fixtures document normalizedVersions coverage. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-003 | Telemetry & documentation<br>Apple meter metrics wired into Feedser WebService OpenTelemetry configuration; README and fixtures document normalizedVersions coverage. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-004 | Live HTML regression sweep<br>Live support.apple.com sweep captured iOS/macOS/RSR cases with deterministic sanitizers. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-004 | Live HTML regression sweep<br>Sanitised HT125326/HT125328/HT106355/HT214108/HT215500 fixtures recorded and regression tests green on 2025-10-12. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-005 | Fixture regeneration tooling<br>`UPDATE_APPLE_FIXTURES=1` flow fetches & rewrites fixtures; README documents usage.<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.Source.Vndr.Apple/AGENTS.md. Resume stalled tasks, ensuring normalizedVersions output and fixtures align with ./src/FASTER_MODELING_AND_NORMALIZATION.md before handing data to the conflict sprint. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-005 | Fixture regeneration tooling<br>`UPDATE_APPLE_FIXTURES=1` flow fetches & rewrites fixtures; README documents usage.<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.Source.Vndr.Apple/AGENTS.md. Resume stalled tasks, ensuring normalizedVersions output and fixtures align with ./src/FASTER_MODELING_AND_NORMALIZATION.md before handing data to the conflict sprint. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | — | Team Connector Normalized Versions Rollout | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance<br>Team instructions: Read ./AGENTS.md and each module's AGENTS file. Adopt the `NormalizedVersions` array emitted by the models sprint, wiring provenance `decisionReason` where merge overrides occur. Follow ./src/FASTER_MODELING_AND_NORMALIZATION.md; report via src/StellaOps.Feedser.Merge/TASKS.md (FEEDMERGE-COORD-02-900). Progress 2025-10-11: GHSA/OSV emit normalized arrays with refreshed fixtures; CVE mapper now surfaces SemVer normalized ranges; NVD/KEV adoption pending; outstanding follow-ups include FEEDSTORAGE-DATA-02-001, FEEDMERGE-ENGINE-02-002, and rolling `tools/FixtureUpdater` updates across connectors. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance<br>Team instructions: Read ./AGENTS.md and each module's AGENTS file. Adopt the `NormalizedVersions` array emitted by the models sprint, wiring provenance `decisionReason` where merge overrides occur. Follow ./src/FASTER_MODELING_AND_NORMALIZATION.md; report via src/StellaOps.Feedser.Merge/TASKS.md (FEEDMERGE-COORD-02-900). Progress 2025-10-11: GHSA/OSV emit normalized arrays with refreshed fixtures; CVE mapper now surfaces SemVer normalized ranges; NVD/KEV adoption pending; outstanding follow-ups include FEEDSTORAGE-DATA-02-001, FEEDMERGE-ENGINE-02-002, and rolling `tools/FixtureUpdater` updates across connectors. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Osv/TASKS.md | — | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Nvd/TASKS.md | — | Team Connector Normalized Versions Rollout | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Cve/TASKS.md | — | Team Connector Normalized Versions Rollout | FEEDCONN-CVE-02-003 | CVE normalized versions uplift |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Cve/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-CVE-02-003 | CVE normalized versions uplift |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Kev/TASKS.md | — | Team Connector Normalized Versions Rollout | FEEDCONN-KEV-02-003 | KEV normalized versions propagation |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-KEV-02-003 | KEV normalized versions propagation |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Osv/TASKS.md | — | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-04-003 | OSV parity fixture refresh |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-04-003 | OSV parity fixture refresh |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-10) | Team WebService & Authority | FEEDWEB-DOCS-01-001 | Document authority toggle & scope requirements<br>Quickstart carries toggle/scope guidance pending docs guild review (no change this sprint). |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-10) | Team WebService & Authority | FEEDWEB-DOCS-01-001 | Document authority toggle & scope requirements<br>Quickstart carries toggle/scope guidance pending docs guild review (no change this sprint). |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options<br>WebService binds `authority.resilience.*` into `AddStellaOpsAuthClient`; integration tests validate retry/offline wiring. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options<br>WebService wires `authority.resilience.*` into `AddStellaOpsAuthClient` and adds binding coverage via `AuthorityClientResilienceOptionsAreBound`. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning<br>Operator docs cover resilience defaults, environment-specific tuning, and monitoring cues. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning<br>Operator docs now outline connected vs air-gapped resilience profiles and monitoring cues. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns<br>Audit logging examples now document bypass fields and recommended alerts. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns<br>Audit logging guidance highlights `route/status/subject/clientId/scopes/bypass/remote` fields and SIEM alerts. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Feedser operator guide for enforcement cutoff<br>Enforcement checklist and env overrides documented ahead of the 2025-12-31 cutoff. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Feedser operator guide for enforcement cutoff<br>Install guide reiterates the 2025-12-31 cutoff and ties audit signals to rollout checks. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-006 | Rename plugin drop directory to namespaced path<br>Build outputs, tests, and docs now target `StellaOps.Feedser.PluginBinaries`/`StellaOps.Authority.PluginBinaries`. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-006 | Rename plugin drop directory to namespaced path<br>Build outputs, tests, and docs now target `StellaOps.Feedser.PluginBinaries`/`StellaOps.Authority.PluginBinaries`. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption<br>Deployment docs and CLI notes explain the LIB5 resilience knobs for rollout.<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.WebService/AGENTS.md. These items were mid-flight; resume implementation ensuring docs/operators receive timely updates. |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption<br>Deployment docs and CLI notes explain the LIB5 resilience knobs for rollout.<br>Instructions to work:<br>DONE Read ./AGENTS.md and src/StellaOps.Feedser.WebService/AGENTS.md. These items were mid-flight; resume implementation ensuring docs/operators receive timely updates. |
|
||||||
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). |
|
| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). |
|
||||||
@@ -58,14 +58,14 @@
|
|||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-005 | NVD merge/export parity regression |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-005 | NVD merge/export parity regression |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow<br>Instructions to work:<br>Read ./AGENTS.md and respective module AGENTS. Implement builder integration, provenance, and supporting docs using ./src/FASTER_MODELING_AND_NORMALIZATION.md and ensure outputs satisfy the precedence matrix in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow<br>Instructions to work:<br>Read ./AGENTS.md and respective module AGENTS. Implement builder integration, provenance, and supporting docs using ./src/FASTER_MODELING_AND_NORMALIZATION.md and ensure outputs satisfy the precedence matrix in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Acsc/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | RSS endpoints catalogued 2025-10-11, HTTP/2 error + pagination validation and client compatibility task opened. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Acsc/TASKS.md | Implementation DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch pipeline, DTO parser, canonical mapper, fixtures, and README shipped 2025-10-12; downstream export integration still pending future tasks. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cccs/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-007 | Atom feed verified 2025-10-11, history/caching review and FR locale enumeration pending. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cccs/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-007 | Atom feed verified 2025-10-11, history/caching review and FR locale enumeration pending. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.CertBund/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-007 | BSI RSS directory confirmed CERT-Bund feed 2025-10-11, history assessment pending. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.CertBund/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-007 | BSI RSS directory confirmed CERT-Bund feed 2025-10-11, history assessment pending. |
|
||||||
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kisa/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | KNVD RSS endpoint identified 2025-10-11, access headers/session strategy outstanding. |
|
| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kisa/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | KNVD RSS endpoint identified 2025-10-11, access headers/session strategy outstanding. |
|
||||||
@@ -88,8 +88,8 @@
|
|||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-002 | Override metrics instrumentation<br>Merge events capture per-field decisions; counters/logs align with conflict rules. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-002 | Override metrics instrumentation<br>Merge events capture per-field decisions; counters/logs align with conflict rules. |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-003 | Reference & credit union pipeline<br>Canonical merge preserves unions with updated tests. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-003 | Reference & credit union pipeline<br>Canonical merge preserves unions with updated tests. |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-QA-04-001 | End-to-end conflict regression suite<br>Added regression tests (`AdvisoryMergeServiceTests`) covering canonical + precedence flow.<br>Instructions to work:<br>Read ./AGENTS.md and merge AGENTS. Integrate the canonical merger, instrument metrics, and deliver comprehensive regression tests following ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-QA-04-001 | End-to-end conflict regression suite<br>Added regression tests (`AdvisoryMergeServiceTests`) covering canonical + precedence flow.<br>Instructions to work:<br>Read ./AGENTS.md and merge AGENTS. Integrate the canonical merger, instrument metrics, and deliver comprehensive regression tests following ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md. |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | — | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-002 | GHSA conflict regression fixtures |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-002 | GHSA conflict regression fixtures |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Nvd/TASKS.md | — | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Osv/TASKS.md | — | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures<br>Instructions to work:<br>Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures<br>Instructions to work:<br>Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Feedser Conflict Rules<br>Runbook published at `docs/ops/feedser-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Feedser Conflict Rules<br>Runbook published at `docs/ops/feedser-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. |
|
||||||
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | TODO | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout<br>Instructions to work:<br>Read ./AGENTS.md and docs/AGENTS.md. Socialise the conflict runbook with Feedser Ops, tune alert thresholds, and record change-log linkage once sign-off is captured. Use ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md for ongoing rule references. |
|
| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | TODO | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout<br>Instructions to work:<br>Read ./AGENTS.md and docs/AGENTS.md. Once GHSA/NVD/OSV regression fixtures (FEEDCONN-GHSA-04-002, FEEDCONN-NVD-04-002, FEEDCONN-OSV-04-002) are delivered, schedule the Ops review, apply the alert thresholds captured in `docs/ops/feedser-authority-audit-runbook.md`, and record change-log linkage after sign-off. Use ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md for ongoing rule references. |
|
||||||
|
|||||||
4
TODOS.md
4
TODOS.md
@@ -2,8 +2,8 @@
|
|||||||
| Task | Status | Notes |
|
| Task | Status | Notes |
|
||||||
|---|---|---|
|
|---|---|---|
|
||||||
|FEEDCONN-CERTCC-02-005 Deterministic fixtures/tests|DONE (2025-10-11)|Snapshot regression for summary/detail fetch landed; fixtures regenerate via `UPDATE_CERTCC_FIXTURES`.|
|
|FEEDCONN-CERTCC-02-005 Deterministic fixtures/tests|DONE (2025-10-11)|Snapshot regression for summary/detail fetch landed; fixtures regenerate via `UPDATE_CERTCC_FIXTURES`.|
|
||||||
|FEEDCONN-CERTCC-02-008 Snapshot coverage handoff|DONE (2025-10-11)|`CertCcConnectorSnapshotTests` produce documents/state/request snapshots and document the refresh workflow.|
|
|FEEDCONN-CERTCC-02-008 Snapshot coverage handoff|DONE (2025-10-11)|Fixtures + README guidance shipped; QA can rerun with `UPDATE_CERTCC_FIXTURES=1` and share recorded-request diff with Merge.|
|
||||||
|FEEDCONN-CERTCC-02-007 Connector test harness remediation|TODO|Need to restore Source.CertCc harness (`AddSourceCommon`, canned responses) so parser regression can run in CI.|
|
|FEEDCONN-CERTCC-02-007 Connector test harness remediation|DONE (2025-10-11)|Harness now resets time provider, wires Source.Common, and verifies VINCE canned responses across fetch→parse→map.|
|
||||||
|FEEDCONN-CERTCC-02-009 Detail/map reintegration plan|DONE (2025-10-11)|Plan published in `src/StellaOps.Feedser.Source.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; outlines staged enablement + rollback.|
|
|FEEDCONN-CERTCC-02-009 Detail/map reintegration plan|DONE (2025-10-11)|Plan published in `src/StellaOps.Feedser.Source.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; outlines staged enablement + rollback.|
|
||||||
|
|
||||||
# Connector Apple Status
|
# Connector Apple Status
|
||||||
|
|||||||
5
certificates/authority-signing-2025-dev.pem
Normal file
5
certificates/authority-signing-2025-dev.pem
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
-----BEGIN EC PRIVATE KEY-----
|
||||||
|
MHcCAQEEIIX2ZUujxnKwidwmPeUlhYKafkxno39luXI6700/hv0roAoGCCqGSM49
|
||||||
|
AwEHoUQDQgAEvliBfYvF+aKLX25ZClPwqYt6xdTQ9aP9fbEVTW8xQb61alaa8Tae
|
||||||
|
bjIvg4IFlD+0zzv7ciLVFuYhNkY+UkVnZg==
|
||||||
|
-----END EC PRIVATE KEY-----
|
||||||
0
dep_tmp.txt
Normal file
0
dep_tmp.txt
Normal file
@@ -228,9 +228,11 @@ See `docs/dev/32_AUTH_CLIENT_GUIDE.md` for recommended profiles (online vs. air-
|
|||||||
| `stellaops-cli db merge` | Run canonical merge reconcile | — | Calls `POST /jobs/merge:reconcile`; exit code `0` on acceptance, `1` on failures/conflicts |
|
| `stellaops-cli db merge` | Run canonical merge reconcile | — | Calls `POST /jobs/merge:reconcile`; exit code `0` on acceptance, `1` on failures/conflicts |
|
||||||
| `stellaops-cli db export` | Kick JSON / Trivy exports | `--format <json\|trivy-db>` (default `json`)<br>`--delta`<br>`--publish-full/--publish-delta`<br>`--bundle-full/--bundle-delta` | Sets `{ delta = true }` parameter when requested and can override ORAS/bundle toggles per run |
|
| `stellaops-cli db export` | Kick JSON / Trivy exports | `--format <json\|trivy-db>` (default `json`)<br>`--delta`<br>`--publish-full/--publish-delta`<br>`--bundle-full/--bundle-delta` | Sets `{ delta = true }` parameter when requested and can override ORAS/bundle toggles per run |
|
||||||
| `stellaops-cli auth <login\|logout\|status\|whoami>` | Manage cached tokens for StellaOps Authority | `auth login --force` (ignore cache)<br>`auth status`<br>`auth whoami` | Uses `StellaOps.Auth.Client`; honours `StellaOps:Authority:*` configuration, stores tokens under `~/.stellaops/tokens` by default, and `whoami` prints subject/scope/expiry |
|
| `stellaops-cli auth <login\|logout\|status\|whoami>` | Manage cached tokens for StellaOps Authority | `auth login --force` (ignore cache)<br>`auth status`<br>`auth whoami` | Uses `StellaOps.Auth.Client`; honours `StellaOps:Authority:*` configuration, stores tokens under `~/.stellaops/tokens` by default, and `whoami` prints subject/scope/expiry |
|
||||||
|
| `stellaops-cli auth revoke export` | Export the Authority revocation bundle | `--output <directory>` (defaults to CWD) | Writes `revocation-bundle.json`, `.json.jws`, and `.json.sha256`; verifies the digest locally and includes key metadata in the log summary. |
|
||||||
|
| `stellaops-cli auth revoke verify` | Validate a revocation bundle offline | `--bundle <path>` `--signature <path>` `--key <path>`<br>`--verbose` | Verifies detached JWS signatures, reports the computed SHA-256, and can fall back to cached JWKS when `--key` is omitted. |
|
||||||
|
| `stellaops-cli config show` | Display resolved configuration | — | Masks secret values; helpful for air‑gapped installs |
|
||||||
|
|
||||||
When running on an interactive terminal without explicit override flags, the CLI uses Spectre.Console prompts to let you choose per-run ORAS/offline bundle behaviour.
|
When running on an interactive terminal without explicit override flags, the CLI uses Spectre.Console prompts to let you choose per-run ORAS/offline bundle behaviour.
|
||||||
| `stellaops-cli config show` | Display resolved configuration | — | Masks secret values; helpful for air‑gapped installs |
|
|
||||||
|
|
||||||
**Logging & exit codes**
|
**Logging & exit codes**
|
||||||
|
|
||||||
@@ -340,6 +342,30 @@ Drop `appsettings.local.json` or `.yaml` beside the binary to override per envir
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
### 2.6 Authority Admin APIs
|
||||||
|
|
||||||
|
Administrative endpoints live under `/internal/*` on the Authority host and require the bootstrap API key (`x-stellaops-bootstrap-key`). Responses are deterministic and audited via `AuthEventRecord`.
|
||||||
|
|
||||||
|
| Path | Method | Description |
|
||||||
|
| ---- | ------ | ----------- |
|
||||||
|
| `/internal/revocations/export` | GET | Returns the revocation bundle (JSON + detached JWS + digest). Mirrors the output of `stellaops-cli auth revoke export`. |
|
||||||
|
| `/internal/signing/rotate` | POST | Promotes a new signing key and marks the previous key as retired without restarting the service. |
|
||||||
|
|
||||||
|
**Rotate request body**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"keyId": "authority-signing-2025",
|
||||||
|
"location": "../certificates/authority-signing-2025.pem",
|
||||||
|
"source": "file",
|
||||||
|
"provider": "default"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The API responds with the active `kid`, previous key (if any), and the set of retired key identifiers. Always export a fresh revocation bundle after rotation so downstream mirrors receive signatures from the new key.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## 3 First‑Party CLI Tools
|
## 3 First‑Party CLI Tools
|
||||||
|
|
||||||
### 3.1 `stella`
|
### 3.1 `stella`
|
||||||
|
|||||||
161
docs/11_AUTHORITY.md
Normal file
161
docs/11_AUTHORITY.md
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
# StellaOps Authority Service
|
||||||
|
|
||||||
|
> **Status:** Drafted 2025-10-12 (CORE5B.DOC / DOC1.AUTH) – aligns with Authority revocation store, JWKS rotation, and bootstrap endpoints delivered in Sprint 1.
|
||||||
|
|
||||||
|
## 1. Purpose
|
||||||
|
The **StellaOps Authority** service issues OAuth2/OIDC tokens for every StellaOps module (Feedser, Backend, Agent, Zastava) and exposes the policy controls required in sovereign/offline environments. Authority is built as a minimal ASP.NET host that:
|
||||||
|
|
||||||
|
- brokers password, client-credentials, and device-code flows through pluggable identity providers;
|
||||||
|
- persists access/refresh/device tokens in MongoDB with deterministic schemas for replay analysis and air-gapped audit copies;
|
||||||
|
- distributes revocation bundles and JWKS material so downstream services can enforce lockouts without direct database access;
|
||||||
|
- offers bootstrap APIs for first-run provisioning and key rotation without redeploying binaries.
|
||||||
|
|
||||||
|
Authority is deployed alongside Feedser in air-gapped environments and never requires outbound internet access. All trusted metadata (OpenIddict discovery, JWKS, revocation bundles) is cacheable, signed, and reproducible.
|
||||||
|
|
||||||
|
## 2. Component Architecture
|
||||||
|
Authority is composed of five cooperating subsystems:
|
||||||
|
|
||||||
|
1. **Minimal API host** – configures OpenIddict endpoints (`/token`, `/authorize`, `/revoke`, `/jwks`) and structured logging/telemetry. Rate limiting hooks (`AuthorityRateLimiter`) wrap every request.
|
||||||
|
2. **Plugin host** – loads `StellaOps.Authority.Plugin.*.dll` assemblies, applies capability metadata, and exposes password/client provisioning surfaces through dependency injection.
|
||||||
|
3. **Mongo storage** – persists tokens, revocations, bootstrap invites, and plugin state in deterministic collections indexed for offline sync (`authority_tokens`, `authority_revocations`, etc.).
|
||||||
|
4. **Cryptography layer** – `StellaOps.Cryptography` abstractions manage password hashing, signing keys, JWKS export, and detached JWS generation.
|
||||||
|
5. **Offline ops APIs** – internal endpoints under `/internal/*` provide administrative flows (bootstrap users/clients, revocation export) guarded by API keys and deterministic audit events.
|
||||||
|
|
||||||
|
A high-level sequence for password logins:
|
||||||
|
|
||||||
|
```
|
||||||
|
Client -> /token (password grant)
|
||||||
|
-> Rate limiter & audit hooks
|
||||||
|
-> Plugin credential store (Argon2id verification)
|
||||||
|
-> Token persistence (Mongo authority_tokens)
|
||||||
|
-> Response (access/refresh tokens + deterministic claims)
|
||||||
|
```
|
||||||
|
|
||||||
|
## 3. Token Lifecycle & Persistence
|
||||||
|
Authority persists every issued token in MongoDB so operators can audit or revoke without scanning distributed caches.
|
||||||
|
|
||||||
|
- **Collection:** `authority_tokens`
|
||||||
|
- **Key fields:**
|
||||||
|
- `tokenId`, `type` (`access_token`, `refresh_token`, `device_code`, `authorization_code`)
|
||||||
|
- `subjectId`, `clientId`, ordered `scope` array
|
||||||
|
- `status` (`valid`, `revoked`, `expired`), `createdAt`, optional `expiresAt`
|
||||||
|
- `revokedAt`, machine-readable `revokedReason`, optional `revokedReasonDescription`
|
||||||
|
- `revokedMetadata` (string dictionary for plugin-specific context)
|
||||||
|
- **Persistence flow:** `PersistTokensHandler` stamps missing JWT IDs, normalises scopes, and stores every principal emitted by OpenIddict.
|
||||||
|
- **Revocation flow:** `AuthorityTokenStore.UpdateStatusAsync` flips status, records the reason metadata, and is invoked by token revocation handlers and plugin provisioning events (e.g., disabling a user).
|
||||||
|
- **Expiry maintenance:** `AuthorityTokenStore.DeleteExpiredAsync` prunes non-revoked tokens past their `expiresAt` timestamp. Operators should schedule this in maintenance windows if large volumes of tokens are issued.
|
||||||
|
|
||||||
|
### Expectations for resource servers
|
||||||
|
Resource servers (Feedser WebService, Backend, Agent) **must not** assume in-memory caches are authoritative. They should:
|
||||||
|
|
||||||
|
- cache `/jwks` and `/revocations/export` responses within configured lifetimes;
|
||||||
|
- honour `revokedReason` metadata when shaping audit trails;
|
||||||
|
- treat `status != "valid"` or missing tokens as immediate denial conditions.
|
||||||
|
|
||||||
|
## 4. Revocation Pipeline
|
||||||
|
Authority centralises revocation in `authority_revocations` with deterministic categories:
|
||||||
|
|
||||||
|
| Category | Meaning | Required fields |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `token` | Specific OAuth token revoked early. | `revocationId` (token id), `tokenType`, optional `clientId`, `subjectId` |
|
||||||
|
| `subject` | All tokens for a subject disabled. | `revocationId` (= subject id) |
|
||||||
|
| `client` | OAuth client registration revoked. | `revocationId` (= client id) |
|
||||||
|
| `key` | Signing/JWE key withdrawn. | `revocationId` (= key id) |
|
||||||
|
|
||||||
|
`RevocationBundleBuilder` flattens Mongo documents into canonical JSON, sorts entries by (`category`, `revocationId`, `revokedAt`), and signs exports using detached JWS (RFC 7797) with cosign-compatible headers.
|
||||||
|
|
||||||
|
**Export surfaces** (deterministic output, suitable for Offline Kit):
|
||||||
|
|
||||||
|
- CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`.
|
||||||
|
- API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload.
|
||||||
|
- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys.
|
||||||
|
|
||||||
|
**Consumer guidance:**
|
||||||
|
|
||||||
|
1. Mirror `revocation-bundle.json*` alongside Feedser exports. Offline agents fetch both over the existing update channel.
|
||||||
|
2. Use bundle `sequence` and `bundleId` to detect replay or monotonicity regressions. Ignore bundles with older sequence numbers unless `bundleId` changes and `issuedAt` advances.
|
||||||
|
3. Treat `revokedReason` taxonomy as machine-friendly codes (`compromised`, `rotation`, `policy`, `lifecycle`). Translating to human-readable logs is the consumer’s responsibility.
|
||||||
|
|
||||||
|
## 5. Signing Keys & JWKS Rotation
|
||||||
|
Authority signs revocation bundles and publishes JWKS entries via the new signing manager:
|
||||||
|
|
||||||
|
- **Configuration (`authority.yaml`):**
|
||||||
|
```yaml
|
||||||
|
signing:
|
||||||
|
enabled: true
|
||||||
|
algorithm: ES256 # Defaults to ES256
|
||||||
|
keySource: file # Loader identifier (file, vault, etc.)
|
||||||
|
provider: default # Optional preferred crypto provider
|
||||||
|
activeKeyId: authority-signing-dev
|
||||||
|
keyPath: "../certificates/authority-signing-dev.pem"
|
||||||
|
additionalKeys:
|
||||||
|
- keyId: authority-signing-dev-2024
|
||||||
|
path: "../certificates/authority-signing-dev-2024.pem"
|
||||||
|
source: "file"
|
||||||
|
```
|
||||||
|
- **Sources:** The default loader supports PEM files relative to the content root; additional loaders can be registered via `IAuthoritySigningKeySource`.
|
||||||
|
- **Providers:** Keys are registered against the `ICryptoProviderRegistry`, so alternative implementations (HSM, libsodium) can be plugged in without changing host code.
|
||||||
|
- **JWKS output:** `GET /jwks` lists every signing key with `status` metadata (`active`, `retired`). Old keys remain until operators remove them from configuration, allowing verification of historical bundles/tokens.
|
||||||
|
|
||||||
|
### Rotation SOP (no downtime)
|
||||||
|
1. Generate a new P-256 private key (PEM) on an offline workstation and place it where the Authority host can read it (e.g., `../certificates/authority-signing-2025.pem`).
|
||||||
|
2. Call the authenticated admin API:
|
||||||
|
```bash
|
||||||
|
curl -sS -X POST https://authority.example.com/internal/signing/rotate \
|
||||||
|
-H "x-stellaops-bootstrap-key: ${BOOTSTRAP_KEY}" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"keyId": "authority-signing-2025",
|
||||||
|
"location": "../certificates/authority-signing-2025.pem",
|
||||||
|
"source": "file"
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
3. Verify the response reports the previous key as retired and fetch `/jwks` to confirm the new `kid` appears with `status: "active"`.
|
||||||
|
4. Persist the old key path in `signing.additionalKeys` (the rotation API updates in-memory options; rewrite the YAML to match so restarts remain consistent).
|
||||||
|
5. If you prefer automation, trigger the `.gitea/workflows/authority-key-rotation.yml` workflow with the new `keyId`/`keyPath`; it wraps `ops/authority/key-rotation.sh` and reads environment-specific secrets. The older key will be marked `retired` and appended to `signing.additionalKeys`.
|
||||||
|
6. Re-run `stella auth revoke export` so revocation bundles are signed with the new key. Downstream caches should refresh JWKS within their configured lifetime (`StellaOpsAuthorityOptions.Signing` + client cache tolerance).
|
||||||
|
|
||||||
|
The rotation API leverages the same cryptography abstractions as revocation signing; no restart is required and the previous key is marked `retired` but kept available for verification.
|
||||||
|
|
||||||
|
## 6. Bootstrap & Administrative Endpoints
|
||||||
|
Administrative APIs live under `/internal/*` and require the bootstrap API key plus rate-limiter compliance.
|
||||||
|
|
||||||
|
| Endpoint | Method | Description |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `/internal/users` | `POST` | Provision initial administrative accounts through the registered password-capable plug-in. Emits structured audit events. |
|
||||||
|
| `/internal/clients` | `POST` | Provision OAuth clients (client credentials / device code). |
|
||||||
|
| `/internal/revocations/export` | `GET` | Export revocation bundle + detached JWS + digest. |
|
||||||
|
| `/internal/signing/rotate` | `POST` | Promote a new signing key (see SOP above). Request body accepts `keyId`, `location`, optional `source`, `algorithm`, `provider`, and metadata. |
|
||||||
|
|
||||||
|
All administrative calls emit `AuthEventRecord` entries enriched with correlation IDs, PII tags, and network metadata for offline SOC ingestion.
|
||||||
|
|
||||||
|
## 7. Configuration Reference
|
||||||
|
|
||||||
|
| Section | Key | Description | Notes |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| Root | `issuer` | Absolute HTTPS issuer advertised to clients. | Required. Loopback HTTP allowed only for development. |
|
||||||
|
| Tokens | `accessTokenLifetime`, `refreshTokenLifetime`, etc. | Lifetimes for each grant (access, refresh, device, authorization code, identity). | Enforced during issuance; persisted on each token document. |
|
||||||
|
| Storage | `storage.connectionString` | MongoDB connection string. | Required even for tests; offline kits ship snapshots for seeding. |
|
||||||
|
| Signing | `signing.enabled` | Enable JWKS/revocation signing. | Disable only for development. |
|
||||||
|
| Signing | `signing.algorithm` | Signing algorithm identifier. | Currently ES256; additional curves can be wired through crypto providers. |
|
||||||
|
| Signing | `signing.keySource` | Loader identifier (`file`, `vault`, custom). | Determines which `IAuthoritySigningKeySource` resolves keys. |
|
||||||
|
| Signing | `signing.keyPath` | Relative/absolute path understood by the loader. | Stored as-is; rotation request should keep it in sync with filesystem layout. |
|
||||||
|
| Signing | `signing.activeKeyId` | Active JWKS / revocation signing key id. | Exposed as `kid` in JWKS and bundles. |
|
||||||
|
| Signing | `signing.additionalKeys[].keyId` | Retired key identifier retained for verification. | Manager updates this automatically after rotation; keep YAML aligned. |
|
||||||
|
| Signing | `signing.additionalKeys[].source` | Loader identifier per retired key. | Defaults to `signing.keySource` if omitted. |
|
||||||
|
| Security | `security.rateLimiting` | Fixed-window limits for `/token`, `/authorize`, `/internal/*`. | See `docs/security/rate-limits.md` for tuning. |
|
||||||
|
| Bootstrap | `bootstrap.apiKey` | Shared secret required for `/internal/*`. | Only required when `bootstrap.enabled` is true. |
|
||||||
|
|
||||||
|
## 8. Offline & Sovereign Operation
|
||||||
|
- **No outbound dependencies:** Authority only contacts MongoDB and local plugins. Discovery and JWKS are cached by clients with offline tolerances (`AllowOfflineCacheFallback`, `OfflineCacheTolerance`). Operators should mirror these responses for air-gapped use.
|
||||||
|
- **Structured logging:** Every revocation export, signing rotation, bootstrap action, and token issuance emits structured logs with `traceId`, `client_id`, `subjectId`, and `network.remoteIp` where applicable. Mirror logs to your SIEM to retain audit trails without central connectivity.
|
||||||
|
- **Determinism:** Sorting rules in token and revocation exports guarantee byte-for-byte identical artefacts given the same datastore state. Hashes and signatures remain stable across machines.
|
||||||
|
|
||||||
|
## 9. Operational Checklist
|
||||||
|
- [ ] Protect the bootstrap API key and disable bootstrap endpoints (`bootstrap.enabled: false`) once initial setup is complete.
|
||||||
|
- [ ] Schedule `stella auth revoke export` (or `/internal/revocations/export`) at the same cadence as Feedser exports so bundles remain in lockstep.
|
||||||
|
- [ ] Rotate signing keys before expiration; keep at least one retired key until all cached bundles/tokens signed with it have expired.
|
||||||
|
- [ ] Monitor `/health` and `/ready` plus rate-limiter metrics to detect plugin outages early.
|
||||||
|
- [ ] Ensure downstream services cache JWKS and revocation bundles within tolerances; stale caches risk accepting revoked tokens.
|
||||||
|
|
||||||
|
For plug-in specific requirements, refer to **[Authority Plug-in Developer Guide](dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md)**. For revocation bundle validation workflow, see **[Authority Revocation Bundle](security/revocation-bundle.md)**.
|
||||||
@@ -159,6 +159,7 @@ cosign verify ghcr.io/stellaops/backend@sha256:<DIGEST> \
|
|||||||
showing whether a network bypass CIDR allowed the request. Configure your SIEM
|
showing whether a network bypass CIDR allowed the request. Configure your SIEM
|
||||||
to alert when unauthenticated requests (`status=401`) appear with
|
to alert when unauthenticated requests (`status=401`) appear with
|
||||||
`bypass=true`, or when unexpected scopes invoke job triggers.
|
`bypass=true`, or when unexpected scopes invoke job triggers.
|
||||||
|
Detailed monitoring and response guidance lives in `docs/ops/feedser-authority-audit-runbook.md`.
|
||||||
|
|
||||||
## 8 Update & patch strategy
|
## 8 Update & patch strategy
|
||||||
|
|
||||||
|
|||||||
@@ -93,12 +93,20 @@ The Feedser container reads configuration from `etc/feedser.yaml` plus
|
|||||||
FEEDSER_AUTHORITY__CLIENTSECRETFILE="/run/secrets/feedser_authority_client"
|
FEEDSER_AUTHORITY__CLIENTSECRETFILE="/run/secrets/feedser_authority_client"
|
||||||
FEEDSER_AUTHORITY__BYPASSNETWORKS__0="127.0.0.1/32"
|
FEEDSER_AUTHORITY__BYPASSNETWORKS__0="127.0.0.1/32"
|
||||||
FEEDSER_AUTHORITY__BYPASSNETWORKS__1="::1/128"
|
FEEDSER_AUTHORITY__BYPASSNETWORKS__1="::1/128"
|
||||||
|
FEEDSER_AUTHORITY__RESILIENCE__ENABLERETRIES=true
|
||||||
|
FEEDSER_AUTHORITY__RESILIENCE__RETRYDELAYS__0="00:00:01"
|
||||||
|
FEEDSER_AUTHORITY__RESILIENCE__RETRYDELAYS__1="00:00:02"
|
||||||
|
FEEDSER_AUTHORITY__RESILIENCE__RETRYDELAYS__2="00:00:05"
|
||||||
|
FEEDSER_AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK=true
|
||||||
|
FEEDSER_AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE="00:10:00"
|
||||||
```
|
```
|
||||||
|
|
||||||
Store the client secret outside source control (Docker secrets, mounted file,
|
Store the client secret outside source control (Docker secrets, mounted file,
|
||||||
or Kubernetes Secret). Feedser loads the secret during post-configuration, so
|
or Kubernetes Secret). Feedser loads the secret during post-configuration, so
|
||||||
the value never needs to appear in the YAML template.
|
the value never needs to appear in the YAML template.
|
||||||
|
|
||||||
|
Connected sites can keep the retry ladder short (1 s, 2 s, 5 s) so job triggers fail fast when Authority is down. For air‑gapped or intermittently connected deployments, extend `RESILIENCE__OFFLINECACHETOLERANCE` (e.g. `00:30:00`) so cached discovery/JWKS data remains valid while the Offline Kit synchronises upstream changes.
|
||||||
|
|
||||||
2. Redeploy Feedser:
|
2. Redeploy Feedser:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -106,9 +114,10 @@ The Feedser container reads configuration from `etc/feedser.yaml` plus
|
|||||||
```
|
```
|
||||||
|
|
||||||
3. Tail the logs: `docker compose logs -f feedser`. Successful `/jobs*` calls now
|
3. Tail the logs: `docker compose logs -f feedser`. Successful `/jobs*` calls now
|
||||||
emit `Feedser.Authorization.Audit` entries listing subject, client ID, scopes,
|
emit `Feedser.Authorization.Audit` entries with `route`, `status`, `subject`,
|
||||||
remote IP, and whether the bypass CIDR allowed the call. 401 denials always log
|
`clientId`, `scopes`, `bypass`, and `remote` fields. 401 denials keep the same
|
||||||
`bypassAllowed=false` so unauthenticated cron jobs are easy to catch.
|
shape—watch for `bypass=True`, which indicates a bypass CIDR accepted an anonymous
|
||||||
|
call. See `docs/ops/feedser-authority-audit-runbook.md` for a full audit/alerting checklist.
|
||||||
|
|
||||||
> **Enforcement deadline** – keep `FEEDSER_AUTHORITY__ALLOWANONYMOUSFALLBACK=true`
|
> **Enforcement deadline** – keep `FEEDSER_AUTHORITY__ALLOWANONYMOUSFALLBACK=true`
|
||||||
> only while validating the rollout. Set it to `false` (and restart Feedser)
|
> only while validating the rollout. Set it to `false` (and restart Feedser)
|
||||||
|
|||||||
@@ -42,9 +42,11 @@ Everything here is open‑source and versioned — when you check out a git ta
|
|||||||
- [`nightly_scheduler.md`](08_MODULE_SPECIFICATIONS/nightly_scheduler.md)
|
- [`nightly_scheduler.md`](08_MODULE_SPECIFICATIONS/nightly_scheduler.md)
|
||||||
- **09 – [API & CLI Reference](09_API_CLI_REFERENCE.md)**
|
- **09 – [API & CLI Reference](09_API_CLI_REFERENCE.md)**
|
||||||
- **10 – [Plug‑in SDK Guide](10_PLUGIN_SDK_GUIDE.md)**
|
- **10 – [Plug‑in SDK Guide](10_PLUGIN_SDK_GUIDE.md)**
|
||||||
|
- **11 – [Authority Service](11_AUTHORITY.md)**
|
||||||
- **11 – [Data Schemas](11_DATA_SCHEMAS.md)**
|
- **11 – [Data Schemas](11_DATA_SCHEMAS.md)**
|
||||||
- **12 – [Performance Workbook](12_PERFORMANCE_WORKBOOK.md)**
|
- **12 – [Performance Workbook](12_PERFORMANCE_WORKBOOK.md)**
|
||||||
- **13 – [Release‑Engineering Playbook](13_RELEASE_ENGINEERING_PLAYBOOK.md)**
|
- **13 – [Release‑Engineering Playbook](13_RELEASE_ENGINEERING_PLAYBOOK.md)**
|
||||||
|
- **30 – [Fixture Maintenance](dev/fixtures.md)**
|
||||||
|
|
||||||
### User & operator guides
|
### User & operator guides
|
||||||
- **14 – [Glossary](14_GLOSSARY_OF_TERMS.md)**
|
- **14 – [Glossary](14_GLOSSARY_OF_TERMS.md)**
|
||||||
@@ -56,6 +58,8 @@ Everything here is open‑source and versioned — when you check out a git ta
|
|||||||
- **22 – [CI/CD Recipes Library](ci/20_CI_RECIPES.md)**
|
- **22 – [CI/CD Recipes Library](ci/20_CI_RECIPES.md)**
|
||||||
- **23 – [FAQ](23_FAQ_MATRIX.md)**
|
- **23 – [FAQ](23_FAQ_MATRIX.md)**
|
||||||
- **24 – [Offline Update Kit Admin Guide](24_OUK_ADMIN_GUIDE.md)**
|
- **24 – [Offline Update Kit Admin Guide](24_OUK_ADMIN_GUIDE.md)**
|
||||||
|
- **26 – [Authority Key Rotation Playbook](ops/authority-key-rotation.md)**
|
||||||
|
- **25 – [Feedser Apple Connector Operations](ops/feedser-apple-operations.md)**
|
||||||
|
|
||||||
### Legal & licence
|
### Legal & licence
|
||||||
- **29 – [Legal & Quota FAQ](29_LEGAL_FAQ_QUOTA.md)**
|
- **29 – [Legal & Quota FAQ](29_LEGAL_FAQ_QUOTA.md)**
|
||||||
|
|||||||
@@ -3,10 +3,10 @@
|
|||||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||||
|----|--------|----------|------------|-------------|---------------|
|
|----|--------|----------|------------|-------------|---------------|
|
||||||
| DOC4.AUTH-PDG | REVIEW | Docs Guild, Plugin Team | PLG6.DOC | Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. | ✅ PR merged with polish; ✅ Diagram committed; ✅ Slack handoff posted. |
|
| DOC4.AUTH-PDG | REVIEW | Docs Guild, Plugin Team | PLG6.DOC | Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. | ✅ PR merged with polish; ✅ Diagram committed; ✅ Slack handoff posted. |
|
||||||
| DOC1.AUTH | TODO | Docs Guild, Authority Core | CORE5B.DOC | Draft `docs/11_AUTHORITY.md` covering architecture, configuration, bootstrap flows. | ✅ Architecture + config sections approved by Core; ✅ Samples reference latest options; ✅ Offline note added. |
|
| DOC1.AUTH | DONE (2025-10-12) | Docs Guild, Authority Core | CORE5B.DOC | Draft `docs/11_AUTHORITY.md` covering architecture, configuration, bootstrap flows. | ✅ Architecture + config sections approved by Core; ✅ Samples reference latest options; ✅ Offline note added. |
|
||||||
| DOC3.Feedser-Authority | DOING (2025-10-10) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Feedser authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. |
|
| DOC3.Feedser-Authority | DONE (2025-10-12) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Feedser authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated with audit runbook references; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. |
|
||||||
| DOC5.Feedser-Runbook | TODO | Docs Guild | DOC3.Feedser-Authority | Produce dedicated Feedser authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. |
|
| DOC5.Feedser-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Feedser-Authority | Produce dedicated Feedser authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. |
|
||||||
| FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Feedser conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/feedser-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Feedser team. |
|
| FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Feedser conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/feedser-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Feedser team. |
|
||||||
| FEEDDOCS-DOCS-05-002 | TODO | Docs Guild, Feedser Ops | FEEDDOCS-DOCS-05-001 | Capture ops sign-off: circulate conflict runbook, tune alert thresholds, and document rollout decisions in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised; ✅ change-log entry linked from runbook. |
|
| FEEDDOCS-DOCS-05-002 | TODO | Docs Guild, Feedser Ops | FEEDDOCS-DOCS-05-001 | Capture ops sign-off: circulate conflict runbook, tune alert thresholds, and document rollout decisions in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/feedser-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. |
|
||||||
|
|
||||||
> Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`.
|
> Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`.
|
||||||
|
|||||||
17
docs/assets/authority/authority-plugin-lifecycle.mmd
Normal file
17
docs/assets/authority/authority-plugin-lifecycle.mmd
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
%% Authority plug-in lifecycle sequence diagram (Mermaid)
|
||||||
|
flowchart LR
|
||||||
|
manifest[[Plugin Manifest<br/>etc/authority.plugins/*.yaml]]
|
||||||
|
loader[AuthorityPluginConfigurationLoader<br/>binds and validates options]
|
||||||
|
scanner[PluginHost Assembly Scan<br/>StellaOps.Authority.Plugin.*]
|
||||||
|
registrar[IAuthorityPluginRegistrar<br/>registers services & health checks]
|
||||||
|
runtime[Identity Provider Plugin<br/>IIdentityProviderPlugin surface]
|
||||||
|
capabilities{Capability Metadata<br/>password/mfa/bootstrap/clientProvisioning}
|
||||||
|
storage[(Credential Store<br/>Mongo collections or custom backend)]
|
||||||
|
telemetry[[Structured Logs & Metrics<br/>authority.*]]
|
||||||
|
|
||||||
|
manifest --> loader --> scanner --> registrar --> runtime --> storage
|
||||||
|
scanner --> capabilities
|
||||||
|
capabilities --> runtime
|
||||||
|
runtime --> telemetry
|
||||||
|
loader -. emits deterministic config hashes .-> telemetry
|
||||||
|
storage -. readiness probes .-> runtime
|
||||||
91
docs/assets/authority/authority-plugin-lifecycle.svg
Normal file
91
docs/assets/authority/authority-plugin-lifecycle.svg
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1200 320" role="img">
|
||||||
|
<title>Authority plug-in lifecycle: manifest to runtime</title>
|
||||||
|
<defs>
|
||||||
|
<style>
|
||||||
|
.node { fill: #0f172a; stroke: #1e293b; stroke-width: 2; rx: 14; ry: 14; }
|
||||||
|
.node text { fill: #f1f5f9; font: 16px/1.4 "Segoe UI", sans-serif; }
|
||||||
|
.accent { fill: #1e3a8a; stroke: #1d4ed8; }
|
||||||
|
.accent text { fill: #e2e8f0; }
|
||||||
|
.note { fill: #0f766e; stroke: #134e4a; }
|
||||||
|
.note text { fill: #ecfeff; }
|
||||||
|
.caption { fill: #111827; font: bold 18px "Segoe UI", sans-serif; }
|
||||||
|
.annotation { fill: #475569; font: 14px "Segoe UI", sans-serif; }
|
||||||
|
</style>
|
||||||
|
<marker id="arrow" markerWidth="12" markerHeight="12" refX="10" refY="6" orient="auto">
|
||||||
|
<path d="M0,0 L12,6 L0,12 z" fill="#1d4ed8" />
|
||||||
|
</marker>
|
||||||
|
</defs>
|
||||||
|
|
||||||
|
<text class="caption" x="40" y="36">Authority plug-in lifecycle</text>
|
||||||
|
|
||||||
|
<!-- Nodes -->
|
||||||
|
<g transform="translate(40,70)">
|
||||||
|
<rect class="node" width="200" height="110" />
|
||||||
|
<text x="20" y="32">Manifest YAML</text>
|
||||||
|
<text x="20" y="58">etc/authority.plugins/*.yaml</text>
|
||||||
|
<text x="20" y="84">Deterministic hashes</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(280,70)">
|
||||||
|
<rect class="node accent" width="220" height="110" />
|
||||||
|
<text x="20" y="32">AuthorityPluginConfigurationLoader</text>
|
||||||
|
<text x="20" y="58">binds + validates options</text>
|
||||||
|
<text x="20" y="84">logs config fingerprints</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(540,70)">
|
||||||
|
<rect class="node" width="220" height="110" />
|
||||||
|
<text x="20" y="32">PluginHost assembly scan</text>
|
||||||
|
<text x="20" y="58">StellaOps.Authority.Plugin.*</text>
|
||||||
|
<text x="20" y="84">loads IAuthorityPluginRegistrar</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(800,70)">
|
||||||
|
<rect class="node accent" width="220" height="110" />
|
||||||
|
<text x="20" y="32">IAuthorityPluginRegistrar</text>
|
||||||
|
<text x="20" y="58">register services & health checks</text>
|
||||||
|
<text x="20" y="84">publish capability metadata</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(1060,70)">
|
||||||
|
<rect class="node" width="220" height="110" />
|
||||||
|
<text x="20" y="32">Identity Provider plug-in</text>
|
||||||
|
<text x="20" y="58">IIdentityProviderPlugin</text>
|
||||||
|
<text x="20" y="84">ready/readiness probes</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<!-- Supporting nodes -->
|
||||||
|
<g transform="translate(420,210)">
|
||||||
|
<rect class="node note" width="240" height="96" />
|
||||||
|
<text x="20" y="32">Capability metadata broadcast</text>
|
||||||
|
<text x="20" y="58">password / mfa / bootstrap / clientProvisioning</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(760,210)">
|
||||||
|
<rect class="node note" width="240" height="96" />
|
||||||
|
<text x="20" y="32">Credential & audit storage</text>
|
||||||
|
<text x="20" y="58">Mongo collections or custom backend</text>
|
||||||
|
<text x="20" y="84">queried in readiness probes</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(1040,210)">
|
||||||
|
<rect class="node note" width="180" height="96" />
|
||||||
|
<text x="20" y="32">Telemetry output</text>
|
||||||
|
<text x="20" y="58">logs + metrics with correlation IDs</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<!-- Arrows -->
|
||||||
|
<path d="M240,125 H280" stroke="#1d4ed8" stroke-width="3" fill="none" marker-end="url(#arrow)" />
|
||||||
|
<path d="M500,125 H540" stroke="#1d4ed8" stroke-width="3" fill="none" marker-end="url(#arrow)" />
|
||||||
|
<path d="M760,125 H800" stroke="#1d4ed8" stroke-width="3" fill="none" marker-end="url(#arrow)" />
|
||||||
|
<path d="M1020,125 H1060" stroke="#1d4ed8" stroke-width="3" fill="none" marker-end="url(#arrow)" />
|
||||||
|
|
||||||
|
<path d="M650,180 V210" stroke="#22d3ee" stroke-width="3" fill="none" marker-end="url(#arrow)" />
|
||||||
|
<path d="M920,180 V210" stroke="#22d3ee" stroke-width="3" fill="none" marker-end="url(#arrow)" />
|
||||||
|
<path d="M1180,180 V210" stroke="#22d3ee" stroke-width="3" fill="none" marker-end="url(#arrow)" />
|
||||||
|
|
||||||
|
<text class="annotation" x="320" y="56">1. Configuration</text>
|
||||||
|
<text class="annotation" x="600" y="56">2. Assembly discovery</text>
|
||||||
|
<text class="annotation" x="860" y="56">3. Registrar execution</text>
|
||||||
|
<text class="annotation" x="1120" y="56">4. Runtime surface</text>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 4.0 KiB |
27
docs/assets/authority/authority-rate-limit-flow.mmd
Normal file
27
docs/assets/authority/authority-rate-limit-flow.mmd
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
%% Rate limit and lockout interplay for Standard plug-in (Mermaid)
|
||||||
|
sequenceDiagram
|
||||||
|
autonumber
|
||||||
|
participant Client as Client/App
|
||||||
|
participant Host as Authority Host
|
||||||
|
participant Limiter as Rate Limiter Middleware
|
||||||
|
participant Plugin as Standard Plugin
|
||||||
|
participant Store as Credential Store / Lockout State
|
||||||
|
|
||||||
|
Client->>Host: POST /token (client_id, credentials)
|
||||||
|
Host->>Limiter: Check quota (client_id + remote_ip)
|
||||||
|
alt quota exceeded
|
||||||
|
Limiter-->>Host: Reject (429, retryAfter)
|
||||||
|
Host-->>Client: 429 Too Many Requests\nRetry-After header with limiter tags
|
||||||
|
else quota ok
|
||||||
|
Limiter-->>Host: Allow (remaining tokens)
|
||||||
|
Host->>Plugin: VerifyCredentials(subject)
|
||||||
|
Plugin->>Store: Load hashed password + lockout counters
|
||||||
|
Store-->>Plugin: Credential result + deterministic counter
|
||||||
|
alt lockout threshold reached
|
||||||
|
Plugin-->>Host: Locked (retryAfter=lockoutWindow)
|
||||||
|
Host-->>Client: 423 Locked\nRetry-After header + `authority.lockout` tag
|
||||||
|
else valid credentials
|
||||||
|
Plugin-->>Host: Success (issue tokens)
|
||||||
|
Host-->>Client: 200 OK + tokens + limiter metadata
|
||||||
|
end
|
||||||
|
end
|
||||||
105
docs/assets/authority/authority-rate-limit-flow.svg
Normal file
105
docs/assets/authority/authority-rate-limit-flow.svg
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1180 400" role="img">
|
||||||
|
<title>Authority rate limit and lockout flow</title>
|
||||||
|
<defs>
|
||||||
|
<style>
|
||||||
|
.lane { fill: #0f172a; }
|
||||||
|
.lane text { fill: #e2e8f0; font: bold 18px "Segoe UI", sans-serif; }
|
||||||
|
.step { fill: #1f2937; stroke: #1e3a8a; stroke-width: 2; rx: 12; ry: 12; }
|
||||||
|
.step text { fill: #f8fafc; font: 15px "Segoe UI", sans-serif; }
|
||||||
|
.decision { fill: #0f766e; stroke: #0d9488; stroke-width: 2; rx: 12; ry: 12; }
|
||||||
|
.decision text { fill: #ecfeff; font: 15px "Segoe UI", sans-serif; }
|
||||||
|
.note { fill: #1e293b; font: italic 14px "Segoe UI", sans-serif; }
|
||||||
|
</style>
|
||||||
|
<marker id="arrow-blue" markerWidth="11" markerHeight="11" refX="10" refY="6" orient="auto">
|
||||||
|
<path d="M0,0 L11,6 L0,12 z" fill="#2563eb" />
|
||||||
|
</marker>
|
||||||
|
<marker id="arrow-green" markerWidth="11" markerHeight="11" refX="10" refY="6" orient="auto">
|
||||||
|
<path d="M0,0 L11,6 L0,12 z" fill="#0d9488" />
|
||||||
|
</marker>
|
||||||
|
<marker id="arrow-red" markerWidth="11" markerHeight="11" refX="10" refY="6" orient="auto">
|
||||||
|
<path d="M0,0 L11,6 L0,12 z" fill="#dc2626" />
|
||||||
|
</marker>
|
||||||
|
</defs>
|
||||||
|
|
||||||
|
<rect class="lane" x="40" y="20" width="160" height="40" rx="8" />
|
||||||
|
<text x="60" y="48">Client / App</text>
|
||||||
|
<rect class="lane" x="300" y="20" width="200" height="40" rx="8" />
|
||||||
|
<text x="320" y="48">Authority Host</text>
|
||||||
|
<rect class="lane" x="560" y="20" width="210" height="40" rx="8" />
|
||||||
|
<text x="580" y="48">Rate Limiter</text>
|
||||||
|
<rect class="lane" x="840" y="20" width="150" height="40" rx="8" />
|
||||||
|
<text x="860" y="48">Standard Plug-in</text>
|
||||||
|
<rect class="lane" x="1040" y="20" width="120" height="40" rx="8" />
|
||||||
|
<text x="1060" y="48">Credential Store</text>
|
||||||
|
|
||||||
|
<!-- Flow steps -->
|
||||||
|
<g transform="translate(40,90)">
|
||||||
|
<rect class="step" width="220" height="90" />
|
||||||
|
<text x="20" y="36">POST /token request</text>
|
||||||
|
<text x="20" y="64">client_id + subject creds</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(300,90)">
|
||||||
|
<rect class="step" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Authority middleware</text>
|
||||||
|
<text x="20" y="64">enriches context tags</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(580,90)">
|
||||||
|
<rect class="step" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Rate limiter window</text>
|
||||||
|
<text x="20" y="64">client_id + IP keyed</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(580,210)">
|
||||||
|
<rect class="decision" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Quota exceeded?</text>
|
||||||
|
<text x="20" y="64">emit Retry-After & tags</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(580,330)">
|
||||||
|
<rect class="step" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Quota OK</text>
|
||||||
|
<text x="20" y="64">pass remaining tokens</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(840,210)">
|
||||||
|
<rect class="step" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Verify credentials</text>
|
||||||
|
<text x="20" y="64">hash compare + audit tags</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(1040,210)">
|
||||||
|
<rect class="step" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Load lockout state</text>
|
||||||
|
<text x="20" y="64">deterministic counters</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(840,330)">
|
||||||
|
<rect class="decision" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Lockout threshold hit?</text>
|
||||||
|
<text x="20" y="64">follow dedup precedence</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<g transform="translate(300,330)">
|
||||||
|
<rect class="step" width="220" height="90" />
|
||||||
|
<text x="20" y="36">Issue tokens or errors</text>
|
||||||
|
<text x="20" y="64">include limiter metadata</text>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<!-- Arrows -->
|
||||||
|
<path d="M260,135 H300" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
<path d="M520,135 H580" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
<path d="M670,180 V210" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
<path d="M670,300 V330" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
<path d="M800,375 H840" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
<path d="M960,255 H1040" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
<path d="M960,375 H840" stroke="#0d9488" stroke-width="3" fill="none" marker-end="url(#arrow-green)" />
|
||||||
|
<path d="M670,255 H520" stroke="#dc2626" stroke-width="3" fill="none" marker-end="url(#arrow-red)" />
|
||||||
|
<path d="M520,375 H300" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
<path d="M260,375 H40" stroke="#2563eb" stroke-width="3" fill="none" marker-end="url(#arrow-blue)" />
|
||||||
|
|
||||||
|
<!-- Notes -->
|
||||||
|
<text class="note" x="40" y="210">429 path → add `authority.client_id`, `authority.remote_ip` tags for dashboards.</text>
|
||||||
|
<text class="note" x="40" y="240">Lockout path → reuse precedence strategy from Feedser dedup (see DEDUP_CONFLICTS_RESOLUTION_ALGO.md).</text>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 4.8 KiB |
@@ -1,6 +1,6 @@
|
|||||||
# Authority Plug-in Developer Guide
|
# Authority Plug-in Developer Guide
|
||||||
|
|
||||||
> **Status:** Ready for Docs/DOC4 editorial review as of 2025-10-10. Content aligns with PLG6 acceptance criteria and references stable Authority primitives.
|
> **Status:** Updated 2025-10-11 (AUTHPLUG-DOCS-01-001) with lifecycle + limiter diagrams and refreshed rate-limit guidance aligned to PLG6 acceptance criteria.
|
||||||
|
|
||||||
## 1. Overview
|
## 1. Overview
|
||||||
Authority plug-ins extend the **StellaOps Authority** service with custom identity providers, credential stores, and client-management logic. Unlike Feedser plug-ins (which ingest or export advisories), Authority plug-ins participate directly in authentication flows:
|
Authority plug-ins extend the **StellaOps Authority** service with custom identity providers, credential stores, and client-management logic. Unlike Feedser plug-ins (which ingest or export advisories), Authority plug-ins participate directly in authentication flows:
|
||||||
@@ -17,6 +17,10 @@ Authority hosts follow a deterministic plug-in lifecycle. The flow below can be
|
|||||||
3. **Registrar execution** – each assembly is searched for `IAuthorityPluginRegistrar` implementations. Registrars bind options, register services, and optionally queue bootstrap tasks.
|
3. **Registrar execution** – each assembly is searched for `IAuthorityPluginRegistrar` implementations. Registrars bind options, register services, and optionally queue bootstrap tasks.
|
||||||
4. **Runtime** – the host resolves `IIdentityProviderPlugin` instances, uses capability metadata to decide which OAuth grants to expose, and invokes health checks for readiness endpoints.
|
4. **Runtime** – the host resolves `IIdentityProviderPlugin` instances, uses capability metadata to decide which OAuth grants to expose, and invokes health checks for readiness endpoints.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
_Source:_ `docs/assets/authority/authority-plugin-lifecycle.mmd`
|
||||||
|
|
||||||
**Data persistence primer:** the standard Mongo-backed plugin stores users in collections named `authority_users_<pluginName>` and lockout metadata in embedded documents. Additional plugins must document their storage layout and provide deterministic collection naming to honour the Offline Kit replication process.
|
**Data persistence primer:** the standard Mongo-backed plugin stores users in collections named `authority_users_<pluginName>` and lockout metadata in embedded documents. Additional plugins must document their storage layout and provide deterministic collection naming to honour the Offline Kit replication process.
|
||||||
|
|
||||||
## 3. Capability Metadata
|
## 3. Capability Metadata
|
||||||
@@ -100,7 +104,7 @@ Capability flags let the host reason about what your plug-in supports:
|
|||||||
- Optional `IClientProvisioningStore` for machine-to-machine clients.
|
- Optional `IClientProvisioningStore` for machine-to-machine clients.
|
||||||
- `AuthorityIdentityProviderCapabilities` to advertise supported flows.
|
- `AuthorityIdentityProviderCapabilities` to advertise supported flows.
|
||||||
- Password guidance:
|
- Password guidance:
|
||||||
- Prefer Argon2 (Security Guild upcoming recommendation); Standard plug-in currently ships PBKDF2 with easy swap via `IPasswordHasher`.
|
- Standard plug-in hashes via `ICryptoProvider` using Argon2id by default and emits PHC-compliant strings. Successful PBKDF2 logins trigger automatic rehashes so migrations complete gradually. See `docs/security/password-hashing.md` for tuning advice.
|
||||||
- Enforce password policies before hashing to avoid storing weak credentials.
|
- Enforce password policies before hashing to avoid storing weak credentials.
|
||||||
- Health checks should probe backing stores (e.g., Mongo `ping`) and return `AuthorityPluginHealthResult` so `/ready` can surface issues.
|
- Health checks should probe backing stores (e.g., Mongo `ping`) and return `AuthorityPluginHealthResult` so `/ready` can surface issues.
|
||||||
- When supporting additional factors (e.g., TOTP), implement `SupportsMfa` and document the enrolment flow for resource servers.
|
- When supporting additional factors (e.g., TOTP), implement `SupportsMfa` and document the enrolment flow for resource servers.
|
||||||
@@ -111,14 +115,64 @@ Capability flags let the host reason about what your plug-in supports:
|
|||||||
- Never store raw secrets in git: allow operators to supply them via `.local.yaml`, environment variables, or injected secret files. Document which keys are mandatory.
|
- Never store raw secrets in git: allow operators to supply them via `.local.yaml`, environment variables, or injected secret files. Document which keys are mandatory.
|
||||||
- Validate configuration as soon as the registrar runs; use explicit error messages to guide operators. The Standard plug-in now enforces complete bootstrap credentials (username + password) and positive lockout windows via `StandardPluginOptions.Validate`.
|
- Validate configuration as soon as the registrar runs; use explicit error messages to guide operators. The Standard plug-in now enforces complete bootstrap credentials (username + password) and positive lockout windows via `StandardPluginOptions.Validate`.
|
||||||
- Cross-reference bootstrap workflows with `docs/ops/authority_bootstrap.md` (to be published alongside CORE6) so operators can reuse the same payload formats for manual provisioning.
|
- Cross-reference bootstrap workflows with `docs/ops/authority_bootstrap.md` (to be published alongside CORE6) so operators can reuse the same payload formats for manual provisioning.
|
||||||
|
- `passwordHashing` inherits defaults from `authority.security.passwordHashing`. Override only when hardware constraints differ per plug-in:
|
||||||
|
```yaml
|
||||||
|
passwordHashing:
|
||||||
|
algorithm: Argon2id
|
||||||
|
memorySizeInKib: 19456
|
||||||
|
iterations: 2
|
||||||
|
parallelism: 1
|
||||||
|
```
|
||||||
|
Invalid values (≤0) fail fast during startup, and legacy PBKDF2 hashes rehash automatically once the new algorithm succeeds.
|
||||||
|
|
||||||
## 8. Logging, Metrics, and Diagnostics
|
### 7.1 Token Persistence Contract
|
||||||
|
- The host automatically persists every issued principal (access, refresh, device, authorization code) in `authority_tokens`. Plug-in code **must not** bypass this store; use the provided `IAuthorityTokenStore` helpers when implementing custom flows.
|
||||||
|
- When a plug-in disables a subject or client outside the standard handlers, call `IAuthorityTokenStore.UpdateStatusAsync(...)` for each affected token so revocation bundles stay consistent.
|
||||||
|
- Supply machine-friendly `revokedReason` codes (`compromised`, `rotation`, `policy`, `lifecycle`, etc.) and optional `revokedMetadata` entries when invalidating credentials. These flow straight into `revocation-bundle.json` and should remain deterministic.
|
||||||
|
- Token scopes should be normalised (trimmed, unique, ordinal sort) before returning from plug-in verification paths. `TokenPersistenceHandlers` will keep that ordering for downstream consumers.
|
||||||
|
|
||||||
|
### 7.2 Claims & Enrichment Checklist
|
||||||
|
- Authority always sets the OpenID Connect basics: `sub`, `client_id`, `preferred_username`, optional `name`, and `role` (for password flows). Plug-ins must use `IClaimsEnricher` to append additional claims in a **deterministic** order (sort arrays, normalise casing) so resource servers can rely on stable shapes.
|
||||||
|
- Recommended enrichment keys:
|
||||||
|
- `stellaops.realm` – plug-in/tenant identifier so services can scope policies.
|
||||||
|
- `stellaops.subject.type` – values such as `human`, `service`, `bootstrap`.
|
||||||
|
- `groups` / `projects` – sorted arrays describing operator entitlements.
|
||||||
|
- Claims visible in tokens should mirror what `/token` and `/userinfo` emit. Avoid injecting sensitive PII directly; mark values with `ClassifiedString.Personal` inside the plug-in so audit sinks can tag them appropriately.
|
||||||
|
- For client-credential flows, remember to enrich both the client principal and the validation path (`TokenValidationHandlers`) so refresh flows keep the same metadata.
|
||||||
|
|
||||||
|
### 7.3 Revocation Bundles & Reasons
|
||||||
|
- Use `IAuthorityRevocationStore` to record subject/client/token revocations when credentials are deleted or rotated. Stick to the standard categories (`token`, `subject`, `client`, `key`).
|
||||||
|
- Include a deterministic `reason` string and optional `reasonDescription` so operators understand *why* a subject was revoked when inspecting bundles offline.
|
||||||
|
- Plug-ins should populate `metadata` with stable keys (e.g., `revokedBy`, `sourcePlugin`, `ticketId`) to simplify SOC correlation. The keys must be lowercase, ASCII, and free of secrets—bundles are mirrored to air-gapped agents.
|
||||||
|
|
||||||
|
## 8. Rate Limiting & Lockout Interplay
|
||||||
|
Rate limiting and account lockouts are complementary controls. Plug-ins must surface both deterministically so operators can correlate limiter hits with credential rejections.
|
||||||
|
|
||||||
|
**Baseline quotas** (from `docs/dev/authority-rate-limit-tuning-outline.md`):
|
||||||
|
|
||||||
|
| Endpoint | Default policy | Notes |
|
||||||
|
|----------|----------------|-------|
|
||||||
|
| `/token` | 30 requests / 60s, queue 0 | Drop to 10/60s for untrusted ranges; raise only with WAF + monitoring. |
|
||||||
|
| `/authorize` | 60 requests / 60s, queue 10 | Reduce carefully; interactive UX depends on headroom. |
|
||||||
|
| `/internal/*` | Disabled by default; recommended 5/60s when enabled | Keep queue 0 for bootstrap APIs. |
|
||||||
|
|
||||||
|
**Retry metadata:** The middleware stamps `Retry-After` plus tags `authority.client_id`, `authority.remote_ip`, and `authority.endpoint`. Plug-ins should keep these tags intact when crafting responses or telemetry so dashboards remain consistent.
|
||||||
|
|
||||||
|
**Lockout counters:** Treat lockouts as **subject-scoped** decisions. When multiple instances update counters, reuse the deterministic tie-breakers documented in `src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md` (freshness overrides, precedence, and stable hashes) to avoid divergent lockout states across replicas.
|
||||||
|
|
||||||
|
**Alerting hooks:** Emit structured logs/metrics when either the limiter or credential store rejects access. Suggested gauges include `aspnetcore_rate_limiting_rejections_total{limiter="authority-token"}` and any custom `auth.plugins.<pluginName>.lockouts_total` counter.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
_Source:_ `docs/assets/authority/authority-rate-limit-flow.mmd`
|
||||||
|
|
||||||
|
## 9. Logging, Metrics, and Diagnostics
|
||||||
- Always log via the injected `ILogger<T>`; include `pluginName` and correlation IDs where available.
|
- Always log via the injected `ILogger<T>`; include `pluginName` and correlation IDs where available.
|
||||||
- Activity/metric names should align with `AuthorityTelemetry` constants (`service.name=stellaops-authority`).
|
- Activity/metric names should align with `AuthorityTelemetry` constants (`service.name=stellaops-authority`).
|
||||||
- Expose additional diagnostics via structured logging rather than writing custom HTTP endpoints; the host will integrate these into `/health` and `/ready`.
|
- Expose additional diagnostics via structured logging rather than writing custom HTTP endpoints; the host will integrate these into `/health` and `/ready`.
|
||||||
- Emit metrics with stable names (`auth.plugins.<pluginName>.*`) when introducing custom instrumentation; coordinate with the Observability guild to reserve prefixes.
|
- Emit metrics with stable names (`auth.plugins.<pluginName>.*`) when introducing custom instrumentation; coordinate with the Observability guild to reserve prefixes.
|
||||||
|
|
||||||
## 9. Testing & Tooling
|
## 10. Testing & Tooling
|
||||||
- Unit tests: use Mongo2Go (or similar) to exercise credential stores without hitting production infrastructure (`StandardUserCredentialStoreTests` is a template).
|
- Unit tests: use Mongo2Go (or similar) to exercise credential stores without hitting production infrastructure (`StandardUserCredentialStoreTests` is a template).
|
||||||
- Determinism: fix timestamps to UTC and sort outputs consistently; avoid random GUIDs unless stable.
|
- Determinism: fix timestamps to UTC and sort outputs consistently; avoid random GUIDs unless stable.
|
||||||
- Smoke tests: launch `dotnet run --project src/StellaOps.Authority/StellaOps.Authority` with your plug-in under `PluginBinaries/Authority` and verify `/ready`.
|
- Smoke tests: launch `dotnet run --project src/StellaOps.Authority/StellaOps.Authority` with your plug-in under `PluginBinaries/Authority` and verify `/ready`.
|
||||||
@@ -137,13 +191,13 @@ Capability flags let the host reason about what your plug-in supports:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## 10. Packaging & Delivery
|
## 11. Packaging & Delivery
|
||||||
- Output assembly should follow `StellaOps.Authority.Plugin.<Name>.dll` so the host’s search pattern picks it up.
|
- Output assembly should follow `StellaOps.Authority.Plugin.<Name>.dll` so the host’s search pattern picks it up.
|
||||||
- Place the compiled DLL plus dependencies under `PluginBinaries/Authority` for offline deployments; include hashes/signatures in release notes (Security Guild guidance forthcoming).
|
- Place the compiled DLL plus dependencies under `PluginBinaries/Authority` for offline deployments; include hashes/signatures in release notes (Security Guild guidance forthcoming).
|
||||||
- Document any external prerequisites (e.g., CA cert bundle) in your plug-in README.
|
- Document any external prerequisites (e.g., CA cert bundle) in your plug-in README.
|
||||||
- Update `etc/authority.plugins/<plugin>.yaml` samples and include deterministic SHA256 hashes for optional bootstrap payloads when distributing Offline Kit artefacts.
|
- Update `etc/authority.plugins/<plugin>.yaml` samples and include deterministic SHA256 hashes for optional bootstrap payloads when distributing Offline Kit artefacts.
|
||||||
|
|
||||||
## 11. Checklist & Handoff
|
## 12. Checklist & Handoff
|
||||||
- ✅ Capabilities declared and validated in automated tests.
|
- ✅ Capabilities declared and validated in automated tests.
|
||||||
- ✅ Bootstrap workflows documented (if `bootstrap` capability used) and repeatable.
|
- ✅ Bootstrap workflows documented (if `bootstrap` capability used) and repeatable.
|
||||||
- ✅ Local smoke test + unit/integration suites green (`dotnet test`).
|
- ✅ Local smoke test + unit/integration suites green (`dotnet test`).
|
||||||
@@ -151,7 +205,4 @@ Capability flags let the host reason about what your plug-in supports:
|
|||||||
- Submit the developer guide update referencing PLG6/DOC4 and tag DevEx + Docs reviewers for sign-off.
|
- Submit the developer guide update referencing PLG6/DOC4 and tag DevEx + Docs reviewers for sign-off.
|
||||||
|
|
||||||
---
|
---
|
||||||
**Next documentation actions:**
|
Mermaid sources for the embedded diagrams live under `docs/assets/authority/`. Regenerate the SVG assets with your preferred renderer before committing future updates so the visuals stay in sync with the `.mmd` definitions.
|
||||||
- Add rendered architectural diagram (PlantUML/mermaid) reflecting the lifecycle above once the Docs toolkit pipeline is ready.
|
|
||||||
- Reference the LDAP RFC (`docs/rfcs/authority-plugin-ldap.md`) in the capability section once review completes.
|
|
||||||
- Sync terminology with `docs/11_AUTHORITY.md` when that chapter is published to keep glossary terms consistent.
|
|
||||||
|
|||||||
34
docs/dev/fixtures.md
Normal file
34
docs/dev/fixtures.md
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Feedser Fixture Maintenance
|
||||||
|
|
||||||
|
Feedser uses a handful of deterministic fixtures to keep connector regressions in check. This guide lists the
|
||||||
|
fixture sets, where they live, and how to regenerate them safely.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## GHSA ↔ OSV parity fixtures
|
||||||
|
|
||||||
|
- **Location:** `src/StellaOps.Feedser.Source.Osv.Tests/Fixtures/osv-ghsa.*.json`
|
||||||
|
- **Purpose:** Exercised by `OsvGhsaParityRegressionTests` to ensure OSV + GHSA outputs stay aligned on aliases,
|
||||||
|
ranges, references, and credits.
|
||||||
|
- **Regeneration:** Either run the test harness with online regeneration (`UPDATE_PARITY_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Osv.Tests/StellaOps.Feedser.Source.Osv.Tests.csproj`)
|
||||||
|
or execute the fixture updater (`dotnet run --project tools/FixtureUpdater/FixtureUpdater.csproj`). Both paths
|
||||||
|
normalise timestamps and canonical ordering.
|
||||||
|
- **Verification:** Inspect the diff, then re-run `dotnet test src/StellaOps.Feedser.Source.Osv.Tests/StellaOps.Feedser.Source.Osv.Tests.csproj` to confirm parity.
|
||||||
|
|
||||||
|
## GHSA credit parity fixtures
|
||||||
|
|
||||||
|
- **Location:** `src/StellaOps.Feedser.Source.Ghsa.Tests/Fixtures/credit-parity.{ghsa,osv,nvd}.json`
|
||||||
|
- **Purpose:** Exercised by `GhsaCreditParityRegressionTests` to guarantee GHSA/NVD/OSV acknowledgements remain in lockstep.
|
||||||
|
- **Regeneration:** `dotnet run --project tools/FixtureUpdater/FixtureUpdater.csproj` rewrites all three canonical snapshots.
|
||||||
|
- **Verification:** `dotnet test src/StellaOps.Feedser.Source.Ghsa.Tests/StellaOps.Feedser.Source.Ghsa.Tests.csproj`.
|
||||||
|
|
||||||
|
> Always commit fixture changes together with the code that motivated them and reference the regression test that guards the behaviour.
|
||||||
|
|
||||||
|
## Apple security update fixtures
|
||||||
|
|
||||||
|
- **Location:** `src/StellaOps.Feedser.Source.Vndr.Apple.Tests/Apple/Fixtures/*.html` and `.expected.json`.
|
||||||
|
- **Purpose:** Exercised by `AppleLiveRegressionTests` to guarantee the Apple HTML parser and mapper stay deterministic while covering Rapid Security Responses and multi-device advisories.
|
||||||
|
- **Regeneration:** Use the helper scripts (`scripts/update-apple-fixtures.sh` or `scripts/update-apple-fixtures.ps1`). They export `UPDATE_APPLE_FIXTURES=1`, propagate the flag through `WSLENV`, touch `.update-apple-fixtures`, and then run the Apple test project. This keeps WSL/VSCode test invocations in sync while the refresh workflow fetches live Apple support pages, sanitises them, and rewrites both the HTML and expected DTO snapshots with normalised ordering.
|
||||||
|
- **Verification:** Inspect the generated diffs and re-run `dotnet test src/StellaOps.Feedser.Source.Vndr.Apple.Tests/StellaOps.Feedser.Source.Vndr.Apple.Tests.csproj` without the env var to confirm determinism.
|
||||||
|
|
||||||
|
> **Tip for other connector owners:** mirror the sentinel + `WSLENV` pattern (`touch .update-<connector>-fixtures`, append the env var via `WSLENV`) when you add fixture refresh scripts so contributors running under WSL inherit the regeneration flag automatically.
|
||||||
@@ -29,6 +29,8 @@ var rule = primitive.ToNormalizedVersionRule(notes: "nvd:CVE-2025-1234");
|
|||||||
// rule => scheme=semver, type=range, min=1.2.3, minInclusive=true, max=2.0.0, maxInclusive=false
|
// rule => scheme=semver, type=range, min=1.2.3, minInclusive=true, max=2.0.0, maxInclusive=false
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you omit the optional `notes` argument, `ToNormalizedVersionRule` now falls back to the primitive’s `ConstraintExpression`, ensuring the original comparator expression is preserved for provenance/audit queries.
|
||||||
|
|
||||||
Emit the resulting rule inside `AffectedPackage.NormalizedVersions` while continuing to populate `AffectedVersionRange.RangeExpression` for backward compatibility.
|
Emit the resulting rule inside `AffectedPackage.NormalizedVersions` while continuing to populate `AffectedVersionRange.RangeExpression` for backward compatibility.
|
||||||
|
|
||||||
## 3. Merge dedupe flow
|
## 3. Merge dedupe flow
|
||||||
@@ -97,3 +99,56 @@ Follow the operational checklist in `docs/ops/migrations/SEMVER_STYLE.md`. The s
|
|||||||
- [ ] Confirm integration tests include fixtures with normalized rules and SemVer styles.
|
- [ ] Confirm integration tests include fixtures with normalized rules and SemVer styles.
|
||||||
|
|
||||||
For deeper query examples and maintenance tasks, continue with [Normalized Versions Query Guide](mongo_indices.md).
|
For deeper query examples and maintenance tasks, continue with [Normalized Versions Query Guide](mongo_indices.md).
|
||||||
|
|
||||||
|
## 8. Storage projection reference
|
||||||
|
|
||||||
|
`NormalizedVersionDocumentFactory` copies each normalized rule into MongoDB using the shape below. Use this as a contract when reviewing connector fixtures or diagnosing merge/storage diffs:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"packageId": "pkg:npm/example",
|
||||||
|
"packageType": "npm",
|
||||||
|
"scheme": "semver",
|
||||||
|
"type": "range",
|
||||||
|
"style": "range",
|
||||||
|
"min": "1.2.3",
|
||||||
|
"minInclusive": true,
|
||||||
|
"max": "2.0.0",
|
||||||
|
"maxInclusive": false,
|
||||||
|
"value": null,
|
||||||
|
"notes": "ghsa:GHSA-xxxx-yyyy",
|
||||||
|
"decisionReason": "ghsa-precedence-over-nvd",
|
||||||
|
"constraint": ">= 1.2.3 < 2.0.0",
|
||||||
|
"source": "ghsa",
|
||||||
|
"recordedAt": "2025-10-11T00:00:00Z"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For distro-specific ranges (`nevra`, `evr`) the same envelope applies with `scheme` switched accordingly. Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"packageId": "bash",
|
||||||
|
"packageType": "rpm",
|
||||||
|
"scheme": "nevra",
|
||||||
|
"type": "range",
|
||||||
|
"style": "range",
|
||||||
|
"min": "0:4.4.18-2.el7",
|
||||||
|
"minInclusive": true,
|
||||||
|
"max": "0:4.4.20-1.el7",
|
||||||
|
"maxInclusive": false,
|
||||||
|
"value": null,
|
||||||
|
"notes": "redhat:RHSA-2025:1234",
|
||||||
|
"decisionReason": "rhel-priority-over-nvd",
|
||||||
|
"constraint": "<= 0:4.4.20-1.el7",
|
||||||
|
"source": "redhat",
|
||||||
|
"recordedAt": "2025-10-11T00:00:00Z"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If a new scheme is required (for example, `apple.build` or `ios.semver`), raise it with the Models team before emitting documents so merge comparers and hashing logic can incorporate the change deterministically.
|
||||||
|
|
||||||
|
## 9. Observability signals
|
||||||
|
|
||||||
|
- `feedser.merge.normalized_rules` (counter, tags: `package_type`, `scheme`) – increments once per normalized rule retained after precedence merge.
|
||||||
|
- `feedser.merge.normalized_rules_missing` (counter, tags: `package_type`) – increments when a merged package still carries version ranges but no normalized rules; watch for spikes to catch connectors that have not emitted normalized arrays yet.
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
This guide complements the Sprint 1–2 normalized versions rollout. It documents recommended indexes and aggregation patterns for querying `AffectedPackage.normalizedVersions`.
|
This guide complements the Sprint 1–2 normalized versions rollout. It documents recommended indexes and aggregation patterns for querying `AffectedPackage.normalizedVersions`.
|
||||||
|
|
||||||
|
For a field-by-field look at how normalized rules persist in MongoDB (including provenance metadata), see Section 8 of the [Feedser SemVer Merge Playbook](merge_semver_playbook.md).
|
||||||
|
|
||||||
## 1. Recommended indexes
|
## 1. Recommended indexes
|
||||||
|
|
||||||
When `feedser.storage.enableSemVerStyle` is enabled, advisories expose a flattened
|
When `feedser.storage.enableSemVerStyle` is enabled, advisories expose a flattened
|
||||||
|
|||||||
49
docs/dev/normalized_versions_rollout.md
Normal file
49
docs/dev/normalized_versions_rollout.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# Normalized Versions Rollout Dashboard (Sprint 2 – Feedser)
|
||||||
|
|
||||||
|
_Status date: 2025-10-12 17:05 UTC_
|
||||||
|
|
||||||
|
This dashboard tracks connector readiness for emitting `AffectedPackage.NormalizedVersions` arrays and highlights upcoming coordination checkpoints. Use it alongside:
|
||||||
|
|
||||||
|
- [`src/StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`](../../src/StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md) for detailed guidance and timelines.
|
||||||
|
- [Feedser SemVer Merge Playbook](merge_semver_playbook.md) §8 for persisted Mongo document shapes.
|
||||||
|
- [Normalized Versions Query Guide](mongo_indices.md) for index/query validation steps.
|
||||||
|
|
||||||
|
## Key milestones
|
||||||
|
|
||||||
|
- **2025-10-13** – Normalization to finalize `SemVerRangeRuleBuilder` API contract for review.
|
||||||
|
- **2025-10-17** – Connector owners to post fixture PRs showing `NormalizedVersions` arrays (even if feature-flagged).
|
||||||
|
- **2025-10-18** – Merge cross-connector review to validate consistent field usage before enabling union logic.
|
||||||
|
|
||||||
|
## Connector readiness matrix
|
||||||
|
|
||||||
|
| Connector | Owner team | Normalized versions status | Last update | Next action / link |
|
||||||
|
|-----------|------------|---------------------------|-------------|--------------------|
|
||||||
|
| Acsc | BE-Conn-ACSC | ❌ Not started – mapper pending | 2025-10-11 | Design DTOs + mapper with normalized rule array; see `src/StellaOps.Feedser.Source.Acsc/TASKS.md`. |
|
||||||
|
| Cccs | BE-Conn-CCCS | ❌ Not started – mapper pending | 2025-10-11 | Add normalized SemVer array in canonical mapper; coordinate fixtures per `TASKS.md`. |
|
||||||
|
| CertBund | BE-Conn-CERTBUND | ❌ Not started – mapper pending | 2025-10-11 | Capture firmware-style ranges; emit normalized payload; `src/StellaOps.Feedser.Source.CertBund/TASKS.md`. |
|
||||||
|
| CertCc | BE-Conn-CERTCC | ⚠️ In progress – fetch pipeline DOING | 2025-10-11 | Implement VINCE mapper with SemVer/NEVRA rules; unblock snapshot regeneration; `src/StellaOps.Feedser.Source.CertCc/TASKS.md`. |
|
||||||
|
| Kev | BE-Conn-KEV | ✅ Normalized catalog/due-date rules verified | 2025-10-12 | Fixtures reconfirmed via `dotnet test src/StellaOps.Feedser.Source.Kev.Tests`; `src/StellaOps.Feedser.Source.Kev/TASKS.md`. |
|
||||||
|
| Cve | BE-Conn-CVE | ✅ Normalized SemVer rules verified | 2025-10-12 | Snapshot parity green (`dotnet test src/StellaOps.Feedser.Source.Cve.Tests`); `src/StellaOps.Feedser.Source.Cve/TASKS.md`. |
|
||||||
|
| Ghsa | BE-Conn-GHSA | ⚠️ DOING – normalized rollout task active | 2025-10-11 18:45 UTC | Wire `SemVerRangeRuleBuilder` + refresh fixtures; `src/StellaOps.Feedser.Source.Ghsa/TASKS.md`. |
|
||||||
|
| Osv | BE-Conn-OSV | ✅ SemVer mapper & parity fixtures verified | 2025-10-12 | GHSA parity regression passing (`dotnet test src/StellaOps.Feedser.Source.Osv.Tests`); `src/StellaOps.Feedser.Source.Osv/TASKS.md`. |
|
||||||
|
| Ics.Cisa | BE-Conn-ICS-CISA | ❌ Not started – mapper TODO | 2025-10-11 | Plan SemVer/firmware scheme selection; `src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md`. |
|
||||||
|
| Kisa | BE-Conn-KISA | ❌ Not started – mapper TODO | 2025-10-11 | Localisation-aware mapper with normalized rules; `src/StellaOps.Feedser.Source.Kisa/TASKS.md`. |
|
||||||
|
| Ru.Bdu | BE-Conn-BDU | ❌ Not started – mapper TODO | 2025-10-11 | Emit normalized ranges, capture provenance; `src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md`. |
|
||||||
|
| Ru.Nkcki | BE-Conn-Nkcki | ❌ Not started – mapper TODO | 2025-10-11 | Similar to BDU; ensure Cyrillic provenance preserved; `src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md`. |
|
||||||
|
| Vndr.Apple | BE-Conn-Apple | ✅ Shipped – emitting normalized arrays | 2025-10-11 | Continue fixture/tooling work; `src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md`. |
|
||||||
|
| Vndr.Cisco | BE-Conn-Cisco | ❌ Not started – mapper TODO | 2025-10-11 | Decide on scheme (`semver` vs custom) before emitting rules; `src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md`. |
|
||||||
|
| Vndr.Msrc | BE-Conn-MSRC | ❌ Not started – mapper TODO | 2025-10-11 | Gather samples, define scheme, emit normalized rules; `src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md`. |
|
||||||
|
| Nvd | BE-Conn-NVD | ⚠️ Needs follow-up – mapper complete but normalized array MR pending | 2025-10-11 | Align CVE notes + normalized payload flag; `src/StellaOps.Feedser.Source.Nvd/TASKS.md`. |
|
||||||
|
|
||||||
|
Legend: ✅ complete, ⚠️ in progress/partial, ❌ not started.
|
||||||
|
|
||||||
|
## Monitoring
|
||||||
|
|
||||||
|
- Merge now emits `feedser.merge.normalized_rules` (tags: `package_type`, `scheme`) and `feedser.merge.normalized_rules_missing` (tags: `package_type`). Track these counters to confirm normalized arrays land as connectors roll out.
|
||||||
|
- Expect `normalized_rules_missing` to trend toward zero as each connector flips on normalized output. Investigate any sustained counts by checking the corresponding module `TASKS.md`.
|
||||||
|
|
||||||
|
## How to use this dashboard
|
||||||
|
|
||||||
|
1. Before opening a connector PR, update the module `TASKS.md` entry and drop a short bullet here (status + timestamp).
|
||||||
|
2. When a connector lands normalized outputs, flip the status to ✅ and note any rollout toggles (feature flags, fixture regenerations).
|
||||||
|
3. If a dependency or blocker emerges, add it both in the module `TASKS.md` and in this matrix so merge/storage can escalate quickly.
|
||||||
39
docs/feedser-connector-research-20251011.md
Normal file
39
docs/feedser-connector-research-20251011.md
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Feedser Connector Research – 2025-10-11
|
||||||
|
|
||||||
|
Snapshot of direct network checks performed on 2025-10-11 (UTC) for the national/vendor connectors in scope. Use alongside each module’s `TASKS.md` notes.
|
||||||
|
|
||||||
|
## ACSC (Australia)
|
||||||
|
- Enumerated feed slugs `/acsc/view-all-content/{alerts,advisories,news,publications,threats}/rss`; every endpoint negotiates HTTP/2 then aborts with `INTERNAL_ERROR` (curl exit 92). Forcing HTTP/1.1 hangs >600 s and sitemap/HTML fetches fail the same way.
|
||||||
|
- Next actions: prototype `SocketsHttpHandler` settings (`RequestVersionOrLower`, allow fallback to relay), capture successful headers from partner vantage (need retention + cache semantics), and keep `FEEDCONN-SHARED-HTTP2-001` open for downgrade work.
|
||||||
|
|
||||||
|
## CCCS (Canada)
|
||||||
|
- RSS endpoint (`https://cyber.gc.ca/api/cccs/rss/v1/get?...`) 301s to Atom feed (`/api/cccs/atom/v1/get?...`) with 50-entry window, HTML-heavy `<content>` fields, and no cache headers.
|
||||||
|
- Next actions: enumerate additional `feed` query values, sanitise inline HTML for DTO storage, and track retention depth via HTML pagination (`?page=`).
|
||||||
|
|
||||||
|
## CERT-Bund (Germany)
|
||||||
|
- `https://wid.cert-bund.de/content/public/securityAdvisory/rss` responds 200 without cookies (250-item window, German taxonomy). Detail links load an Angular SPA that fetches JSON behind session cookies.
|
||||||
|
- Next actions: script SPA cookie/bootstrap, discover JSON detail endpoint, and capture advisory schema for parser planning.
|
||||||
|
|
||||||
|
## KISA / KNVD (Korea)
|
||||||
|
- `https://knvd.krcert.or.kr/rss/securityInfo.do` and `/rss/securityNotice.do` return UTF-8 RSS (10-item window) with `detailDos.do?IDX=` links. No cookies required for feed fetch.
|
||||||
|
- Next actions: trace SPA detail requests to identify JSON endpoints, normalise Hangul content, and finalise localisation plan.
|
||||||
|
|
||||||
|
## BDU (Russia / FSTEC)
|
||||||
|
- Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml/json`) return 403/404; TLS chain requires Russian Trusted Sub CA and WAF expects additional headers.
|
||||||
|
- Next actions: acquire official PEM chain, point `feedser:httpClients:source.bdu:trustedRootPaths` (or `feedser:sources:bdu:http:trustedRootPaths`) at the Offline Kit PEM, keep `allowInvalidCertificates=false`, script session bootstrap, then capture RSS/HTML schema for parser work.
|
||||||
|
|
||||||
|
## NKTsKI / cert.gov.ru (Russia)
|
||||||
|
- `https://cert.gov.ru/rss/advisories.xml` served via Bitrix returns 403/404 even with `Accept-Language: ru-RU`; TLS chain also requires Russian trust anchors.
|
||||||
|
- Next actions: source trust store, configure `feedser:httpClients:source.nkcki:trustedRootPaths` (Offline Kit root via `feedser:offline:root`), prepare proxy fallback, and once accessible document taxonomy/retention plus attachment handling.
|
||||||
|
|
||||||
|
## CISA ICS (United States)
|
||||||
|
- `curl -I https://www.cisa.gov/cybersecurity-advisories/ics-advisories.xml` returns HTTP 403 + `x-reference-error` (Akamai). Same for legacy feed paths.
|
||||||
|
- Next actions: secure GovDelivery access, document token rotation, and build HTML/email fallback with throttling.
|
||||||
|
|
||||||
|
## Cisco PSIRT
|
||||||
|
- `https://api.cisco.com/security/advisories/latest` returns `ERR_596_SERVICE_NOT_FOUND` when unauthenticated. openVuln REST requires Mashery OAuth (client credentials) with quotas ~5 req/s, 30/min, 5 000/day; supports `pageIndex/pageSize` pagination.
|
||||||
|
- Next actions: register OAuth app, capture pagination/delta parameters, and compare API vs RSS coverage.
|
||||||
|
|
||||||
|
## Microsoft MSRC
|
||||||
|
- REST endpoint (`https://api.msrc.microsoft.com/sug/v2.0/en-US/vulnerabilities`) requires Azure AD token + `api-version` (current `2024-08-01`) and supports delta filters (`lastModifiedStartDateTime`). CVRF ZIP remains available for offline use.
|
||||||
|
- Next actions: finalise AAD app registration, implement token cache, and design combined REST+CVRF ingestion path for determinism.
|
||||||
@@ -80,12 +80,12 @@
|
|||||||
docker compose up -d
|
docker compose up -d
|
||||||
curl -fsS http://localhost:8080/health
|
curl -fsS http://localhost:8080/health
|
||||||
```
|
```
|
||||||
6. **Validate JWKS and tokens:** call `/jwks` and issue a short-lived token via the CLI to confirm key material matches expectations.
|
6. **Validate JWKS and tokens:** call `/jwks` and issue a short-lived token via the CLI to confirm key material matches expectations. If the restored environment requires a fresh signing key, follow the rotation SOP in [`docs/11_AUTHORITY.md`](../11_AUTHORITY.md) using `ops/authority/key-rotation.sh` to invoke `/internal/signing/rotate`.
|
||||||
|
|
||||||
## Disaster Recovery Notes
|
## Disaster Recovery Notes
|
||||||
- **Air-gapped replication:** replicate archives via the Offline Update Kit transport channels; never attach USB devices without scanning.
|
- **Air-gapped replication:** replicate archives via the Offline Update Kit transport channels; never attach USB devices without scanning.
|
||||||
- **Retention:** maintain 30 daily snapshots + 12 monthly archival copies. Rotate encryption keys annually.
|
- **Retention:** maintain 30 daily snapshots + 12 monthly archival copies. Rotate encryption keys annually.
|
||||||
- **Key compromise:** if signing keys are suspected compromised, restore from the latest clean backup, rotate via OPS3 (key rotation tooling), and publish a revocation notice.
|
- **Key compromise:** if signing keys are suspected compromised, restore from the latest clean backup, rotate via OPS3 (see `ops/authority/key-rotation.sh` and `docs/11_AUTHORITY.md`), and publish a revocation notice.
|
||||||
- **Mongo version:** keep dump/restore images pinned to the deployment version (compose uses `mongo:7`). Restoring across major versions requires a compatibility review.
|
- **Mongo version:** keep dump/restore images pinned to the deployment version (compose uses `mongo:7`). Restoring across major versions requires a compatibility review.
|
||||||
|
|
||||||
## Verification Checklist
|
## Verification Checklist
|
||||||
|
|||||||
83
docs/ops/authority-key-rotation.md
Normal file
83
docs/ops/authority-key-rotation.md
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# Authority Signing Key Rotation Playbook
|
||||||
|
|
||||||
|
> **Status:** Authored 2025-10-12 as part of OPS3.KEY-ROTATION rollout.
|
||||||
|
> Use together with `docs/11_AUTHORITY.md` (Authority service guide) and the automation shipped under `ops/authority/`.
|
||||||
|
|
||||||
|
## 1. Overview
|
||||||
|
|
||||||
|
Authority publishes JWKS and revocation bundles signed with ES256 keys. To rotate those keys without downtime we now provide:
|
||||||
|
|
||||||
|
- **Automation script:** `ops/authority/key-rotation.sh`
|
||||||
|
Shell helper that POSTS to `/internal/signing/rotate`, supports metadata, dry-run, and confirms JWKS afterwards.
|
||||||
|
- **CI workflow:** `.gitea/workflows/authority-key-rotation.yml`
|
||||||
|
Manual dispatch workflow that pulls environment-specific secrets, runs the script, and records the result. Works across staging/production by passing the `environment` input.
|
||||||
|
|
||||||
|
This playbook documents the repeatable sequence for all environments.
|
||||||
|
|
||||||
|
## 2. Pre-requisites
|
||||||
|
|
||||||
|
1. **Generate a new PEM key (per environment)**
|
||||||
|
```bash
|
||||||
|
openssl ecparam -name prime256v1 -genkey -noout \
|
||||||
|
-out certificates/authority-signing-<env>-<year>.pem
|
||||||
|
chmod 600 certificates/authority-signing-<env>-<year>.pem
|
||||||
|
```
|
||||||
|
2. **Stash the previous key** under the same volume so it can be referenced in `signing.additionalKeys` after rotation.
|
||||||
|
3. **Ensure secrets/vars exist in Gitea**
|
||||||
|
- `<ENV>_AUTHORITY_BOOTSTRAP_KEY`
|
||||||
|
- `<ENV>_AUTHORITY_URL`
|
||||||
|
- Optional shared defaults `AUTHORITY_BOOTSTRAP_KEY`, `AUTHORITY_URL`.
|
||||||
|
|
||||||
|
## 3. Executing the rotation
|
||||||
|
|
||||||
|
### Option A – via CI workflow (recommended)
|
||||||
|
|
||||||
|
1. Navigate to **Actions → Authority Key Rotation**.
|
||||||
|
2. Provide inputs:
|
||||||
|
- `environment`: `staging`, `production`, etc.
|
||||||
|
- `key_id`: new `kid` (e.g. `authority-signing-2025-dev`).
|
||||||
|
- `key_path`: path as seen by the Authority service (e.g. `../certificates/authority-signing-2025-dev.pem`).
|
||||||
|
- Optional `metadata`: comma-separated `key=value` pairs (for audit trails).
|
||||||
|
3. Trigger. The workflow:
|
||||||
|
- Reads the bootstrap key/URL from secrets.
|
||||||
|
- Runs `ops/authority/key-rotation.sh`.
|
||||||
|
- Prints the JWKS response for verification.
|
||||||
|
|
||||||
|
### Option B – manual shell invocation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
AUTHORITY_BOOTSTRAP_KEY=$(cat /secure/authority-bootstrap.key) \
|
||||||
|
./ops/authority/key-rotation.sh \
|
||||||
|
--authority-url https://authority.example.com \
|
||||||
|
--key-id authority-signing-2025-dev \
|
||||||
|
--key-path ../certificates/authority-signing-2025-dev.pem \
|
||||||
|
--meta rotatedBy=ops --meta changeTicket=OPS-1234
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `--dry-run` to inspect the payload before execution.
|
||||||
|
|
||||||
|
## 4. Post-rotation checklist
|
||||||
|
|
||||||
|
1. Update `authority.yaml` (or environment-specific overrides):
|
||||||
|
- Set `signing.activeKeyId` to the new key.
|
||||||
|
- Set `signing.keyPath` to the new PEM.
|
||||||
|
- Append the previous key into `signing.additionalKeys`.
|
||||||
|
- Ensure `keySource`/`provider` match the values passed to the script.
|
||||||
|
2. Run `stellaops-cli auth revoke export` so revocation bundles are re-signed with the new key.
|
||||||
|
3. Confirm `/jwks` lists the new `kid` with `status: "active"` and the previous one as `retired`.
|
||||||
|
4. Archive the old key securely; keep it available until all tokens/bundles signed with it have expired.
|
||||||
|
|
||||||
|
## 5. Development key state
|
||||||
|
|
||||||
|
For the sample configuration (`etc/authority.yaml.sample`) we minted a placeholder dev key:
|
||||||
|
|
||||||
|
- Active: `authority-signing-2025-dev` (`certificates/authority-signing-2025-dev.pem`)
|
||||||
|
- Retired: `authority-signing-dev`
|
||||||
|
|
||||||
|
Treat these as examples; real environments must maintain their own PEM material.
|
||||||
|
|
||||||
|
## 6. References
|
||||||
|
|
||||||
|
- `docs/11_AUTHORITY.md` – Architecture and rotation SOP (Section 5).
|
||||||
|
- `docs/ops/authority-backup-restore.md` – Recovery flow referencing this playbook.
|
||||||
|
- `ops/authority/README.md` – CLI usage and examples.
|
||||||
77
docs/ops/feedser-apple-operations.md
Normal file
77
docs/ops/feedser-apple-operations.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# Feedser Apple Security Update Connector Operations
|
||||||
|
|
||||||
|
This runbook covers staging and production rollout for the Apple security updates connector (`source:vndr-apple:*`), including observability checks and fixture maintenance.
|
||||||
|
|
||||||
|
## 1. Prerequisites
|
||||||
|
|
||||||
|
- Network egress (or mirrored cache) for `https://gdmf.apple.com/v2/pmv` and the Apple Support domain (`https://support.apple.com/`).
|
||||||
|
- Optional: corporate proxy exclusions for the Apple hosts if outbound traffic is normally filtered.
|
||||||
|
- Updated configuration (environment variables or `feedser.yaml`) with an `apple` section. Example baseline:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
sources:
|
||||||
|
apple:
|
||||||
|
softwareLookupUri: "https://gdmf.apple.com/v2/pmv"
|
||||||
|
advisoryBaseUri: "https://support.apple.com/"
|
||||||
|
localeSegment: "en-us"
|
||||||
|
maxAdvisoriesPerFetch: 25
|
||||||
|
initialBackfill: "120.00:00:00"
|
||||||
|
modifiedTolerance: "02:00:00"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
```
|
||||||
|
|
||||||
|
> ℹ️ `softwareLookupUri` and `advisoryBaseUri` must stay absolute and aligned with the HTTP allow-list; Feedser automatically adds both hosts to the connector HttpClient.
|
||||||
|
|
||||||
|
## 2. Staging Smoke Test
|
||||||
|
|
||||||
|
1. Deploy the configuration and restart the Feedser workers to ensure the Apple connector options are bound.
|
||||||
|
2. Trigger a full connector cycle:
|
||||||
|
- CLI: `stella db jobs run source:vndr-apple:fetch --and-then source:vndr-apple:parse --and-then source:vndr-apple:map`
|
||||||
|
- REST: `POST /jobs/run { "kind": "source:vndr-apple:fetch", "chain": ["source:vndr-apple:parse", "source:vndr-apple:map"] }`
|
||||||
|
3. Validate metrics exported under meter `StellaOps.Feedser.Source.Vndr.Apple`:
|
||||||
|
- `apple.fetch.items` (documents fetched)
|
||||||
|
- `apple.fetch.failures`
|
||||||
|
- `apple.fetch.unchanged`
|
||||||
|
- `apple.parse.failures`
|
||||||
|
- `apple.map.affected.count` (histogram of affected package counts)
|
||||||
|
4. Cross-check the shared HTTP counters:
|
||||||
|
- `feedser.source.http.requests_total{feedser_source="vndr-apple"}` should increase for both index and detail phases.
|
||||||
|
- `feedser.source.http.failures_total{feedser_source="vndr-apple"}` should remain flat (0) during a healthy run.
|
||||||
|
5. Inspect the info logs:
|
||||||
|
- `Apple software index fetch … processed=X newDocuments=Y`
|
||||||
|
- `Apple advisory parse complete … aliases=… affected=…`
|
||||||
|
- `Mapped Apple advisory … pendingMappings=0`
|
||||||
|
6. Confirm MongoDB state:
|
||||||
|
- `raw_documents` store contains the HT article HTML with metadata (`apple.articleId`, `apple.postingDate`).
|
||||||
|
- `dtos` store has `schemaVersion="apple.security.update.v1"`.
|
||||||
|
- `advisories` collection includes keys `HTxxxxxx` with normalized SemVer rules.
|
||||||
|
- `source_states` entry for `apple` shows a recent `cursor.lastPosted`.
|
||||||
|
|
||||||
|
## 3. Production Monitoring
|
||||||
|
|
||||||
|
- **Dashboards** – Add the following expressions to your Feedser Grafana board (OTLP/Prometheus naming assumed):
|
||||||
|
- `rate(apple_fetch_items_total[15m])` vs `rate(feedser_source_http_requests_total{feedser_source="vndr-apple"}[15m])`
|
||||||
|
- `rate(apple_fetch_failures_total[5m])` for error spikes (`severity=warning` at `>0`)
|
||||||
|
- `histogram_quantile(0.95, rate(apple_map_affected_count_bucket[1h]))` to watch affected-package fan-out
|
||||||
|
- `increase(apple_parse_failures_total[6h])` to catch parser drift (alerts at `>0`)
|
||||||
|
- **Alerts** – Page if `rate(apple_fetch_items_total[2h]) == 0` during business hours while other connectors are active. This often indicates lookup feed failures or misconfigured allow-lists.
|
||||||
|
- **Logs** – Surface warnings `Apple document {DocumentId} missing GridFS payload` or `Apple parse failed`—repeated hits imply storage issues or HTML regressions.
|
||||||
|
- **Telemetry pipeline** – `StellaOps.Feedser.WebService` now exports `StellaOps.Feedser.Source.Vndr.Apple` alongside existing Feedser meters; ensure your OTEL collector or Prometheus scraper includes it.
|
||||||
|
|
||||||
|
## 4. Fixture Maintenance
|
||||||
|
|
||||||
|
Regression fixtures live under `src/StellaOps.Feedser.Source.Vndr.Apple.Tests/Apple/Fixtures`. Refresh them whenever Apple reshapes the HT layout or when new platforms appear.
|
||||||
|
|
||||||
|
1. Run the helper script matching your platform:
|
||||||
|
- Bash: `./scripts/update-apple-fixtures.sh`
|
||||||
|
- PowerShell: `./scripts/update-apple-fixtures.ps1`
|
||||||
|
2. Each script exports `UPDATE_APPLE_FIXTURES=1`, updates the `WSLENV` passthrough, and touches `.update-apple-fixtures` so WSL+VS Code test runs observe the flag. The subsequent test execution fetches the live HT articles listed in `AppleFixtureManager`, sanitises the HTML, and rewrites the `.expected.json` DTO snapshots.
|
||||||
|
3. Review the diff for localisation or nav noise. Once satisfied, re-run the tests without the env var (`dotnet test src/StellaOps.Feedser.Source.Vndr.Apple.Tests/StellaOps.Feedser.Source.Vndr.Apple.Tests.csproj`) to verify determinism.
|
||||||
|
4. Commit fixture updates together with any parser/mapping changes that motivated them.
|
||||||
|
|
||||||
|
## 5. Known Issues & Follow-up Tasks
|
||||||
|
|
||||||
|
- Apple occasionally throttles anonymous requests after bursts. The connector backs off automatically, but persistent `apple.fetch.failures` spikes might require mirroring the HT content or scheduling wider fetch windows.
|
||||||
|
- Rapid Security Responses may appear before the general patch notes surface in the lookup JSON. When that happens, the fetch run will log `detailFailures>0`. Collect sample HTML and refresh fixtures to confirm parser coverage.
|
||||||
|
- Multi-locale content is still under regression sweep (`src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md`). Capture non-`en-us` snapshots once the fixture tooling stabilises.
|
||||||
150
docs/ops/feedser-authority-audit-runbook.md
Normal file
150
docs/ops/feedser-authority-audit-runbook.md
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
# Feedser Authority Audit Runbook
|
||||||
|
|
||||||
|
_Last updated: 2025-10-12_
|
||||||
|
|
||||||
|
This runbook helps operators verify and monitor the StellaOps Feedser ⇆ Authority integration. It focuses on the `/jobs*` surface, which now requires StellaOps Authority tokens, and the corresponding audit/metric signals that expose authentication and bypass activity.
|
||||||
|
|
||||||
|
## 1. Prerequisites
|
||||||
|
|
||||||
|
- Authority integration is enabled in `feedser.yaml` (or via `FEEDSER_AUTHORITY__*` environment variables) with a valid `clientId`, secret, audience, and required scopes.
|
||||||
|
- OTLP metrics/log exporters are configured (`feedser.telemetry.*`) or container stdout is shipped to your SIEM.
|
||||||
|
- Operators have access to the Feedser job trigger endpoints via CLI or REST for smoke tests.
|
||||||
|
|
||||||
|
### Configuration snippet
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
authority:
|
||||||
|
enabled: true
|
||||||
|
allowAnonymousFallback: false # keep true only during initial rollout
|
||||||
|
issuer: "https://authority.internal"
|
||||||
|
audiences:
|
||||||
|
- "api://feedser"
|
||||||
|
requiredScopes:
|
||||||
|
- "feedser.jobs.trigger"
|
||||||
|
bypassNetworks:
|
||||||
|
- "127.0.0.1/32"
|
||||||
|
- "::1/128"
|
||||||
|
clientId: "feedser-jobs"
|
||||||
|
clientSecretFile: "/run/secrets/feedser_authority_client"
|
||||||
|
tokenClockSkewSeconds: 60
|
||||||
|
resilience:
|
||||||
|
enableRetries: true
|
||||||
|
retryDelays:
|
||||||
|
- "00:00:01"
|
||||||
|
- "00:00:02"
|
||||||
|
- "00:00:05"
|
||||||
|
allowOfflineCacheFallback: true
|
||||||
|
offlineCacheTolerance: "00:10:00"
|
||||||
|
```
|
||||||
|
|
||||||
|
> Store secrets outside source control. Feedser reads `clientSecretFile` on startup; rotate by updating the mounted file and restarting the service.
|
||||||
|
|
||||||
|
### Resilience tuning
|
||||||
|
|
||||||
|
- **Connected sites:** keep the default 1 s / 2 s / 5 s retry ladder so Feedser retries transient Authority hiccups but still surfaces outages quickly. Leave `allowOfflineCacheFallback=true` so cached discovery/JWKS data can bridge short Pathfinder restarts.
|
||||||
|
- **Air-gapped/Offline Kit installs:** extend `offlineCacheTolerance` (15–30 minutes) to keep the cached metadata valid between manual synchronisations. You can also disable retries (`enableRetries=false`) if infrastructure teams prefer to handle exponential backoff at the network layer; Feedser will fail fast but keep deterministic logs.
|
||||||
|
- Feedser resolves these knobs through `IOptionsMonitor<StellaOpsAuthClientOptions>`. Edits to `feedser.yaml` are applied on configuration reload; restart the container if you change environment variables or do not have file-watch reloads enabled.
|
||||||
|
|
||||||
|
## 2. Key Signals
|
||||||
|
|
||||||
|
### 2.1 Audit log channel
|
||||||
|
|
||||||
|
Feedser emits structured audit entries via the `Feedser.Authorization.Audit` logger for every `/jobs*` request once Authority enforcement is active.
|
||||||
|
|
||||||
|
```
|
||||||
|
Feedser authorization audit route=/jobs/definitions status=200 subject=ops@example.com clientId=feedser-cli scopes=feedser.jobs.trigger bypass=False remote=10.1.4.7
|
||||||
|
```
|
||||||
|
|
||||||
|
| Field | Sample value | Meaning |
|
||||||
|
|--------------|-------------------------|------------------------------------------------------------------------------------------|
|
||||||
|
| `route` | `/jobs/definitions` | Endpoint that processed the request. |
|
||||||
|
| `status` | `200` / `401` / `409` | Final HTTP status code returned to the caller. |
|
||||||
|
| `subject` | `ops@example.com` | User or service principal subject (falls back to `(anonymous)` when unauthenticated). |
|
||||||
|
| `clientId` | `feedser-cli` | OAuth client ID provided by Authority ( `(none)` if the token lacked the claim). |
|
||||||
|
| `scopes` | `feedser.jobs.trigger` | Normalised scope list extracted from token claims; `(none)` if the token carried none. |
|
||||||
|
| `bypass` | `True` / `False` | Indicates whether the request succeeded because its source IP matched a bypass CIDR. |
|
||||||
|
| `remote` | `10.1.4.7` | Remote IP recorded from the connection / forwarded header test hooks. |
|
||||||
|
|
||||||
|
Use your logging backend (e.g., Loki) to index the logger name and filter for suspicious combinations:
|
||||||
|
|
||||||
|
- `status=401 AND bypass=True` – bypass network accepted an unauthenticated call (should be temporary during rollout).
|
||||||
|
- `status=202 AND scopes="(none)"` – a token without scopes triggered a job; tighten client configuration.
|
||||||
|
- Spike in `clientId="(none)"` – indicates upstream Authority is not issuing `client_id` claims or the CLI is outdated.
|
||||||
|
|
||||||
|
### 2.2 Metrics
|
||||||
|
|
||||||
|
Feedser publishes counters under the OTEL meter `StellaOps.Feedser.WebService.Jobs`. Tags: `job.kind`, `job.trigger`, `job.outcome`.
|
||||||
|
|
||||||
|
| Metric name | Description | PromQL example |
|
||||||
|
|-------------------------------|----------------------------------------------------|----------------|
|
||||||
|
| `web.jobs.triggered` | Accepted job trigger requests. | `sum by (job_kind) (rate(web_jobs_triggered_total[5m]))` |
|
||||||
|
| `web.jobs.trigger.conflict` | Rejected triggers (already running, disabled…). | `sum(rate(web_jobs_trigger_conflict_total[5m]))` |
|
||||||
|
| `web.jobs.trigger.failed` | Server-side job failures. | `sum(rate(web_jobs_trigger_failed_total[5m]))` |
|
||||||
|
|
||||||
|
> Prometheus/OTEL collectors typically surface counters with `_total` suffix. Adjust queries to match your pipeline’s generated metric names.
|
||||||
|
|
||||||
|
Correlate audit logs with the following global meter exported via `Feedser.SourceDiagnostics`:
|
||||||
|
|
||||||
|
- `feedser.source.http.requests_total{feedser_source="jobs-run"}` – ensures REST/manual triggers route through Authority.
|
||||||
|
- If Grafana dashboards are deployed, extend the “Feedser Jobs” board with the above counters plus a table of recent audit log entries.
|
||||||
|
|
||||||
|
## 3. Alerting Guidance
|
||||||
|
|
||||||
|
1. **Unauthorized bypass attempt**
|
||||||
|
- Query: `sum(rate(log_messages_total{logger="Feedser.Authorization.Audit", status="401", bypass="True"}[5m])) > 0`
|
||||||
|
- Action: verify `bypassNetworks` list; confirm expected maintenance windows; rotate credentials if suspicious.
|
||||||
|
|
||||||
|
2. **Missing scopes**
|
||||||
|
- Query: `sum(rate(log_messages_total{logger="Feedser.Authorization.Audit", scopes="(none)", status="200"}[5m])) > 0`
|
||||||
|
- Action: audit Authority client registration; ensure `requiredScopes` includes `feedser.jobs.trigger`.
|
||||||
|
|
||||||
|
3. **Trigger failure surge**
|
||||||
|
- Query: `sum(rate(web_jobs_trigger_failed_total[10m])) > 0` with severity `warning` if sustained for 10 minutes.
|
||||||
|
- Action: inspect correlated audit entries and `Feedser.Telemetry` traces for job execution errors.
|
||||||
|
|
||||||
|
4. **Conflict spike**
|
||||||
|
- Query: `sum(rate(web_jobs_trigger_conflict_total[10m])) > 5` (tune threshold).
|
||||||
|
- Action: downstream scheduling may be firing repetitive triggers; ensure precedence is configured properly.
|
||||||
|
|
||||||
|
5. **Authority offline**
|
||||||
|
- Watch `Feedser.Authorization.Audit` logs for `status=503` or `status=500` along with `clientId="(none)"`. Investigate Authority availability before re-enabling anonymous fallback.
|
||||||
|
|
||||||
|
## 4. Rollout & Verification Procedure
|
||||||
|
|
||||||
|
1. **Pre-checks**
|
||||||
|
- Confirm `allowAnonymousFallback` is `false` in production; keep `true` only during staged validation.
|
||||||
|
- Validate Authority issuer metadata is reachable from Feedser (`curl https://authority.internal/.well-known/openid-configuration` from the host).
|
||||||
|
|
||||||
|
2. **Smoke test with valid token**
|
||||||
|
- Obtain a token via CLI: `stella auth login --scope feedser.jobs.trigger`.
|
||||||
|
- Trigger a read-only endpoint: `curl -H "Authorization: Bearer $TOKEN" https://feedser.internal/jobs/definitions`.
|
||||||
|
- Expect HTTP 200/202 and an audit log with `bypass=False`, `scopes=feedser.jobs.trigger`.
|
||||||
|
|
||||||
|
3. **Negative test without token**
|
||||||
|
- Call the same endpoint without a token. Expect HTTP 401, `bypass=False`.
|
||||||
|
- If the request succeeds, double-check `bypassNetworks` and ensure fallback is disabled.
|
||||||
|
|
||||||
|
4. **Bypass check (if applicable)**
|
||||||
|
- From an allowed maintenance IP, call `/jobs/definitions` without a token. Confirm the audit log shows `bypass=True`. Review business justification and expiry date for such entries.
|
||||||
|
|
||||||
|
5. **Metrics validation**
|
||||||
|
- Ensure `web.jobs.triggered` counter increments during accepted runs.
|
||||||
|
- Exporters should show corresponding spans (`feedser.job.trigger`) if tracing is enabled.
|
||||||
|
|
||||||
|
## 5. Troubleshooting
|
||||||
|
|
||||||
|
| Symptom | Probable cause | Remediation |
|
||||||
|
|---------|----------------|-------------|
|
||||||
|
| Audit log shows `clientId=(none)` for all requests | Authority not issuing `client_id` claim or CLI outdated | Update StellaOps Authority configuration (`StellaOpsAuthorityOptions.Token.Claims.ClientId`), or upgrade the CLI token acquisition flow. |
|
||||||
|
| Requests succeed with `bypass=True` unexpectedly | Local network added to `bypassNetworks` or fallback still enabled | Remove/adjust the CIDR list, disable anonymous fallback, restart Feedser. |
|
||||||
|
| HTTP 401 with valid token | `requiredScopes` missing from client registration or token audience mismatch | Verify Authority client scopes (`feedser.jobs.trigger`) and ensure the token audience matches `audiences` config. |
|
||||||
|
| Metrics missing from Prometheus | Telemetry exporters disabled or filter missing OTEL meter | Set `feedser.telemetry.enableMetrics=true`, ensure collector includes `StellaOps.Feedser.WebService.Jobs` meter. |
|
||||||
|
| Sudden spike in `web.jobs.trigger.failed` | Downstream job failure or Authority timeout mid-request | Inspect Feedser job logs, re-run with tracing enabled, validate Authority latency. |
|
||||||
|
|
||||||
|
## 6. References
|
||||||
|
|
||||||
|
- `docs/21_INSTALL_GUIDE.md` – Authority configuration quick start.
|
||||||
|
- `docs/17_SECURITY_HARDENING_GUIDE.md` – Security guardrails and enforcement deadlines.
|
||||||
|
- `docs/ops/authority-monitoring.md` – Authority-side monitoring and alerting playbook.
|
||||||
|
- `StellaOps.Feedser.WebService/Filters/JobAuthorizationAuditFilter.cs` – source of audit log fields.
|
||||||
@@ -47,6 +47,12 @@ Expect all logs at `Information`. Ensure OTEL exporters include the scope `Stell
|
|||||||
3. **Job health**
|
3. **Job health**
|
||||||
- `stellaops-cli db merge` exit code `1` signifies unresolved conflicts. Pipe to automation that captures logs and notifies #feedser-ops.
|
- `stellaops-cli db merge` exit code `1` signifies unresolved conflicts. Pipe to automation that captures logs and notifies #feedser-ops.
|
||||||
|
|
||||||
|
### Threshold updates (2025-10-12)
|
||||||
|
|
||||||
|
- `feedser.merge.conflicts` – Page only when ≥ 2 events fire within 30 minutes; the synthetic conflict fixture run produces 0 conflicts, so the first event now routes to Slack for manual review instead of paging.
|
||||||
|
- `feedser.merge.overrides` – Raise a warning when the 30-minute sum exceeds 10 (canonical triple yields exactly 1 summary override with `primary_source=osv`, `suppressed_source=ghsa`).
|
||||||
|
- `feedser.merge.range_overrides` – Maintain the 15-minute alert at ≥ 3 but annotate dashboards that the regression triple emits a single `package_type=semver` override so ops can spot unexpected spikes.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 4. Triage Workflow
|
## 4. Triage Workflow
|
||||||
@@ -128,3 +134,19 @@ Expect all logs at `Information`. Ensure OTEL exporters include the scope `Stell
|
|||||||
- Storage audit trail: `src/StellaOps.Feedser.Merge/Services/MergeEventWriter.cs`, `src/StellaOps.Feedser.Storage.Mongo/MergeEvents`.
|
- Storage audit trail: `src/StellaOps.Feedser.Merge/Services/MergeEventWriter.cs`, `src/StellaOps.Feedser.Storage.Mongo/MergeEvents`.
|
||||||
|
|
||||||
Keep this runbook synchronized with future sprint notes and update alert thresholds as baseline volumes change.
|
Keep this runbook synchronized with future sprint notes and update alert thresholds as baseline volumes change.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Synthetic Regression Fixtures
|
||||||
|
|
||||||
|
- **Locations** – Canonical conflict snapshots now live at `src/StellaOps.Feedser.Source.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json`, `src/StellaOps.Feedser.Source.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json`, and `src/StellaOps.Feedser.Source.Osv.Tests/Fixtures/conflict-osv.canonical.json`.
|
||||||
|
- **Validation commands** – To regenerate and verify the fixtures offline, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dotnet test src/StellaOps.Feedser.Source.Ghsa.Tests/StellaOps.Feedser.Source.Ghsa.Tests.csproj --filter GhsaConflictFixtureTests
|
||||||
|
dotnet test src/StellaOps.Feedser.Source.Nvd.Tests/StellaOps.Feedser.Source.Nvd.Tests.csproj --filter NvdConflictFixtureTests
|
||||||
|
dotnet test src/StellaOps.Feedser.Source.Osv.Tests/StellaOps.Feedser.Source.Osv.Tests.csproj --filter OsvConflictFixtureTests
|
||||||
|
dotnet test src/StellaOps.Feedser.Merge.Tests/StellaOps.Feedser.Merge.Tests.csproj --filter MergeAsync_AppliesCanonicalRulesAndPersistsDecisions
|
||||||
|
```
|
||||||
|
|
||||||
|
- **Expected signals** – The triple produces one freshness-driven summary override (`primary_source=osv`, `suppressed_source=ghsa`) and one range override for the npm SemVer package while leaving `feedser.merge.conflicts` at zero. Use these values as the baseline when tuning dashboards or load-testing alert pipelines.
|
||||||
|
|||||||
151
docs/ops/feedser-cve-kev-grafana-dashboard.json
Normal file
151
docs/ops/feedser-cve-kev-grafana-dashboard.json
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
{
|
||||||
|
"title": "Feedser CVE & KEV Observability",
|
||||||
|
"uid": "feedser-cve-kev",
|
||||||
|
"schemaVersion": 38,
|
||||||
|
"version": 1,
|
||||||
|
"editable": true,
|
||||||
|
"timezone": "",
|
||||||
|
"time": {
|
||||||
|
"from": "now-24h",
|
||||||
|
"to": "now"
|
||||||
|
},
|
||||||
|
"refresh": "5m",
|
||||||
|
"templating": {
|
||||||
|
"list": [
|
||||||
|
{
|
||||||
|
"name": "datasource",
|
||||||
|
"type": "datasource",
|
||||||
|
"query": "prometheus",
|
||||||
|
"refresh": 1,
|
||||||
|
"hide": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"panels": [
|
||||||
|
{
|
||||||
|
"type": "timeseries",
|
||||||
|
"title": "CVE fetch success vs failure",
|
||||||
|
"gridPos": { "h": 9, "w": 12, "x": 0, "y": 0 },
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"unit": "ops",
|
||||||
|
"custom": {
|
||||||
|
"drawStyle": "line",
|
||||||
|
"lineWidth": 2,
|
||||||
|
"fillOpacity": 10
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"refId": "A",
|
||||||
|
"expr": "rate(cve_fetch_success_total[5m])",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||||
|
"legendFormat": "success"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"refId": "B",
|
||||||
|
"expr": "rate(cve_fetch_failures_total[5m])",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||||
|
"legendFormat": "failure"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "timeseries",
|
||||||
|
"title": "KEV fetch cadence",
|
||||||
|
"gridPos": { "h": 9, "w": 12, "x": 12, "y": 0 },
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"unit": "ops",
|
||||||
|
"custom": {
|
||||||
|
"drawStyle": "line",
|
||||||
|
"lineWidth": 2,
|
||||||
|
"fillOpacity": 10
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"refId": "A",
|
||||||
|
"expr": "rate(kev_fetch_success_total[30m])",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||||
|
"legendFormat": "success"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"refId": "B",
|
||||||
|
"expr": "rate(kev_fetch_failures_total[30m])",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||||
|
"legendFormat": "failure"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"refId": "C",
|
||||||
|
"expr": "rate(kev_fetch_unchanged_total[30m])",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||||
|
"legendFormat": "unchanged"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "table",
|
||||||
|
"title": "KEV parse anomalies (24h)",
|
||||||
|
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 9 },
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"unit": "short"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"refId": "A",
|
||||||
|
"expr": "sum by (reason) (increase(kev_parse_anomalies_total[24h]))",
|
||||||
|
"format": "table",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" }
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"transformations": [
|
||||||
|
{
|
||||||
|
"id": "organize",
|
||||||
|
"options": {
|
||||||
|
"renameByName": {
|
||||||
|
"Value": "count"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "timeseries",
|
||||||
|
"title": "Advisories emitted",
|
||||||
|
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 9 },
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"unit": "ops",
|
||||||
|
"custom": {
|
||||||
|
"drawStyle": "line",
|
||||||
|
"lineWidth": 2,
|
||||||
|
"fillOpacity": 10
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"refId": "A",
|
||||||
|
"expr": "rate(cve_map_success_total[15m])",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||||
|
"legendFormat": "CVE"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"refId": "B",
|
||||||
|
"expr": "rate(kev_map_advisories_total[24h])",
|
||||||
|
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||||
|
"legendFormat": "KEV"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -34,19 +34,22 @@ feedser:
|
|||||||
- Feedser CLI: `stella db jobs run source:cve:fetch --and-then source:cve:parse --and-then source:cve:map`
|
- Feedser CLI: `stella db jobs run source:cve:fetch --and-then source:cve:parse --and-then source:cve:map`
|
||||||
- REST fallback: `POST /jobs/run { "kind": "source:cve:fetch", "chain": ["source:cve:parse", "source:cve:map"] }`
|
- REST fallback: `POST /jobs/run { "kind": "source:cve:fetch", "chain": ["source:cve:parse", "source:cve:map"] }`
|
||||||
3. Observe the following metrics (exported via OTEL meter `StellaOps.Feedser.Source.Cve`):
|
3. Observe the following metrics (exported via OTEL meter `StellaOps.Feedser.Source.Cve`):
|
||||||
- `cve.fetch.attempts`, `cve.fetch.success`, `cve.fetch.failures`, `cve.fetch.unchanged`
|
- `cve.fetch.attempts`, `cve.fetch.success`, `cve.fetch.documents`, `cve.fetch.failures`, `cve.fetch.unchanged`
|
||||||
- `cve.parse.success`, `cve.parse.failures`, `cve.parse.quarantine`
|
- `cve.parse.success`, `cve.parse.failures`, `cve.parse.quarantine`
|
||||||
- `cve.map.success`
|
- `cve.map.success`
|
||||||
4. Verify the MongoDB advisory store contains fresh CVE advisories (`advisoryKey` prefix `cve/`) and that the source cursor (`source_states` collection) advanced.
|
4. Verify Prometheus shows matching `feedser.source.http.requests_total{feedser_source="cve"}` deltas (list vs detail phases) while `feedser.source.http.failures_total{feedser_source="cve"}` stays flat.
|
||||||
|
5. Confirm the info-level summary log `CVEs fetch window … pages=X detailDocuments=Y detailFailures=Z` appears once per fetch run and shows `detailFailures=0`.
|
||||||
|
6. Verify the MongoDB advisory store contains fresh CVE advisories (`advisoryKey` prefix `cve/`) and that the source cursor (`source_states` collection) advanced.
|
||||||
|
|
||||||
### 1.3 Production Monitoring
|
### 1.3 Production Monitoring
|
||||||
|
|
||||||
- **Dashboards** – Add the counters above plus `feedser.range.primitives` (filtered by `scheme=semver` or `scheme=vendor`) to the Feedser overview board. Alert when:
|
- **Dashboards** – Plot `rate(cve_fetch_success_total[5m])`, `rate(cve_fetch_failures_total[5m])`, and `rate(cve_fetch_documents_total[5m])` alongside `feedser_source_http_requests_total{feedser_source="cve"}` to confirm HTTP and connector counters stay aligned. Keep `feedser.range.primitives{scheme=~"semver|vendor"}` on the same board for range coverage. Example alerts:
|
||||||
- `rate(cve.fetch.failures[5m]) > 0`
|
- `rate(cve_fetch_failures_total[5m]) > 0` for 10 minutes (`severity=warning`)
|
||||||
- `rate(cve.map.success[15m]) == 0` while fetch attempts continue
|
- `rate(cve_map_success_total[15m]) == 0` while `rate(cve_fetch_success_total[15m]) > 0` (`severity=critical`)
|
||||||
- `sum_over_time(cve.parse.quarantine[1h]) > 0`
|
- `sum_over_time(cve_parse_quarantine_total[1h]) > 0` to catch schema anomalies
|
||||||
- **Logs** – Watch for `CveConnector` warnings such as `Failed fetching CVE record` or schema validation errors (`Malformed CVE JSON`). These are emitted with the CVE ID and document identifier for triage.
|
- **Logs** – Monitor warnings such as `Failed fetching CVE record {CveId}` and `Malformed CVE JSON`, and surface the summary info log `CVEs fetch window … detailFailures=0 detailUnchanged=0` on dashboards. A non-zero `detailFailures` usually indicates rate-limit or auth issues on detail requests.
|
||||||
- **Backfill window** – operators can tighten or widen the `initialBackfill` / `maxPagesPerFetch` values after validating baseline throughput. Update the config and restart the worker to apply changes.
|
- **Grafana pack** – Import `docs/ops/feedser-cve-kev-grafana-dashboard.json` and filter by panel legend (`CVE`, `KEV`) to reuse the canned layout.
|
||||||
|
- **Backfill window** – Operators can tighten or widen `initialBackfill` / `maxPagesPerFetch` after validating throughput. Update config and restart Feedser to apply changes.
|
||||||
|
|
||||||
## 2. CISA KEV Connector (`source:kev:*`)
|
## 2. CISA KEV Connector (`source:kev:*`)
|
||||||
|
|
||||||
@@ -67,7 +70,15 @@ feedser:
|
|||||||
|
|
||||||
### 2.2 Schema validation & anomaly handling
|
### 2.2 Schema validation & anomaly handling
|
||||||
|
|
||||||
From this sprint the connector validates the KEV JSON payload against `Schemas/kev-catalog.schema.json`. Malformed documents are quarantined, and entries missing a CVE ID are dropped with a warning (`reason=missingCveId`). Operators should treat repeated schema failures as an upstream regression and coordinate with CISA or mirror maintainers.
|
The connector validates each catalog against `Schemas/kev-catalog.schema.json`. Failures increment `kev.parse.failures_total{reason="schema"}` and the document is quarantined (status `Failed`). Additional failure reasons include `download`, `invalidJson`, `deserialize`, `missingPayload`, and `emptyCatalog`. Entry-level anomalies are surfaced through `kev.parse.anomalies_total` with reasons:
|
||||||
|
|
||||||
|
| Reason | Meaning |
|
||||||
|
| --- | --- |
|
||||||
|
| `missingCveId` | Catalog entry omitted `cveID`; the entry is skipped. |
|
||||||
|
| `countMismatch` | Catalog `count` field disagreed with the actual entry total. |
|
||||||
|
| `nullEntry` | Upstream emitted a `null` entry object (rare upstream defect). |
|
||||||
|
|
||||||
|
Treat repeated schema failures or growing anomaly counts as an upstream regression and coordinate with CISA or mirror maintainers.
|
||||||
|
|
||||||
### 2.3 Smoke Test (staging)
|
### 2.3 Smoke Test (staging)
|
||||||
|
|
||||||
@@ -79,13 +90,16 @@ From this sprint the connector validates the KEV JSON payload against `Schemas/k
|
|||||||
- `kev.fetch.attempts`, `kev.fetch.success`, `kev.fetch.unchanged`, `kev.fetch.failures`
|
- `kev.fetch.attempts`, `kev.fetch.success`, `kev.fetch.unchanged`, `kev.fetch.failures`
|
||||||
- `kev.parse.entries` (tag `catalogVersion`), `kev.parse.failures`, `kev.parse.anomalies` (tag `reason`)
|
- `kev.parse.entries` (tag `catalogVersion`), `kev.parse.failures`, `kev.parse.anomalies` (tag `reason`)
|
||||||
- `kev.map.advisories` (tag `catalogVersion`)
|
- `kev.map.advisories` (tag `catalogVersion`)
|
||||||
4. Confirm MongoDB documents exist for the catalog JSON (`raw_documents` & `dtos`) and that advisories with prefix `kev/` are written.
|
4. Confirm `feedser.source.http.requests_total{feedser_source="kev"}` increments once per fetch and that the paired `feedser.source.http.failures_total` stays flat (zero increase).
|
||||||
|
5. Inspect the info logs `Fetched KEV catalog document … pendingDocuments=…` and `Parsed KEV catalog document … entries=…`—they should appear exactly once per run and `Mapped X/Y… skipped=0` should match the `kev.map.advisories` delta.
|
||||||
|
6. Confirm MongoDB documents exist for the catalog JSON (`raw_documents` & `dtos`) and that advisories with prefix `kev/` are written.
|
||||||
|
|
||||||
### 2.4 Production Monitoring
|
### 2.4 Production Monitoring
|
||||||
|
|
||||||
- Alert when `kev.fetch.success` goes to zero for longer than the expected daily cadence (default: trigger if `rate(kev.fetch.success[8h]) == 0` during business hours).
|
- Alert when `rate(kev_fetch_success_total[8h]) == 0` during working hours (daily cadence breach) and when `increase(kev_fetch_failures_total[1h]) > 0`.
|
||||||
- Track anomaly spikes via `kev.parse.anomalies{reason="missingCveId"}`. A sustained non-zero rate means the upstream catalog contains unexpected records.
|
- Page the on-call if `increase(kev_parse_failures_total{reason="schema"}[6h]) > 0`—this usually signals an upstream payload change. Treat repeated `reason="download"` spikes as networking issues to the mirror.
|
||||||
- The connector logs each validated catalog: `Parsed KEV catalog document … entries=X`. Absence of that log alongside consecutive `kev.fetch.success` counts suggests schema validation failures—correlate with warning-level events in the `StellaOps.Feedser.Source.Kev` logger.
|
- Track anomaly spikes through `sum_over_time(kev_parse_anomalies_total{reason="missingCveId"}[24h])`. Rising `countMismatch` trends point to catalog publishing bugs.
|
||||||
|
- Surface the fetch/mapping info logs (`Fetched KEV catalog document …` and `Mapped X/Y KEV advisories … skipped=S`) on dashboards; absence of those logs while metrics show success typically means schema validation short-circuited the run.
|
||||||
|
|
||||||
### 2.5 Known good dashboard tiles
|
### 2.5 Known good dashboard tiles
|
||||||
|
|
||||||
@@ -93,12 +107,14 @@ Add the following panels to the Feedser observability board:
|
|||||||
|
|
||||||
| Metric | Recommended visualisation |
|
| Metric | Recommended visualisation |
|
||||||
|--------|---------------------------|
|
|--------|---------------------------|
|
||||||
| `kev.fetch.success` | Single-stat (last 24 h) with threshold alert |
|
| `rate(kev_fetch_success_total[30m])` | Single-stat (last 24 h) with warning threshold `>0` |
|
||||||
| `rate(kev.parse.entries[1h])` by `catalogVersion` | Stacked area – highlights daily release size |
|
| `rate(kev_parse_entries_total[1h])` by `catalogVersion` | Stacked area – highlights daily release size |
|
||||||
| `sum_over_time(kev.parse.anomalies[1d])` by `reason` | Table – anomaly breakdown |
|
| `sum_over_time(kev_parse_anomalies_total[1d])` by `reason` | Table – anomaly breakdown (matches dashboard panel) |
|
||||||
|
| `rate(cve_map_success_total[15m])` vs `rate(kev_map_advisories_total[24h])` | Comparative timeseries for advisories emitted |
|
||||||
|
|
||||||
## 3. Runbook updates
|
## 3. Runbook updates
|
||||||
|
|
||||||
- Record staging/production smoke test results (date, catalog version, advisory counts) in your team’s change log.
|
- Record staging/production smoke test results (date, catalog version, advisory counts) in your team’s change log.
|
||||||
- Add the CVE/KEV job kinds to the standard maintenance checklist so operators can manually trigger them after planned downtime.
|
- Add the CVE/KEV job kinds to the standard maintenance checklist so operators can manually trigger them after planned downtime.
|
||||||
- Keep this document in sync with future connector changes (for example, new anomaly reasons or additional metrics).
|
- Keep this document in sync with future connector changes (for example, new anomaly reasons or additional metrics).
|
||||||
|
- Version-control dashboard tweaks alongside `docs/ops/feedser-cve-kev-grafana-dashboard.json` so operations can re-import the observability pack during restores.
|
||||||
|
|||||||
111
docs/ops/feedser-ghsa-operations.md
Normal file
111
docs/ops/feedser-ghsa-operations.md
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# Feedser GHSA Connector – Operations Runbook
|
||||||
|
|
||||||
|
_Last updated: 2025-10-12_
|
||||||
|
|
||||||
|
## 1. Overview
|
||||||
|
The GitHub Security Advisories (GHSA) connector pulls advisory metadata from the GitHub REST API `/security/advisories` endpoint. GitHub enforces both primary and secondary rate limits, so operators must monitor usage and configure retries to avoid throttling incidents.
|
||||||
|
|
||||||
|
## 2. Rate-limit telemetry
|
||||||
|
The connector now surfaces rate-limit headers on every fetch and exposes the following metrics via OpenTelemetry:
|
||||||
|
|
||||||
|
| Metric | Description | Tags |
|
||||||
|
|--------|-------------|------|
|
||||||
|
| `ghsa.ratelimit.limit` (histogram) | Samples the reported request quota at fetch time. | `phase` = `list` or `detail`, `resource` (e.g., `core`). |
|
||||||
|
| `ghsa.ratelimit.remaining` (histogram) | Remaining requests returned by `X-RateLimit-Remaining`. | `phase`, `resource`. |
|
||||||
|
| `ghsa.ratelimit.reset_seconds` (histogram) | Seconds until `X-RateLimit-Reset`. | `phase`, `resource`. |
|
||||||
|
| `ghsa.ratelimit.exhausted` (counter) | Incremented whenever GitHub returns a zero remaining quota and the connector delays before retrying. | `phase`. |
|
||||||
|
|
||||||
|
### Dashboards & alerts
|
||||||
|
- Plot `ghsa.ratelimit.remaining` as the latest value to watch the runway. Alert when the value stays below **`RateLimitWarningThreshold`** (default `500`) for more than 5 minutes.
|
||||||
|
- Raise a separate alert on `increase(ghsa.ratelimit.exhausted[15m]) > 0` to catch hard throttles.
|
||||||
|
- Overlay `ghsa.fetch.attempts` vs `ghsa.fetch.failures` to confirm retries are effective.
|
||||||
|
|
||||||
|
## 3. Logging signals
|
||||||
|
When `X-RateLimit-Remaining` falls below `RateLimitWarningThreshold`, the connector emits:
|
||||||
|
```
|
||||||
|
GHSA rate limit warning: remaining {Remaining}/{Limit} for {Phase} {Resource}
|
||||||
|
```
|
||||||
|
When GitHub reports zero remaining calls, the connector logs and sleeps for the reported `Retry-After`/`X-RateLimit-Reset` interval (falling back to `SecondaryRateLimitBackoff`).
|
||||||
|
|
||||||
|
## 4. Configuration knobs (`feedser.yaml`)
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
sources:
|
||||||
|
ghsa:
|
||||||
|
apiToken: "${GITHUB_PAT}"
|
||||||
|
pageSize: 50
|
||||||
|
requestDelay: "00:00:00.200"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
rateLimitWarningThreshold: 500 # warn below this many remaining calls
|
||||||
|
secondaryRateLimitBackoff: "00:02:00" # fallback delay when GitHub omits Retry-After
|
||||||
|
```
|
||||||
|
|
||||||
|
### Recommendations
|
||||||
|
- Increase `requestDelay` in air-gapped or burst-heavy deployments to smooth token consumption.
|
||||||
|
- Lower `rateLimitWarningThreshold` only if your dashboards already page on the new histogram; never set it negative.
|
||||||
|
- For bots using a low-privilege PAT, keep `secondaryRateLimitBackoff` at ≥60 seconds to respect GitHub’s secondary-limit guidance.
|
||||||
|
|
||||||
|
#### Default job schedule
|
||||||
|
|
||||||
|
| Job kind | Cron | Timeout | Lease |
|
||||||
|
|----------|------|---------|-------|
|
||||||
|
| `source:ghsa:fetch` | `1,11,21,31,41,51 * * * *` | 6 minutes | 4 minutes |
|
||||||
|
| `source:ghsa:parse` | `3,13,23,33,43,53 * * * *` | 5 minutes | 4 minutes |
|
||||||
|
| `source:ghsa:map` | `5,15,25,35,45,55 * * * *` | 5 minutes | 4 minutes |
|
||||||
|
|
||||||
|
These defaults spread GHSA stages across the hour so fetch completes before parse/map fire. Override them via `feedser.jobs.definitions[...]` when coordinating multiple connectors on the same runner.
|
||||||
|
|
||||||
|
## 5. Provisioning credentials
|
||||||
|
|
||||||
|
Feedser requires a GitHub personal access token (classic) with the **`read:org`** and **`security_events`** scopes to pull GHSA data. Store it as a secret and reference it via `feedser.sources.ghsa.apiToken`.
|
||||||
|
|
||||||
|
### Docker Compose (stack operators)
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
feedser:
|
||||||
|
environment:
|
||||||
|
FEEDSER__SOURCES__GHSA__APITOKEN: /run/secrets/ghsa_pat
|
||||||
|
secrets:
|
||||||
|
- ghsa_pat
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
ghsa_pat:
|
||||||
|
file: ./secrets/ghsa_pat.txt # contains only the PAT value
|
||||||
|
```
|
||||||
|
|
||||||
|
### Helm values (cluster operators)
|
||||||
|
```yaml
|
||||||
|
feedser:
|
||||||
|
extraEnv:
|
||||||
|
- name: FEEDSER__SOURCES__GHSA__APITOKEN
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: feedser-ghsa
|
||||||
|
key: apiToken
|
||||||
|
|
||||||
|
extraSecrets:
|
||||||
|
feedser-ghsa:
|
||||||
|
apiToken: "<paste PAT here or source from external secret store>"
|
||||||
|
```
|
||||||
|
|
||||||
|
After rotating the PAT, restart the Feedser workers (or run `kubectl rollout restart deployment/feedser`) to ensure the configuration reloads.
|
||||||
|
|
||||||
|
When enabling GHSA the first time, run a staged backfill:
|
||||||
|
|
||||||
|
1. Trigger `source:ghsa:fetch` manually (CLI or API) outside of peak hours.
|
||||||
|
2. Watch `feedser.jobs.health` for the GHSA jobs until they report `healthy`.
|
||||||
|
3. Allow the scheduled cron cadence to resume once the initial backlog drains (typically < 30 minutes).
|
||||||
|
|
||||||
|
## 6. Runbook steps when throttled
|
||||||
|
1. Check `ghsa.ratelimit.exhausted` for the affected phase (`list` vs `detail`).
|
||||||
|
2. Confirm the connector is delaying—logs will show `GHSA rate limit exhausted...` with the chosen backoff.
|
||||||
|
3. If rate limits stay exhausted:
|
||||||
|
- Verify no other jobs are sharing the PAT.
|
||||||
|
- Temporarily reduce `MaxPagesPerFetch` or `PageSize` to shrink burst size.
|
||||||
|
- Consider provisioning a dedicated PAT (GHSA permissions only) for Feedser.
|
||||||
|
4. After the quota resets, reset `rateLimitWarningThreshold`/`requestDelay` to their normal values and monitor the histograms for at least one hour.
|
||||||
|
|
||||||
|
## 7. Alert integration quick reference
|
||||||
|
- Prometheus: `ghsa_ratelimit_remaining_bucket` (from histogram) – use `histogram_quantile(0.99, ...)` to trend capacity.
|
||||||
|
- VictoriaMetrics: `LAST_over_time(ghsa_ratelimit_remaining_sum[5m])` for simple last-value graphs.
|
||||||
|
- Grafana: stack remaining + used to visualise total limit per resource.
|
||||||
47
docs/security/audit-events.md
Normal file
47
docs/security/audit-events.md
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# StellaOps Authority Audit Events
|
||||||
|
|
||||||
|
StellaOps Authority emits structured audit records for every credential flow and bootstrap operation. The goal is to provide deterministic, privacy-aware telemetry that can be persisted offline and replayed for incident response without leaking credentials.
|
||||||
|
|
||||||
|
## Contract
|
||||||
|
|
||||||
|
Audit events share the `StellaOps.Cryptography.Audit.AuthEventRecord` contract. Key fields:
|
||||||
|
|
||||||
|
- `EventType` — canonical identifier such as `authority.password.grant`, `authority.client_credentials.grant`, or `authority.bootstrap.user`.
|
||||||
|
- `OccurredAt` — UTC timestamp captured at emission time.
|
||||||
|
- `CorrelationId` — stable identifier propagated across logs and persistence.
|
||||||
|
- `Outcome` — `Success`, `Failure`, `LockedOut`, `RateLimited`, or `Error`.
|
||||||
|
- `Reason` — optional failure or policy message.
|
||||||
|
- `Subject` — `AuthEventSubject` carrying subject identifier, username, display name, and optional realm metadata. All subject fields are tagged as PII.
|
||||||
|
- `Client` — `AuthEventClient` with client identifier, display name, and originating provider/plugin.
|
||||||
|
- `Scopes` — granted or requested OAuth scopes (sorted before emission).
|
||||||
|
- `Network` — `AuthEventNetwork` with remote address, forwarded headers, and user agent string (all treated as PII).
|
||||||
|
- `Properties` — additional `AuthEventProperty` entries for context-specific details (lockout durations, policy decisions, retries, etc.).
|
||||||
|
|
||||||
|
## Data Classifications
|
||||||
|
|
||||||
|
Every string value uses `ClassifiedString` to assign a data classification:
|
||||||
|
|
||||||
|
- `None` — public or operational metadata (event type, outcome).
|
||||||
|
- `Personal` — personally identifiable information (PII) such as subject identifiers, usernames, remote IP addresses, and user agents.
|
||||||
|
- `Sensitive` — secrets or derived credentials (client secrets, retry tokens). Avoid storing raw credentials; emit only hashed or summarised data when the classification is `Sensitive`.
|
||||||
|
|
||||||
|
Downstream log sinks and persistence layers can inspect classifications to redact or separate PII before export.
|
||||||
|
|
||||||
|
## Event Naming
|
||||||
|
|
||||||
|
Event names follow dotted notation:
|
||||||
|
|
||||||
|
- `authority.password.grant` — password grant handled by OpenIddict.
|
||||||
|
- `authority.client_credentials.grant` — client credential grant handling.
|
||||||
|
- `authority.bootstrap.user` and `authority.bootstrap.client` — bootstrap API operations.
|
||||||
|
- Future additions should preserve the `authority.<surface>.<action>` pattern to keep filtering deterministic.
|
||||||
|
|
||||||
|
## Persistence
|
||||||
|
|
||||||
|
The Authority host converts audit records into `AuthorityLoginAttemptDocument` rows for MongoDB persistence. Documents must:
|
||||||
|
|
||||||
|
- Preserve `CorrelationId`, `SubjectId`, `ClientId`, `Plugin`, `Outcome`, `Reason`, and `OccurredAt`.
|
||||||
|
- Store remote address in `remoteAddress` only after classification as PII.
|
||||||
|
- Include summary booleans such as `Successful` to accelerate lockout policy checks.
|
||||||
|
|
||||||
|
When exporting to external SIEMs, honour the `ClassifiedString.Classification` tag to avoid shipping PII into restricted environments.
|
||||||
106
docs/security/authority-threat-model.md
Normal file
106
docs/security/authority-threat-model.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
# Authority Threat Model (STRIDE)
|
||||||
|
|
||||||
|
> Prepared by Security Guild — 2025-10-12. Scope covers Authority host, Standard plug-in, CLI, bootstrap workflow, and offline revocation distribution.
|
||||||
|
|
||||||
|
## 1. Scope & Method
|
||||||
|
|
||||||
|
- Methodology: STRIDE applied to primary Authority surfaces (token issuance, bootstrap, revocation, operator tooling, plug-in extensibility).
|
||||||
|
- Assets in scope: identity credentials, OAuth tokens (access/refresh), bootstrap invites, revocation manifests, signing keys, audit telemetry.
|
||||||
|
- Out of scope: Third-party IdPs federated via OpenIddict (tracked separately in SEC6 backlog).
|
||||||
|
|
||||||
|
## 2. Assets & Entry Points
|
||||||
|
|
||||||
|
| Asset / Surface | Description | Primary Actors |
|
||||||
|
|-----------------|-------------|----------------|
|
||||||
|
| Token issuance APIs (`/token`, `/authorize`) | OAuth/OIDC endpoints mediated by OpenIddict | CLI, UI, automation agents |
|
||||||
|
| Bootstrap channel | Initial admin invite + bootstrap CLI workflow | Platform operators |
|
||||||
|
| Revocation bundle | Offline JSON + detached JWS consumed by agents | Feedser, Agents, Zastava |
|
||||||
|
| Plug-in manifests | Standard plug-in configuration and password policy overrides | Operators, DevOps |
|
||||||
|
| Signing keys | ES256 signing keys backing tokens and revocation manifests | Security Guild, HSM/KeyOps |
|
||||||
|
| Audit telemetry | Structured login/audit stream persisted to Mongo/observability stack | SOC, SecOps |
|
||||||
|
|
||||||
|
## 3. Trust Boundaries
|
||||||
|
|
||||||
|
| Boundary | Rationale | Controls |
|
||||||
|
|----------|-----------|----------|
|
||||||
|
| TB1 — Public network ↔️ Authority ingress | Internet/extranet exposure for `/token`, `/authorize`, `/bootstrap` | TLS 1.3, reverse proxy ACLs, rate limiting (SEC3.A / CORE8.RL) |
|
||||||
|
| TB2 — Authority host ↔️ Mongo storage | Credential store, revocation state, audit log persistence | Authenticated Mongo, network segmentation, deterministic serializers |
|
||||||
|
| TB3 — Authority host ↔️ Plug-in sandbox | Plug-ins may override password policy and bootstrap flows | Code signing, manifest validation, restart-time loading only |
|
||||||
|
| TB4 — Operator workstation ↔️ CLI | CLI holds bootstrap secrets and revocation bundles | OS keychain storage, MFA on workstations, offline kit checksum |
|
||||||
|
| TB5 — Authority ↔️ Downstream agents | Revocation bundle consumption, token validation | Mutual TLS (planned), detached JWS signatures, bundle freshness checks |
|
||||||
|
|
||||||
|
## 4. Data Flow Diagrams
|
||||||
|
|
||||||
|
### 4.1 Runtime token issuance
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart LR
|
||||||
|
subgraph Client Tier
|
||||||
|
CLI[StellaOps CLI]
|
||||||
|
UI[UI / Automation]
|
||||||
|
end
|
||||||
|
subgraph Perimeter
|
||||||
|
RP[Reverse Proxy / WAF]
|
||||||
|
end
|
||||||
|
subgraph Authority
|
||||||
|
AUTH[Authority Host]
|
||||||
|
PLGIN[Standard Plug-in]
|
||||||
|
STORE[(Mongo Credential Store)]
|
||||||
|
end
|
||||||
|
CLI -->|OAuth password / client creds| RP --> AUTH
|
||||||
|
UI -->|OAuth flows| RP
|
||||||
|
AUTH -->|PasswordHashOptions + Secrets| PLGIN
|
||||||
|
AUTH -->|Verify / Persist hashes| STORE
|
||||||
|
STORE -->|Rehash needed| AUTH
|
||||||
|
AUTH -->|Access / refresh token| RP --> Client Tier
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.2 Bootstrap & revocation
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart LR
|
||||||
|
subgraph Operator
|
||||||
|
OPS[Operator Workstation]
|
||||||
|
end
|
||||||
|
subgraph Authority
|
||||||
|
AUTH[Authority Host]
|
||||||
|
STORE[(Mongo)]
|
||||||
|
end
|
||||||
|
subgraph Distribution
|
||||||
|
OFFKIT[Offline Kit Bundle]
|
||||||
|
AGENT[Authorized Agent / Feedser]
|
||||||
|
end
|
||||||
|
OPS -->|Bootstrap CLI (`stellaops auth bootstrap`)| AUTH
|
||||||
|
AUTH -->|One-time invite + Argon2 hash| STORE
|
||||||
|
AUTH -->|Revocation export (`stellaops auth revoke export`)| OFFKIT
|
||||||
|
OFFKIT -->|Signed JSON + .jws| AGENT
|
||||||
|
AGENT -->|Revocation ACK / telemetry| AUTH
|
||||||
|
```
|
||||||
|
|
||||||
|
## 5. STRIDE Analysis
|
||||||
|
|
||||||
|
| Threat | STRIDE Vector | Surface | Risk (L×I) | Existing Controls | Gaps / Actions | Owner |
|
||||||
|
|--------|---------------|---------|------------|-------------------|----------------|-------|
|
||||||
|
| Spoofed revocation bundle | Spoofing | TB5 — Authority ↔️ Agents | Med×High | Detached JWS signature (planned), offline kit checksums | Finalise signing key registry & verification script (SEC4.B/SEC4.HOST); add bundle freshness requirement | Security Guild (follow-up: **SEC5.B**) |
|
||||||
|
| Parameter tampering on `/token` | Tampering | TB1 — Public ingress | Med×High | ASP.NET model validation, OpenIddict, rate limiter (CORE8.RL) | Add audit coverage for tampered inputs, align correlation IDs with SOC (SEC2.A/SEC2.B) | Security Guild + Authority Core (follow-up: **SEC5.C**) |
|
||||||
|
| Bootstrap invite replay | Repudiation | TB4 — Operator CLI ↔️ Authority | Low×High | One-time bootstrap tokens, Argon2id hashing on creation | Enforce invite expiration + audit trail for unused invites | Security Guild (follow-up: **SEC5.D**) |
|
||||||
|
| Token replay by stolen agent | Information Disclosure | TB5 | Med×High | Planned revocation bundles, optional mTLS | Require agent binding (device fingerprint) and enforce revocation grace window alerts | Security Guild + Zastava (follow-up: **SEC5.E**) |
|
||||||
|
| Privilege escalation via plug-in override | Elevation of Privilege | TB3 — Plug-in sandbox | Med×High | Signed plug-ins, restart-only loading, configuration validation | Add static analysis on manifest overrides + runtime warning when policy weaker than host | Security Guild + DevOps (follow-up: **SEC5.F**) |
|
||||||
|
| Offline bundle tampering | Tampering | Distribution | Low×High | SHA256 manifest, signed bundles (planned) | Add supply-chain attestation for Offline Kit, publish verification CLI in docs | Security Guild + Ops (follow-up: **SEC5.G**) |
|
||||||
|
| Failure to log denied tokens | Repudiation | TB2 — Authority ↔️ Mongo | Med×Med | Serilog structured events (partial), Mongo persistence path (planned) | Finalise audit schema (SEC2.A) and ensure `/token` denies include subject/client/IP fields | Security Guild + Authority Core (follow-up: **SEC5.H**) |
|
||||||
|
|
||||||
|
Risk scoring uses qualitative scale (Low/Med/High) for likelihood × impact; mitigation priority follows High > Med > Low.
|
||||||
|
|
||||||
|
## 6. Follow-up Backlog Hooks
|
||||||
|
|
||||||
|
| Backlog ID | Linked Threat | Summary | Target Owners |
|
||||||
|
|------------|---------------|---------|---------------|
|
||||||
|
| SEC5.B | Spoofed revocation bundle | Complete libsodium/Core signing integration and ship revocation verification script. | Security Guild + Authority Core |
|
||||||
|
| SEC5.C | Parameter tampering on `/token` | Finalise audit contract (`SEC2.A`) and add request tamper logging. | Security Guild + Authority Core |
|
||||||
|
| SEC5.D | Bootstrap invite replay | Implement expiry enforcement + audit coverage for unused bootstrap invites. | Security Guild |
|
||||||
|
| SEC5.E | Token replay by stolen agent | Document device binding requirements and create detector for stale revocation acknowledgements. | Security Guild + Zastava |
|
||||||
|
| SEC5.F | Plug-in override escalation | Static analysis of plug-in manifests; warn on weaker password policy overrides. | Security Guild + DevOps |
|
||||||
|
| SEC5.G | Offline bundle tampering | Extend Offline Kit build to include attested manifest + verification CLI sample. | Security Guild + Ops |
|
||||||
|
| SEC5.H | Failure to log denied tokens | Ensure audit persistence for all `/token` denials with correlation IDs. | Security Guild + Authority Core |
|
||||||
|
|
||||||
|
Update `src/StellaOps.Cryptography/TASKS.md` (Security Guild board) with the above backlog entries to satisfy SEC5.A exit criteria.
|
||||||
76
docs/security/password-hashing.md
Normal file
76
docs/security/password-hashing.md
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# Authority Password Hashing Guidance
|
||||||
|
|
||||||
|
> **Status:** Drafted 2025-10-11 alongside SEC1.A / SEC1.PLG rollout. Argon2id is now the default hashing algorithm for the Standard plug-in and recommended for all Authority identity providers.
|
||||||
|
|
||||||
|
## 1. Overview
|
||||||
|
|
||||||
|
StellaOps Authority issues and verifies credentials through the shared `StellaOps.Cryptography` provider abstraction. As of October 2025:
|
||||||
|
|
||||||
|
- **Default algorithm:** Argon2id (PHC format `$argon2id$v=19$m=<mem>,t=<time>,p=<parallelism>$<salt>$<hash>`).
|
||||||
|
- **Legacy support:** PBKDF2-SHA256 hashes (`PBKDF2.<iterations>.<payload>`) continue to verify, but successful logins are transparently rehashed to Argon2id.
|
||||||
|
- **Configuration path:** `authority.security.passwordHashing` in the primary Authority configuration controls system-wide defaults. Individual plug-ins may override via `passwordHashing` in their manifests.
|
||||||
|
|
||||||
|
## 2. Recommended Parameters
|
||||||
|
|
||||||
|
| Environment | memorySizeInKib | iterations | parallelism | Notes |
|
||||||
|
|-------------|-----------------|------------|-------------|-------|
|
||||||
|
| Production (default) | 19456 | 2 | 1 | Balances CPU with 19 MiB memory cost; ~175 ms on 4 vCPU host. |
|
||||||
|
| High-security enclave | 32768 | 3 | 1 | Increases memory pressure; confirm capacity on shared hosts. |
|
||||||
|
| Resource-constrained lab | 8192 | 2 | 1 | Use only for bootstrap/testing; increase once hardware upgraded. |
|
||||||
|
| PBKDF2 fallback | — | ≥210000 | — | Set `algorithm: Pbkdf2` only when Argon2 hardware support unavailable. |
|
||||||
|
|
||||||
|
> ⚠️ Lowering parameters below these baselines should be a temporary measure. Document any deviations in runbooks and schedule follow-up work to restore defaults.
|
||||||
|
|
||||||
|
## 3. Configuring Authority Defaults
|
||||||
|
|
||||||
|
`authority.yaml` (or equivalent) accepts the following block:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
security:
|
||||||
|
passwordHashing:
|
||||||
|
algorithm: Argon2id # or Pbkdf2
|
||||||
|
memorySizeInKib: 19456 # ~19 MiB
|
||||||
|
iterations: 2
|
||||||
|
parallelism: 1
|
||||||
|
```
|
||||||
|
|
||||||
|
These values propagate to plug-ins that do not provide explicit overrides. Runtime validation ensures all numbers are > 0 and the algorithm is recognised.
|
||||||
|
|
||||||
|
## 4. Plug-in Overrides
|
||||||
|
|
||||||
|
The Standard plug-in inherits the host defaults but can fine-tune parameters per installation:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
passwordHashing:
|
||||||
|
algorithm: Argon2id
|
||||||
|
memorySizeInKib: 8192
|
||||||
|
iterations: 2
|
||||||
|
parallelism: 1
|
||||||
|
```
|
||||||
|
|
||||||
|
- When the plug-in configuration omits `passwordHashing`, the Authority defaults apply.
|
||||||
|
- Setting `algorithm: Pbkdf2` keeps PBKDF2 active but still upgrades credentials when the host default switches back to Argon2id.
|
||||||
|
- Invalid overrides (e.g., `memorySizeInKib: 0`) cause startup to fail with a descriptive validation error.
|
||||||
|
|
||||||
|
## 5. Observability & Migration
|
||||||
|
|
||||||
|
- Successful PBKDF2 verification logs a **rehash-needed** event and immediately persists an Argon2id hash.
|
||||||
|
- Metrics emitted: `auth.plugins.standard.password_rehash_total{algorithm="pbkdf2"}` (add dashboards to monitor upgrade progress).
|
||||||
|
- During migration, expect a gradual decline in PBKDF2 hashes as users authenticate. Use operator scripts to query `authority_users_*` collections for lingering `PBKDF2.` prefixes if you need to track completion.
|
||||||
|
|
||||||
|
## 6. Operational Checklist
|
||||||
|
|
||||||
|
1. Update Authority configuration with desired defaults; restart the host.
|
||||||
|
2. Regenerate plug-in manifests (if overrides required) and redeploy.
|
||||||
|
3. Monitor `password_rehash_total` and login success rates; investigate any spike in failures (likely due to mis-sized limits).
|
||||||
|
4. Review hardware utilisation; Argon2id increases memory pressure compared to PBKDF2.
|
||||||
|
5. Archive this document with the change request and notify SOC of the new baseline.
|
||||||
|
|
||||||
|
For additional context on tuning trade-offs, consult OWASP Password Storage Cheat Sheet and the StellaOps Security Guild guidance (to be published in `docs/security/rate-limits.md`).
|
||||||
|
|
||||||
|
## 7. Native Argon2 Preview Build Flag
|
||||||
|
|
||||||
|
- Set `dotnet build -p:StellaOpsCryptoSodium=true` (or define the MSBuild property in your CI) to enable the `STELLAOPS_CRYPTO_SODIUM` compilation symbol.
|
||||||
|
- The symbol switches `StellaOps.Cryptography` to use the native-oriented build pipeline so we can wire libsodium/Core bindings without affecting the managed default.
|
||||||
|
- Until the native implementation lands (SEC1.B follow-up), the flag falls back to the managed Konscious implementation while still validating the alternate compilation path.
|
||||||
|
- Document any production usage of the flag in your change log so future upgrades can align with the Security Guild rollout plan.
|
||||||
56
docs/security/revocation-bundle-example.json
Normal file
56
docs/security/revocation-bundle-example.json
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"$schema": "../../etc/authority/revocation_bundle.schema.json",
|
||||||
|
"schemaVersion": "1.0.0",
|
||||||
|
"issuer": "https://auth.stella-ops.example",
|
||||||
|
"bundleId": "6f9d08bfa0c24a0a9f7f59e6c17d2f8e8bca2ef34215c3d3ba5a9a1f0fbe2d10",
|
||||||
|
"issuedAt": "2025-10-12T15:00:00Z",
|
||||||
|
"validFrom": "2025-10-12T15:00:00Z",
|
||||||
|
"sequence": 42,
|
||||||
|
"signingKeyId": "authority-signing-20251012",
|
||||||
|
"revocations": [
|
||||||
|
{
|
||||||
|
"id": "7ad4f3d2c21b461d9b3420e1151be9c4",
|
||||||
|
"category": "token",
|
||||||
|
"tokenType": "access_token",
|
||||||
|
"clientId": "feedser-cli",
|
||||||
|
"subjectId": "user:ops-admin",
|
||||||
|
"reason": "compromised",
|
||||||
|
"reasonDescription": "Access token reported by SOC automation run R-2045.",
|
||||||
|
"revokedAt": "2025-10-12T14:32:05Z",
|
||||||
|
"scopes": [
|
||||||
|
"feedser:export",
|
||||||
|
"feedser:jobs"
|
||||||
|
],
|
||||||
|
"fingerprint": "AD35E719C12204D7E7C92ED3F6DEBF0A44642D41AAF94233F9A47E183F4C5F18",
|
||||||
|
"metadata": {
|
||||||
|
"reportId": "R-2045",
|
||||||
|
"source": "soc-automation"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "user:departed-vendor",
|
||||||
|
"category": "subject",
|
||||||
|
"subjectId": "user:departed-vendor",
|
||||||
|
"reason": "lifecycle",
|
||||||
|
"revokedAt": "2025-10-10T18:15:00Z",
|
||||||
|
"metadata": {
|
||||||
|
"ticket": "HR-8821"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "ci-runner-legacy",
|
||||||
|
"category": "client",
|
||||||
|
"clientId": "ci-runner-legacy",
|
||||||
|
"reason": "rotation",
|
||||||
|
"revokedAt": "2025-10-09T11:00:00Z",
|
||||||
|
"expiresAt": "2025-11-01T00:00:00Z",
|
||||||
|
"metadata": {
|
||||||
|
"replacement": "ci-runner-2025"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"generator": "stellaops-authority@1.4.0",
|
||||||
|
"jobId": "revocation-export-20251012T1500Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
70
docs/security/revocation-bundle.md
Normal file
70
docs/security/revocation-bundle.md
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# Authority Revocation Bundle
|
||||||
|
|
||||||
|
The Authority service exports revocation information as an offline-friendly JSON document plus a detached JWS signature. Operators can mirror the bundle alongside Feedser exports to ensure air-gapped scanners receive the latest token, subject, and client revocations.
|
||||||
|
|
||||||
|
## File layout
|
||||||
|
|
||||||
|
| Artefact | Description |
|
||||||
|
| --- | --- |
|
||||||
|
| `revocation-bundle.json` | Canonical JSON document describing revoked entities. Validates against [`etc/authority/revocation_bundle.schema.json`](../../etc/authority/revocation_bundle.schema.json). |
|
||||||
|
| `revocation-bundle.json.jws` | Detached JWS signature covering the exact UTF-8 bytes of `revocation-bundle.json`. |
|
||||||
|
| `revocation-bundle.json.sha256` | Hex-encoded SHA-256 digest used by mirror automation (optional but recommended). |
|
||||||
|
|
||||||
|
All hashes and signatures are generated after applying the deterministic formatting rules below.
|
||||||
|
|
||||||
|
## Deterministic formatting rules
|
||||||
|
|
||||||
|
- JSON is serialised with UTF-8 encoding, 2-space indentation, and lexicographically sorted object keys.
|
||||||
|
- Arrays are sorted by deterministic keys:
|
||||||
|
- Top-level `revocations` sorted by (`category`, `id`, `revokedAt`).
|
||||||
|
- Nested arrays (`scopes`) sorted ascending, unique enforced.
|
||||||
|
- Numeric values (`sequence`) are emitted without leading zeros.
|
||||||
|
- Timestamps use UTC ISO-8601 format with `Z` suffix.
|
||||||
|
|
||||||
|
Consumers MUST treat the combination of `schemaVersion` and `sequence` as a monotonic feed. Bundles with older `sequence` values are ignored unless `bundleId` differs and `issuedAt` is newer (supporting replay detection).
|
||||||
|
|
||||||
|
## Revocation entry categories
|
||||||
|
|
||||||
|
| Category | Description | Required fields |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `token` | A single OAuth token (access, refresh, device, authorization code). | `tokenType`, `clientId`, `revokedAt`, optional `subjectId` |
|
||||||
|
| `subject` | All credentials issued to a subject (user/service account). | `subjectId`, `revokedAt` |
|
||||||
|
| `client` | Entire OAuth client registration is revoked. | `clientId`, `revokedAt` |
|
||||||
|
| `key` | Signing/encryption key material revoked. | `id`, `revokedAt` |
|
||||||
|
|
||||||
|
`reason` is a machine-friendly code (`compromised`, `rotation`, `policy`, `lifecycle`, etc). `reasonDescription` may include a short operator note.
|
||||||
|
|
||||||
|
## Detached JWS workflow
|
||||||
|
|
||||||
|
1. Serialise `revocation-bundle.json` using the deterministic rules.
|
||||||
|
2. Compute SHA-256 digest; write to `revocation-bundle.json.sha256`.
|
||||||
|
3. Sign using ES256 (default) with the configured Authority signing key. The JWS header uses:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"alg": "ES256",
|
||||||
|
"kid": "{signingKeyId}",
|
||||||
|
"typ": "application/vnd.stellaops.revocation-bundle+jws",
|
||||||
|
"b64": false,
|
||||||
|
"crit": ["b64"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
4. Persist the detached signature payload to `revocation-bundle.json.jws` (per RFC 7797).
|
||||||
|
|
||||||
|
Verification steps:
|
||||||
|
|
||||||
|
1. Validate `revocation-bundle.json` against the schema.
|
||||||
|
2. Re-compute SHA-256 and compare with `.sha256` (if present).
|
||||||
|
3. Resolve the signing key from JWKS (`/.well-known/jwks.json`) or the offline key bundle.
|
||||||
|
4. Verify the detached JWS using the stored signing key (example tooling coming with `stella auth revoke verify`).
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
The repository contains an [example bundle](revocation-bundle-example.json) demonstrating a mixed export of token, subject, and client revocations. Use it as a reference for integration tests and tooling.
|
||||||
|
|
||||||
|
## Operations Quick Reference
|
||||||
|
|
||||||
|
- `stella auth revoke export` emits a canonical JSON bundle, `.sha256` digest, and detached JWS signature in one command. Use `--output` to write into your mirror staging directory.
|
||||||
|
- `stella auth revoke verify` validates a bundle using cached JWKS or an offline PEM key and reports digest mismatches before distribution.
|
||||||
|
- `POST /internal/revocations/export` provides the same payload for orchestrators that already talk to the bootstrap API.
|
||||||
|
- `POST /internal/signing/rotate` rotates JWKS material without downtime; always export a fresh bundle afterward so downstream mirrors receive signatures from the new `kid`.
|
||||||
|
- Offline Kit automation should mirror `revocation-bundle.json*` alongside Feedser exports so agents ingest revocations during the same sync pass.
|
||||||
@@ -10,6 +10,12 @@ passwordPolicy:
|
|||||||
requireDigit: true
|
requireDigit: true
|
||||||
requireSymbol: true
|
requireSymbol: true
|
||||||
|
|
||||||
|
passwordHashing:
|
||||||
|
algorithm: "Argon2id"
|
||||||
|
memorySizeInKib: 19456
|
||||||
|
iterations: 2
|
||||||
|
parallelism: 1
|
||||||
|
|
||||||
lockout:
|
lockout:
|
||||||
enabled: true
|
enabled: true
|
||||||
maxAttempts: 5
|
maxAttempts: 5
|
||||||
|
|||||||
@@ -23,6 +23,25 @@ storage:
|
|||||||
# databaseName: "stellaops_authority"
|
# databaseName: "stellaops_authority"
|
||||||
commandTimeout: "00:00:30"
|
commandTimeout: "00:00:30"
|
||||||
|
|
||||||
|
# Signing configuration for revocation bundles and JWKS.
|
||||||
|
signing:
|
||||||
|
enabled: true
|
||||||
|
activeKeyId: "authority-signing-2025-dev"
|
||||||
|
keyPath: "../certificates/authority-signing-2025-dev.pem"
|
||||||
|
algorithm: "ES256"
|
||||||
|
keySource: "file"
|
||||||
|
# provider: "default"
|
||||||
|
additionalKeys:
|
||||||
|
- keyId: "authority-signing-dev"
|
||||||
|
path: "../certificates/authority-signing-dev.pem"
|
||||||
|
source: "file"
|
||||||
|
# Rotation flow:
|
||||||
|
# 1. Generate a new PEM under ./certificates (e.g. authority-signing-2026-dev.pem).
|
||||||
|
# 2. Trigger the .gitea/workflows/authority-key-rotation.yml workflow (or run
|
||||||
|
# ops/authority/key-rotation.sh) with the new keyId/keyPath.
|
||||||
|
# 3. Update activeKeyId/keyPath above and move the previous key into additionalKeys
|
||||||
|
# so restarts retain retired material for JWKS consumers.
|
||||||
|
|
||||||
# Bootstrap administrative endpoints (initial provisioning).
|
# Bootstrap administrative endpoints (initial provisioning).
|
||||||
bootstrap:
|
bootstrap:
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|||||||
165
etc/authority/revocation_bundle.schema.json
Normal file
165
etc/authority/revocation_bundle.schema.json
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||||
|
"$id": "https://stella-ops.org/schemas/revocation-bundle.json",
|
||||||
|
"title": "StellaOps Authority Revocation Bundle",
|
||||||
|
"description": "Canonical representation of revoked tokens, clients, and principals distributed to offline mirrors.",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": [
|
||||||
|
"schemaVersion",
|
||||||
|
"issuer",
|
||||||
|
"issuedAt",
|
||||||
|
"sequence",
|
||||||
|
"revocations"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"schemaVersion": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^1\\.0\\.[0-9]+$",
|
||||||
|
"description": "SemVer of the bundle schema. Major version bumps indicate breaking changes."
|
||||||
|
},
|
||||||
|
"issuer": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "uri",
|
||||||
|
"description": "Canonical issuer URL of the Authority instance producing the bundle."
|
||||||
|
},
|
||||||
|
"bundleId": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[a-f0-9]{16,64}$",
|
||||||
|
"description": "Deterministic identifier for this bundle revision (e.g. SHA-256 hex)."
|
||||||
|
},
|
||||||
|
"issuedAt": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time",
|
||||||
|
"description": "UTC timestamp when the bundle was emitted."
|
||||||
|
},
|
||||||
|
"validFrom": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time",
|
||||||
|
"description": "UTC timestamp when consumers should begin enforcing entries."
|
||||||
|
},
|
||||||
|
"expiresAt": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time",
|
||||||
|
"description": "Optional expiry after which consumers must fetch a newer bundle."
|
||||||
|
},
|
||||||
|
"sequence": {
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 0,
|
||||||
|
"description": "Monotonic sequence number. Consumers MUST ignore bundles with lower sequence values."
|
||||||
|
},
|
||||||
|
"signingKeyId": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Key identifier (kid) used for the detached JWS signature."
|
||||||
|
},
|
||||||
|
"revocations": {
|
||||||
|
"type": "array",
|
||||||
|
"description": "Deterministically sorted revocation entries.",
|
||||||
|
"items": { "$ref": "#/$defs/revocationEntry" }
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Additional producer metadata (operator, environment, export job id).",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": ["string", "number", "boolean", "null"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"$defs": {
|
||||||
|
"revocationEntry": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": ["id", "category", "revokedAt"],
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 4,
|
||||||
|
"description": "Primary identifier for the revoked entity (token id, subject id, client id, or key id)."
|
||||||
|
},
|
||||||
|
"category": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["token", "subject", "client", "key"],
|
||||||
|
"description": "Scope of the revocation entry."
|
||||||
|
},
|
||||||
|
"tokenType": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"access_token",
|
||||||
|
"refresh_token",
|
||||||
|
"authorization_code",
|
||||||
|
"device_code"
|
||||||
|
],
|
||||||
|
"description": "Token type impacted by the revocation (required when category == 'token')."
|
||||||
|
},
|
||||||
|
"subjectId": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Subject identifier impacted (user, service account)."
|
||||||
|
},
|
||||||
|
"clientId": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "OAuth client identifier impacted."
|
||||||
|
},
|
||||||
|
"reason": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[a-z0-9_.-]{1,64}$",
|
||||||
|
"description": "Reason code (e.g. compromised, rotation, policy)."
|
||||||
|
},
|
||||||
|
"reasonDescription": {
|
||||||
|
"type": "string",
|
||||||
|
"maxLength": 256,
|
||||||
|
"description": "Human-readable description for operator tooling."
|
||||||
|
},
|
||||||
|
"revokedAt": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time",
|
||||||
|
"description": "UTC timestamp when the entity was revoked."
|
||||||
|
},
|
||||||
|
"effectiveAt": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time",
|
||||||
|
"description": "UTC timestamp when revocation becomes effective (defaults to revokedAt)."
|
||||||
|
},
|
||||||
|
"expiresAt": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time",
|
||||||
|
"description": "Optional expiry after which the revocation no longer applies."
|
||||||
|
},
|
||||||
|
"scopes": {
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "string" },
|
||||||
|
"uniqueItems": true,
|
||||||
|
"description": "Scoped permissions affected (for token revocations)."
|
||||||
|
},
|
||||||
|
"fingerprint": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[A-Fa-f0-9]{64}$",
|
||||||
|
"description": "SHA-256 hash of the revoked credential (optional)."
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Additional structured metadata to assist consumers (e.g. audit id).",
|
||||||
|
"patternProperties": {
|
||||||
|
"^[a-zA-Z0-9_.-]{1,64}$": {
|
||||||
|
"type": ["string", "number", "boolean", "null"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"allOf": [
|
||||||
|
{
|
||||||
|
"if": { "properties": { "category": { "const": "token" } } },
|
||||||
|
"then": { "required": ["tokenType", "clientId"] }
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"if": { "properties": { "category": { "const": "subject" } } },
|
||||||
|
"then": { "required": ["subjectId"] }
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"if": { "properties": { "category": { "const": "client" } } },
|
||||||
|
"then": { "required": ["clientId"] }
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -59,7 +59,27 @@ authority:
|
|||||||
clientSecretFile: ""
|
clientSecretFile: ""
|
||||||
clientScopes:
|
clientScopes:
|
||||||
- "feedser.jobs.trigger"
|
- "feedser.jobs.trigger"
|
||||||
|
resilience:
|
||||||
|
# Enable deterministic retry/backoff when Authority is briefly unavailable.
|
||||||
|
enableRetries: true
|
||||||
|
retryDelays:
|
||||||
|
- "00:00:01"
|
||||||
|
- "00:00:02"
|
||||||
|
- "00:00:05"
|
||||||
|
# Allow stale discovery/JWKS responses when Authority is offline (extend tolerance as needed for air-gapped mirrors).
|
||||||
|
allowOfflineCacheFallback: true
|
||||||
|
offlineCacheTolerance: "00:10:00"
|
||||||
# Networks allowed to bypass authentication (loopback by default for on-host cron jobs).
|
# Networks allowed to bypass authentication (loopback by default for on-host cron jobs).
|
||||||
bypassNetworks:
|
bypassNetworks:
|
||||||
- "127.0.0.1/32"
|
- "127.0.0.1/32"
|
||||||
- "::1/128"
|
- "::1/128"
|
||||||
|
|
||||||
|
sources:
|
||||||
|
ghsa:
|
||||||
|
apiToken: "${GITHUB_PAT}"
|
||||||
|
pageSize: 50
|
||||||
|
maxPagesPerFetch: 5
|
||||||
|
requestDelay: "00:00:00.200"
|
||||||
|
failureBackoff: "00:05:00"
|
||||||
|
rateLimitWarningThreshold: 500
|
||||||
|
secondaryRateLimitBackoff: "00:02:00"
|
||||||
|
|||||||
@@ -37,3 +37,22 @@ Key environment variables (mirroring `StellaOpsAuthorityOptions`):
|
|||||||
| `STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY` | Path to plugin manifest directory |
|
| `STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY` | Path to plugin manifest directory |
|
||||||
|
|
||||||
For additional options, see `etc/authority.yaml.sample`.
|
For additional options, see `etc/authority.yaml.sample`.
|
||||||
|
|
||||||
|
## Key rotation automation (OPS3)
|
||||||
|
|
||||||
|
The `key-rotation.sh` helper wraps the `/internal/signing/rotate` endpoint delivered with CORE10. It can run in CI/CD once the new PEM key is staged on the Authority host volume.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
AUTHORITY_BOOTSTRAP_KEY=$(cat ~/.secrets/authority-bootstrap.key) \
|
||||||
|
./key-rotation.sh \
|
||||||
|
--authority-url https://authority.stella-ops.local \
|
||||||
|
--key-id authority-signing-2025 \
|
||||||
|
--key-path ../certificates/authority-signing-2025.pem \
|
||||||
|
--meta rotatedBy=pipeline --meta changeTicket=OPS-1234
|
||||||
|
```
|
||||||
|
|
||||||
|
- `--key-path` should resolve from the Authority content root (same as `docs/11_AUTHORITY.md` SOP).
|
||||||
|
- Provide `--source`/`--provider` if the key loader differs from the default file-based provider.
|
||||||
|
- Pass `--dry-run` during rehearsals to inspect the JSON payload without invoking the API.
|
||||||
|
|
||||||
|
After rotation, export a fresh revocation bundle (`stellaops-cli auth revoke export`) so downstream mirrors consume signatures from the new `kid`. The canonical operational steps live in `docs/11_AUTHORITY.md` – make sure any local automation keeps that guide as source of truth.
|
||||||
|
|||||||
@@ -3,4 +3,4 @@
|
|||||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||||
|----|--------|----------|------------|-------------|---------------|
|
|----|--------|----------|------------|-------------|---------------|
|
||||||
| OPS3.KEY-ROTATION | BLOCKED | DevOps Crew, Authority Core | CORE10.JWKS | Implement key rotation tooling + pipeline hook once rotating JWKS lands. Document SOP and secret handling. | ✅ CLI/script rotates keys + updates JWKS; ✅ Pipeline job documented; ✅ docs/ops runbook updated. |
|
| OPS3.KEY-ROTATION | DONE (2025-10-12) | DevOps Crew, Authority Core | CORE10.JWKS | Implement key rotation tooling + pipeline hook once rotating JWKS lands. Document SOP and secret handling. | ✅ CLI/script rotates keys + updates JWKS; ✅ Pipeline job documented; ✅ docs/ops runbook updated. |
|
||||||
|
|||||||
189
ops/authority/key-rotation.sh
Normal file
189
ops/authority/key-rotation.sh
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'USAGE'
|
||||||
|
Usage: key-rotation.sh --authority-url URL --api-key TOKEN --key-id ID --key-path PATH [options]
|
||||||
|
|
||||||
|
Required flags:
|
||||||
|
-u, --authority-url Base Authority URL (e.g. https://authority.example.com)
|
||||||
|
-k, --api-key Bootstrap API key (x-stellaops-bootstrap-key header)
|
||||||
|
-i, --key-id Identifier (kid) for the new signing key
|
||||||
|
-p, --key-path Path (relative to Authority content root or absolute) where the PEM key lives
|
||||||
|
|
||||||
|
Optional flags:
|
||||||
|
-s, --source Key source loader identifier (default: file)
|
||||||
|
-a, --algorithm Signing algorithm (default: ES256)
|
||||||
|
--provider Preferred crypto provider name
|
||||||
|
-m, --meta key=value Additional metadata entries for the rotation record (repeatable)
|
||||||
|
--dry-run Print the JSON payload instead of invoking the API
|
||||||
|
-h, --help Show this help
|
||||||
|
|
||||||
|
Environment fallbacks:
|
||||||
|
AUTHORITY_URL, AUTHORITY_BOOTSTRAP_KEY, AUTHORITY_KEY_SOURCE, AUTHORITY_KEY_PROVIDER
|
||||||
|
|
||||||
|
Example:
|
||||||
|
AUTHORITY_BOOTSTRAP_KEY=$(cat key.txt) \\
|
||||||
|
./key-rotation.sh -u https://authority.local \\
|
||||||
|
-i authority-signing-2025 \\
|
||||||
|
-p ../certificates/authority-signing-2025.pem \\
|
||||||
|
-m rotatedBy=pipeline -m ticket=OPS-1234
|
||||||
|
USAGE
|
||||||
|
}
|
||||||
|
|
||||||
|
require_python() {
|
||||||
|
if command -v python3 >/dev/null 2>&1; then
|
||||||
|
PYTHON_BIN=python3
|
||||||
|
elif command -v python >/dev/null 2>&1; then
|
||||||
|
PYTHON_BIN=python
|
||||||
|
else
|
||||||
|
echo "error: python3 (or python) is required for JSON encoding" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
json_quote() {
|
||||||
|
"$PYTHON_BIN" - "$1" <<'PY'
|
||||||
|
import json, sys
|
||||||
|
print(json.dumps(sys.argv[1]))
|
||||||
|
PY
|
||||||
|
}
|
||||||
|
|
||||||
|
AUTHORITY_URL="${AUTHORITY_URL:-}"
|
||||||
|
API_KEY="${AUTHORITY_BOOTSTRAP_KEY:-}"
|
||||||
|
KEY_ID=""
|
||||||
|
KEY_PATH=""
|
||||||
|
SOURCE="${AUTHORITY_KEY_SOURCE:-file}"
|
||||||
|
ALGORITHM="ES256"
|
||||||
|
PROVIDER="${AUTHORITY_KEY_PROVIDER:-}"
|
||||||
|
DRY_RUN=false
|
||||||
|
declare -a METADATA=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
-u|--authority-url)
|
||||||
|
AUTHORITY_URL="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-k|--api-key)
|
||||||
|
API_KEY="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-i|--key-id)
|
||||||
|
KEY_ID="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-p|--key-path)
|
||||||
|
KEY_PATH="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-s|--source)
|
||||||
|
SOURCE="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-a|--algorithm)
|
||||||
|
ALGORITHM="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--provider)
|
||||||
|
PROVIDER="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
-m|--meta)
|
||||||
|
METADATA+=("$2")
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--dry-run)
|
||||||
|
DRY_RUN=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1" >&2
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z "$AUTHORITY_URL" || -z "$API_KEY" || -z "$KEY_ID" || -z "$KEY_PATH" ]]; then
|
||||||
|
echo "error: missing required arguments" >&2
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$AUTHORITY_URL" in
|
||||||
|
http://*|https://*) ;;
|
||||||
|
*)
|
||||||
|
echo "error: --authority-url must include scheme (http/https)" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
require_python
|
||||||
|
|
||||||
|
payload="{"
|
||||||
|
payload+="\"keyId\":$(json_quote "$KEY_ID"),"
|
||||||
|
payload+="\"location\":$(json_quote "$KEY_PATH"),"
|
||||||
|
payload+="\"source\":$(json_quote "$SOURCE"),"
|
||||||
|
payload+="\"algorithm\":$(json_quote "$ALGORITHM"),"
|
||||||
|
if [[ -n "$PROVIDER" ]]; then
|
||||||
|
payload+="\"provider\":$(json_quote "$PROVIDER"),"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#METADATA[@]} -gt 0 ]]; then
|
||||||
|
payload+="\"metadata\":{"
|
||||||
|
for entry in "${METADATA[@]}"; do
|
||||||
|
if [[ "$entry" != *=* ]]; then
|
||||||
|
echo "warning: ignoring metadata entry '$entry' (expected key=value)" >&2
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
key="${entry%%=*}"
|
||||||
|
value="${entry#*=}"
|
||||||
|
payload+="$(json_quote "$key"):$(json_quote "$value"),"
|
||||||
|
done
|
||||||
|
if [[ "${payload: -1}" == "," ]]; then
|
||||||
|
payload="${payload::-1}"
|
||||||
|
fi
|
||||||
|
payload+="},"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${payload: -1}" == "," ]]; then
|
||||||
|
payload="${payload::-1}"
|
||||||
|
fi
|
||||||
|
payload+="}"
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
echo "# Dry run payload:"
|
||||||
|
echo "$payload"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
tmp_response="$(mktemp)"
|
||||||
|
cleanup() { rm -f "$tmp_response"; }
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
http_code=$(curl -sS -o "$tmp_response" -w "%{http_code}" \
|
||||||
|
-X POST "${AUTHORITY_URL%/}/internal/signing/rotate" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "x-stellaops-bootstrap-key: $API_KEY" \
|
||||||
|
--data "$payload")
|
||||||
|
|
||||||
|
if [[ "$http_code" != "200" && "$http_code" != "201" ]]; then
|
||||||
|
echo "error: rotation API returned HTTP $http_code" >&2
|
||||||
|
cat "$tmp_response" >&2 || true
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Rotation request accepted (HTTP $http_code). Response:"
|
||||||
|
cat "$tmp_response"
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "Fetching JWKS to confirm active key..."
|
||||||
|
curl -sS "${AUTHORITY_URL%/}/jwks" || true
|
||||||
|
echo
|
||||||
|
echo "Done. Remember to update authority.yaml with the new key metadata to keep restarts consistent."
|
||||||
19
scripts/update-apple-fixtures.ps1
Normal file
19
scripts/update-apple-fixtures.ps1
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
Set-StrictMode -Version Latest
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
|
||||||
|
$rootDir = Split-Path -Parent $PSCommandPath
|
||||||
|
$rootDir = Join-Path $rootDir ".."
|
||||||
|
$rootDir = Resolve-Path $rootDir
|
||||||
|
|
||||||
|
$env:UPDATE_APPLE_FIXTURES = "1"
|
||||||
|
|
||||||
|
Push-Location $rootDir
|
||||||
|
try {
|
||||||
|
$sentinel = Join-Path $rootDir "src/StellaOps.Feedser.Source.Vndr.Apple.Tests/Apple/Fixtures/.update-apple-fixtures"
|
||||||
|
New-Item -ItemType File -Path $sentinel -Force | Out-Null
|
||||||
|
dotnet test "src\StellaOps.Feedser.Source.Vndr.Apple.Tests\StellaOps.Feedser.Source.Vndr.Apple.Tests.csproj" @Args
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
Pop-Location
|
||||||
|
}
|
||||||
14
scripts/update-apple-fixtures.sh
Normal file
14
scripts/update-apple-fixtures.sh
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||||
|
|
||||||
|
export UPDATE_APPLE_FIXTURES=1
|
||||||
|
if [ -n "${WSLENV-}" ]; then
|
||||||
|
export WSLENV="${WSLENV}:UPDATE_APPLE_FIXTURES/up"
|
||||||
|
else
|
||||||
|
export WSLENV="UPDATE_APPLE_FIXTURES/up"
|
||||||
|
fi
|
||||||
|
|
||||||
|
touch "$ROOT_DIR/src/StellaOps.Feedser.Source.Vndr.Apple.Tests/Apple/Fixtures/.update-apple-fixtures"
|
||||||
|
( cd "$ROOT_DIR" && dotnet test "src/StellaOps.Feedser.Source.Vndr.Apple.Tests/StellaOps.Feedser.Source.Vndr.Apple.Tests.csproj" "$@" )
|
||||||
@@ -0,0 +1,125 @@
|
|||||||
|
using System;
|
||||||
|
using StellaOps.Authority.Plugin.Standard.Security;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Plugin.Standard.Tests.Security;
|
||||||
|
|
||||||
|
public class CryptoPasswordHasherTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void Hash_EmitsArgon2idByDefault()
|
||||||
|
{
|
||||||
|
var options = CreateOptions();
|
||||||
|
var hasher = new CryptoPasswordHasher(options, new DefaultCryptoProvider());
|
||||||
|
|
||||||
|
var encoded = hasher.Hash("Secr3t!");
|
||||||
|
|
||||||
|
Assert.StartsWith("$argon2id$", encoded, StringComparison.Ordinal);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Verify_ReturnsSuccess_ForCurrentAlgorithm()
|
||||||
|
{
|
||||||
|
var options = CreateOptions();
|
||||||
|
var provider = new DefaultCryptoProvider();
|
||||||
|
var hasher = new CryptoPasswordHasher(options, provider);
|
||||||
|
var encoded = hasher.Hash("Passw0rd!");
|
||||||
|
|
||||||
|
var result = hasher.Verify("Passw0rd!", encoded);
|
||||||
|
|
||||||
|
Assert.Equal(PasswordVerificationResult.Success, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Verify_FlagsLegacyPbkdf2_ForRehash()
|
||||||
|
{
|
||||||
|
var options = CreateOptions();
|
||||||
|
var provider = new DefaultCryptoProvider();
|
||||||
|
var hasher = new CryptoPasswordHasher(options, provider);
|
||||||
|
|
||||||
|
var legacy = new Pbkdf2PasswordHasher().Hash(
|
||||||
|
"Passw0rd!",
|
||||||
|
new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Algorithm = PasswordHashAlgorithm.Pbkdf2,
|
||||||
|
Iterations = 150_000
|
||||||
|
});
|
||||||
|
|
||||||
|
var result = hasher.Verify("Passw0rd!", legacy);
|
||||||
|
|
||||||
|
Assert.Equal(PasswordVerificationResult.SuccessRehashNeeded, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Verify_RejectsTamperedPayload()
|
||||||
|
{
|
||||||
|
var options = CreateOptions();
|
||||||
|
var provider = new DefaultCryptoProvider();
|
||||||
|
var hasher = new CryptoPasswordHasher(options, provider);
|
||||||
|
|
||||||
|
var legacy = new Pbkdf2PasswordHasher().Hash(
|
||||||
|
"Passw0rd!",
|
||||||
|
new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Algorithm = PasswordHashAlgorithm.Pbkdf2,
|
||||||
|
Iterations = 160_000
|
||||||
|
});
|
||||||
|
|
||||||
|
var tampered = legacy + "corrupted";
|
||||||
|
|
||||||
|
var result = hasher.Verify("Passw0rd!", tampered);
|
||||||
|
|
||||||
|
Assert.Equal(PasswordVerificationResult.Failed, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Verify_AllowsLegacyAlgorithmWhenConfigured()
|
||||||
|
{
|
||||||
|
var options = CreateOptions();
|
||||||
|
options.PasswordHashing = options.PasswordHashing with
|
||||||
|
{
|
||||||
|
Algorithm = PasswordHashAlgorithm.Pbkdf2,
|
||||||
|
Iterations = 200_000
|
||||||
|
};
|
||||||
|
|
||||||
|
var provider = new DefaultCryptoProvider();
|
||||||
|
var hasher = new CryptoPasswordHasher(options, provider);
|
||||||
|
|
||||||
|
var legacy = new Pbkdf2PasswordHasher().Hash(
|
||||||
|
"Passw0rd!",
|
||||||
|
new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Algorithm = PasswordHashAlgorithm.Pbkdf2,
|
||||||
|
Iterations = 200_000
|
||||||
|
});
|
||||||
|
|
||||||
|
var result = hasher.Verify("Passw0rd!", legacy);
|
||||||
|
|
||||||
|
Assert.Equal(PasswordVerificationResult.Success, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static StandardPluginOptions CreateOptions() => new()
|
||||||
|
{
|
||||||
|
PasswordPolicy = new PasswordPolicyOptions
|
||||||
|
{
|
||||||
|
MinimumLength = 8,
|
||||||
|
RequireDigit = true,
|
||||||
|
RequireLowercase = true,
|
||||||
|
RequireUppercase = true,
|
||||||
|
RequireSymbol = false
|
||||||
|
},
|
||||||
|
Lockout = new LockoutOptions
|
||||||
|
{
|
||||||
|
Enabled = true,
|
||||||
|
MaxAttempts = 5,
|
||||||
|
WindowMinutes = 15
|
||||||
|
},
|
||||||
|
PasswordHashing = new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Algorithm = PasswordHashAlgorithm.Argon2id,
|
||||||
|
MemorySizeInKib = 8 * 1024,
|
||||||
|
Iterations = 2,
|
||||||
|
Parallelism = 1
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using StellaOps.Authority.Plugin.Standard;
|
using StellaOps.Authority.Plugin.Standard;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugin.Standard.Tests;
|
namespace StellaOps.Authority.Plugin.Standard.Tests;
|
||||||
|
|
||||||
@@ -96,4 +97,49 @@ public class StandardPluginOptionsTests
|
|||||||
|
|
||||||
Assert.Equal(Path.GetFullPath(absolute), options.TokenSigning.KeyDirectory);
|
Assert.Equal(Path.GetFullPath(absolute), options.TokenSigning.KeyDirectory);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Validate_Throws_WhenPasswordHashingMemoryInvalid()
|
||||||
|
{
|
||||||
|
var options = new StandardPluginOptions
|
||||||
|
{
|
||||||
|
PasswordHashing = new PasswordHashOptions
|
||||||
|
{
|
||||||
|
MemorySizeInKib = 0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var ex = Assert.Throws<InvalidOperationException>(() => options.Validate("standard"));
|
||||||
|
Assert.Contains("memory", ex.Message, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Validate_Throws_WhenPasswordHashingIterationsInvalid()
|
||||||
|
{
|
||||||
|
var options = new StandardPluginOptions
|
||||||
|
{
|
||||||
|
PasswordHashing = new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Iterations = 0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var ex = Assert.Throws<InvalidOperationException>(() => options.Validate("standard"));
|
||||||
|
Assert.Contains("iteration", ex.Message, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Validate_Throws_WhenPasswordHashingParallelismInvalid()
|
||||||
|
{
|
||||||
|
var options = new StandardPluginOptions
|
||||||
|
{
|
||||||
|
PasswordHashing = new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Parallelism = 0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var ex = Assert.Throws<InvalidOperationException>(() => options.Validate("standard"));
|
||||||
|
Assert.Contains("parallelism", ex.Message, StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,6 +34,9 @@ public class StandardPluginRegistrarTests
|
|||||||
["passwordPolicy:requireDigit"] = "false",
|
["passwordPolicy:requireDigit"] = "false",
|
||||||
["passwordPolicy:requireSymbol"] = "false",
|
["passwordPolicy:requireSymbol"] = "false",
|
||||||
["lockout:enabled"] = "false",
|
["lockout:enabled"] = "false",
|
||||||
|
["passwordHashing:memorySizeInKib"] = "8192",
|
||||||
|
["passwordHashing:iterations"] = "2",
|
||||||
|
["passwordHashing:parallelism"] = "1",
|
||||||
["bootstrapUser:username"] = "bootstrap",
|
["bootstrapUser:username"] = "bootstrap",
|
||||||
["bootstrapUser:password"] = "Bootstrap1!",
|
["bootstrapUser:password"] = "Bootstrap1!",
|
||||||
["bootstrapUser:requirePasswordReset"] = "true"
|
["bootstrapUser:requirePasswordReset"] = "true"
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Microsoft.Extensions.Logging.Abstractions;
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
@@ -7,6 +9,7 @@ using MongoDB.Driver;
|
|||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
using StellaOps.Authority.Plugin.Standard.Security;
|
using StellaOps.Authority.Plugin.Standard.Security;
|
||||||
using StellaOps.Authority.Plugin.Standard.Storage;
|
using StellaOps.Authority.Plugin.Standard.Storage;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugin.Standard.Tests;
|
namespace StellaOps.Authority.Plugin.Standard.Tests;
|
||||||
|
|
||||||
@@ -37,13 +40,21 @@ public class StandardUserCredentialStoreTests : IAsyncLifetime
|
|||||||
Enabled = true,
|
Enabled = true,
|
||||||
MaxAttempts = 2,
|
MaxAttempts = 2,
|
||||||
WindowMinutes = 1
|
WindowMinutes = 1
|
||||||
|
},
|
||||||
|
PasswordHashing = new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Algorithm = PasswordHashAlgorithm.Argon2id,
|
||||||
|
MemorySizeInKib = 8 * 1024,
|
||||||
|
Iterations = 2,
|
||||||
|
Parallelism = 1
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
var cryptoProvider = new DefaultCryptoProvider();
|
||||||
store = new StandardUserCredentialStore(
|
store = new StandardUserCredentialStore(
|
||||||
"standard",
|
"standard",
|
||||||
database,
|
database,
|
||||||
options,
|
options,
|
||||||
new Pbkdf2PasswordHasher(),
|
new CryptoPasswordHasher(options, cryptoProvider),
|
||||||
NullLogger<StandardUserCredentialStore>.Instance);
|
NullLogger<StandardUserCredentialStore>.Instance);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -65,6 +76,7 @@ public class StandardUserCredentialStoreTests : IAsyncLifetime
|
|||||||
var result = await store.VerifyPasswordAsync("alice", "Password1!", CancellationToken.None);
|
var result = await store.VerifyPasswordAsync("alice", "Password1!", CancellationToken.None);
|
||||||
Assert.True(result.Succeeded);
|
Assert.True(result.Succeeded);
|
||||||
Assert.Equal("alice", result.User?.Username);
|
Assert.Equal("alice", result.User?.Username);
|
||||||
|
Assert.Empty(result.AuditProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -90,6 +102,46 @@ public class StandardUserCredentialStoreTests : IAsyncLifetime
|
|||||||
Assert.Equal(AuthorityCredentialFailureCode.LockedOut, second.FailureCode);
|
Assert.Equal(AuthorityCredentialFailureCode.LockedOut, second.FailureCode);
|
||||||
Assert.NotNull(second.RetryAfter);
|
Assert.NotNull(second.RetryAfter);
|
||||||
Assert.True(second.RetryAfter.Value > System.TimeSpan.Zero);
|
Assert.True(second.RetryAfter.Value > System.TimeSpan.Zero);
|
||||||
|
Assert.Contains(second.AuditProperties, property => property.Name == "plugin.lockout_until");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task VerifyPasswordAsync_RehashesLegacyHashesToArgon2()
|
||||||
|
{
|
||||||
|
var legacyHash = new Pbkdf2PasswordHasher().Hash(
|
||||||
|
"Legacy1!",
|
||||||
|
new PasswordHashOptions
|
||||||
|
{
|
||||||
|
Algorithm = PasswordHashAlgorithm.Pbkdf2,
|
||||||
|
Iterations = 160_000
|
||||||
|
});
|
||||||
|
|
||||||
|
var document = new StandardUserDocument
|
||||||
|
{
|
||||||
|
Username = "legacy",
|
||||||
|
NormalizedUsername = "legacy",
|
||||||
|
PasswordHash = legacyHash,
|
||||||
|
Roles = new List<string>(),
|
||||||
|
Attributes = new Dictionary<string, string?>(),
|
||||||
|
CreatedAt = DateTimeOffset.UtcNow.AddDays(-1),
|
||||||
|
UpdatedAt = DateTimeOffset.UtcNow.AddDays(-1)
|
||||||
|
};
|
||||||
|
|
||||||
|
await database.GetCollection<StandardUserDocument>("authority_users_standard")
|
||||||
|
.InsertOneAsync(document);
|
||||||
|
|
||||||
|
var result = await store.VerifyPasswordAsync("legacy", "Legacy1!", CancellationToken.None);
|
||||||
|
|
||||||
|
Assert.True(result.Succeeded);
|
||||||
|
Assert.Equal("legacy", result.User?.Username);
|
||||||
|
Assert.Contains(result.AuditProperties, property => property.Name == "plugin.rehashed");
|
||||||
|
|
||||||
|
var updated = await database.GetCollection<StandardUserDocument>("authority_users_standard")
|
||||||
|
.Find(u => u.NormalizedUsername == "legacy")
|
||||||
|
.FirstOrDefaultAsync();
|
||||||
|
|
||||||
|
Assert.NotNull(updated);
|
||||||
|
Assert.StartsWith("$argon2id$", updated!.PasswordHash, StringComparison.Ordinal);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task InitializeAsync() => Task.CompletedTask;
|
public Task InitializeAsync() => Task.CompletedTask;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Security.Cryptography;
|
using StellaOps.Cryptography;
|
||||||
using System.Text;
|
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugin.Standard.Security;
|
namespace StellaOps.Authority.Plugin.Standard.Security;
|
||||||
|
|
||||||
@@ -18,96 +17,70 @@ internal enum PasswordVerificationResult
|
|||||||
SuccessRehashNeeded
|
SuccessRehashNeeded
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed class Pbkdf2PasswordHasher : IPasswordHasher
|
internal sealed class CryptoPasswordHasher : IPasswordHasher
|
||||||
{
|
{
|
||||||
private const int SaltSize = 16;
|
private readonly StandardPluginOptions options;
|
||||||
private const int HashSize = 32;
|
private readonly ICryptoProvider cryptoProvider;
|
||||||
private const int Iterations = 210_000;
|
|
||||||
private const string Header = "PBKDF2";
|
public CryptoPasswordHasher(StandardPluginOptions options, ICryptoProvider cryptoProvider)
|
||||||
|
{
|
||||||
|
this.options = options ?? throw new ArgumentNullException(nameof(options));
|
||||||
|
this.cryptoProvider = cryptoProvider ?? throw new ArgumentNullException(nameof(cryptoProvider));
|
||||||
|
}
|
||||||
|
|
||||||
public string Hash(string password)
|
public string Hash(string password)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(password))
|
ArgumentException.ThrowIfNullOrEmpty(password);
|
||||||
{
|
|
||||||
throw new ArgumentException("Password is required.", nameof(password));
|
|
||||||
}
|
|
||||||
|
|
||||||
Span<byte> salt = stackalloc byte[SaltSize];
|
var hashOptions = options.PasswordHashing;
|
||||||
RandomNumberGenerator.Fill(salt);
|
hashOptions.Validate();
|
||||||
|
|
||||||
Span<byte> hash = stackalloc byte[HashSize];
|
var hasher = cryptoProvider.GetPasswordHasher(hashOptions.Algorithm.ToAlgorithmId());
|
||||||
var derived = Rfc2898DeriveBytes.Pbkdf2(password, salt.ToArray(), Iterations, HashAlgorithmName.SHA256, HashSize);
|
return hasher.Hash(password, hashOptions);
|
||||||
derived.CopyTo(hash);
|
|
||||||
|
|
||||||
var payload = new byte[1 + SaltSize + HashSize];
|
|
||||||
payload[0] = 0x01; // version
|
|
||||||
salt.CopyTo(payload.AsSpan(1));
|
|
||||||
hash.CopyTo(payload.AsSpan(1 + SaltSize));
|
|
||||||
|
|
||||||
var builder = new StringBuilder();
|
|
||||||
builder.Append(Header);
|
|
||||||
builder.Append('.');
|
|
||||||
builder.Append(Iterations);
|
|
||||||
builder.Append('.');
|
|
||||||
builder.Append(Convert.ToBase64String(payload));
|
|
||||||
return builder.ToString();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public PasswordVerificationResult Verify(string password, string hashedPassword)
|
public PasswordVerificationResult Verify(string password, string hashedPassword)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(password) || string.IsNullOrEmpty(hashedPassword))
|
ArgumentException.ThrowIfNullOrEmpty(password);
|
||||||
|
ArgumentException.ThrowIfNullOrEmpty(hashedPassword);
|
||||||
|
|
||||||
|
var desired = options.PasswordHashing;
|
||||||
|
desired.Validate();
|
||||||
|
|
||||||
|
var primaryHasher = cryptoProvider.GetPasswordHasher(desired.Algorithm.ToAlgorithmId());
|
||||||
|
|
||||||
|
if (IsArgon2Hash(hashedPassword))
|
||||||
|
{
|
||||||
|
if (!primaryHasher.Verify(password, hashedPassword))
|
||||||
{
|
{
|
||||||
return PasswordVerificationResult.Failed;
|
return PasswordVerificationResult.Failed;
|
||||||
}
|
}
|
||||||
|
|
||||||
var parts = hashedPassword.Split('.', StringSplitOptions.RemoveEmptyEntries);
|
return primaryHasher.NeedsRehash(hashedPassword, desired)
|
||||||
if (parts.Length != 3 || !string.Equals(parts[0], Header, StringComparison.Ordinal))
|
|
||||||
{
|
|
||||||
return PasswordVerificationResult.Failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!int.TryParse(parts[1], out var iterations))
|
|
||||||
{
|
|
||||||
return PasswordVerificationResult.Failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
byte[] payload;
|
|
||||||
try
|
|
||||||
{
|
|
||||||
payload = Convert.FromBase64String(parts[2]);
|
|
||||||
}
|
|
||||||
catch (FormatException)
|
|
||||||
{
|
|
||||||
return PasswordVerificationResult.Failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (payload.Length != 1 + SaltSize + HashSize)
|
|
||||||
{
|
|
||||||
return PasswordVerificationResult.Failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
var version = payload[0];
|
|
||||||
if (version != 0x01)
|
|
||||||
{
|
|
||||||
return PasswordVerificationResult.Failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
var salt = new byte[SaltSize];
|
|
||||||
Array.Copy(payload, 1, salt, 0, SaltSize);
|
|
||||||
|
|
||||||
var expectedHash = new byte[HashSize];
|
|
||||||
Array.Copy(payload, 1 + SaltSize, expectedHash, 0, HashSize);
|
|
||||||
|
|
||||||
var actualHash = Rfc2898DeriveBytes.Pbkdf2(password, salt, iterations, HashAlgorithmName.SHA256, HashSize);
|
|
||||||
|
|
||||||
var success = CryptographicOperations.FixedTimeEquals(expectedHash, actualHash);
|
|
||||||
if (!success)
|
|
||||||
{
|
|
||||||
return PasswordVerificationResult.Failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
return iterations < Iterations
|
|
||||||
? PasswordVerificationResult.SuccessRehashNeeded
|
? PasswordVerificationResult.SuccessRehashNeeded
|
||||||
: PasswordVerificationResult.Success;
|
: PasswordVerificationResult.Success;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (IsLegacyPbkdf2Hash(hashedPassword))
|
||||||
|
{
|
||||||
|
var legacyHasher = cryptoProvider.GetPasswordHasher(PasswordHashAlgorithm.Pbkdf2.ToAlgorithmId());
|
||||||
|
if (!legacyHasher.Verify(password, hashedPassword))
|
||||||
|
{
|
||||||
|
return PasswordVerificationResult.Failed;
|
||||||
|
}
|
||||||
|
|
||||||
|
return desired.Algorithm == PasswordHashAlgorithm.Pbkdf2 &&
|
||||||
|
!legacyHasher.NeedsRehash(hashedPassword, desired)
|
||||||
|
? PasswordVerificationResult.Success
|
||||||
|
: PasswordVerificationResult.SuccessRehashNeeded;
|
||||||
|
}
|
||||||
|
|
||||||
|
return PasswordVerificationResult.Failed;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool IsArgon2Hash(string value) =>
|
||||||
|
value.StartsWith("$argon2id$", StringComparison.Ordinal);
|
||||||
|
|
||||||
|
private static bool IsLegacyPbkdf2Hash(string value) =>
|
||||||
|
value.StartsWith("PBKDF2.", StringComparison.Ordinal);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugin.Standard;
|
namespace StellaOps.Authority.Plugin.Standard;
|
||||||
|
|
||||||
@@ -13,6 +14,8 @@ internal sealed class StandardPluginOptions
|
|||||||
|
|
||||||
public TokenSigningOptions TokenSigning { get; set; } = new();
|
public TokenSigningOptions TokenSigning { get; set; } = new();
|
||||||
|
|
||||||
|
public PasswordHashOptions PasswordHashing { get; set; } = new();
|
||||||
|
|
||||||
public void Normalize(string configPath)
|
public void Normalize(string configPath)
|
||||||
{
|
{
|
||||||
TokenSigning.Normalize(configPath);
|
TokenSigning.Normalize(configPath);
|
||||||
@@ -23,6 +26,7 @@ internal sealed class StandardPluginOptions
|
|||||||
BootstrapUser?.Validate(pluginName);
|
BootstrapUser?.Validate(pluginName);
|
||||||
PasswordPolicy.Validate(pluginName);
|
PasswordPolicy.Validate(pluginName);
|
||||||
Lockout.Validate(pluginName);
|
Lockout.Validate(pluginName);
|
||||||
|
PasswordHashing.Validate();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ using StellaOps.Authority.Plugin.Standard.Bootstrap;
|
|||||||
using StellaOps.Authority.Plugin.Standard.Security;
|
using StellaOps.Authority.Plugin.Standard.Security;
|
||||||
using StellaOps.Authority.Plugin.Standard.Storage;
|
using StellaOps.Authority.Plugin.Standard.Storage;
|
||||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
using StellaOps.Cryptography.DependencyInjection;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugin.Standard;
|
namespace StellaOps.Authority.Plugin.Standard;
|
||||||
|
|
||||||
@@ -25,10 +27,11 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
|
|||||||
|
|
||||||
var pluginName = context.Plugin.Manifest.Name;
|
var pluginName = context.Plugin.Manifest.Name;
|
||||||
|
|
||||||
context.Services.TryAddSingleton<IPasswordHasher, Pbkdf2PasswordHasher>();
|
|
||||||
context.Services.AddSingleton<StandardClaimsEnricher>();
|
context.Services.AddSingleton<StandardClaimsEnricher>();
|
||||||
context.Services.AddSingleton<IClaimsEnricher>(sp => sp.GetRequiredService<StandardClaimsEnricher>());
|
context.Services.AddSingleton<IClaimsEnricher>(sp => sp.GetRequiredService<StandardClaimsEnricher>());
|
||||||
|
|
||||||
|
context.Services.AddStellaOpsCrypto();
|
||||||
|
|
||||||
var configPath = context.Plugin.Manifest.ConfigPath;
|
var configPath = context.Plugin.Manifest.ConfigPath;
|
||||||
|
|
||||||
context.Services.AddOptions<StandardPluginOptions>(pluginName)
|
context.Services.AddOptions<StandardPluginOptions>(pluginName)
|
||||||
@@ -45,7 +48,8 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
|
|||||||
var database = sp.GetRequiredService<IMongoDatabase>();
|
var database = sp.GetRequiredService<IMongoDatabase>();
|
||||||
var optionsMonitor = sp.GetRequiredService<IOptionsMonitor<StandardPluginOptions>>();
|
var optionsMonitor = sp.GetRequiredService<IOptionsMonitor<StandardPluginOptions>>();
|
||||||
var pluginOptions = optionsMonitor.Get(pluginName);
|
var pluginOptions = optionsMonitor.Get(pluginName);
|
||||||
var passwordHasher = sp.GetRequiredService<IPasswordHasher>();
|
var cryptoProvider = sp.GetRequiredService<ICryptoProvider>();
|
||||||
|
var passwordHasher = new CryptoPasswordHasher(pluginOptions, cryptoProvider);
|
||||||
var loggerFactory = sp.GetRequiredService<ILoggerFactory>();
|
var loggerFactory = sp.GetRequiredService<ILoggerFactory>();
|
||||||
|
|
||||||
return new StandardUserCredentialStore(
|
return new StandardUserCredentialStore(
|
||||||
@@ -59,7 +63,9 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
|
|||||||
context.Services.AddSingleton(sp =>
|
context.Services.AddSingleton(sp =>
|
||||||
{
|
{
|
||||||
var clientStore = sp.GetRequiredService<IAuthorityClientStore>();
|
var clientStore = sp.GetRequiredService<IAuthorityClientStore>();
|
||||||
return new StandardClientProvisioningStore(pluginName, clientStore);
|
var revocationStore = sp.GetRequiredService<IAuthorityRevocationStore>();
|
||||||
|
var timeProvider = sp.GetRequiredService<TimeProvider>();
|
||||||
|
return new StandardClientProvisioningStore(pluginName, clientStore, revocationStore, timeProvider);
|
||||||
});
|
});
|
||||||
|
|
||||||
context.Services.AddSingleton<IIdentityProviderPlugin>(sp =>
|
context.Services.AddSingleton<IIdentityProviderPlugin>(sp =>
|
||||||
|
|||||||
@@ -18,5 +18,7 @@
|
|||||||
<ProjectReference Include="..\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" />
|
<ProjectReference Include="..\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" />
|
||||||
<ProjectReference Include="..\..\StellaOps.Plugin\StellaOps.Plugin.csproj" />
|
<ProjectReference Include="..\..\StellaOps.Plugin\StellaOps.Plugin.csproj" />
|
||||||
<ProjectReference Include="..\StellaOps.Authority.Storage.Mongo\StellaOps.Authority.Storage.Mongo.csproj" />
|
<ProjectReference Include="..\StellaOps.Authority.Storage.Mongo\StellaOps.Authority.Storage.Mongo.csproj" />
|
||||||
|
<ProjectReference Include="..\..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
||||||
|
<ProjectReference Include="..\..\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
@@ -9,11 +10,19 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
|
|||||||
{
|
{
|
||||||
private readonly string pluginName;
|
private readonly string pluginName;
|
||||||
private readonly IAuthorityClientStore clientStore;
|
private readonly IAuthorityClientStore clientStore;
|
||||||
|
private readonly IAuthorityRevocationStore revocationStore;
|
||||||
|
private readonly TimeProvider clock;
|
||||||
|
|
||||||
public StandardClientProvisioningStore(string pluginName, IAuthorityClientStore clientStore)
|
public StandardClientProvisioningStore(
|
||||||
|
string pluginName,
|
||||||
|
IAuthorityClientStore clientStore,
|
||||||
|
IAuthorityRevocationStore revocationStore,
|
||||||
|
TimeProvider clock)
|
||||||
{
|
{
|
||||||
this.pluginName = pluginName ?? throw new ArgumentNullException(nameof(pluginName));
|
this.pluginName = pluginName ?? throw new ArgumentNullException(nameof(pluginName));
|
||||||
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
|
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
|
||||||
|
this.revocationStore = revocationStore ?? throw new ArgumentNullException(nameof(revocationStore));
|
||||||
|
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async ValueTask<AuthorityPluginOperationResult<AuthorityClientDescriptor>> CreateOrUpdateAsync(
|
public async ValueTask<AuthorityPluginOperationResult<AuthorityClientDescriptor>> CreateOrUpdateAsync(
|
||||||
@@ -28,7 +37,7 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
|
|||||||
}
|
}
|
||||||
|
|
||||||
var document = await clientStore.FindByClientIdAsync(registration.ClientId, cancellationToken).ConfigureAwait(false)
|
var document = await clientStore.FindByClientIdAsync(registration.ClientId, cancellationToken).ConfigureAwait(false)
|
||||||
?? new AuthorityClientDocument { ClientId = registration.ClientId, CreatedAt = DateTimeOffset.UtcNow };
|
?? new AuthorityClientDocument { ClientId = registration.ClientId, CreatedAt = clock.GetUtcNow() };
|
||||||
|
|
||||||
document.Plugin = pluginName;
|
document.Plugin = pluginName;
|
||||||
document.ClientType = registration.Confidential ? "confidential" : "public";
|
document.ClientType = registration.Confidential ? "confidential" : "public";
|
||||||
@@ -36,6 +45,7 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
|
|||||||
document.SecretHash = registration.Confidential && registration.ClientSecret is not null
|
document.SecretHash = registration.Confidential && registration.ClientSecret is not null
|
||||||
? AuthoritySecretHasher.ComputeHash(registration.ClientSecret)
|
? AuthoritySecretHasher.ComputeHash(registration.ClientSecret)
|
||||||
: null;
|
: null;
|
||||||
|
document.UpdatedAt = clock.GetUtcNow();
|
||||||
|
|
||||||
document.RedirectUris = registration.RedirectUris.Select(static uri => uri.ToString()).ToList();
|
document.RedirectUris = registration.RedirectUris.Select(static uri => uri.ToString()).ToList();
|
||||||
document.PostLogoutRedirectUris = registration.PostLogoutRedirectUris.Select(static uri => uri.ToString()).ToList();
|
document.PostLogoutRedirectUris = registration.PostLogoutRedirectUris.Select(static uri => uri.ToString()).ToList();
|
||||||
@@ -51,6 +61,7 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
|
|||||||
}
|
}
|
||||||
|
|
||||||
await clientStore.UpsertAsync(document, cancellationToken).ConfigureAwait(false);
|
await clientStore.UpsertAsync(document, cancellationToken).ConfigureAwait(false);
|
||||||
|
await revocationStore.RemoveAsync("client", registration.ClientId, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
return AuthorityPluginOperationResult<AuthorityClientDescriptor>.Success(ToDescriptor(document));
|
return AuthorityPluginOperationResult<AuthorityClientDescriptor>.Success(ToDescriptor(document));
|
||||||
}
|
}
|
||||||
@@ -64,9 +75,39 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
|
|||||||
public async ValueTask<AuthorityPluginOperationResult> DeleteAsync(string clientId, CancellationToken cancellationToken)
|
public async ValueTask<AuthorityPluginOperationResult> DeleteAsync(string clientId, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
var deleted = await clientStore.DeleteByClientIdAsync(clientId, cancellationToken).ConfigureAwait(false);
|
var deleted = await clientStore.DeleteByClientIdAsync(clientId, cancellationToken).ConfigureAwait(false);
|
||||||
return deleted
|
if (!deleted)
|
||||||
? AuthorityPluginOperationResult.Success()
|
{
|
||||||
: AuthorityPluginOperationResult.Failure("not_found", "Client was not found.");
|
return AuthorityPluginOperationResult.Failure("not_found", "Client was not found.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var now = clock.GetUtcNow();
|
||||||
|
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
["plugin"] = pluginName
|
||||||
|
};
|
||||||
|
|
||||||
|
var revocation = new AuthorityRevocationDocument
|
||||||
|
{
|
||||||
|
Category = "client",
|
||||||
|
RevocationId = clientId,
|
||||||
|
ClientId = clientId,
|
||||||
|
Reason = "operator_request",
|
||||||
|
ReasonDescription = $"Client '{clientId}' deleted via plugin '{pluginName}'.",
|
||||||
|
RevokedAt = now,
|
||||||
|
EffectiveAt = now,
|
||||||
|
Metadata = metadata
|
||||||
|
};
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await revocationStore.UpsertAsync(revocation, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
// Revocation export should proceed even if the metadata write fails.
|
||||||
|
}
|
||||||
|
|
||||||
|
return AuthorityPluginOperationResult.Success();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static AuthorityClientDescriptor ToDescriptor(AuthorityClientDocument document)
|
private static AuthorityClientDescriptor ToDescriptor(AuthorityClientDocument document)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -8,6 +9,7 @@ using MongoDB.Bson;
|
|||||||
using MongoDB.Driver;
|
using MongoDB.Driver;
|
||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
using StellaOps.Authority.Plugin.Standard.Security;
|
using StellaOps.Authority.Plugin.Standard.Security;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugin.Standard.Storage;
|
namespace StellaOps.Authority.Plugin.Standard.Storage;
|
||||||
|
|
||||||
@@ -43,9 +45,11 @@ internal sealed class StandardUserCredentialStore : IUserCredentialStore
|
|||||||
string password,
|
string password,
|
||||||
CancellationToken cancellationToken)
|
CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
|
var auditProperties = new List<AuthEventProperty>();
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password))
|
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password))
|
||||||
{
|
{
|
||||||
return AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials);
|
return AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials, auditProperties: auditProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
var normalized = NormalizeUsername(username);
|
var normalized = NormalizeUsername(username);
|
||||||
@@ -56,17 +60,24 @@ internal sealed class StandardUserCredentialStore : IUserCredentialStore
|
|||||||
if (user is null)
|
if (user is null)
|
||||||
{
|
{
|
||||||
logger.LogWarning("Plugin {PluginName} failed password verification for unknown user {Username}.", pluginName, normalized);
|
logger.LogWarning("Plugin {PluginName} failed password verification for unknown user {Username}.", pluginName, normalized);
|
||||||
return AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials);
|
return AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials, auditProperties: auditProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.Lockout.Enabled && user.Lockout.LockoutEnd is { } lockoutEnd && lockoutEnd > DateTimeOffset.UtcNow)
|
if (options.Lockout.Enabled && user.Lockout.LockoutEnd is { } lockoutEnd && lockoutEnd > DateTimeOffset.UtcNow)
|
||||||
{
|
{
|
||||||
var retryAfter = lockoutEnd - DateTimeOffset.UtcNow;
|
var retryAfter = lockoutEnd - DateTimeOffset.UtcNow;
|
||||||
logger.LogWarning("Plugin {PluginName} denied access for {Username} due to lockout (retry after {RetryAfter}).", pluginName, normalized, retryAfter);
|
logger.LogWarning("Plugin {PluginName} denied access for {Username} due to lockout (retry after {RetryAfter}).", pluginName, normalized, retryAfter);
|
||||||
|
auditProperties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "plugin.lockout_until",
|
||||||
|
Value = ClassifiedString.Public(lockoutEnd.ToString("O", CultureInfo.InvariantCulture))
|
||||||
|
});
|
||||||
|
|
||||||
return AuthorityCredentialVerificationResult.Failure(
|
return AuthorityCredentialVerificationResult.Failure(
|
||||||
AuthorityCredentialFailureCode.LockedOut,
|
AuthorityCredentialFailureCode.LockedOut,
|
||||||
"Account is temporarily locked.",
|
"Account is temporarily locked.",
|
||||||
retryAfter);
|
retryAfter,
|
||||||
|
auditProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
var verification = passwordHasher.Verify(password, user.PasswordHash);
|
var verification = passwordHasher.Verify(password, user.PasswordHash);
|
||||||
@@ -75,8 +86,14 @@ internal sealed class StandardUserCredentialStore : IUserCredentialStore
|
|||||||
if (verification == PasswordVerificationResult.SuccessRehashNeeded)
|
if (verification == PasswordVerificationResult.SuccessRehashNeeded)
|
||||||
{
|
{
|
||||||
user.PasswordHash = passwordHasher.Hash(password);
|
user.PasswordHash = passwordHasher.Hash(password);
|
||||||
|
auditProperties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "plugin.rehashed",
|
||||||
|
Value = ClassifiedString.Public("argon2id")
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var previousFailures = user.Lockout.FailedAttempts;
|
||||||
ResetLockout(user);
|
ResetLockout(user);
|
||||||
user.UpdatedAt = DateTimeOffset.UtcNow;
|
user.UpdatedAt = DateTimeOffset.UtcNow;
|
||||||
await users.ReplaceOneAsync(
|
await users.ReplaceOneAsync(
|
||||||
@@ -84,8 +101,20 @@ internal sealed class StandardUserCredentialStore : IUserCredentialStore
|
|||||||
user,
|
user,
|
||||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (previousFailures > 0)
|
||||||
|
{
|
||||||
|
auditProperties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "plugin.failed_attempts_cleared",
|
||||||
|
Value = ClassifiedString.Public(previousFailures.ToString(CultureInfo.InvariantCulture))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
var descriptor = ToDescriptor(user);
|
var descriptor = ToDescriptor(user);
|
||||||
return AuthorityCredentialVerificationResult.Success(descriptor, descriptor.RequiresPasswordReset ? "Password reset required." : null);
|
return AuthorityCredentialVerificationResult.Success(
|
||||||
|
descriptor,
|
||||||
|
descriptor.RequiresPasswordReset ? "Password reset required." : null,
|
||||||
|
auditProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
await RegisterFailureAsync(user, cancellationToken).ConfigureAwait(false);
|
await RegisterFailureAsync(user, cancellationToken).ConfigureAwait(false);
|
||||||
@@ -98,10 +127,26 @@ internal sealed class StandardUserCredentialStore : IUserCredentialStore
|
|||||||
? lockoutTime - DateTimeOffset.UtcNow
|
? lockoutTime - DateTimeOffset.UtcNow
|
||||||
: null;
|
: null;
|
||||||
|
|
||||||
|
auditProperties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "plugin.failed_attempts",
|
||||||
|
Value = ClassifiedString.Public(user.Lockout.FailedAttempts.ToString(CultureInfo.InvariantCulture))
|
||||||
|
});
|
||||||
|
|
||||||
|
if (user.Lockout.LockoutEnd is { } pendingLockout)
|
||||||
|
{
|
||||||
|
auditProperties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "plugin.lockout_until",
|
||||||
|
Value = ClassifiedString.Public(pendingLockout.ToString("O", CultureInfo.InvariantCulture))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return AuthorityCredentialVerificationResult.Failure(
|
return AuthorityCredentialVerificationResult.Failure(
|
||||||
code,
|
code,
|
||||||
code == AuthorityCredentialFailureCode.LockedOut ? "Account is temporarily locked." : "Invalid credentials.",
|
code == AuthorityCredentialFailureCode.LockedOut ? "Account is temporarily locked." : "Invalid credentials.",
|
||||||
retry);
|
retry,
|
||||||
|
auditProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async ValueTask<AuthorityPluginOperationResult<AuthorityUserDescriptor>> UpsertUserAsync(
|
public async ValueTask<AuthorityPluginOperationResult<AuthorityUserDescriptor>> UpsertUserAsync(
|
||||||
|
|||||||
@@ -2,14 +2,14 @@
|
|||||||
|
|
||||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||||
|----|--------|----------|------------|-------------|---------------|
|
|----|--------|----------|------------|-------------|---------------|
|
||||||
| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1–PLG5 | Final polish + diagrams for plugin developer guide. | Docs team delivers copy-edit + exported diagrams; PR merged. |
|
| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1–PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. |
|
||||||
| SEC1.PLG | TODO | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. |
|
| SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. |
|
||||||
| SEC1.OPT | TODO | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. |
|
| SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. |
|
||||||
| SEC2.PLG | TODO | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
|
| SEC2.PLG | TODO | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
|
||||||
| SEC3.PLG | TODO | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
|
| SEC3.PLG | TODO | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
|
||||||
| SEC4.PLG | TODO | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. |
|
| SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. |
|
||||||
| SEC5.PLG | TODO | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
|
| SEC5.PLG | TODO | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
|
||||||
| PLG4-6.CAPABILITIES | DOING (2025-10-10) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles. |
|
| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles. <br>⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. |
|
||||||
| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. |
|
| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. |
|
||||||
| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. |
|
| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. |
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugins.Abstractions.Tests;
|
namespace StellaOps.Authority.Plugins.Abstractions.Tests;
|
||||||
|
|
||||||
@@ -10,12 +11,18 @@ public class AuthorityCredentialVerificationResultTests
|
|||||||
{
|
{
|
||||||
var user = new AuthorityUserDescriptor("subject-1", "user", "User", false);
|
var user = new AuthorityUserDescriptor("subject-1", "user", "User", false);
|
||||||
|
|
||||||
var result = AuthorityCredentialVerificationResult.Success(user, "ok");
|
var auditProperties = new[]
|
||||||
|
{
|
||||||
|
new AuthEventProperty { Name = "test", Value = ClassifiedString.Public("value") }
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = AuthorityCredentialVerificationResult.Success(user, "ok", auditProperties);
|
||||||
|
|
||||||
Assert.True(result.Succeeded);
|
Assert.True(result.Succeeded);
|
||||||
Assert.Equal(user, result.User);
|
Assert.Equal(user, result.User);
|
||||||
Assert.Null(result.FailureCode);
|
Assert.Null(result.FailureCode);
|
||||||
Assert.Equal("ok", result.Message);
|
Assert.Equal("ok", result.Message);
|
||||||
|
Assert.Collection(result.AuditProperties, property => Assert.Equal("test", property.Name));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -27,12 +34,18 @@ public class AuthorityCredentialVerificationResultTests
|
|||||||
[Fact]
|
[Fact]
|
||||||
public void Failure_SetsFailureCode()
|
public void Failure_SetsFailureCode()
|
||||||
{
|
{
|
||||||
var result = AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.LockedOut, "locked", TimeSpan.FromMinutes(5));
|
var auditProperties = new[]
|
||||||
|
{
|
||||||
|
new AuthEventProperty { Name = "reason", Value = ClassifiedString.Public("lockout") }
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.LockedOut, "locked", TimeSpan.FromMinutes(5), auditProperties);
|
||||||
|
|
||||||
Assert.False(result.Succeeded);
|
Assert.False(result.Succeeded);
|
||||||
Assert.Null(result.User);
|
Assert.Null(result.User);
|
||||||
Assert.Equal(AuthorityCredentialFailureCode.LockedOut, result.FailureCode);
|
Assert.Equal(AuthorityCredentialFailureCode.LockedOut, result.FailureCode);
|
||||||
Assert.Equal("locked", result.Message);
|
Assert.Equal("locked", result.Message);
|
||||||
Assert.Equal(TimeSpan.FromMinutes(5), result.RetryAfter);
|
Assert.Equal(TimeSpan.FromMinutes(5), result.RetryAfter);
|
||||||
|
Assert.Collection(result.AuditProperties, property => Assert.Equal("reason", property.Name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ using System.Linq;
|
|||||||
using System.Security.Claims;
|
using System.Security.Claims;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Plugins.Abstractions;
|
namespace StellaOps.Authority.Plugins.Abstractions;
|
||||||
|
|
||||||
@@ -311,13 +312,15 @@ public sealed record AuthorityCredentialVerificationResult
|
|||||||
AuthorityUserDescriptor? user,
|
AuthorityUserDescriptor? user,
|
||||||
AuthorityCredentialFailureCode? failureCode,
|
AuthorityCredentialFailureCode? failureCode,
|
||||||
string? message,
|
string? message,
|
||||||
TimeSpan? retryAfter)
|
TimeSpan? retryAfter,
|
||||||
|
IReadOnlyList<AuthEventProperty> auditProperties)
|
||||||
{
|
{
|
||||||
Succeeded = succeeded;
|
Succeeded = succeeded;
|
||||||
User = user;
|
User = user;
|
||||||
FailureCode = failureCode;
|
FailureCode = failureCode;
|
||||||
Message = message;
|
Message = message;
|
||||||
RetryAfter = retryAfter;
|
RetryAfter = retryAfter;
|
||||||
|
AuditProperties = auditProperties ?? Array.Empty<AuthEventProperty>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -345,13 +348,19 @@ public sealed record AuthorityCredentialVerificationResult
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public TimeSpan? RetryAfter { get; }
|
public TimeSpan? RetryAfter { get; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Additional audit properties emitted by the credential store.
|
||||||
|
/// </summary>
|
||||||
|
public IReadOnlyList<AuthEventProperty> AuditProperties { get; }
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Builds a successful verification result.
|
/// Builds a successful verification result.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public static AuthorityCredentialVerificationResult Success(
|
public static AuthorityCredentialVerificationResult Success(
|
||||||
AuthorityUserDescriptor user,
|
AuthorityUserDescriptor user,
|
||||||
string? message = null)
|
string? message = null,
|
||||||
=> new(true, user ?? throw new ArgumentNullException(nameof(user)), null, message, null);
|
IReadOnlyList<AuthEventProperty>? auditProperties = null)
|
||||||
|
=> new(true, user ?? throw new ArgumentNullException(nameof(user)), null, message, null, auditProperties ?? Array.Empty<AuthEventProperty>());
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Builds a failed verification result.
|
/// Builds a failed verification result.
|
||||||
@@ -359,8 +368,9 @@ public sealed record AuthorityCredentialVerificationResult
|
|||||||
public static AuthorityCredentialVerificationResult Failure(
|
public static AuthorityCredentialVerificationResult Failure(
|
||||||
AuthorityCredentialFailureCode failureCode,
|
AuthorityCredentialFailureCode failureCode,
|
||||||
string? message = null,
|
string? message = null,
|
||||||
TimeSpan? retryAfter = null)
|
TimeSpan? retryAfter = null,
|
||||||
=> new(false, null, failureCode, message, retryAfter);
|
IReadOnlyList<AuthEventProperty>? auditProperties = null)
|
||||||
|
=> new(false, null, failureCode, message, retryAfter, auditProperties ?? Array.Empty<AuthEventProperty>());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@@ -11,4 +11,7 @@
|
|||||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.0" />
|
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.0" />
|
||||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
|
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="..\..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
||||||
|
</ItemGroup>
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -20,5 +20,7 @@ public static class AuthorityMongoDefaults
|
|||||||
public const string Scopes = "authority_scopes";
|
public const string Scopes = "authority_scopes";
|
||||||
public const string Tokens = "authority_tokens";
|
public const string Tokens = "authority_tokens";
|
||||||
public const string LoginAttempts = "authority_login_attempts";
|
public const string LoginAttempts = "authority_login_attempts";
|
||||||
|
public const string Revocations = "authority_revocations";
|
||||||
|
public const string RevocationState = "authority_revocation_state";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,6 +13,16 @@ public sealed class AuthorityLoginAttemptDocument
|
|||||||
[BsonRepresentation(BsonType.ObjectId)]
|
[BsonRepresentation(BsonType.ObjectId)]
|
||||||
public string Id { get; set; } = ObjectId.GenerateNewId().ToString();
|
public string Id { get; set; } = ObjectId.GenerateNewId().ToString();
|
||||||
|
|
||||||
|
[BsonElement("eventType")]
|
||||||
|
public string EventType { get; set; } = "authority.unknown";
|
||||||
|
|
||||||
|
[BsonElement("outcome")]
|
||||||
|
public string Outcome { get; set; } = "unknown";
|
||||||
|
|
||||||
|
[BsonElement("correlationId")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? CorrelationId { get; set; }
|
||||||
|
|
||||||
[BsonElement("subjectId")]
|
[BsonElement("subjectId")]
|
||||||
[BsonIgnoreIfNull]
|
[BsonIgnoreIfNull]
|
||||||
public string? SubjectId { get; set; }
|
public string? SubjectId { get; set; }
|
||||||
@@ -32,6 +42,9 @@ public sealed class AuthorityLoginAttemptDocument
|
|||||||
[BsonElement("successful")]
|
[BsonElement("successful")]
|
||||||
public bool Successful { get; set; }
|
public bool Successful { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("scopes")]
|
||||||
|
public List<string> Scopes { get; set; } = new();
|
||||||
|
|
||||||
[BsonElement("reason")]
|
[BsonElement("reason")]
|
||||||
[BsonIgnoreIfNull]
|
[BsonIgnoreIfNull]
|
||||||
public string? Reason { get; set; }
|
public string? Reason { get; set; }
|
||||||
@@ -40,6 +53,26 @@ public sealed class AuthorityLoginAttemptDocument
|
|||||||
[BsonIgnoreIfNull]
|
[BsonIgnoreIfNull]
|
||||||
public string? RemoteAddress { get; set; }
|
public string? RemoteAddress { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("properties")]
|
||||||
|
public List<AuthorityLoginAttemptPropertyDocument> Properties { get; set; } = new();
|
||||||
|
|
||||||
[BsonElement("occurredAt")]
|
[BsonElement("occurredAt")]
|
||||||
public DateTimeOffset OccurredAt { get; set; } = DateTimeOffset.UtcNow;
|
public DateTimeOffset OccurredAt { get; set; } = DateTimeOffset.UtcNow;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents an additional classified property captured for an authority login attempt.
|
||||||
|
/// </summary>
|
||||||
|
[BsonIgnoreExtraElements]
|
||||||
|
public sealed class AuthorityLoginAttemptPropertyDocument
|
||||||
|
{
|
||||||
|
[BsonElement("name")]
|
||||||
|
public string Name { get; set; } = string.Empty;
|
||||||
|
|
||||||
|
[BsonElement("value")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? Value { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("classification")]
|
||||||
|
public string Classification { get; set; } = "none";
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,72 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using MongoDB.Bson;
|
||||||
|
using MongoDB.Bson.Serialization.Attributes;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a revocation entry emitted by Authority (subject/client/token/key).
|
||||||
|
/// </summary>
|
||||||
|
[BsonIgnoreExtraElements]
|
||||||
|
public sealed class AuthorityRevocationDocument
|
||||||
|
{
|
||||||
|
[BsonId]
|
||||||
|
[BsonRepresentation(BsonType.ObjectId)]
|
||||||
|
public string Id { get; set; } = ObjectId.GenerateNewId().ToString();
|
||||||
|
|
||||||
|
[BsonElement("category")]
|
||||||
|
public string Category { get; set; } = string.Empty;
|
||||||
|
|
||||||
|
[BsonElement("revocationId")]
|
||||||
|
public string RevocationId { get; set; } = string.Empty;
|
||||||
|
|
||||||
|
[BsonElement("tokenType")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? TokenType { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("subjectId")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? SubjectId { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("clientId")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? ClientId { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("reason")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? Reason { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("reasonDescription")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? ReasonDescription { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("revokedAt")]
|
||||||
|
public DateTimeOffset RevokedAt { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("effectiveAt")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public DateTimeOffset? EffectiveAt { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("expiresAt")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public DateTimeOffset? ExpiresAt { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("scopes")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public List<string>? Scopes { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("fingerprint")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? Fingerprint { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("metadata")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public Dictionary<string, string?>? Metadata { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("createdAt")]
|
||||||
|
public DateTimeOffset CreatedAt { get; set; } = DateTimeOffset.UtcNow;
|
||||||
|
|
||||||
|
[BsonElement("updatedAt")]
|
||||||
|
public DateTimeOffset UpdatedAt { get; set; } = DateTimeOffset.UtcNow;
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
using System;
|
||||||
|
using MongoDB.Bson;
|
||||||
|
using MongoDB.Bson.Serialization.Attributes;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
[BsonIgnoreExtraElements]
|
||||||
|
public sealed class AuthorityRevocationExportStateDocument
|
||||||
|
{
|
||||||
|
[BsonId]
|
||||||
|
public string Id { get; set; } = "state";
|
||||||
|
|
||||||
|
[BsonElement("sequence")]
|
||||||
|
public long Sequence { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("lastBundleId")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? LastBundleId { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("lastIssuedAt")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public DateTimeOffset? LastIssuedAt { get; set; }
|
||||||
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
using MongoDB.Bson;
|
using MongoDB.Bson;
|
||||||
using MongoDB.Bson.Serialization.Attributes;
|
using MongoDB.Bson.Serialization.Attributes;
|
||||||
|
|
||||||
@@ -51,4 +52,16 @@ public sealed class AuthorityTokenDocument
|
|||||||
[BsonElement("revokedAt")]
|
[BsonElement("revokedAt")]
|
||||||
[BsonIgnoreIfNull]
|
[BsonIgnoreIfNull]
|
||||||
public DateTimeOffset? RevokedAt { get; set; }
|
public DateTimeOffset? RevokedAt { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("revokedReason")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? RevokedReason { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("revokedReasonDescription")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public string? RevokedReasonDescription { get; set; }
|
||||||
|
|
||||||
|
[BsonElement("revokedMetadata")]
|
||||||
|
[BsonIgnoreIfNull]
|
||||||
|
public Dictionary<string, string?>? RevokedMetadata { get; set; }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -86,17 +86,32 @@ public static class ServiceCollectionExtensions
|
|||||||
return database.GetCollection<AuthorityLoginAttemptDocument>(AuthorityMongoDefaults.Collections.LoginAttempts);
|
return database.GetCollection<AuthorityLoginAttemptDocument>(AuthorityMongoDefaults.Collections.LoginAttempts);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
services.AddSingleton(static sp =>
|
||||||
|
{
|
||||||
|
var database = sp.GetRequiredService<IMongoDatabase>();
|
||||||
|
return database.GetCollection<AuthorityRevocationDocument>(AuthorityMongoDefaults.Collections.Revocations);
|
||||||
|
});
|
||||||
|
|
||||||
|
services.AddSingleton(static sp =>
|
||||||
|
{
|
||||||
|
var database = sp.GetRequiredService<IMongoDatabase>();
|
||||||
|
return database.GetCollection<AuthorityRevocationExportStateDocument>(AuthorityMongoDefaults.Collections.RevocationState);
|
||||||
|
});
|
||||||
|
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityUserCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityUserCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityClientCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityClientCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityScopeCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityScopeCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityTokenCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityTokenCollectionInitializer>();
|
||||||
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityLoginAttemptCollectionInitializer>();
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityLoginAttemptCollectionInitializer>();
|
||||||
|
services.TryAddSingleton<IAuthorityCollectionInitializer, AuthorityRevocationCollectionInitializer>();
|
||||||
|
|
||||||
services.TryAddSingleton<IAuthorityUserStore, AuthorityUserStore>();
|
services.TryAddSingleton<IAuthorityUserStore, AuthorityUserStore>();
|
||||||
services.TryAddSingleton<IAuthorityClientStore, AuthorityClientStore>();
|
services.TryAddSingleton<IAuthorityClientStore, AuthorityClientStore>();
|
||||||
services.TryAddSingleton<IAuthorityScopeStore, AuthorityScopeStore>();
|
services.TryAddSingleton<IAuthorityScopeStore, AuthorityScopeStore>();
|
||||||
services.TryAddSingleton<IAuthorityTokenStore, AuthorityTokenStore>();
|
services.TryAddSingleton<IAuthorityTokenStore, AuthorityTokenStore>();
|
||||||
services.TryAddSingleton<IAuthorityLoginAttemptStore, AuthorityLoginAttemptStore>();
|
services.TryAddSingleton<IAuthorityLoginAttemptStore, AuthorityLoginAttemptStore>();
|
||||||
|
services.TryAddSingleton<IAuthorityRevocationStore, AuthorityRevocationStore>();
|
||||||
|
services.TryAddSingleton<IAuthorityRevocationExportStateStore, AuthorityRevocationExportStateStore>();
|
||||||
|
|
||||||
return services;
|
return services;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,11 @@ internal sealed class AuthorityLoginAttemptCollectionInitializer : IAuthorityCol
|
|||||||
new CreateIndexOptions { Name = "login_attempt_subject_time" }),
|
new CreateIndexOptions { Name = "login_attempt_subject_time" }),
|
||||||
new CreateIndexModel<AuthorityLoginAttemptDocument>(
|
new CreateIndexModel<AuthorityLoginAttemptDocument>(
|
||||||
Builders<AuthorityLoginAttemptDocument>.IndexKeys.Descending(a => a.OccurredAt),
|
Builders<AuthorityLoginAttemptDocument>.IndexKeys.Descending(a => a.OccurredAt),
|
||||||
new CreateIndexOptions { Name = "login_attempt_time" })
|
new CreateIndexOptions { Name = "login_attempt_time" }),
|
||||||
|
new CreateIndexModel<AuthorityLoginAttemptDocument>(
|
||||||
|
Builders<AuthorityLoginAttemptDocument>.IndexKeys
|
||||||
|
.Ascending(a => a.CorrelationId),
|
||||||
|
new CreateIndexOptions { Name = "login_attempt_correlation", Sparse = true })
|
||||||
};
|
};
|
||||||
|
|
||||||
await collection.Indexes.CreateManyAsync(indexModels, cancellationToken).ConfigureAwait(false);
|
await collection.Indexes.CreateManyAsync(indexModels, cancellationToken).ConfigureAwait(false);
|
||||||
|
|||||||
@@ -0,0 +1,32 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using MongoDB.Driver;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Initialization;
|
||||||
|
|
||||||
|
internal sealed class AuthorityRevocationCollectionInitializer : IAuthorityCollectionInitializer
|
||||||
|
{
|
||||||
|
public async ValueTask EnsureIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(database);
|
||||||
|
|
||||||
|
var collection = database.GetCollection<AuthorityRevocationDocument>(AuthorityMongoDefaults.Collections.Revocations);
|
||||||
|
var indexModels = new List<CreateIndexModel<AuthorityRevocationDocument>>
|
||||||
|
{
|
||||||
|
new(
|
||||||
|
Builders<AuthorityRevocationDocument>.IndexKeys
|
||||||
|
.Ascending(d => d.Category)
|
||||||
|
.Ascending(d => d.RevocationId),
|
||||||
|
new CreateIndexOptions<AuthorityRevocationDocument> { Name = "revocation_identity_unique", Unique = true }),
|
||||||
|
new(
|
||||||
|
Builders<AuthorityRevocationDocument>.IndexKeys.Ascending(d => d.RevokedAt),
|
||||||
|
new CreateIndexOptions<AuthorityRevocationDocument> { Name = "revocation_revokedAt" }),
|
||||||
|
new(
|
||||||
|
Builders<AuthorityRevocationDocument>.IndexKeys.Ascending(d => d.ExpiresAt),
|
||||||
|
new CreateIndexOptions<AuthorityRevocationDocument> { Name = "revocation_expiresAt" })
|
||||||
|
};
|
||||||
|
|
||||||
|
await collection.Indexes.CreateManyAsync(indexModels, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -22,7 +22,12 @@ internal sealed class AuthorityTokenCollectionInitializer : IAuthorityCollection
|
|||||||
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_subject" }),
|
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_subject" }),
|
||||||
new(
|
new(
|
||||||
Builders<AuthorityTokenDocument>.IndexKeys.Ascending(t => t.ClientId),
|
Builders<AuthorityTokenDocument>.IndexKeys.Ascending(t => t.ClientId),
|
||||||
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_client" })
|
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_client" }),
|
||||||
|
new(
|
||||||
|
Builders<AuthorityTokenDocument>.IndexKeys
|
||||||
|
.Ascending(t => t.Status)
|
||||||
|
.Ascending(t => t.RevokedAt),
|
||||||
|
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_status_revokedAt" })
|
||||||
};
|
};
|
||||||
|
|
||||||
var expirationFilter = Builders<AuthorityTokenDocument>.Filter.Exists(t => t.ExpiresAt, true);
|
var expirationFilter = Builders<AuthorityTokenDocument>.Filter.Exists(t => t.ExpiresAt, true);
|
||||||
|
|||||||
@@ -23,9 +23,10 @@ internal sealed class AuthorityLoginAttemptStore : IAuthorityLoginAttemptStore
|
|||||||
|
|
||||||
await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
|
await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||||
logger.LogDebug(
|
logger.LogDebug(
|
||||||
"Recorded login attempt for subject '{SubjectId}' (success={Successful}).",
|
"Recorded authority audit event {EventType} for subject '{SubjectId}' with outcome {Outcome}.",
|
||||||
|
document.EventType,
|
||||||
document.SubjectId ?? document.Username ?? "<unknown>",
|
document.SubjectId ?? document.Username ?? "<unknown>",
|
||||||
document.Successful);
|
document.Outcome);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async ValueTask<IReadOnlyList<AuthorityLoginAttemptDocument>> ListRecentAsync(string subjectId, int limit, CancellationToken cancellationToken)
|
public async ValueTask<IReadOnlyList<AuthorityLoginAttemptDocument>> ListRecentAsync(string subjectId, int limit, CancellationToken cancellationToken)
|
||||||
|
|||||||
@@ -0,0 +1,83 @@
|
|||||||
|
using System;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using MongoDB.Driver;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
internal sealed class AuthorityRevocationExportStateStore : IAuthorityRevocationExportStateStore
|
||||||
|
{
|
||||||
|
private const string StateId = "state";
|
||||||
|
|
||||||
|
private readonly IMongoCollection<AuthorityRevocationExportStateDocument> collection;
|
||||||
|
private readonly ILogger<AuthorityRevocationExportStateStore> logger;
|
||||||
|
|
||||||
|
public AuthorityRevocationExportStateStore(
|
||||||
|
IMongoCollection<AuthorityRevocationExportStateDocument> collection,
|
||||||
|
ILogger<AuthorityRevocationExportStateStore> logger)
|
||||||
|
{
|
||||||
|
this.collection = collection ?? throw new ArgumentNullException(nameof(collection));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<AuthorityRevocationExportStateDocument?> GetAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var filter = Builders<AuthorityRevocationExportStateDocument>.Filter.Eq(d => d.Id, StateId);
|
||||||
|
return await collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<AuthorityRevocationExportStateDocument> UpdateAsync(
|
||||||
|
long expectedSequence,
|
||||||
|
long newSequence,
|
||||||
|
string bundleId,
|
||||||
|
DateTimeOffset issuedAt,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (newSequence <= 0)
|
||||||
|
{
|
||||||
|
throw new ArgumentOutOfRangeException(nameof(newSequence), "Sequence must be positive.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var filter = Builders<AuthorityRevocationExportStateDocument>.Filter.Eq(d => d.Id, StateId);
|
||||||
|
|
||||||
|
if (expectedSequence > 0)
|
||||||
|
{
|
||||||
|
filter &= Builders<AuthorityRevocationExportStateDocument>.Filter.Eq(d => d.Sequence, expectedSequence);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
filter &= Builders<AuthorityRevocationExportStateDocument>.Filter.Or(
|
||||||
|
Builders<AuthorityRevocationExportStateDocument>.Filter.Exists(d => d.Sequence, false),
|
||||||
|
Builders<AuthorityRevocationExportStateDocument>.Filter.Eq(d => d.Sequence, 0));
|
||||||
|
}
|
||||||
|
|
||||||
|
var update = Builders<AuthorityRevocationExportStateDocument>.Update
|
||||||
|
.Set(d => d.Sequence, newSequence)
|
||||||
|
.Set(d => d.LastBundleId, bundleId)
|
||||||
|
.Set(d => d.LastIssuedAt, issuedAt);
|
||||||
|
|
||||||
|
var options = new FindOneAndUpdateOptions<AuthorityRevocationExportStateDocument>
|
||||||
|
{
|
||||||
|
IsUpsert = expectedSequence == 0,
|
||||||
|
ReturnDocument = ReturnDocument.After
|
||||||
|
};
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var result = await collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (result is null)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Revocation export state update conflict.");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.LogDebug("Updated revocation export state to sequence {Sequence}.", result.Sequence);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "DuplicateKey", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Revocation export state update conflict due to concurrent writer.", ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,143 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using MongoDB.Driver;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
internal sealed class AuthorityRevocationStore : IAuthorityRevocationStore
|
||||||
|
{
|
||||||
|
private readonly IMongoCollection<AuthorityRevocationDocument> collection;
|
||||||
|
private readonly ILogger<AuthorityRevocationStore> logger;
|
||||||
|
|
||||||
|
public AuthorityRevocationStore(
|
||||||
|
IMongoCollection<AuthorityRevocationDocument> collection,
|
||||||
|
ILogger<AuthorityRevocationStore> logger)
|
||||||
|
{
|
||||||
|
this.collection = collection ?? throw new ArgumentNullException(nameof(collection));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(document);
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(document.Category))
|
||||||
|
{
|
||||||
|
throw new ArgumentException("Revocation category is required.", nameof(document));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(document.RevocationId))
|
||||||
|
{
|
||||||
|
throw new ArgumentException("Revocation identifier is required.", nameof(document));
|
||||||
|
}
|
||||||
|
|
||||||
|
document.Category = document.Category.Trim();
|
||||||
|
document.RevocationId = document.RevocationId.Trim();
|
||||||
|
document.Scopes = NormalizeScopes(document.Scopes);
|
||||||
|
document.Metadata = NormalizeMetadata(document.Metadata);
|
||||||
|
|
||||||
|
var filter = Builders<AuthorityRevocationDocument>.Filter.And(
|
||||||
|
Builders<AuthorityRevocationDocument>.Filter.Eq(d => d.Category, document.Category),
|
||||||
|
Builders<AuthorityRevocationDocument>.Filter.Eq(d => d.RevocationId, document.RevocationId));
|
||||||
|
|
||||||
|
var now = DateTimeOffset.UtcNow;
|
||||||
|
document.UpdatedAt = now;
|
||||||
|
|
||||||
|
var existing = await collection
|
||||||
|
.Find(filter)
|
||||||
|
.FirstOrDefaultAsync(cancellationToken)
|
||||||
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (existing is null)
|
||||||
|
{
|
||||||
|
document.CreatedAt = now;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
document.Id = existing.Id;
|
||||||
|
document.CreatedAt = existing.CreatedAt;
|
||||||
|
}
|
||||||
|
|
||||||
|
await collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
|
||||||
|
logger.LogDebug("Upserted Authority revocation entry {Category}:{RevocationId}.", document.Category, document.RevocationId);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<bool> RemoveAsync(string category, string revocationId, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(category) || string.IsNullOrWhiteSpace(revocationId))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var filter = Builders<AuthorityRevocationDocument>.Filter.And(
|
||||||
|
Builders<AuthorityRevocationDocument>.Filter.Eq(d => d.Category, category.Trim()),
|
||||||
|
Builders<AuthorityRevocationDocument>.Filter.Eq(d => d.RevocationId, revocationId.Trim()));
|
||||||
|
|
||||||
|
var result = await collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false);
|
||||||
|
if (result.DeletedCount > 0)
|
||||||
|
{
|
||||||
|
logger.LogInformation("Removed Authority revocation entry {Category}:{RevocationId}.", category, revocationId);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask<IReadOnlyList<AuthorityRevocationDocument>> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var filter = Builders<AuthorityRevocationDocument>.Filter.Or(
|
||||||
|
Builders<AuthorityRevocationDocument>.Filter.Eq(d => d.ExpiresAt, null),
|
||||||
|
Builders<AuthorityRevocationDocument>.Filter.Gt(d => d.ExpiresAt, asOf));
|
||||||
|
|
||||||
|
var documents = await collection
|
||||||
|
.Find(filter)
|
||||||
|
.Sort(Builders<AuthorityRevocationDocument>.Sort.Ascending(d => d.Category).Ascending(d => d.RevocationId))
|
||||||
|
.ToListAsync(cancellationToken)
|
||||||
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
|
return documents;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<string>? NormalizeScopes(List<string>? scopes)
|
||||||
|
{
|
||||||
|
if (scopes is null || scopes.Count == 0)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var distinct = scopes
|
||||||
|
.Where(scope => !string.IsNullOrWhiteSpace(scope))
|
||||||
|
.Select(scope => scope.Trim())
|
||||||
|
.Distinct(StringComparer.Ordinal)
|
||||||
|
.OrderBy(scope => scope, StringComparer.Ordinal)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
return distinct.Count == 0 ? null : distinct;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Dictionary<string, string?>? NormalizeMetadata(Dictionary<string, string?>? metadata)
|
||||||
|
{
|
||||||
|
if (metadata is null || metadata.Count == 0)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var result = new SortedDictionary<string, string?>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
foreach (var pair in metadata)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(pair.Key))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
result[pair.Key.Trim()] = pair.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.Count == 0 ? null : new Dictionary<string, string?>(result, StringComparer.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using MongoDB.Driver;
|
using MongoDB.Driver;
|
||||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
@@ -51,7 +52,14 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore
|
|||||||
.ConfigureAwait(false);
|
.ConfigureAwait(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, CancellationToken cancellationToken)
|
public async ValueTask UpdateStatusAsync(
|
||||||
|
string tokenId,
|
||||||
|
string status,
|
||||||
|
DateTimeOffset? revokedAt,
|
||||||
|
string? reason,
|
||||||
|
string? reasonDescription,
|
||||||
|
IReadOnlyDictionary<string, string?>? metadata,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(tokenId))
|
if (string.IsNullOrWhiteSpace(tokenId))
|
||||||
{
|
{
|
||||||
@@ -65,7 +73,10 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore
|
|||||||
|
|
||||||
var update = Builders<AuthorityTokenDocument>.Update
|
var update = Builders<AuthorityTokenDocument>.Update
|
||||||
.Set(t => t.Status, status)
|
.Set(t => t.Status, status)
|
||||||
.Set(t => t.RevokedAt, revokedAt);
|
.Set(t => t.RevokedAt, revokedAt)
|
||||||
|
.Set(t => t.RevokedReason, reason)
|
||||||
|
.Set(t => t.RevokedReasonDescription, reasonDescription)
|
||||||
|
.Set(t => t.RevokedMetadata, metadata is null ? null : new Dictionary<string, string?>(metadata, StringComparer.OrdinalIgnoreCase));
|
||||||
|
|
||||||
var result = await collection.UpdateOneAsync(
|
var result = await collection.UpdateOneAsync(
|
||||||
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.TokenId, tokenId.Trim()),
|
Builders<AuthorityTokenDocument>.Filter.Eq(t => t.TokenId, tokenId.Trim()),
|
||||||
@@ -90,4 +101,24 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore
|
|||||||
|
|
||||||
return result.DeletedCount;
|
return result.DeletedCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var filter = Builders<AuthorityTokenDocument>.Filter.Eq(t => t.Status, "revoked");
|
||||||
|
|
||||||
|
if (issuedAfter is DateTimeOffset threshold)
|
||||||
|
{
|
||||||
|
filter = Builders<AuthorityTokenDocument>.Filter.And(
|
||||||
|
filter,
|
||||||
|
Builders<AuthorityTokenDocument>.Filter.Gt(t => t.RevokedAt, threshold));
|
||||||
|
}
|
||||||
|
|
||||||
|
var documents = await collection
|
||||||
|
.Find(filter)
|
||||||
|
.Sort(Builders<AuthorityTokenDocument>.Sort.Ascending(t => t.RevokedAt).Ascending(t => t.TokenId))
|
||||||
|
.ToListAsync(cancellationToken)
|
||||||
|
.ConfigureAwait(false);
|
||||||
|
|
||||||
|
return documents;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
using System;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
public interface IAuthorityRevocationExportStateStore
|
||||||
|
{
|
||||||
|
ValueTask<AuthorityRevocationExportStateDocument?> GetAsync(CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<AuthorityRevocationExportStateDocument> UpdateAsync(
|
||||||
|
long expectedSequence,
|
||||||
|
long newSequence,
|
||||||
|
string bundleId,
|
||||||
|
DateTimeOffset issuedAt,
|
||||||
|
CancellationToken cancellationToken);
|
||||||
|
}
|
||||||
@@ -0,0 +1,16 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
public interface IAuthorityRevocationStore
|
||||||
|
{
|
||||||
|
ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<bool> RemoveAsync(string category, string revocationId, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<IReadOnlyList<AuthorityRevocationDocument>> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken);
|
||||||
|
}
|
||||||
@@ -10,7 +10,16 @@ public interface IAuthorityTokenStore
|
|||||||
|
|
||||||
ValueTask<AuthorityTokenDocument?> FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken);
|
ValueTask<AuthorityTokenDocument?> FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken);
|
||||||
|
|
||||||
ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, CancellationToken cancellationToken);
|
ValueTask UpdateStatusAsync(
|
||||||
|
string tokenId,
|
||||||
|
string status,
|
||||||
|
DateTimeOffset? revokedAt,
|
||||||
|
string? reason,
|
||||||
|
string? reasonDescription,
|
||||||
|
IReadOnlyDictionary<string, string?>? metadata,
|
||||||
|
CancellationToken cancellationToken);
|
||||||
|
|
||||||
ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken);
|
ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using System.Security.Claims;
|
using System.Security.Claims;
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
@@ -12,6 +14,8 @@ using StellaOps.Authority.OpenIddict.Handlers;
|
|||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
using static StellaOps.Authority.Tests.OpenIddict.TestHelpers;
|
using static StellaOps.Authority.Tests.OpenIddict.TestHelpers;
|
||||||
|
|
||||||
@@ -30,7 +34,14 @@ public class ClientCredentialsHandlersTests
|
|||||||
allowedScopes: "jobs:read");
|
allowedScopes: "jobs:read");
|
||||||
|
|
||||||
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
||||||
var handler = new ValidateClientCredentialsHandler(new TestClientStore(clientDocument), registry, TestActivitySource, NullLogger<ValidateClientCredentialsHandler>.Instance);
|
var handler = new ValidateClientCredentialsHandler(
|
||||||
|
new TestClientStore(clientDocument),
|
||||||
|
registry,
|
||||||
|
TestActivitySource,
|
||||||
|
new TestAuthEventSink(),
|
||||||
|
new TestRateLimiterMetadataAccessor(),
|
||||||
|
TimeProvider.System,
|
||||||
|
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||||
|
|
||||||
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:write");
|
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:write");
|
||||||
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
||||||
@@ -51,7 +62,14 @@ public class ClientCredentialsHandlersTests
|
|||||||
allowedScopes: "jobs:read jobs:trigger");
|
allowedScopes: "jobs:read jobs:trigger");
|
||||||
|
|
||||||
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
|
||||||
var handler = new ValidateClientCredentialsHandler(new TestClientStore(clientDocument), registry, TestActivitySource, NullLogger<ValidateClientCredentialsHandler>.Instance);
|
var handler = new ValidateClientCredentialsHandler(
|
||||||
|
new TestClientStore(clientDocument),
|
||||||
|
registry,
|
||||||
|
TestActivitySource,
|
||||||
|
new TestAuthEventSink(),
|
||||||
|
new TestRateLimiterMetadataAccessor(),
|
||||||
|
TimeProvider.System,
|
||||||
|
NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||||
|
|
||||||
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
|
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
|
||||||
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
|
||||||
@@ -78,7 +96,17 @@ public class ClientCredentialsHandlersTests
|
|||||||
var descriptor = CreateDescriptor(clientDocument);
|
var descriptor = CreateDescriptor(clientDocument);
|
||||||
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor);
|
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor);
|
||||||
var tokenStore = new TestTokenStore();
|
var tokenStore = new TestTokenStore();
|
||||||
var handler = new HandleClientCredentialsHandler(registry, tokenStore, TimeProvider.System, TestActivitySource, NullLogger<HandleClientCredentialsHandler>.Instance);
|
var authSink = new TestAuthEventSink();
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var handler = new HandleClientCredentialsHandler(
|
||||||
|
registry,
|
||||||
|
tokenStore,
|
||||||
|
TimeProvider.System,
|
||||||
|
TestActivitySource,
|
||||||
|
authSink,
|
||||||
|
metadataAccessor,
|
||||||
|
NullLogger<HandleClientCredentialsHandler>.Instance);
|
||||||
|
var persistHandler = new PersistTokensHandler(tokenStore, TimeProvider.System, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
||||||
|
|
||||||
var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger");
|
var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger");
|
||||||
transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30);
|
transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30);
|
||||||
@@ -93,12 +121,23 @@ public class ClientCredentialsHandlersTests
|
|||||||
Assert.True(context.IsRequestHandled);
|
Assert.True(context.IsRequestHandled);
|
||||||
Assert.NotNull(context.Principal);
|
Assert.NotNull(context.Principal);
|
||||||
|
|
||||||
|
Assert.Contains(authSink.Events, record => record.EventType == "authority.client_credentials.grant" && record.Outcome == AuthEventOutcome.Success);
|
||||||
|
|
||||||
var identityProviderClaim = context.Principal?.GetClaim(StellaOpsClaimTypes.IdentityProvider);
|
var identityProviderClaim = context.Principal?.GetClaim(StellaOpsClaimTypes.IdentityProvider);
|
||||||
Assert.Equal(clientDocument.Plugin, identityProviderClaim);
|
Assert.Equal(clientDocument.Plugin, identityProviderClaim);
|
||||||
|
|
||||||
var tokenId = context.Principal?.GetClaim(OpenIddictConstants.Claims.JwtId);
|
var principal = context.Principal ?? throw new InvalidOperationException("Principal missing");
|
||||||
|
var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId);
|
||||||
Assert.False(string.IsNullOrWhiteSpace(tokenId));
|
Assert.False(string.IsNullOrWhiteSpace(tokenId));
|
||||||
|
|
||||||
|
var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction)
|
||||||
|
{
|
||||||
|
Principal = principal,
|
||||||
|
AccessTokenPrincipal = principal
|
||||||
|
};
|
||||||
|
|
||||||
|
await persistHandler.HandleAsync(signInContext);
|
||||||
|
|
||||||
var persisted = Assert.IsType<AuthorityTokenDocument>(tokenStore.Inserted);
|
var persisted = Assert.IsType<AuthorityTokenDocument>(tokenStore.Inserted);
|
||||||
Assert.Equal(tokenId, persisted.TokenId);
|
Assert.Equal(tokenId, persisted.TokenId);
|
||||||
Assert.Equal(clientDocument.ClientId, persisted.ClientId);
|
Assert.Equal(clientDocument.ClientId, persisted.ClientId);
|
||||||
@@ -236,11 +275,14 @@ internal sealed class TestTokenStore : IAuthorityTokenStore
|
|||||||
public ValueTask<AuthorityTokenDocument?> FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken)
|
public ValueTask<AuthorityTokenDocument?> FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken)
|
||||||
=> ValueTask.FromResult<AuthorityTokenDocument?>(null);
|
=> ValueTask.FromResult<AuthorityTokenDocument?>(null);
|
||||||
|
|
||||||
public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, CancellationToken cancellationToken)
|
public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, string? reason, string? reasonDescription, IReadOnlyDictionary<string, string?>? metadata, CancellationToken cancellationToken)
|
||||||
=> ValueTask.CompletedTask;
|
=> ValueTask.CompletedTask;
|
||||||
|
|
||||||
public ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken)
|
public ValueTask<long> DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken)
|
||||||
=> ValueTask.FromResult(0L);
|
=> ValueTask.FromResult(0L);
|
||||||
|
|
||||||
|
public ValueTask<IReadOnlyList<AuthorityTokenDocument>> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult<IReadOnlyList<AuthorityTokenDocument>>(Array.Empty<AuthorityTokenDocument>());
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed class TestClaimsEnricher : IClaimsEnricher
|
internal sealed class TestClaimsEnricher : IClaimsEnricher
|
||||||
@@ -328,6 +370,30 @@ internal sealed class TestIdentityProviderPlugin : IIdentityProviderPlugin
|
|||||||
=> ValueTask.FromResult(AuthorityPluginHealthResult.Healthy());
|
=> ValueTask.FromResult(AuthorityPluginHealthResult.Healthy());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal sealed class TestAuthEventSink : IAuthEventSink
|
||||||
|
{
|
||||||
|
public List<AuthEventRecord> Events { get; } = new();
|
||||||
|
|
||||||
|
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Events.Add(record);
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor
|
||||||
|
{
|
||||||
|
private readonly AuthorityRateLimiterMetadata metadata = new();
|
||||||
|
|
||||||
|
public AuthorityRateLimiterMetadata? GetMetadata() => metadata;
|
||||||
|
|
||||||
|
public void SetClientId(string? clientId) => metadata.ClientId = clientId;
|
||||||
|
|
||||||
|
public void SetSubjectId(string? subjectId) => metadata.SubjectId = subjectId;
|
||||||
|
|
||||||
|
public void SetTag(string name, string? value) => metadata.SetTag(name, value);
|
||||||
|
}
|
||||||
|
|
||||||
internal static class TestHelpers
|
internal static class TestHelpers
|
||||||
{
|
{
|
||||||
public static AuthorityClientDocument CreateClient(
|
public static AuthorityClientDocument CreateClient(
|
||||||
|
|||||||
@@ -0,0 +1,196 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Diagnostics;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Security.Claims;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Configuration;
|
||||||
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
|
using OpenIddict.Abstractions;
|
||||||
|
using OpenIddict.Server;
|
||||||
|
using OpenIddict.Server.AspNetCore;
|
||||||
|
using StellaOps.Authority.OpenIddict;
|
||||||
|
using StellaOps.Authority.OpenIddict.Handlers;
|
||||||
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Tests.OpenIddict;
|
||||||
|
|
||||||
|
public class PasswordGrantHandlersTests
|
||||||
|
{
|
||||||
|
private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests");
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task HandlePasswordGrant_EmitsSuccessAuditEvent()
|
||||||
|
{
|
||||||
|
var sink = new TestAuthEventSink();
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var registry = CreateRegistry(new SuccessCredentialStore());
|
||||||
|
var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger<ValidatePasswordGrantHandler>.Instance);
|
||||||
|
var handle = new HandlePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger<HandlePasswordGrantHandler>.Instance);
|
||||||
|
|
||||||
|
var transaction = CreatePasswordTransaction("alice", "Password1!");
|
||||||
|
|
||||||
|
await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction));
|
||||||
|
await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction));
|
||||||
|
|
||||||
|
Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task HandlePasswordGrant_EmitsFailureAuditEvent()
|
||||||
|
{
|
||||||
|
var sink = new TestAuthEventSink();
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var registry = CreateRegistry(new FailureCredentialStore());
|
||||||
|
var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger<ValidatePasswordGrantHandler>.Instance);
|
||||||
|
var handle = new HandlePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger<HandlePasswordGrantHandler>.Instance);
|
||||||
|
|
||||||
|
var transaction = CreatePasswordTransaction("alice", "BadPassword!");
|
||||||
|
|
||||||
|
await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction));
|
||||||
|
await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction));
|
||||||
|
|
||||||
|
Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task HandlePasswordGrant_EmitsLockoutAuditEvent()
|
||||||
|
{
|
||||||
|
var sink = new TestAuthEventSink();
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var registry = CreateRegistry(new LockoutCredentialStore());
|
||||||
|
var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger<ValidatePasswordGrantHandler>.Instance);
|
||||||
|
var handle = new HandlePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger<HandlePasswordGrantHandler>.Instance);
|
||||||
|
|
||||||
|
var transaction = CreatePasswordTransaction("alice", "Locked!");
|
||||||
|
|
||||||
|
await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction));
|
||||||
|
await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction));
|
||||||
|
|
||||||
|
Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store)
|
||||||
|
{
|
||||||
|
var plugin = new StubIdentityProviderPlugin("stub", store);
|
||||||
|
return new AuthorityIdentityProviderRegistry(new[] { plugin }, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static OpenIddictServerTransaction CreatePasswordTransaction(string username, string password)
|
||||||
|
{
|
||||||
|
var request = new OpenIddictRequest
|
||||||
|
{
|
||||||
|
GrantType = OpenIddictConstants.GrantTypes.Password,
|
||||||
|
Username = username,
|
||||||
|
Password = password
|
||||||
|
};
|
||||||
|
|
||||||
|
return new OpenIddictServerTransaction
|
||||||
|
{
|
||||||
|
EndpointType = OpenIddictServerEndpointType.Token,
|
||||||
|
Options = new OpenIddictServerOptions(),
|
||||||
|
Request = request
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class StubIdentityProviderPlugin : IIdentityProviderPlugin
|
||||||
|
{
|
||||||
|
public StubIdentityProviderPlugin(string name, IUserCredentialStore store)
|
||||||
|
{
|
||||||
|
Name = name;
|
||||||
|
Type = "stub";
|
||||||
|
var manifest = new AuthorityPluginManifest(
|
||||||
|
name,
|
||||||
|
"stub",
|
||||||
|
enabled: true,
|
||||||
|
version: null,
|
||||||
|
description: null,
|
||||||
|
capabilities: new[] { AuthorityPluginCapabilities.Password },
|
||||||
|
configuration: new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase),
|
||||||
|
configPath: $"{name}.yaml");
|
||||||
|
Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build());
|
||||||
|
Credentials = store;
|
||||||
|
ClaimsEnricher = new NoopClaimsEnricher();
|
||||||
|
Capabilities = new AuthorityIdentityProviderCapabilities(SupportsPassword: true, SupportsMfa: false, SupportsClientProvisioning: false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public string Name { get; }
|
||||||
|
public string Type { get; }
|
||||||
|
public AuthorityPluginContext Context { get; }
|
||||||
|
public IUserCredentialStore Credentials { get; }
|
||||||
|
public IClaimsEnricher ClaimsEnricher { get; }
|
||||||
|
public IClientProvisioningStore? ClientProvisioning => null;
|
||||||
|
public AuthorityIdentityProviderCapabilities Capabilities { get; }
|
||||||
|
|
||||||
|
public ValueTask<AuthorityPluginHealthResult> CheckHealthAsync(CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(AuthorityPluginHealthResult.Healthy());
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class NoopClaimsEnricher : IClaimsEnricher
|
||||||
|
{
|
||||||
|
public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class SuccessCredentialStore : IUserCredentialStore
|
||||||
|
{
|
||||||
|
public ValueTask<AuthorityCredentialVerificationResult> VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false);
|
||||||
|
return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor));
|
||||||
|
}
|
||||||
|
|
||||||
|
public ValueTask<AuthorityPluginOperationResult<AuthorityUserDescriptor>> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken)
|
||||||
|
=> throw new NotImplementedException();
|
||||||
|
|
||||||
|
public ValueTask<AuthorityUserDescriptor?> FindBySubjectAsync(string subjectId, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult<AuthorityUserDescriptor?>(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class FailureCredentialStore : IUserCredentialStore
|
||||||
|
{
|
||||||
|
public ValueTask<AuthorityCredentialVerificationResult> VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials, "Invalid username or password."));
|
||||||
|
|
||||||
|
public ValueTask<AuthorityPluginOperationResult<AuthorityUserDescriptor>> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken)
|
||||||
|
=> throw new NotImplementedException();
|
||||||
|
|
||||||
|
public ValueTask<AuthorityUserDescriptor?> FindBySubjectAsync(string subjectId, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult<AuthorityUserDescriptor?>(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class LockoutCredentialStore : IUserCredentialStore
|
||||||
|
{
|
||||||
|
public ValueTask<AuthorityCredentialVerificationResult> VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var retry = TimeSpan.FromMinutes(5);
|
||||||
|
var properties = new[]
|
||||||
|
{
|
||||||
|
new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "plugin.lockout_until",
|
||||||
|
Value = ClassifiedString.Public(timeProvider.GetUtcNow().Add(retry).ToString("O", CultureInfo.InvariantCulture))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(
|
||||||
|
AuthorityCredentialFailureCode.LockedOut,
|
||||||
|
"Account locked.",
|
||||||
|
retryAfter: retry,
|
||||||
|
auditProperties: properties));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static readonly TimeProvider timeProvider = TimeProvider.System;
|
||||||
|
|
||||||
|
public ValueTask<AuthorityPluginOperationResult<AuthorityUserDescriptor>> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken)
|
||||||
|
=> throw new NotImplementedException();
|
||||||
|
|
||||||
|
public ValueTask<AuthorityUserDescriptor?> FindBySubjectAsync(string subjectId, CancellationToken cancellationToken)
|
||||||
|
=> ValueTask.FromResult<AuthorityUserDescriptor?>(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -17,6 +17,8 @@ using StellaOps.Authority.Storage.Mongo.Extensions;
|
|||||||
using StellaOps.Authority.Storage.Mongo.Initialization;
|
using StellaOps.Authority.Storage.Mongo.Initialization;
|
||||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
using StellaOps.Feedser.Testing;
|
using StellaOps.Feedser.Testing;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace StellaOps.Authority.Tests.OpenIddict;
|
namespace StellaOps.Authority.Tests.OpenIddict;
|
||||||
@@ -55,7 +57,10 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
clientDescriptor: TestHelpers.CreateDescriptor(clientDocument));
|
clientDescriptor: TestHelpers.CreateDescriptor(clientDocument));
|
||||||
|
|
||||||
var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, NullLogger<ValidateClientCredentialsHandler>.Instance);
|
var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, NullLogger<ValidateClientCredentialsHandler>.Instance);
|
||||||
var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, clock, TestActivitySource, NullLogger<HandleClientCredentialsHandler>.Instance);
|
var authSink = new TestAuthEventSink();
|
||||||
|
var metadataAccessor = new TestRateLimiterMetadataAccessor();
|
||||||
|
var handleHandler = new HandleClientCredentialsHandler(registry, TestActivitySource, authSink, metadataAccessor, clock, NullLogger<HandleClientCredentialsHandler>.Instance);
|
||||||
|
var persistHandler = new PersistTokensHandler(tokenStore, clock, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);
|
||||||
|
|
||||||
var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger");
|
var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger");
|
||||||
transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(15);
|
transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(15);
|
||||||
@@ -72,6 +77,14 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId);
|
var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId);
|
||||||
Assert.False(string.IsNullOrWhiteSpace(tokenId));
|
Assert.False(string.IsNullOrWhiteSpace(tokenId));
|
||||||
|
|
||||||
|
var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction)
|
||||||
|
{
|
||||||
|
Principal = principal,
|
||||||
|
AccessTokenPrincipal = principal
|
||||||
|
};
|
||||||
|
|
||||||
|
await persistHandler.HandleAsync(signInContext);
|
||||||
|
|
||||||
var stored = await tokenStore.FindByTokenIdAsync(tokenId!, CancellationToken.None);
|
var stored = await tokenStore.FindByTokenIdAsync(tokenId!, CancellationToken.None);
|
||||||
Assert.NotNull(stored);
|
Assert.NotNull(stored);
|
||||||
Assert.Equal(clientDocument.ClientId, stored!.ClientId);
|
Assert.Equal(clientDocument.ClientId, stored!.ClientId);
|
||||||
@@ -133,7 +146,7 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
await tokenStore.InsertAsync(refreshToken, CancellationToken.None);
|
await tokenStore.InsertAsync(refreshToken, CancellationToken.None);
|
||||||
|
|
||||||
var revokedAt = now.AddMinutes(1);
|
var revokedAt = now.AddMinutes(1);
|
||||||
await tokenStore.UpdateStatusAsync(revokedTokenId, "revoked", revokedAt, CancellationToken.None);
|
await tokenStore.UpdateStatusAsync(revokedTokenId, "revoked", revokedAt, "manual", null, null, CancellationToken.None);
|
||||||
|
|
||||||
var handler = new ValidateAccessTokenHandler(
|
var handler = new ValidateAccessTokenHandler(
|
||||||
tokenStore,
|
tokenStore,
|
||||||
@@ -174,6 +187,7 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
Assert.NotNull(stored);
|
Assert.NotNull(stored);
|
||||||
Assert.Equal("revoked", stored!.Status);
|
Assert.Equal("revoked", stored!.Status);
|
||||||
Assert.Equal(revokedAt, stored.RevokedAt);
|
Assert.Equal(revokedAt, stored.RevokedAt);
|
||||||
|
Assert.Equal("manual", stored.RevokedReason);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task ResetCollectionsAsync()
|
private async Task ResetCollectionsAsync()
|
||||||
@@ -206,3 +220,27 @@ public sealed class TokenPersistenceIntegrationTests
|
|||||||
return provider;
|
return provider;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal sealed class TestAuthEventSink : IAuthEventSink
|
||||||
|
{
|
||||||
|
public List<AuthEventRecord> Records { get; } = new();
|
||||||
|
|
||||||
|
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Records.Add(record);
|
||||||
|
return ValueTask.CompletedTask;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor
|
||||||
|
{
|
||||||
|
private readonly AuthorityRateLimiterMetadata metadata = new();
|
||||||
|
|
||||||
|
public AuthorityRateLimiterMetadata? GetMetadata() => metadata;
|
||||||
|
|
||||||
|
public void SetClientId(string? clientId) => metadata.ClientId = string.IsNullOrWhiteSpace(clientId) ? null : clientId;
|
||||||
|
|
||||||
|
public void SetSubjectId(string? subjectId) => metadata.SubjectId = string.IsNullOrWhiteSpace(subjectId) ? null : subjectId;
|
||||||
|
|
||||||
|
public void SetTag(string name, string? value) => metadata.SetTag(name, value);
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,137 @@
|
|||||||
|
using System;
|
||||||
|
using System.IO;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||||
|
using Microsoft.Extensions.FileProviders;
|
||||||
|
using Microsoft.Extensions.Hosting;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Authority.Signing;
|
||||||
|
using StellaOps.Configuration;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
using StellaOps.Cryptography.DependencyInjection;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Tests.Signing;
|
||||||
|
|
||||||
|
public sealed class AuthoritySigningKeyManagerTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void Rotate_ReplacesActiveKeyAndRetiresPreviousKey()
|
||||||
|
{
|
||||||
|
var tempDir = Directory.CreateTempSubdirectory("authority-signing-tests").FullName;
|
||||||
|
var key1Relative = "key-1.pem";
|
||||||
|
var key2Relative = "key-2.pem";
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
CreateEcPrivateKey(Path.Combine(tempDir, key1Relative));
|
||||||
|
|
||||||
|
var options = new StellaOpsAuthorityOptions
|
||||||
|
{
|
||||||
|
Issuer = new Uri("https://authority.test"),
|
||||||
|
Storage = { ConnectionString = "mongodb://localhost/test" },
|
||||||
|
Signing =
|
||||||
|
{
|
||||||
|
Enabled = true,
|
||||||
|
ActiveKeyId = "key-1",
|
||||||
|
KeyPath = key1Relative,
|
||||||
|
Algorithm = SignatureAlgorithms.Es256,
|
||||||
|
KeySource = "file",
|
||||||
|
Provider = "default"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
using var provider = BuildProvider(tempDir, options);
|
||||||
|
var manager = provider.GetRequiredService<AuthoritySigningKeyManager>();
|
||||||
|
var jwksService = provider.GetRequiredService<AuthorityJwksService>();
|
||||||
|
|
||||||
|
var initial = jwksService.Build();
|
||||||
|
var initialKey = Assert.Single(initial.Keys);
|
||||||
|
Assert.Equal("key-1", initialKey.Kid);
|
||||||
|
Assert.Equal(AuthoritySigningKeyStatus.Active, initialKey.Status);
|
||||||
|
|
||||||
|
CreateEcPrivateKey(Path.Combine(tempDir, key2Relative));
|
||||||
|
|
||||||
|
var result = manager.Rotate(new SigningRotationRequest
|
||||||
|
{
|
||||||
|
KeyId = "key-2",
|
||||||
|
Location = key2Relative
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.Equal("key-2", result.ActiveKeyId);
|
||||||
|
Assert.Equal("key-1", result.PreviousKeyId);
|
||||||
|
Assert.Contains("key-1", result.RetiredKeyIds);
|
||||||
|
|
||||||
|
Assert.Equal("key-2", options.Signing.ActiveKeyId);
|
||||||
|
var additional = Assert.Single(options.Signing.AdditionalKeys);
|
||||||
|
Assert.Equal("key-1", additional.KeyId);
|
||||||
|
Assert.Equal(key1Relative, additional.Path);
|
||||||
|
Assert.Equal("file", additional.Source);
|
||||||
|
|
||||||
|
var afterRotation = jwksService.Build();
|
||||||
|
Assert.Equal(2, afterRotation.Keys.Count);
|
||||||
|
|
||||||
|
var activeEntry = Assert.Single(afterRotation.Keys.Where(key => key.Status == AuthoritySigningKeyStatus.Active));
|
||||||
|
Assert.Equal("key-2", activeEntry.Kid);
|
||||||
|
|
||||||
|
var retiredEntry = Assert.Single(afterRotation.Keys.Where(key => key.Status == AuthoritySigningKeyStatus.Retired));
|
||||||
|
Assert.Equal("key-1", retiredEntry.Kid);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Directory.Delete(tempDir, recursive: true);
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
// ignore cleanup failures
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ServiceProvider BuildProvider(string basePath, StellaOpsAuthorityOptions options)
|
||||||
|
{
|
||||||
|
var services = new ServiceCollection();
|
||||||
|
services.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug));
|
||||||
|
services.AddSingleton<IHostEnvironment>(new TestHostEnvironment(basePath));
|
||||||
|
services.AddSingleton(options);
|
||||||
|
services.AddSingleton<IOptions<StellaOpsAuthorityOptions>>(Options.Create(options));
|
||||||
|
services.AddStellaOpsCrypto();
|
||||||
|
services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, FileAuthoritySigningKeySource>());
|
||||||
|
services.AddSingleton<AuthoritySigningKeyManager>();
|
||||||
|
services.AddSingleton<AuthorityJwksService>();
|
||||||
|
|
||||||
|
return services.BuildServiceProvider();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void CreateEcPrivateKey(string path)
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
|
||||||
|
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||||
|
var pem = ecdsa.ExportECPrivateKeyPem();
|
||||||
|
File.WriteAllText(path, pem);
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class TestHostEnvironment : IHostEnvironment
|
||||||
|
{
|
||||||
|
public TestHostEnvironment(string contentRoot)
|
||||||
|
{
|
||||||
|
ContentRootPath = contentRoot;
|
||||||
|
ContentRootFileProvider = new PhysicalFileProvider(contentRoot);
|
||||||
|
EnvironmentName = Environments.Development;
|
||||||
|
ApplicationName = "StellaOps.Authority.Tests";
|
||||||
|
}
|
||||||
|
|
||||||
|
public string EnvironmentName { get; set; }
|
||||||
|
|
||||||
|
public string ApplicationName { get; set; }
|
||||||
|
|
||||||
|
public string ContentRootPath { get; set; }
|
||||||
|
|
||||||
|
public IFileProvider ContentRootFileProvider { get; set; }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -53,6 +53,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", ".
|
|||||||
EndProject
|
EndProject
|
||||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Tests", "..\StellaOps.Cryptography.Tests\StellaOps.Cryptography.Tests.csproj", "{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}"
|
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Tests", "..\StellaOps.Cryptography.Tests\StellaOps.Cryptography.Tests.csproj", "{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}"
|
||||||
EndProject
|
EndProject
|
||||||
|
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.DependencyInjection", "..\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj", "{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}"
|
||||||
|
EndProject
|
||||||
Global
|
Global
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
Debug|Any CPU = Debug|Any CPU
|
Debug|Any CPU = Debug|Any CPU
|
||||||
@@ -363,6 +365,18 @@ Global
|
|||||||
{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}.Release|x64.Build.0 = Release|Any CPU
|
{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}.Release|x64.Build.0 = Release|Any CPU
|
||||||
{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}.Release|x86.ActiveCfg = Release|Any CPU
|
{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}.Release|x86.ActiveCfg = Release|Any CPU
|
||||||
{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}.Release|x86.Build.0 = Release|Any CPU
|
{84AEC0C8-EE60-4AB1-A59B-B8E7CCFC0A25}.Release|x86.Build.0 = Release|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Debug|x64.Build.0 = Debug|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Debug|x86.Build.0 = Debug|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|x64.ActiveCfg = Release|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|x64.Build.0 = Release|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|x86.ActiveCfg = Release|Any CPU
|
||||||
|
{159A9B4E-61F8-4A82-8F6E-D01E3FB7E18F}.Release|x86.Build.0 = Release|Any CPU
|
||||||
EndGlobalSection
|
EndGlobalSection
|
||||||
GlobalSection(SolutionProperties) = preSolution
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
HideSolutionNode = FALSE
|
HideSolutionNode = FALSE
|
||||||
|
|||||||
@@ -0,0 +1,230 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Audit;
|
||||||
|
|
||||||
|
internal sealed class AuthorityAuditSink : IAuthEventSink
|
||||||
|
{
|
||||||
|
private static readonly StringComparer OrdinalComparer = StringComparer.Ordinal;
|
||||||
|
|
||||||
|
private readonly IAuthorityLoginAttemptStore loginAttemptStore;
|
||||||
|
private readonly ILogger<AuthorityAuditSink> logger;
|
||||||
|
|
||||||
|
public AuthorityAuditSink(
|
||||||
|
IAuthorityLoginAttemptStore loginAttemptStore,
|
||||||
|
ILogger<AuthorityAuditSink> logger)
|
||||||
|
{
|
||||||
|
this.loginAttemptStore = loginAttemptStore ?? throw new ArgumentNullException(nameof(loginAttemptStore));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(record);
|
||||||
|
|
||||||
|
var logState = BuildLogScope(record);
|
||||||
|
using (logger.BeginScope(logState))
|
||||||
|
{
|
||||||
|
logger.LogInformation(
|
||||||
|
"Authority audit event {EventType} emitted with outcome {Outcome}.",
|
||||||
|
record.EventType,
|
||||||
|
NormalizeOutcome(record.Outcome));
|
||||||
|
}
|
||||||
|
|
||||||
|
var document = MapToDocument(record);
|
||||||
|
await loginAttemptStore.InsertAsync(document, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthorityLoginAttemptDocument MapToDocument(AuthEventRecord record)
|
||||||
|
{
|
||||||
|
var document = new AuthorityLoginAttemptDocument
|
||||||
|
{
|
||||||
|
EventType = record.EventType,
|
||||||
|
Outcome = NormalizeOutcome(record.Outcome),
|
||||||
|
CorrelationId = Normalize(record.CorrelationId),
|
||||||
|
SubjectId = record.Subject?.SubjectId.Value,
|
||||||
|
Username = record.Subject?.Username.Value,
|
||||||
|
ClientId = record.Client?.ClientId.Value,
|
||||||
|
Plugin = record.Client?.Provider.Value,
|
||||||
|
Successful = record.Outcome == AuthEventOutcome.Success,
|
||||||
|
Reason = Normalize(record.Reason),
|
||||||
|
RemoteAddress = record.Network?.RemoteAddress.Value ?? record.Network?.ForwardedFor.Value,
|
||||||
|
OccurredAt = record.OccurredAt
|
||||||
|
};
|
||||||
|
|
||||||
|
if (record.Scopes is { Count: > 0 })
|
||||||
|
{
|
||||||
|
document.Scopes = record.Scopes
|
||||||
|
.Where(static scope => !string.IsNullOrWhiteSpace(scope))
|
||||||
|
.Select(static scope => scope.Trim())
|
||||||
|
.Where(static scope => scope.Length > 0)
|
||||||
|
.Distinct(OrdinalComparer)
|
||||||
|
.OrderBy(static scope => scope, OrdinalComparer)
|
||||||
|
.ToList();
|
||||||
|
}
|
||||||
|
|
||||||
|
var properties = new List<AuthorityLoginAttemptPropertyDocument>();
|
||||||
|
|
||||||
|
if (record.Subject is { } subject)
|
||||||
|
{
|
||||||
|
AddProperty(properties, "subject.display_name", subject.DisplayName);
|
||||||
|
AddProperty(properties, "subject.realm", subject.Realm);
|
||||||
|
|
||||||
|
if (subject.Attributes is { Count: > 0 })
|
||||||
|
{
|
||||||
|
foreach (var attribute in subject.Attributes)
|
||||||
|
{
|
||||||
|
AddProperty(properties, $"subject.attr.{attribute.Name}", attribute.Value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.Client is { } client)
|
||||||
|
{
|
||||||
|
AddProperty(properties, "client.name", client.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.Network is { } network)
|
||||||
|
{
|
||||||
|
AddProperty(properties, "network.remote", network.RemoteAddress);
|
||||||
|
AddProperty(properties, "network.forwarded_for", network.ForwardedFor);
|
||||||
|
AddProperty(properties, "network.user_agent", network.UserAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.Properties is { Count: > 0 })
|
||||||
|
{
|
||||||
|
foreach (var property in record.Properties)
|
||||||
|
{
|
||||||
|
AddProperty(properties, property.Name, property.Value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (properties.Count > 0)
|
||||||
|
{
|
||||||
|
document.Properties = properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
return document;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyCollection<KeyValuePair<string, object?>> BuildLogScope(AuthEventRecord record)
|
||||||
|
{
|
||||||
|
var entries = new List<KeyValuePair<string, object?>>
|
||||||
|
{
|
||||||
|
new("audit.event_type", record.EventType),
|
||||||
|
new("audit.outcome", NormalizeOutcome(record.Outcome)),
|
||||||
|
new("audit.timestamp", record.OccurredAt.ToString("O", CultureInfo.InvariantCulture))
|
||||||
|
};
|
||||||
|
|
||||||
|
AddValue(entries, "audit.correlation_id", Normalize(record.CorrelationId));
|
||||||
|
AddValue(entries, "audit.reason", Normalize(record.Reason));
|
||||||
|
|
||||||
|
if (record.Subject is { } subject)
|
||||||
|
{
|
||||||
|
AddClassified(entries, "audit.subject.id", subject.SubjectId);
|
||||||
|
AddClassified(entries, "audit.subject.username", subject.Username);
|
||||||
|
AddClassified(entries, "audit.subject.display_name", subject.DisplayName);
|
||||||
|
AddClassified(entries, "audit.subject.realm", subject.Realm);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.Client is { } client)
|
||||||
|
{
|
||||||
|
AddClassified(entries, "audit.client.id", client.ClientId);
|
||||||
|
AddClassified(entries, "audit.client.name", client.Name);
|
||||||
|
AddClassified(entries, "audit.client.provider", client.Provider);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.Network is { } network)
|
||||||
|
{
|
||||||
|
AddClassified(entries, "audit.network.remote", network.RemoteAddress);
|
||||||
|
AddClassified(entries, "audit.network.forwarded_for", network.ForwardedFor);
|
||||||
|
AddClassified(entries, "audit.network.user_agent", network.UserAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.Scopes is { Count: > 0 })
|
||||||
|
{
|
||||||
|
entries.Add(new KeyValuePair<string, object?>(
|
||||||
|
"audit.scopes",
|
||||||
|
record.Scopes.Where(static scope => !string.IsNullOrWhiteSpace(scope)).ToArray()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.Properties is { Count: > 0 })
|
||||||
|
{
|
||||||
|
foreach (var property in record.Properties)
|
||||||
|
{
|
||||||
|
AddClassified(entries, $"audit.property.{property.Name}", property.Value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AddProperty(ICollection<AuthorityLoginAttemptPropertyDocument> properties, string name, ClassifiedString value)
|
||||||
|
{
|
||||||
|
if (!value.HasValue || string.IsNullOrWhiteSpace(name))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(new AuthorityLoginAttemptPropertyDocument
|
||||||
|
{
|
||||||
|
Name = name,
|
||||||
|
Value = value.Value,
|
||||||
|
Classification = NormalizeClassification(value.Classification)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AddValue(ICollection<KeyValuePair<string, object?>> entries, string key, string? value)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
entries.Add(new KeyValuePair<string, object?>(key, value));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AddClassified(ICollection<KeyValuePair<string, object?>> entries, string key, ClassifiedString value)
|
||||||
|
{
|
||||||
|
if (!value.HasValue || string.IsNullOrWhiteSpace(key))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
entries.Add(new KeyValuePair<string, object?>(key, new
|
||||||
|
{
|
||||||
|
value.Value,
|
||||||
|
classification = NormalizeClassification(value.Classification)
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string NormalizeOutcome(AuthEventOutcome outcome)
|
||||||
|
=> outcome switch
|
||||||
|
{
|
||||||
|
AuthEventOutcome.Success => "success",
|
||||||
|
AuthEventOutcome.Failure => "failure",
|
||||||
|
AuthEventOutcome.LockedOut => "locked_out",
|
||||||
|
AuthEventOutcome.RateLimited => "rate_limited",
|
||||||
|
AuthEventOutcome.Error => "error",
|
||||||
|
_ => "unknown"
|
||||||
|
};
|
||||||
|
|
||||||
|
private static string NormalizeClassification(AuthEventDataClassification classification)
|
||||||
|
=> classification switch
|
||||||
|
{
|
||||||
|
AuthEventDataClassification.Personal => "personal",
|
||||||
|
AuthEventDataClassification.Sensitive => "sensitive",
|
||||||
|
_ => "none"
|
||||||
|
};
|
||||||
|
|
||||||
|
private static string? Normalize(string? value)
|
||||||
|
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
|
||||||
|
}
|
||||||
@@ -8,4 +8,11 @@ internal static class AuthorityOpenIddictConstants
|
|||||||
internal const string ClientProviderTransactionProperty = "authority:client_provider";
|
internal const string ClientProviderTransactionProperty = "authority:client_provider";
|
||||||
internal const string ClientGrantedScopesProperty = "authority:client_granted_scopes";
|
internal const string ClientGrantedScopesProperty = "authority:client_granted_scopes";
|
||||||
internal const string TokenTransactionProperty = "authority:token";
|
internal const string TokenTransactionProperty = "authority:token";
|
||||||
|
internal const string AuditCorrelationProperty = "authority:audit_correlation_id";
|
||||||
|
internal const string AuditClientIdProperty = "authority:audit_client_id";
|
||||||
|
internal const string AuditProviderProperty = "authority:audit_provider";
|
||||||
|
internal const string AuditConfidentialProperty = "authority:audit_confidential";
|
||||||
|
internal const string AuditRequestedScopesProperty = "authority:audit_requested_scopes";
|
||||||
|
internal const string AuditGrantedScopesProperty = "authority:audit_granted_scopes";
|
||||||
|
internal const string AuditInvalidScopeProperty = "authority:audit_invalid_scope";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Collections.Immutable;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
|
using System.Globalization;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Security.Claims;
|
using System.Security.Claims;
|
||||||
using System.Security.Cryptography;
|
using System.Security.Cryptography;
|
||||||
@@ -12,6 +15,8 @@ using StellaOps.Authority.OpenIddict;
|
|||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
using StellaOps.Authority.Storage.Mongo.Documents;
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
using StellaOps.Authority.Storage.Mongo.Stores;
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
namespace StellaOps.Authority.OpenIddict.Handlers;
|
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||||
|
|
||||||
@@ -20,17 +25,26 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
|
|||||||
private readonly IAuthorityClientStore clientStore;
|
private readonly IAuthorityClientStore clientStore;
|
||||||
private readonly IAuthorityIdentityProviderRegistry registry;
|
private readonly IAuthorityIdentityProviderRegistry registry;
|
||||||
private readonly ActivitySource activitySource;
|
private readonly ActivitySource activitySource;
|
||||||
|
private readonly IAuthEventSink auditSink;
|
||||||
|
private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor;
|
||||||
|
private readonly TimeProvider timeProvider;
|
||||||
private readonly ILogger<ValidateClientCredentialsHandler> logger;
|
private readonly ILogger<ValidateClientCredentialsHandler> logger;
|
||||||
|
|
||||||
public ValidateClientCredentialsHandler(
|
public ValidateClientCredentialsHandler(
|
||||||
IAuthorityClientStore clientStore,
|
IAuthorityClientStore clientStore,
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
ActivitySource activitySource,
|
ActivitySource activitySource,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
|
IAuthorityRateLimiterMetadataAccessor metadataAccessor,
|
||||||
|
TimeProvider timeProvider,
|
||||||
ILogger<ValidateClientCredentialsHandler> logger)
|
ILogger<ValidateClientCredentialsHandler> logger)
|
||||||
{
|
{
|
||||||
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
|
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
|
||||||
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||||
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
||||||
|
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
|
||||||
|
this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor));
|
||||||
|
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -48,6 +62,22 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
|
|||||||
activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.ClientCredentials);
|
activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.ClientCredentials);
|
||||||
activity?.SetTag("authority.client_id", context.ClientId ?? string.Empty);
|
activity?.SetTag("authority.client_id", context.ClientId ?? string.Empty);
|
||||||
|
|
||||||
|
ClientCredentialsAuditHelper.EnsureCorrelationId(context.Transaction);
|
||||||
|
|
||||||
|
var metadata = metadataAccessor.GetMetadata();
|
||||||
|
var clientId = context.ClientId ?? context.Request.ClientId;
|
||||||
|
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditClientIdProperty] = clientId;
|
||||||
|
if (!string.IsNullOrWhiteSpace(clientId))
|
||||||
|
{
|
||||||
|
metadataAccessor.SetClientId(clientId);
|
||||||
|
}
|
||||||
|
|
||||||
|
var requestedScopeInput = context.Request.GetScopes();
|
||||||
|
var requestedScopes = requestedScopeInput.IsDefaultOrEmpty ? Array.Empty<string>() : requestedScopeInput.ToArray();
|
||||||
|
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes;
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
if (string.IsNullOrWhiteSpace(context.ClientId))
|
if (string.IsNullOrWhiteSpace(context.ClientId))
|
||||||
{
|
{
|
||||||
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client identifier is required.");
|
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client identifier is required.");
|
||||||
@@ -63,6 +93,8 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditConfidentialProperty] = string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase);
|
||||||
|
|
||||||
IIdentityProviderPlugin? provider = null;
|
IIdentityProviderPlugin? provider = null;
|
||||||
if (!string.IsNullOrWhiteSpace(document.Plugin))
|
if (!string.IsNullOrWhiteSpace(document.Plugin))
|
||||||
{
|
{
|
||||||
@@ -73,6 +105,8 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditProviderProperty] = provider.Name;
|
||||||
|
|
||||||
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
|
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
|
||||||
{
|
{
|
||||||
context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Associated identity provider does not support client provisioning.");
|
context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Associated identity provider does not support client provisioning.");
|
||||||
@@ -123,11 +157,14 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
|
|||||||
|
|
||||||
if (resolvedScopes.InvalidScope is not null)
|
if (resolvedScopes.InvalidScope is not null)
|
||||||
{
|
{
|
||||||
|
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = resolvedScopes.InvalidScope;
|
||||||
context.Reject(OpenIddictConstants.Errors.InvalidScope, $"Scope '{resolvedScopes.InvalidScope}' is not allowed for this client.");
|
context.Reject(OpenIddictConstants.Errors.InvalidScope, $"Scope '{resolvedScopes.InvalidScope}' is not allowed for this client.");
|
||||||
logger.LogWarning("Client credentials validation failed for {ClientId}: scope {Scope} not permitted.", document.ClientId, resolvedScopes.InvalidScope);
|
logger.LogWarning("Client credentials validation failed for {ClientId}: scope {Scope} not permitted.", document.ClientId, resolvedScopes.InvalidScope);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditGrantedScopesProperty] = resolvedScopes.Scopes;
|
||||||
|
|
||||||
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = document;
|
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = document;
|
||||||
if (provider is not null)
|
if (provider is not null)
|
||||||
{
|
{
|
||||||
@@ -138,6 +175,46 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
|
|||||||
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = resolvedScopes.Scopes;
|
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = resolvedScopes.Scopes;
|
||||||
logger.LogInformation("Client credentials validated for {ClientId}.", document.ClientId);
|
logger.LogInformation("Client credentials validated for {ClientId}.", document.ClientId);
|
||||||
}
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
var outcome = context.IsRejected ? AuthEventOutcome.Failure : AuthEventOutcome.Success;
|
||||||
|
var reason = context.IsRejected ? context.ErrorDescription : null;
|
||||||
|
var auditClientId = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditClientIdProperty, out var clientValue)
|
||||||
|
? clientValue as string
|
||||||
|
: clientId;
|
||||||
|
var providerName = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditProviderProperty, out var providerValue)
|
||||||
|
? providerValue as string
|
||||||
|
: null;
|
||||||
|
var confidentialValue = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditConfidentialProperty, out var confidentialValueObj) && confidentialValueObj is bool conf
|
||||||
|
? (bool?)conf
|
||||||
|
: null;
|
||||||
|
var requested = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditRequestedScopesProperty, out var requestedValue) && requestedValue is string[] requestedArray
|
||||||
|
? (IReadOnlyList<string>)requestedArray
|
||||||
|
: requestedScopes;
|
||||||
|
var granted = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditGrantedScopesProperty, out var grantedValue) && grantedValue is string[] grantedArray
|
||||||
|
? (IReadOnlyList<string>)grantedArray
|
||||||
|
: Array.Empty<string>();
|
||||||
|
var invalidScope = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditInvalidScopeProperty, out var invalidValue)
|
||||||
|
? invalidValue as string
|
||||||
|
: null;
|
||||||
|
|
||||||
|
var record = ClientCredentialsAuditHelper.CreateRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
null,
|
||||||
|
outcome,
|
||||||
|
reason,
|
||||||
|
auditClientId,
|
||||||
|
providerName,
|
||||||
|
confidentialValue,
|
||||||
|
requested,
|
||||||
|
granted,
|
||||||
|
invalidScope);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<OpenIddictServerEvents.HandleTokenRequestContext>
|
internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<OpenIddictServerEvents.HandleTokenRequestContext>
|
||||||
|
|||||||
@@ -1,5 +1,10 @@
|
|||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Collections.Immutable;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Linq;
|
||||||
using System.Security.Claims;
|
using System.Security.Claims;
|
||||||
|
using Microsoft.AspNetCore.Http;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using OpenIddict.Abstractions;
|
using OpenIddict.Abstractions;
|
||||||
using OpenIddict.Extensions;
|
using OpenIddict.Extensions;
|
||||||
@@ -7,6 +12,8 @@ using OpenIddict.Server;
|
|||||||
using OpenIddict.Server.AspNetCore;
|
using OpenIddict.Server.AspNetCore;
|
||||||
using StellaOps.Authority.OpenIddict;
|
using StellaOps.Authority.OpenIddict;
|
||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
|
||||||
namespace StellaOps.Authority.OpenIddict.Handlers;
|
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||||
|
|
||||||
@@ -14,25 +21,34 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
|
|||||||
{
|
{
|
||||||
private readonly IAuthorityIdentityProviderRegistry registry;
|
private readonly IAuthorityIdentityProviderRegistry registry;
|
||||||
private readonly ActivitySource activitySource;
|
private readonly ActivitySource activitySource;
|
||||||
|
private readonly IAuthEventSink auditSink;
|
||||||
|
private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor;
|
||||||
|
private readonly TimeProvider timeProvider;
|
||||||
private readonly ILogger<ValidatePasswordGrantHandler> logger;
|
private readonly ILogger<ValidatePasswordGrantHandler> logger;
|
||||||
|
|
||||||
public ValidatePasswordGrantHandler(
|
public ValidatePasswordGrantHandler(
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
ActivitySource activitySource,
|
ActivitySource activitySource,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
|
IAuthorityRateLimiterMetadataAccessor metadataAccessor,
|
||||||
|
TimeProvider timeProvider,
|
||||||
ILogger<ValidatePasswordGrantHandler> logger)
|
ILogger<ValidatePasswordGrantHandler> logger)
|
||||||
{
|
{
|
||||||
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||||
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
||||||
|
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
|
||||||
|
this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor));
|
||||||
|
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
}
|
}
|
||||||
|
|
||||||
public ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenRequestContext context)
|
public async ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenRequestContext context)
|
||||||
{
|
{
|
||||||
ArgumentNullException.ThrowIfNull(context);
|
ArgumentNullException.ThrowIfNull(context);
|
||||||
|
|
||||||
if (!context.Request.IsPasswordGrantType())
|
if (!context.Request.IsPasswordGrantType())
|
||||||
{
|
{
|
||||||
return default;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
using var activity = activitySource.StartActivity("authority.token.validate_password_grant", ActivityKind.Internal);
|
using var activity = activitySource.StartActivity("authority.token.validate_password_grant", ActivityKind.Internal);
|
||||||
@@ -40,25 +56,72 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
|
|||||||
activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password);
|
activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password);
|
||||||
activity?.SetTag("authority.username", context.Request.Username ?? string.Empty);
|
activity?.SetTag("authority.username", context.Request.Username ?? string.Empty);
|
||||||
|
|
||||||
|
PasswordGrantAuditHelper.EnsureCorrelationId(context.Transaction);
|
||||||
|
|
||||||
|
var metadata = metadataAccessor.GetMetadata();
|
||||||
|
var clientId = context.ClientId ?? context.Request.ClientId;
|
||||||
|
if (!string.IsNullOrWhiteSpace(clientId))
|
||||||
|
{
|
||||||
|
metadataAccessor.SetClientId(clientId);
|
||||||
|
}
|
||||||
|
|
||||||
|
var requestedScopesInput = context.Request.GetScopes();
|
||||||
|
var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty<string>() : requestedScopesInput.ToArray();
|
||||||
|
|
||||||
var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry);
|
var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry);
|
||||||
if (!selection.Succeeded)
|
if (!selection.Succeeded)
|
||||||
{
|
{
|
||||||
|
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
null,
|
||||||
|
AuthEventOutcome.Failure,
|
||||||
|
selection.Description,
|
||||||
|
clientId,
|
||||||
|
providerName: null,
|
||||||
|
user: null,
|
||||||
|
username: context.Request.Username,
|
||||||
|
scopes: requestedScopes,
|
||||||
|
retryAfter: null,
|
||||||
|
failureCode: AuthorityCredentialFailureCode.InvalidCredentials,
|
||||||
|
extraProperties: null);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
context.Reject(selection.Error!, selection.Description);
|
context.Reject(selection.Error!, selection.Description);
|
||||||
logger.LogWarning("Password grant validation failed for {Username}: {Reason}.", context.Request.Username, selection.Description);
|
logger.LogWarning("Password grant validation failed for {Username}: {Reason}.", context.Request.Username, selection.Description);
|
||||||
return default;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(context.Request.Username) || string.IsNullOrEmpty(context.Request.Password))
|
if (string.IsNullOrWhiteSpace(context.Request.Username) || string.IsNullOrEmpty(context.Request.Password))
|
||||||
{
|
{
|
||||||
|
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
httpContext,
|
||||||
|
AuthEventOutcome.Failure,
|
||||||
|
"Both username and password must be provided.",
|
||||||
|
clientId,
|
||||||
|
providerName: selection.Provider?.Name,
|
||||||
|
user: null,
|
||||||
|
username: context.Request.Username,
|
||||||
|
scopes: requestedScopes,
|
||||||
|
retryAfter: null,
|
||||||
|
failureCode: AuthorityCredentialFailureCode.InvalidCredentials,
|
||||||
|
extraProperties: null);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided.");
|
context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided.");
|
||||||
logger.LogWarning("Password grant validation failed: missing credentials for {Username}.", context.Request.Username);
|
logger.LogWarning("Password grant validation failed: missing credentials for {Username}.", context.Request.Username);
|
||||||
return default;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
context.Transaction.Properties[AuthorityOpenIddictConstants.ProviderTransactionProperty] = selection.Provider!.Name;
|
context.Transaction.Properties[AuthorityOpenIddictConstants.ProviderTransactionProperty] = selection.Provider!.Name;
|
||||||
activity?.SetTag("authority.identity_provider", selection.Provider.Name);
|
activity?.SetTag("authority.identity_provider", selection.Provider.Name);
|
||||||
logger.LogInformation("Password grant validation succeeded for {Username} using provider {Provider}.", context.Request.Username, selection.Provider.Name);
|
logger.LogInformation("Password grant validation succeeded for {Username} using provider {Provider}.", context.Request.Username, selection.Provider.Name);
|
||||||
return default;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,15 +129,24 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
{
|
{
|
||||||
private readonly IAuthorityIdentityProviderRegistry registry;
|
private readonly IAuthorityIdentityProviderRegistry registry;
|
||||||
private readonly ActivitySource activitySource;
|
private readonly ActivitySource activitySource;
|
||||||
|
private readonly IAuthEventSink auditSink;
|
||||||
|
private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor;
|
||||||
|
private readonly TimeProvider timeProvider;
|
||||||
private readonly ILogger<HandlePasswordGrantHandler> logger;
|
private readonly ILogger<HandlePasswordGrantHandler> logger;
|
||||||
|
|
||||||
public HandlePasswordGrantHandler(
|
public HandlePasswordGrantHandler(
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
ActivitySource activitySource,
|
ActivitySource activitySource,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
|
IAuthorityRateLimiterMetadataAccessor metadataAccessor,
|
||||||
|
TimeProvider timeProvider,
|
||||||
ILogger<HandlePasswordGrantHandler> logger)
|
ILogger<HandlePasswordGrantHandler> logger)
|
||||||
{
|
{
|
||||||
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||||
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
||||||
|
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
|
||||||
|
this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor));
|
||||||
|
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -92,6 +164,18 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password);
|
activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password);
|
||||||
activity?.SetTag("authority.username", context.Request.Username ?? string.Empty);
|
activity?.SetTag("authority.username", context.Request.Username ?? string.Empty);
|
||||||
|
|
||||||
|
PasswordGrantAuditHelper.EnsureCorrelationId(context.Transaction);
|
||||||
|
|
||||||
|
var metadata = metadataAccessor.GetMetadata();
|
||||||
|
var clientId = context.ClientId ?? context.Request.ClientId;
|
||||||
|
if (!string.IsNullOrWhiteSpace(clientId))
|
||||||
|
{
|
||||||
|
metadataAccessor.SetClientId(clientId);
|
||||||
|
}
|
||||||
|
|
||||||
|
var requestedScopesInput = context.Request.GetScopes();
|
||||||
|
var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty<string>() : requestedScopesInput.ToArray();
|
||||||
|
|
||||||
var providerName = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ProviderTransactionProperty, out var value)
|
var providerName = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ProviderTransactionProperty, out var value)
|
||||||
? value as string
|
? value as string
|
||||||
: null;
|
: null;
|
||||||
@@ -101,6 +185,23 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
{
|
{
|
||||||
if (!registry.TryGet(providerName!, out var explicitProvider))
|
if (!registry.TryGet(providerName!, out var explicitProvider))
|
||||||
{
|
{
|
||||||
|
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
AuthEventOutcome.Failure,
|
||||||
|
"Unable to resolve the requested identity provider.",
|
||||||
|
clientId,
|
||||||
|
providerName,
|
||||||
|
user: null,
|
||||||
|
username: context.Request.Username,
|
||||||
|
scopes: requestedScopes,
|
||||||
|
retryAfter: null,
|
||||||
|
failureCode: AuthorityCredentialFailureCode.UnknownError,
|
||||||
|
extraProperties: null);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
context.Reject(OpenIddictConstants.Errors.ServerError, "Unable to resolve the requested identity provider.");
|
context.Reject(OpenIddictConstants.Errors.ServerError, "Unable to resolve the requested identity provider.");
|
||||||
logger.LogError("Password grant handling failed: provider {Provider} not found for user {Username}.", providerName, context.Request.Username);
|
logger.LogError("Password grant handling failed: provider {Provider} not found for user {Username}.", providerName, context.Request.Username);
|
||||||
return;
|
return;
|
||||||
@@ -113,12 +214,30 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry);
|
var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry);
|
||||||
if (!selection.Succeeded)
|
if (!selection.Succeeded)
|
||||||
{
|
{
|
||||||
|
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
AuthEventOutcome.Failure,
|
||||||
|
selection.Description,
|
||||||
|
clientId,
|
||||||
|
providerName: null,
|
||||||
|
user: null,
|
||||||
|
username: context.Request.Username,
|
||||||
|
scopes: requestedScopes,
|
||||||
|
retryAfter: null,
|
||||||
|
failureCode: AuthorityCredentialFailureCode.InvalidCredentials,
|
||||||
|
extraProperties: null);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
context.Reject(selection.Error!, selection.Description);
|
context.Reject(selection.Error!, selection.Description);
|
||||||
logger.LogWarning("Password grant handling rejected {Username}: {Reason}.", context.Request.Username, selection.Description);
|
logger.LogWarning("Password grant handling rejected {Username}: {Reason}.", context.Request.Username, selection.Description);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
resolvedProvider = selection.Provider;
|
resolvedProvider = selection.Provider;
|
||||||
|
providerName = selection.Provider?.Name;
|
||||||
}
|
}
|
||||||
|
|
||||||
var provider = resolvedProvider ?? throw new InvalidOperationException("No identity provider resolved for password grant.");
|
var provider = resolvedProvider ?? throw new InvalidOperationException("No identity provider resolved for password grant.");
|
||||||
@@ -127,6 +246,24 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
var password = context.Request.Password;
|
var password = context.Request.Password;
|
||||||
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password))
|
if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password))
|
||||||
{
|
{
|
||||||
|
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
httpContext,
|
||||||
|
AuthEventOutcome.Failure,
|
||||||
|
"Both username and password must be provided.",
|
||||||
|
clientId,
|
||||||
|
provider.Name,
|
||||||
|
user: null,
|
||||||
|
username: username,
|
||||||
|
scopes: requestedScopes,
|
||||||
|
retryAfter: null,
|
||||||
|
failureCode: AuthorityCredentialFailureCode.InvalidCredentials,
|
||||||
|
extraProperties: null);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided.");
|
context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided.");
|
||||||
logger.LogWarning("Password grant handling rejected: missing credentials for {Username}.", username);
|
logger.LogWarning("Password grant handling rejected: missing credentials for {Username}.", username);
|
||||||
return;
|
return;
|
||||||
@@ -139,6 +276,27 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
|
|
||||||
if (!verification.Succeeded || verification.User is null)
|
if (!verification.Succeeded || verification.User is null)
|
||||||
{
|
{
|
||||||
|
var outcome = verification.FailureCode == AuthorityCredentialFailureCode.LockedOut
|
||||||
|
? AuthEventOutcome.LockedOut
|
||||||
|
: AuthEventOutcome.Failure;
|
||||||
|
|
||||||
|
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
outcome,
|
||||||
|
verification.Message,
|
||||||
|
clientId,
|
||||||
|
provider.Name,
|
||||||
|
verification.User,
|
||||||
|
username,
|
||||||
|
scopes: requestedScopes,
|
||||||
|
retryAfter: verification.RetryAfter,
|
||||||
|
failureCode: verification.FailureCode,
|
||||||
|
extraProperties: verification.AuditProperties);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
context.Reject(
|
context.Reject(
|
||||||
OpenIddictConstants.Errors.InvalidGrant,
|
OpenIddictConstants.Errors.InvalidGrant,
|
||||||
verification.Message ?? "Invalid username or password.");
|
verification.Message ?? "Invalid username or password.");
|
||||||
@@ -146,6 +304,8 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
metadataAccessor.SetSubjectId(verification.User.SubjectId);
|
||||||
|
|
||||||
var identity = new ClaimsIdentity(
|
var identity = new ClaimsIdentity(
|
||||||
OpenIddictServerAspNetCoreDefaults.AuthenticationScheme,
|
OpenIddictServerAspNetCoreDefaults.AuthenticationScheme,
|
||||||
OpenIddictConstants.Claims.Name,
|
OpenIddictConstants.Claims.Name,
|
||||||
@@ -179,9 +339,246 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
|
|||||||
var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, verification.User, null);
|
var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, verification.User, null);
|
||||||
await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false);
|
await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
var successRecord = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
|
||||||
|
timeProvider,
|
||||||
|
context.Transaction,
|
||||||
|
metadata,
|
||||||
|
AuthEventOutcome.Success,
|
||||||
|
verification.Message,
|
||||||
|
clientId,
|
||||||
|
provider.Name,
|
||||||
|
verification.User,
|
||||||
|
username,
|
||||||
|
scopes: requestedScopes,
|
||||||
|
retryAfter: null,
|
||||||
|
failureCode: null,
|
||||||
|
extraProperties: verification.AuditProperties);
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(successRecord, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
context.Principal = principal;
|
context.Principal = principal;
|
||||||
context.HandleRequest();
|
context.HandleRequest();
|
||||||
activity?.SetTag("authority.subject_id", verification.User.SubjectId);
|
activity?.SetTag("authority.subject_id", verification.User.SubjectId);
|
||||||
logger.LogInformation("Password grant issued for {Username} with subject {SubjectId}.", verification.User.Username, verification.User.SubjectId);
|
logger.LogInformation("Password grant issued for {Username} with subject {SubjectId}.", verification.User.Username, verification.User.SubjectId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal static class PasswordGrantAuditHelper
|
||||||
|
{
|
||||||
|
internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(transaction);
|
||||||
|
|
||||||
|
if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) &&
|
||||||
|
value is string existing && !string.IsNullOrWhiteSpace(existing))
|
||||||
|
{
|
||||||
|
return existing;
|
||||||
|
}
|
||||||
|
|
||||||
|
var correlation = Activity.Current?.TraceId.ToString() ??
|
||||||
|
Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
||||||
|
|
||||||
|
transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation;
|
||||||
|
return correlation;
|
||||||
|
}
|
||||||
|
|
||||||
|
internal static AuthEventRecord CreatePasswordGrantRecord(
|
||||||
|
TimeProvider timeProvider,
|
||||||
|
OpenIddictServerTransaction transaction,
|
||||||
|
AuthorityRateLimiterMetadata? metadata,
|
||||||
|
AuthEventOutcome outcome,
|
||||||
|
string? reason,
|
||||||
|
string? clientId,
|
||||||
|
string? providerName,
|
||||||
|
AuthorityUserDescriptor? user,
|
||||||
|
string? username,
|
||||||
|
IEnumerable<string>? scopes,
|
||||||
|
TimeSpan? retryAfter,
|
||||||
|
AuthorityCredentialFailureCode? failureCode,
|
||||||
|
IEnumerable<AuthEventProperty>? extraProperties)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||||
|
ArgumentNullException.ThrowIfNull(transaction);
|
||||||
|
|
||||||
|
var correlationId = EnsureCorrelationId(transaction);
|
||||||
|
var normalizedScopes = NormalizeScopes(scopes);
|
||||||
|
var subject = BuildSubject(user, username, providerName);
|
||||||
|
var client = BuildClient(clientId, providerName);
|
||||||
|
var network = BuildNetwork(metadata);
|
||||||
|
var properties = BuildProperties(user, retryAfter, failureCode, extraProperties);
|
||||||
|
|
||||||
|
return new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = "authority.password.grant",
|
||||||
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
|
CorrelationId = correlationId,
|
||||||
|
Outcome = outcome,
|
||||||
|
Reason = Normalize(reason),
|
||||||
|
Subject = subject,
|
||||||
|
Client = client,
|
||||||
|
Scopes = normalizedScopes,
|
||||||
|
Network = network,
|
||||||
|
Properties = properties
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthEventSubject? BuildSubject(AuthorityUserDescriptor? user, string? username, string? providerName)
|
||||||
|
{
|
||||||
|
var attributes = user?.Attributes;
|
||||||
|
var normalizedUsername = Normalize(username) ?? Normalize(user?.Username);
|
||||||
|
var subjectId = Normalize(user?.SubjectId);
|
||||||
|
var displayName = Normalize(user?.DisplayName);
|
||||||
|
var attributeProperties = BuildSubjectAttributes(attributes);
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(subjectId) &&
|
||||||
|
string.IsNullOrWhiteSpace(normalizedUsername) &&
|
||||||
|
string.IsNullOrWhiteSpace(displayName) &&
|
||||||
|
attributeProperties.Count == 0 &&
|
||||||
|
string.IsNullOrWhiteSpace(providerName))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new AuthEventSubject
|
||||||
|
{
|
||||||
|
SubjectId = ClassifiedString.Personal(subjectId),
|
||||||
|
Username = ClassifiedString.Personal(normalizedUsername),
|
||||||
|
DisplayName = ClassifiedString.Personal(displayName),
|
||||||
|
Realm = ClassifiedString.Public(Normalize(providerName)),
|
||||||
|
Attributes = attributeProperties
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<AuthEventProperty> BuildSubjectAttributes(IReadOnlyDictionary<string, string?>? attributes)
|
||||||
|
{
|
||||||
|
if (attributes is null || attributes.Count == 0)
|
||||||
|
{
|
||||||
|
return Array.Empty<AuthEventProperty>();
|
||||||
|
}
|
||||||
|
|
||||||
|
var items = new List<AuthEventProperty>(attributes.Count);
|
||||||
|
foreach (var pair in attributes)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(pair.Key))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
items.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = pair.Key,
|
||||||
|
Value = ClassifiedString.Personal(Normalize(pair.Value))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return items.Count == 0 ? Array.Empty<AuthEventProperty>() : items;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthEventClient? BuildClient(string? clientId, string? providerName)
|
||||||
|
{
|
||||||
|
var normalizedClientId = Normalize(clientId);
|
||||||
|
var provider = Normalize(providerName);
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(normalizedClientId) && string.IsNullOrWhiteSpace(provider))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new AuthEventClient
|
||||||
|
{
|
||||||
|
ClientId = ClassifiedString.Personal(normalizedClientId),
|
||||||
|
Name = ClassifiedString.Empty,
|
||||||
|
Provider = ClassifiedString.Public(provider)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata)
|
||||||
|
{
|
||||||
|
var remote = Normalize(metadata?.RemoteIp);
|
||||||
|
var forwarded = Normalize(metadata?.ForwardedFor);
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new AuthEventNetwork
|
||||||
|
{
|
||||||
|
RemoteAddress = ClassifiedString.Personal(remote),
|
||||||
|
ForwardedFor = ClassifiedString.Personal(forwarded)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<AuthEventProperty> BuildProperties(
|
||||||
|
AuthorityUserDescriptor? user,
|
||||||
|
TimeSpan? retryAfter,
|
||||||
|
AuthorityCredentialFailureCode? failureCode,
|
||||||
|
IEnumerable<AuthEventProperty>? extraProperties)
|
||||||
|
{
|
||||||
|
var properties = new List<AuthEventProperty>();
|
||||||
|
|
||||||
|
if (failureCode is { } code)
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "failure.code",
|
||||||
|
Value = ClassifiedString.Public(code.ToString())
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (retryAfter is { } retry && retry > TimeSpan.Zero)
|
||||||
|
{
|
||||||
|
var seconds = Math.Ceiling(retry.TotalSeconds).ToString(CultureInfo.InvariantCulture);
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "policy.retry_after_seconds",
|
||||||
|
Value = ClassifiedString.Public(seconds)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (user is not null)
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "subject.requires_password_reset",
|
||||||
|
Value = ClassifiedString.Public(user.RequiresPasswordReset ? "true" : "false")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (extraProperties is not null)
|
||||||
|
{
|
||||||
|
foreach (var property in extraProperties)
|
||||||
|
{
|
||||||
|
if (property is null || string.IsNullOrWhiteSpace(property.Name))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.Add(property);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties.Count == 0 ? Array.Empty<AuthEventProperty>() : properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IReadOnlyList<string> NormalizeScopes(IEnumerable<string>? scopes)
|
||||||
|
{
|
||||||
|
if (scopes is null)
|
||||||
|
{
|
||||||
|
return Array.Empty<string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
var normalized = scopes
|
||||||
|
.Where(static scope => !string.IsNullOrWhiteSpace(scope))
|
||||||
|
.Select(static scope => scope.Trim())
|
||||||
|
.Where(static scope => scope.Length > 0)
|
||||||
|
.Distinct(StringComparer.Ordinal)
|
||||||
|
.OrderBy(static scope => scope, StringComparer.Ordinal)
|
||||||
|
.ToArray();
|
||||||
|
|
||||||
|
return normalized.Length == 0 ? Array.Empty<string>() : normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? Normalize(string? value)
|
||||||
|
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,124 @@
|
|||||||
|
using System;
|
||||||
|
using System.Diagnostics;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using OpenIddict.Abstractions;
|
||||||
|
using OpenIddict.Server;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||||
|
|
||||||
|
internal sealed class HandleRevocationRequestHandler : IOpenIddictServerHandler<OpenIddictServerEvents.HandleRevocationRequestContext>
|
||||||
|
{
|
||||||
|
private readonly IAuthorityTokenStore tokenStore;
|
||||||
|
private readonly TimeProvider clock;
|
||||||
|
private readonly ILogger<HandleRevocationRequestHandler> logger;
|
||||||
|
private readonly ActivitySource activitySource;
|
||||||
|
|
||||||
|
public HandleRevocationRequestHandler(
|
||||||
|
IAuthorityTokenStore tokenStore,
|
||||||
|
TimeProvider clock,
|
||||||
|
ActivitySource activitySource,
|
||||||
|
ILogger<HandleRevocationRequestHandler> logger)
|
||||||
|
{
|
||||||
|
this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore));
|
||||||
|
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
||||||
|
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask HandleAsync(OpenIddictServerEvents.HandleRevocationRequestContext context)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(context);
|
||||||
|
|
||||||
|
using var activity = activitySource.StartActivity("authority.token.revoke", ActivityKind.Internal);
|
||||||
|
|
||||||
|
var request = context.Request;
|
||||||
|
if (request is null || string.IsNullOrWhiteSpace(request.Token))
|
||||||
|
{
|
||||||
|
context.Reject(OpenIddictConstants.Errors.InvalidRequest, "The revocation request is missing the token parameter.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var token = request.Token.Trim();
|
||||||
|
var document = await tokenStore.FindByTokenIdAsync(token, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (document is null)
|
||||||
|
{
|
||||||
|
var tokenId = TryExtractTokenId(token);
|
||||||
|
if (!string.IsNullOrWhiteSpace(tokenId))
|
||||||
|
{
|
||||||
|
document = await tokenStore.FindByTokenIdAsync(tokenId!, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (document is null)
|
||||||
|
{
|
||||||
|
logger.LogDebug("Revocation request for unknown token ignored.");
|
||||||
|
context.HandleRequest();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(document.Status, "revoked", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
await tokenStore.UpdateStatusAsync(
|
||||||
|
document.TokenId,
|
||||||
|
"revoked",
|
||||||
|
clock.GetUtcNow(),
|
||||||
|
"client_request",
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
context.CancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
logger.LogInformation("Token {TokenId} revoked via revocation endpoint.", document.TokenId);
|
||||||
|
activity?.SetTag("authority.token_id", document.TokenId);
|
||||||
|
}
|
||||||
|
|
||||||
|
context.HandleRequest();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? TryExtractTokenId(string token)
|
||||||
|
{
|
||||||
|
var parts = token.Split('.');
|
||||||
|
if (parts.Length < 2)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var payload = Base64UrlDecode(parts[1]);
|
||||||
|
using var document = JsonDocument.Parse(payload);
|
||||||
|
if (document.RootElement.TryGetProperty("jti", out var jti) && jti.ValueKind == JsonValueKind.String)
|
||||||
|
{
|
||||||
|
var value = jti.GetString();
|
||||||
|
return string.IsNullOrWhiteSpace(value) ? null : value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (JsonException)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
catch (FormatException)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static byte[] Base64UrlDecode(string value)
|
||||||
|
{
|
||||||
|
var padded = value.Length % 4 switch
|
||||||
|
{
|
||||||
|
2 => value + "==",
|
||||||
|
3 => value + "=",
|
||||||
|
_ => value
|
||||||
|
};
|
||||||
|
|
||||||
|
padded = padded.Replace('-', '+').Replace('_', '/');
|
||||||
|
return Convert.FromBase64String(padded);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,135 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Diagnostics;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Security.Claims;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using OpenIddict.Abstractions;
|
||||||
|
using OpenIddict.Extensions;
|
||||||
|
using OpenIddict.Server;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||||
|
|
||||||
|
internal sealed class PersistTokensHandler : IOpenIddictServerHandler<OpenIddictServerEvents.ProcessSignInContext>
|
||||||
|
{
|
||||||
|
private readonly IAuthorityTokenStore tokenStore;
|
||||||
|
private readonly TimeProvider clock;
|
||||||
|
private readonly ActivitySource activitySource;
|
||||||
|
private readonly ILogger<PersistTokensHandler> logger;
|
||||||
|
|
||||||
|
public PersistTokensHandler(
|
||||||
|
IAuthorityTokenStore tokenStore,
|
||||||
|
TimeProvider clock,
|
||||||
|
ActivitySource activitySource,
|
||||||
|
ILogger<PersistTokensHandler> logger)
|
||||||
|
{
|
||||||
|
this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore));
|
||||||
|
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
||||||
|
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ValueTask HandleAsync(OpenIddictServerEvents.ProcessSignInContext context)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(context);
|
||||||
|
|
||||||
|
if (context.AccessTokenPrincipal is null &&
|
||||||
|
context.RefreshTokenPrincipal is null &&
|
||||||
|
context.AuthorizationCodePrincipal is null &&
|
||||||
|
context.DeviceCodePrincipal is null)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
using var activity = activitySource.StartActivity("authority.token.persist", ActivityKind.Internal);
|
||||||
|
var issuedAt = clock.GetUtcNow();
|
||||||
|
|
||||||
|
if (context.AccessTokenPrincipal is ClaimsPrincipal accessPrincipal)
|
||||||
|
{
|
||||||
|
await PersistAsync(accessPrincipal, OpenIddictConstants.TokenTypeHints.AccessToken, issuedAt, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context.RefreshTokenPrincipal is ClaimsPrincipal refreshPrincipal)
|
||||||
|
{
|
||||||
|
await PersistAsync(refreshPrincipal, OpenIddictConstants.TokenTypeHints.RefreshToken, issuedAt, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context.AuthorizationCodePrincipal is ClaimsPrincipal authorizationPrincipal)
|
||||||
|
{
|
||||||
|
await PersistAsync(authorizationPrincipal, OpenIddictConstants.TokenTypeHints.AuthorizationCode, issuedAt, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context.DeviceCodePrincipal is ClaimsPrincipal devicePrincipal)
|
||||||
|
{
|
||||||
|
await PersistAsync(devicePrincipal, OpenIddictConstants.TokenTypeHints.DeviceCode, issuedAt, context.CancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async ValueTask PersistAsync(ClaimsPrincipal principal, string tokenType, DateTimeOffset issuedAt, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var tokenId = EnsureTokenId(principal);
|
||||||
|
var scopes = ExtractScopes(principal);
|
||||||
|
var document = new AuthorityTokenDocument
|
||||||
|
{
|
||||||
|
TokenId = tokenId,
|
||||||
|
Type = tokenType,
|
||||||
|
SubjectId = principal.GetClaim(OpenIddictConstants.Claims.Subject),
|
||||||
|
ClientId = principal.GetClaim(OpenIddictConstants.Claims.ClientId),
|
||||||
|
Scope = scopes,
|
||||||
|
Status = "valid",
|
||||||
|
CreatedAt = issuedAt,
|
||||||
|
ExpiresAt = TryGetExpiration(principal)
|
||||||
|
};
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await tokenStore.InsertAsync(document, cancellationToken).ConfigureAwait(false);
|
||||||
|
logger.LogDebug("Persisted {Type} token {TokenId} for client {ClientId}.", tokenType, tokenId, document.ClientId ?? "<none>");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
logger.LogWarning(ex, "Failed to persist {Type} token {TokenId}.", tokenType, tokenId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string EnsureTokenId(ClaimsPrincipal principal)
|
||||||
|
{
|
||||||
|
var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId);
|
||||||
|
if (string.IsNullOrWhiteSpace(tokenId))
|
||||||
|
{
|
||||||
|
tokenId = Guid.NewGuid().ToString("N");
|
||||||
|
principal.SetClaim(OpenIddictConstants.Claims.JwtId, tokenId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return tokenId;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<string> ExtractScopes(ClaimsPrincipal principal)
|
||||||
|
=> principal.GetScopes()
|
||||||
|
.Where(scope => !string.IsNullOrWhiteSpace(scope))
|
||||||
|
.Select(scope => scope.Trim())
|
||||||
|
.Distinct(StringComparer.Ordinal)
|
||||||
|
.OrderBy(scope => scope, StringComparer.Ordinal)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
private static DateTimeOffset? TryGetExpiration(ClaimsPrincipal principal)
|
||||||
|
{
|
||||||
|
var value = principal.GetClaim(OpenIddictConstants.Claims.Exp);
|
||||||
|
if (string.IsNullOrWhiteSpace(value))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds))
|
||||||
|
{
|
||||||
|
return DateTimeOffset.FromUnixTimeSeconds(seconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +1,13 @@
|
|||||||
|
using System;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
|
using System.Globalization;
|
||||||
using Microsoft.AspNetCore.Builder;
|
using Microsoft.AspNetCore.Builder;
|
||||||
using Microsoft.AspNetCore.Http;
|
using Microsoft.AspNetCore.Http;
|
||||||
using Microsoft.Extensions.DependencyInjection;
|
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||||
using Microsoft.Extensions.Hosting;
|
using Microsoft.Extensions.Hosting;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using Microsoft.AspNetCore.RateLimiting;
|
using Microsoft.AspNetCore.RateLimiting;
|
||||||
using Microsoft.Extensions.Logging.Abstractions;
|
using Microsoft.Extensions.Logging.Abstractions;
|
||||||
@@ -14,16 +18,23 @@ using MongoDB.Driver;
|
|||||||
using Serilog;
|
using Serilog;
|
||||||
using Serilog.Events;
|
using Serilog.Events;
|
||||||
using StellaOps.Authority;
|
using StellaOps.Authority;
|
||||||
|
using StellaOps.Authority.Audit;
|
||||||
using StellaOps.Authority.Plugins.Abstractions;
|
using StellaOps.Authority.Plugins.Abstractions;
|
||||||
using StellaOps.Authority.Plugins;
|
using StellaOps.Authority.Plugins;
|
||||||
using StellaOps.Authority.Bootstrap;
|
using StellaOps.Authority.Bootstrap;
|
||||||
using StellaOps.Authority.Storage.Mongo.Extensions;
|
using StellaOps.Authority.Storage.Mongo.Extensions;
|
||||||
using StellaOps.Authority.Storage.Mongo.Initialization;
|
using StellaOps.Authority.Storage.Mongo.Initialization;
|
||||||
|
using StellaOps.Authority.RateLimiting;
|
||||||
using StellaOps.Configuration;
|
using StellaOps.Configuration;
|
||||||
using StellaOps.Plugin.DependencyInjection;
|
using StellaOps.Plugin.DependencyInjection;
|
||||||
using StellaOps.Plugin.Hosting;
|
using StellaOps.Plugin.Hosting;
|
||||||
using StellaOps.Authority.OpenIddict.Handlers;
|
using StellaOps.Authority.OpenIddict.Handlers;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using StellaOps.Cryptography.Audit;
|
||||||
|
using StellaOps.Cryptography.DependencyInjection;
|
||||||
|
using StellaOps.Authority.Revocation;
|
||||||
|
using StellaOps.Authority.Signing;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
|
|
||||||
@@ -68,12 +79,20 @@ var authorityOptions = authorityConfiguration.Options;
|
|||||||
var issuer = authorityOptions.Issuer ?? throw new InvalidOperationException("Authority issuer configuration is required.");
|
var issuer = authorityOptions.Issuer ?? throw new InvalidOperationException("Authority issuer configuration is required.");
|
||||||
builder.Services.AddSingleton(authorityOptions);
|
builder.Services.AddSingleton(authorityOptions);
|
||||||
builder.Services.AddSingleton<IOptions<StellaOpsAuthorityOptions>>(Options.Create(authorityOptions));
|
builder.Services.AddSingleton<IOptions<StellaOpsAuthorityOptions>>(Options.Create(authorityOptions));
|
||||||
|
builder.Services.AddHttpContextAccessor();
|
||||||
|
builder.Services.TryAddSingleton<TimeProvider>(_ => TimeProvider.System);
|
||||||
|
builder.Services.TryAddSingleton<IAuthorityRateLimiterMetadataAccessor, AuthorityRateLimiterMetadataAccessor>();
|
||||||
|
builder.Services.TryAddSingleton<IAuthorityRateLimiterPartitionKeyResolver, DefaultAuthorityRateLimiterPartitionKeyResolver>();
|
||||||
|
|
||||||
builder.Services.AddRateLimiter(rateLimiterOptions =>
|
builder.Services.AddRateLimiter(rateLimiterOptions =>
|
||||||
{
|
{
|
||||||
AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions);
|
AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
builder.Services.AddStellaOpsCrypto();
|
||||||
|
builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, FileAuthoritySigningKeySource>());
|
||||||
|
builder.Services.AddSingleton<AuthoritySigningKeyManager>();
|
||||||
|
|
||||||
AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader
|
AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader
|
||||||
.Load(authorityOptions, builder.Environment.ContentRootPath)
|
.Load(authorityOptions, builder.Environment.ContentRootPath)
|
||||||
.ToArray();
|
.ToArray();
|
||||||
@@ -93,11 +112,18 @@ builder.Services.AddAuthorityMongoStorage(storageOptions =>
|
|||||||
});
|
});
|
||||||
|
|
||||||
builder.Services.AddSingleton<IAuthorityIdentityProviderRegistry, AuthorityIdentityProviderRegistry>();
|
builder.Services.AddSingleton<IAuthorityIdentityProviderRegistry, AuthorityIdentityProviderRegistry>();
|
||||||
|
builder.Services.AddSingleton<IAuthEventSink, AuthorityAuditSink>();
|
||||||
builder.Services.AddScoped<ValidatePasswordGrantHandler>();
|
builder.Services.AddScoped<ValidatePasswordGrantHandler>();
|
||||||
builder.Services.AddScoped<HandlePasswordGrantHandler>();
|
builder.Services.AddScoped<HandlePasswordGrantHandler>();
|
||||||
builder.Services.AddScoped<ValidateClientCredentialsHandler>();
|
builder.Services.AddScoped<ValidateClientCredentialsHandler>();
|
||||||
builder.Services.AddScoped<HandleClientCredentialsHandler>();
|
builder.Services.AddScoped<HandleClientCredentialsHandler>();
|
||||||
builder.Services.AddScoped<ValidateAccessTokenHandler>();
|
builder.Services.AddScoped<ValidateAccessTokenHandler>();
|
||||||
|
builder.Services.AddScoped<PersistTokensHandler>();
|
||||||
|
builder.Services.AddScoped<HandleRevocationRequestHandler>();
|
||||||
|
builder.Services.AddSingleton<RevocationBundleBuilder>();
|
||||||
|
builder.Services.AddSingleton<RevocationBundleSigner>();
|
||||||
|
builder.Services.AddSingleton<AuthorityRevocationExportService>();
|
||||||
|
builder.Services.AddSingleton<AuthorityJwksService>();
|
||||||
|
|
||||||
var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins(
|
var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins(
|
||||||
builder.Services,
|
builder.Services,
|
||||||
@@ -179,6 +205,16 @@ builder.Services.AddOpenIddict()
|
|||||||
{
|
{
|
||||||
descriptor.UseScopedHandler<ValidateAccessTokenHandler>();
|
descriptor.UseScopedHandler<ValidateAccessTokenHandler>();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
options.AddEventHandler<OpenIddictServerEvents.ProcessSignInContext>(descriptor =>
|
||||||
|
{
|
||||||
|
descriptor.UseScopedHandler<PersistTokensHandler>();
|
||||||
|
});
|
||||||
|
|
||||||
|
options.AddEventHandler<OpenIddictServerEvents.HandleRevocationRequestContext>(descriptor =>
|
||||||
|
{
|
||||||
|
descriptor.UseScopedHandler<HandleRevocationRequestHandler>();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
builder.Services.Configure<OpenIddictServerOptions>(options =>
|
builder.Services.Configure<OpenIddictServerOptions>(options =>
|
||||||
@@ -242,12 +278,16 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
bootstrapGroup.AddEndpointFilter(new BootstrapApiKeyFilter(authorityOptions));
|
bootstrapGroup.AddEndpointFilter(new BootstrapApiKeyFilter(authorityOptions));
|
||||||
|
|
||||||
bootstrapGroup.MapPost("/users", async (
|
bootstrapGroup.MapPost("/users", async (
|
||||||
|
HttpContext httpContext,
|
||||||
BootstrapUserRequest request,
|
BootstrapUserRequest request,
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
|
TimeProvider timeProvider,
|
||||||
CancellationToken cancellationToken) =>
|
CancellationToken cancellationToken) =>
|
||||||
{
|
{
|
||||||
if (request is null)
|
if (request is null)
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty<string>()).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -257,16 +297,19 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty<string>()).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!provider.Capabilities.SupportsPassword)
|
if (!provider.Capabilities.SupportsPassword)
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>()).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." });
|
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password))
|
if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password))
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>()).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -288,24 +331,88 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
|
|
||||||
if (!result.Succeeded || result.Value is null)
|
if (!result.Succeeded || result.Value is null)
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, provider.Name, roles).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." });
|
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, provider.Name, roles).ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Ok(new
|
return Results.Ok(new
|
||||||
{
|
{
|
||||||
provider = provider.Name,
|
provider = provider.Name,
|
||||||
subjectId = result.Value.SubjectId,
|
subjectId = result.Value.SubjectId,
|
||||||
username = result.Value.Username
|
username = result.Value.Username
|
||||||
});
|
});
|
||||||
|
|
||||||
|
async Task WriteBootstrapUserAuditAsync(AuthEventOutcome outcome, string? reason, string? subjectId, string? usernameValue, string? providerValue, IReadOnlyCollection<string> rolesValue)
|
||||||
|
{
|
||||||
|
var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
||||||
|
AuthEventNetwork? network = null;
|
||||||
|
var remoteAddress = httpContext.Connection.RemoteIpAddress?.ToString();
|
||||||
|
var userAgent = httpContext.Request.Headers.UserAgent.ToString();
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(remoteAddress) || !string.IsNullOrWhiteSpace(userAgent))
|
||||||
|
{
|
||||||
|
network = new AuthEventNetwork
|
||||||
|
{
|
||||||
|
RemoteAddress = ClassifiedString.Personal(remoteAddress),
|
||||||
|
UserAgent = ClassifiedString.Personal(string.IsNullOrWhiteSpace(userAgent) ? null : userAgent)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var subject = subjectId is null && string.IsNullOrWhiteSpace(usernameValue) && string.IsNullOrWhiteSpace(providerValue)
|
||||||
|
? null
|
||||||
|
: new AuthEventSubject
|
||||||
|
{
|
||||||
|
SubjectId = ClassifiedString.Personal(subjectId),
|
||||||
|
Username = ClassifiedString.Personal(usernameValue),
|
||||||
|
Realm = ClassifiedString.Public(providerValue)
|
||||||
|
};
|
||||||
|
|
||||||
|
var properties = string.IsNullOrWhiteSpace(providerValue)
|
||||||
|
? Array.Empty<AuthEventProperty>()
|
||||||
|
: new[]
|
||||||
|
{
|
||||||
|
new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "bootstrap.provider",
|
||||||
|
Value = ClassifiedString.Public(providerValue)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var scopes = rolesValue is { Count: > 0 }
|
||||||
|
? rolesValue.ToArray()
|
||||||
|
: Array.Empty<string>();
|
||||||
|
|
||||||
|
var record = new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = "authority.bootstrap.user",
|
||||||
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
|
CorrelationId = correlationId,
|
||||||
|
Outcome = outcome,
|
||||||
|
Reason = reason,
|
||||||
|
Subject = subject,
|
||||||
|
Client = null,
|
||||||
|
Scopes = scopes,
|
||||||
|
Network = network,
|
||||||
|
Properties = properties
|
||||||
|
};
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
bootstrapGroup.MapPost("/clients", async (
|
bootstrapGroup.MapPost("/clients", async (
|
||||||
|
HttpContext httpContext,
|
||||||
BootstrapClientRequest request,
|
BootstrapClientRequest request,
|
||||||
IAuthorityIdentityProviderRegistry registry,
|
IAuthorityIdentityProviderRegistry registry,
|
||||||
|
IAuthEventSink auditSink,
|
||||||
|
TimeProvider timeProvider,
|
||||||
CancellationToken cancellationToken) =>
|
CancellationToken cancellationToken) =>
|
||||||
{
|
{
|
||||||
if (request is null)
|
if (request is null)
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty<string>(), null).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -315,31 +422,37 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty<string>(), request?.Confidential).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
|
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." });
|
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(request.ClientId))
|
if (string.IsNullOrWhiteSpace(request.ClientId))
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." });
|
return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret))
|
if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret))
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." });
|
return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!TryParseUris(request.RedirectUris, out var redirectUris, out var redirectError))
|
if (!TryParseUris(request.RedirectUris, out var redirectUris, out var redirectError))
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, redirectError, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = redirectError });
|
return Results.BadRequest(new { error = "invalid_request", message = redirectError });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!TryParseUris(request.PostLogoutRedirectUris, out var postLogoutUris, out var postLogoutError))
|
if (!TryParseUris(request.PostLogoutRedirectUris, out var postLogoutUris, out var postLogoutError))
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, postLogoutError, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = "invalid_request", message = postLogoutError });
|
return Results.BadRequest(new { error = "invalid_request", message = postLogoutError });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -362,15 +475,151 @@ if (authorityOptions.Bootstrap.Enabled)
|
|||||||
|
|
||||||
if (!result.Succeeded || result.Value is null)
|
if (!result.Succeeded || result.Value is null)
|
||||||
{
|
{
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
||||||
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." });
|
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential).ConfigureAwait(false);
|
||||||
|
|
||||||
return Results.Ok(new
|
return Results.Ok(new
|
||||||
{
|
{
|
||||||
provider = provider.Name,
|
provider = provider.Name,
|
||||||
clientId = result.Value.ClientId,
|
clientId = result.Value.ClientId,
|
||||||
confidential = result.Value.Confidential
|
confidential = result.Value.Confidential
|
||||||
});
|
});
|
||||||
|
|
||||||
|
async Task WriteBootstrapClientAuditAsync(AuthEventOutcome outcome, string? reason, string? requestedClientId, string? assignedClientId, string? providerValue, IReadOnlyCollection<string> scopes, bool? confidentialFlag)
|
||||||
|
{
|
||||||
|
var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
|
||||||
|
AuthEventNetwork? network = null;
|
||||||
|
var remoteAddress = httpContext.Connection.RemoteIpAddress?.ToString();
|
||||||
|
var userAgent = httpContext.Request.Headers.UserAgent.ToString();
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(remoteAddress) || !string.IsNullOrWhiteSpace(userAgent))
|
||||||
|
{
|
||||||
|
network = new AuthEventNetwork
|
||||||
|
{
|
||||||
|
RemoteAddress = ClassifiedString.Personal(remoteAddress),
|
||||||
|
UserAgent = ClassifiedString.Personal(string.IsNullOrWhiteSpace(userAgent) ? null : userAgent)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var clientIdValue = assignedClientId ?? requestedClientId;
|
||||||
|
var client = clientIdValue is null && string.IsNullOrWhiteSpace(providerValue)
|
||||||
|
? null
|
||||||
|
: new AuthEventClient
|
||||||
|
{
|
||||||
|
ClientId = ClassifiedString.Personal(clientIdValue),
|
||||||
|
Name = ClassifiedString.Empty,
|
||||||
|
Provider = ClassifiedString.Public(providerValue)
|
||||||
|
};
|
||||||
|
|
||||||
|
var properties = new List<AuthEventProperty>();
|
||||||
|
if (!string.IsNullOrWhiteSpace(requestedClientId) && !string.Equals(requestedClientId, assignedClientId, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "bootstrap.requested_client_id",
|
||||||
|
Value = ClassifiedString.Public(requestedClientId)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (confidentialFlag == true)
|
||||||
|
{
|
||||||
|
properties.Add(new AuthEventProperty
|
||||||
|
{
|
||||||
|
Name = "bootstrap.confidential",
|
||||||
|
Value = ClassifiedString.Public("true")
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
var record = new AuthEventRecord
|
||||||
|
{
|
||||||
|
EventType = "authority.bootstrap.client",
|
||||||
|
OccurredAt = timeProvider.GetUtcNow(),
|
||||||
|
CorrelationId = correlationId,
|
||||||
|
Outcome = outcome,
|
||||||
|
Reason = reason,
|
||||||
|
Subject = null,
|
||||||
|
Client = client,
|
||||||
|
Scopes = scopes is { Count: > 0 } ? scopes.ToArray() : Array.Empty<string>(),
|
||||||
|
Network = network,
|
||||||
|
Properties = properties.Count == 0 ? Array.Empty<AuthEventProperty>() : properties.ToArray()
|
||||||
|
};
|
||||||
|
|
||||||
|
await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
bootstrapGroup.MapGet("/revocations/export", async (
|
||||||
|
AuthorityRevocationExportService exportService,
|
||||||
|
CancellationToken cancellationToken) =>
|
||||||
|
{
|
||||||
|
var package = await exportService.ExportAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
var build = package.Bundle;
|
||||||
|
|
||||||
|
var response = new RevocationExportResponse
|
||||||
|
{
|
||||||
|
SchemaVersion = build.Bundle.SchemaVersion,
|
||||||
|
BundleId = build.Bundle.BundleId ?? build.Sha256,
|
||||||
|
Sequence = build.Sequence,
|
||||||
|
IssuedAt = build.IssuedAt,
|
||||||
|
SigningKeyId = package.Signature.KeyId,
|
||||||
|
Bundle = new RevocationExportPayload
|
||||||
|
{
|
||||||
|
Data = Convert.ToBase64String(build.CanonicalJson)
|
||||||
|
},
|
||||||
|
Signature = new RevocationExportSignature
|
||||||
|
{
|
||||||
|
Algorithm = package.Signature.Algorithm,
|
||||||
|
KeyId = package.Signature.KeyId,
|
||||||
|
Value = package.Signature.Value
|
||||||
|
},
|
||||||
|
Digest = new RevocationExportDigest
|
||||||
|
{
|
||||||
|
Value = build.Sha256
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return Results.Ok(response);
|
||||||
|
});
|
||||||
|
|
||||||
|
bootstrapGroup.MapPost("/signing/rotate", (
|
||||||
|
SigningRotationRequest? request,
|
||||||
|
AuthoritySigningKeyManager signingManager,
|
||||||
|
ILogger<AuthoritySigningKeyManager> signingLogger) =>
|
||||||
|
{
|
||||||
|
if (request is null)
|
||||||
|
{
|
||||||
|
signingLogger.LogWarning("Signing rotation request payload missing.");
|
||||||
|
return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." });
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var result = signingManager.Rotate(request);
|
||||||
|
signingLogger.LogInformation("Signing key rotation completed. Active key {KeyId}.", result.ActiveKeyId);
|
||||||
|
|
||||||
|
return Results.Ok(new
|
||||||
|
{
|
||||||
|
activeKeyId = result.ActiveKeyId,
|
||||||
|
provider = result.ActiveProvider,
|
||||||
|
source = result.ActiveSource,
|
||||||
|
location = result.ActiveLocation,
|
||||||
|
previousKeyId = result.PreviousKeyId,
|
||||||
|
retiredKeyIds = result.RetiredKeyIds
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (InvalidOperationException ex)
|
||||||
|
{
|
||||||
|
signingLogger.LogWarning(ex, "Signing rotation failed due to invalid input.");
|
||||||
|
return Results.BadRequest(new { error = "rotation_failed", message = ex.Message });
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
signingLogger.LogError(ex, "Unexpected failure rotating signing key.");
|
||||||
|
return Results.Problem("Failed to rotate signing key.");
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -398,6 +647,7 @@ app.UseExceptionHandler(static errorApp =>
|
|||||||
});
|
});
|
||||||
|
|
||||||
app.UseRouting();
|
app.UseRouting();
|
||||||
|
app.UseAuthorityRateLimiterContext();
|
||||||
app.UseRateLimiter();
|
app.UseRateLimiter();
|
||||||
app.UseAuthentication();
|
app.UseAuthentication();
|
||||||
app.UseAuthorization();
|
app.UseAuthorization();
|
||||||
@@ -432,6 +682,12 @@ app.MapGet("/ready", (IAuthorityIdentityProviderRegistry registry) =>
|
|||||||
}))
|
}))
|
||||||
.WithName("ReadinessCheck");
|
.WithName("ReadinessCheck");
|
||||||
|
|
||||||
|
app.MapGet("/jwks", (AuthorityJwksService jwksService) => Results.Ok(jwksService.Build()))
|
||||||
|
.WithName("JsonWebKeySet");
|
||||||
|
|
||||||
|
// Ensure signing key manager initialises key material on startup.
|
||||||
|
app.Services.GetRequiredService<AuthoritySigningKeyManager>();
|
||||||
|
|
||||||
app.Run();
|
app.Run();
|
||||||
|
|
||||||
static PluginHostOptions BuildPluginHostOptions(StellaOpsAuthorityOptions options, string basePath)
|
static PluginHostOptions BuildPluginHostOptions(StellaOpsAuthorityOptions options, string basePath)
|
||||||
|
|||||||
@@ -0,0 +1,37 @@
|
|||||||
|
using System;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed class AuthorityRevocationExportService
|
||||||
|
{
|
||||||
|
private readonly RevocationBundleBuilder bundleBuilder;
|
||||||
|
private readonly RevocationBundleSigner signer;
|
||||||
|
private readonly ILogger<AuthorityRevocationExportService> logger;
|
||||||
|
|
||||||
|
public AuthorityRevocationExportService(
|
||||||
|
RevocationBundleBuilder bundleBuilder,
|
||||||
|
RevocationBundleSigner signer,
|
||||||
|
ILogger<AuthorityRevocationExportService> logger)
|
||||||
|
{
|
||||||
|
this.bundleBuilder = bundleBuilder ?? throw new ArgumentNullException(nameof(bundleBuilder));
|
||||||
|
this.signer = signer ?? throw new ArgumentNullException(nameof(signer));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<RevocationExportPackage> ExportAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var buildResult = await bundleBuilder.BuildAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
var signature = await signer.SignAsync(buildResult.CanonicalJson, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
logger.LogInformation(
|
||||||
|
"Generated revocation bundle sequence {Sequence} with {EntryCount} entries (sha256:{Hash}).",
|
||||||
|
buildResult.Sequence,
|
||||||
|
buildResult.Bundle.Revocations.Count,
|
||||||
|
buildResult.Sha256);
|
||||||
|
|
||||||
|
return new RevocationExportPackage(buildResult, signature);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
using System;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed record RevocationBundleBuildResult(
|
||||||
|
RevocationBundleModel Bundle,
|
||||||
|
byte[] CanonicalJson,
|
||||||
|
string Sha256,
|
||||||
|
long Sequence,
|
||||||
|
DateTimeOffset IssuedAt);
|
||||||
@@ -0,0 +1,220 @@
|
|||||||
|
using System;
|
||||||
|
using System.Buffers;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Documents;
|
||||||
|
using StellaOps.Authority.Storage.Mongo.Stores;
|
||||||
|
using StellaOps.Configuration;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed class RevocationBundleBuilder
|
||||||
|
{
|
||||||
|
private const string SchemaVersion = "1.0.0";
|
||||||
|
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.General)
|
||||||
|
{
|
||||||
|
PropertyNamingPolicy = null,
|
||||||
|
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||||
|
WriteIndented = true
|
||||||
|
};
|
||||||
|
|
||||||
|
private readonly IAuthorityTokenStore tokenStore;
|
||||||
|
private readonly IAuthorityRevocationStore revocationStore;
|
||||||
|
private readonly IAuthorityRevocationExportStateStore stateStore;
|
||||||
|
private readonly StellaOpsAuthorityOptions authorityOptions;
|
||||||
|
private readonly TimeProvider clock;
|
||||||
|
private readonly ILogger<RevocationBundleBuilder> logger;
|
||||||
|
|
||||||
|
public RevocationBundleBuilder(
|
||||||
|
IAuthorityTokenStore tokenStore,
|
||||||
|
IAuthorityRevocationStore revocationStore,
|
||||||
|
IAuthorityRevocationExportStateStore stateStore,
|
||||||
|
IOptions<StellaOpsAuthorityOptions> authorityOptions,
|
||||||
|
TimeProvider clock,
|
||||||
|
ILogger<RevocationBundleBuilder> logger)
|
||||||
|
{
|
||||||
|
this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore));
|
||||||
|
this.revocationStore = revocationStore ?? throw new ArgumentNullException(nameof(revocationStore));
|
||||||
|
this.stateStore = stateStore ?? throw new ArgumentNullException(nameof(stateStore));
|
||||||
|
this.authorityOptions = authorityOptions?.Value ?? throw new ArgumentNullException(nameof(authorityOptions));
|
||||||
|
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<RevocationBundleBuildResult> BuildAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var issuer = authorityOptions.Issuer?.ToString()?.TrimEnd('/')
|
||||||
|
?? throw new InvalidOperationException("Authority issuer configuration is required before exporting revocations.");
|
||||||
|
|
||||||
|
var state = await stateStore.GetAsync(cancellationToken).ConfigureAwait(false);
|
||||||
|
var previousSequence = state?.Sequence ?? 0;
|
||||||
|
var sequence = previousSequence + 1;
|
||||||
|
var issuedAt = clock.GetUtcNow();
|
||||||
|
|
||||||
|
var tokenDocuments = await tokenStore.ListRevokedAsync(null, cancellationToken).ConfigureAwait(false);
|
||||||
|
var manualDocuments = await revocationStore.GetActiveAsync(issuedAt, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
var entries = new List<RevocationEntryModel>();
|
||||||
|
entries.AddRange(BuildTokenEntries(tokenDocuments, issuedAt));
|
||||||
|
entries.AddRange(BuildManualEntries(manualDocuments));
|
||||||
|
|
||||||
|
entries.Sort(static (left, right) =>
|
||||||
|
{
|
||||||
|
var categoryCompare = string.CompareOrdinal(left.Category, right.Category);
|
||||||
|
if (categoryCompare != 0)
|
||||||
|
{
|
||||||
|
return categoryCompare;
|
||||||
|
}
|
||||||
|
|
||||||
|
var idCompare = string.CompareOrdinal(left.Id, right.Id);
|
||||||
|
if (idCompare != 0)
|
||||||
|
{
|
||||||
|
return idCompare;
|
||||||
|
}
|
||||||
|
|
||||||
|
return DateTimeOffset.Compare(left.RevokedAt, right.RevokedAt);
|
||||||
|
});
|
||||||
|
|
||||||
|
var metadata = new SortedDictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
["entryCount"] = entries.Count.ToString(CultureInfo.InvariantCulture)
|
||||||
|
};
|
||||||
|
|
||||||
|
var bundle = new RevocationBundleModel
|
||||||
|
{
|
||||||
|
SchemaVersion = SchemaVersion,
|
||||||
|
Issuer = issuer,
|
||||||
|
IssuedAt = issuedAt,
|
||||||
|
ValidFrom = issuedAt,
|
||||||
|
Sequence = sequence,
|
||||||
|
SigningKeyId = authorityOptions.Signing?.ActiveKeyId,
|
||||||
|
Revocations = entries,
|
||||||
|
Metadata = metadata
|
||||||
|
};
|
||||||
|
|
||||||
|
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(bundle, SerializerOptions);
|
||||||
|
var sha256 = Convert.ToHexString(SHA256.HashData(jsonBytes)).ToLowerInvariant();
|
||||||
|
bundle.BundleId = sha256;
|
||||||
|
|
||||||
|
await PersistStateAsync(previousSequence, sequence, sha256, issuedAt, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
return new RevocationBundleBuildResult(bundle, jsonBytes, sha256, sequence, issuedAt);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IEnumerable<RevocationEntryModel> BuildTokenEntries(IReadOnlyCollection<AuthorityTokenDocument> documents, DateTimeOffset issuedAt)
|
||||||
|
{
|
||||||
|
foreach (var document in documents)
|
||||||
|
{
|
||||||
|
if (!string.Equals(document.Status, "revoked", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (document.ExpiresAt is { } expires && expires <= issuedAt)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var revocationId = document.TokenId;
|
||||||
|
if (string.IsNullOrWhiteSpace(revocationId))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var scopes = document.Scope.Count > 0
|
||||||
|
? document.Scope
|
||||||
|
.Where(scope => !string.IsNullOrWhiteSpace(scope))
|
||||||
|
.Select(scope => scope.Trim())
|
||||||
|
.Distinct(StringComparer.Ordinal)
|
||||||
|
.OrderBy(scope => scope, StringComparer.Ordinal)
|
||||||
|
.ToList()
|
||||||
|
: null;
|
||||||
|
|
||||||
|
var metadata = document.RevokedMetadata is null
|
||||||
|
? null
|
||||||
|
: new SortedDictionary<string, string?>(document.RevokedMetadata, StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
yield return new RevocationEntryModel
|
||||||
|
{
|
||||||
|
Id = revocationId,
|
||||||
|
Category = "token",
|
||||||
|
TokenType = document.Type,
|
||||||
|
SubjectId = Normalize(document.SubjectId),
|
||||||
|
ClientId = Normalize(document.ClientId),
|
||||||
|
Reason = NormalizeReason(document.RevokedReason) ?? "unspecified",
|
||||||
|
ReasonDescription = Normalize(document.RevokedReasonDescription),
|
||||||
|
RevokedAt = document.RevokedAt ?? document.CreatedAt,
|
||||||
|
EffectiveAt = document.RevokedAt ?? document.CreatedAt,
|
||||||
|
ExpiresAt = document.ExpiresAt,
|
||||||
|
Scopes = scopes,
|
||||||
|
Metadata = metadata
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static IEnumerable<RevocationEntryModel> BuildManualEntries(IReadOnlyCollection<AuthorityRevocationDocument> documents)
|
||||||
|
{
|
||||||
|
foreach (var document in documents)
|
||||||
|
{
|
||||||
|
var metadata = document.Metadata is null
|
||||||
|
? null
|
||||||
|
: new SortedDictionary<string, string?>(document.Metadata, StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
var scopes = document.Scopes is null
|
||||||
|
? null
|
||||||
|
: document.Scopes
|
||||||
|
.Where(scope => !string.IsNullOrWhiteSpace(scope))
|
||||||
|
.Select(scope => scope.Trim())
|
||||||
|
.Distinct(StringComparer.Ordinal)
|
||||||
|
.OrderBy(scope => scope, StringComparer.Ordinal)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
yield return new RevocationEntryModel
|
||||||
|
{
|
||||||
|
Id = document.RevocationId,
|
||||||
|
Category = document.Category,
|
||||||
|
TokenType = Normalize(document.TokenType),
|
||||||
|
SubjectId = Normalize(document.SubjectId),
|
||||||
|
ClientId = Normalize(document.ClientId),
|
||||||
|
Reason = NormalizeReason(document.Reason) ?? "unspecified",
|
||||||
|
ReasonDescription = Normalize(document.ReasonDescription),
|
||||||
|
RevokedAt = document.RevokedAt,
|
||||||
|
EffectiveAt = document.EffectiveAt ?? document.RevokedAt,
|
||||||
|
ExpiresAt = document.ExpiresAt,
|
||||||
|
Scopes = scopes,
|
||||||
|
Fingerprint = Normalize(document.Fingerprint),
|
||||||
|
Metadata = metadata
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task PersistStateAsync(long previousSequence, long newSequence, string bundleId, DateTimeOffset issuedAt, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await stateStore.UpdateAsync(previousSequence, newSequence, bundleId, issuedAt, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
catch (InvalidOperationException ex)
|
||||||
|
{
|
||||||
|
logger.LogError(ex, "Failed to update revocation export state (expected sequence {Expected}, new sequence {Sequence}).", previousSequence, newSequence);
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? Normalize(string? value)
|
||||||
|
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
|
||||||
|
|
||||||
|
private static string? NormalizeReason(string? reason)
|
||||||
|
{
|
||||||
|
var normalized = Normalize(reason);
|
||||||
|
return normalized?.ToLowerInvariant();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed class RevocationBundleModel
|
||||||
|
{
|
||||||
|
[JsonPropertyName("schemaVersion")]
|
||||||
|
[JsonPropertyOrder(1)]
|
||||||
|
public required string SchemaVersion { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("issuer")]
|
||||||
|
[JsonPropertyOrder(2)]
|
||||||
|
public required string Issuer { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("bundleId")]
|
||||||
|
[JsonPropertyOrder(3)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? BundleId { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("issuedAt")]
|
||||||
|
[JsonPropertyOrder(4)]
|
||||||
|
public required DateTimeOffset IssuedAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("validFrom")]
|
||||||
|
[JsonPropertyOrder(5)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public DateTimeOffset? ValidFrom { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("expiresAt")]
|
||||||
|
[JsonPropertyOrder(6)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public DateTimeOffset? ExpiresAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("sequence")]
|
||||||
|
[JsonPropertyOrder(7)]
|
||||||
|
public required long Sequence { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("signingKeyId")]
|
||||||
|
[JsonPropertyOrder(8)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? SigningKeyId { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("revocations")]
|
||||||
|
[JsonPropertyOrder(9)]
|
||||||
|
public required List<RevocationEntryModel> Revocations { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("metadata")]
|
||||||
|
[JsonPropertyOrder(10)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public SortedDictionary<string, string?>? Metadata { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed record RevocationBundleSignature(string Algorithm, string KeyId, string Value);
|
||||||
@@ -0,0 +1,112 @@
|
|||||||
|
using System;
|
||||||
|
using System.Buffers;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Configuration;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed class RevocationBundleSigner
|
||||||
|
{
|
||||||
|
private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.General)
|
||||||
|
{
|
||||||
|
PropertyNamingPolicy = null,
|
||||||
|
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||||
|
WriteIndented = false
|
||||||
|
};
|
||||||
|
|
||||||
|
private readonly ICryptoProviderRegistry providerRegistry;
|
||||||
|
private readonly StellaOpsAuthorityOptions authorityOptions;
|
||||||
|
private readonly ILogger<RevocationBundleSigner> logger;
|
||||||
|
|
||||||
|
public RevocationBundleSigner(
|
||||||
|
ICryptoProviderRegistry providerRegistry,
|
||||||
|
IOptions<StellaOpsAuthorityOptions> authorityOptions,
|
||||||
|
ILogger<RevocationBundleSigner> logger)
|
||||||
|
{
|
||||||
|
this.providerRegistry = providerRegistry ?? throw new ArgumentNullException(nameof(providerRegistry));
|
||||||
|
this.authorityOptions = authorityOptions?.Value ?? throw new ArgumentNullException(nameof(authorityOptions));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<RevocationBundleSignature> SignAsync(byte[] payload, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(payload);
|
||||||
|
|
||||||
|
var signing = authorityOptions.Signing ?? throw new InvalidOperationException("Authority signing configuration is required to export revocations.");
|
||||||
|
if (string.IsNullOrWhiteSpace(signing.ActiveKeyId))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Authority signing configuration requires an active key identifier.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var algorithm = string.IsNullOrWhiteSpace(signing.Algorithm)
|
||||||
|
? SignatureAlgorithms.Es256
|
||||||
|
: signing.Algorithm.Trim();
|
||||||
|
|
||||||
|
var keyReference = new CryptoKeyReference(signing.ActiveKeyId, signing.Provider);
|
||||||
|
var signer = providerRegistry.ResolveSigner(CryptoCapability.Signing, algorithm, keyReference, signing.Provider);
|
||||||
|
|
||||||
|
var header = new Dictionary<string, object>
|
||||||
|
{
|
||||||
|
["alg"] = algorithm,
|
||||||
|
["kid"] = signing.ActiveKeyId,
|
||||||
|
["typ"] = "application/vnd.stellaops.revocation-bundle+jws",
|
||||||
|
["b64"] = false,
|
||||||
|
["crit"] = new[] { "b64" }
|
||||||
|
};
|
||||||
|
|
||||||
|
var headerJson = JsonSerializer.Serialize(header, HeaderSerializerOptions);
|
||||||
|
var protectedHeader = Base64UrlEncode(Encoding.UTF8.GetBytes(headerJson));
|
||||||
|
|
||||||
|
var signingInputLength = protectedHeader.Length + 1 + payload.Length;
|
||||||
|
var buffer = ArrayPool<byte>.Shared.Rent(signingInputLength);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var headerBytes = Encoding.ASCII.GetBytes(protectedHeader);
|
||||||
|
Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length);
|
||||||
|
buffer[headerBytes.Length] = (byte)'.';
|
||||||
|
Buffer.BlockCopy(payload, 0, buffer, headerBytes.Length + 1, payload.Length);
|
||||||
|
|
||||||
|
var signingInput = new ReadOnlyMemory<byte>(buffer, 0, signingInputLength);
|
||||||
|
var signatureBytes = await signer.SignAsync(signingInput, cancellationToken).ConfigureAwait(false);
|
||||||
|
var encodedSignature = Base64UrlEncode(signatureBytes);
|
||||||
|
return new RevocationBundleSignature(algorithm, signing.ActiveKeyId, string.Concat(protectedHeader, "..", encodedSignature));
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
ArrayPool<byte>.Shared.Return(buffer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string Base64UrlEncode(ReadOnlySpan<byte> value)
|
||||||
|
{
|
||||||
|
var encoded = Convert.ToBase64String(value);
|
||||||
|
var builder = new StringBuilder(encoded.Length);
|
||||||
|
foreach (var ch in encoded)
|
||||||
|
{
|
||||||
|
switch (ch)
|
||||||
|
{
|
||||||
|
case '+':
|
||||||
|
builder.Append('-');
|
||||||
|
break;
|
||||||
|
case '/':
|
||||||
|
builder.Append('_');
|
||||||
|
break;
|
||||||
|
case '=':
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
builder.Append(ch);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder.ToString();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,70 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed class RevocationEntryModel
|
||||||
|
{
|
||||||
|
[JsonPropertyName("id")]
|
||||||
|
[JsonPropertyOrder(1)]
|
||||||
|
public required string Id { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("category")]
|
||||||
|
[JsonPropertyOrder(2)]
|
||||||
|
public required string Category { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("tokenType")]
|
||||||
|
[JsonPropertyOrder(3)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? TokenType { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("subjectId")]
|
||||||
|
[JsonPropertyOrder(4)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? SubjectId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("clientId")]
|
||||||
|
[JsonPropertyOrder(5)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? ClientId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("reason")]
|
||||||
|
[JsonPropertyOrder(6)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? Reason { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("reasonDescription")]
|
||||||
|
[JsonPropertyOrder(7)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? ReasonDescription { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("revokedAt")]
|
||||||
|
[JsonPropertyOrder(8)]
|
||||||
|
public DateTimeOffset RevokedAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("effectiveAt")]
|
||||||
|
[JsonPropertyOrder(9)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public DateTimeOffset? EffectiveAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("expiresAt")]
|
||||||
|
[JsonPropertyOrder(10)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public DateTimeOffset? ExpiresAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("scopes")]
|
||||||
|
[JsonPropertyOrder(11)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public List<string>? Scopes { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("fingerprint")]
|
||||||
|
[JsonPropertyOrder(12)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? Fingerprint { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("metadata")]
|
||||||
|
[JsonPropertyOrder(13)]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public SortedDictionary<string, string?>? Metadata { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed record RevocationExportPackage(
|
||||||
|
RevocationBundleBuildResult Bundle,
|
||||||
|
RevocationBundleSignature Signature);
|
||||||
@@ -0,0 +1,70 @@
|
|||||||
|
using System;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Revocation;
|
||||||
|
|
||||||
|
internal sealed class RevocationExportResponse
|
||||||
|
{
|
||||||
|
[JsonPropertyName("schemaVersion")]
|
||||||
|
public required string SchemaVersion { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("bundleId")]
|
||||||
|
public required string BundleId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("sequence")]
|
||||||
|
public required long Sequence { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("issuedAt")]
|
||||||
|
public required DateTimeOffset IssuedAt { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("signingKeyId")]
|
||||||
|
public string? SigningKeyId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("bundle")]
|
||||||
|
public required RevocationExportPayload Bundle { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("signature")]
|
||||||
|
public required RevocationExportSignature Signature { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("digest")]
|
||||||
|
public required RevocationExportDigest Digest { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed class RevocationExportPayload
|
||||||
|
{
|
||||||
|
[JsonPropertyName("fileName")]
|
||||||
|
public string FileName { get; init; } = "revocation-bundle.json";
|
||||||
|
|
||||||
|
[JsonPropertyName("contentType")]
|
||||||
|
public string ContentType { get; init; } = "application/json";
|
||||||
|
|
||||||
|
[JsonPropertyName("encoding")]
|
||||||
|
public string Encoding { get; init; } = "base64";
|
||||||
|
|
||||||
|
[JsonPropertyName("data")]
|
||||||
|
public required string Data { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed class RevocationExportSignature
|
||||||
|
{
|
||||||
|
[JsonPropertyName("fileName")]
|
||||||
|
public string FileName { get; init; } = "revocation-bundle.json.jws";
|
||||||
|
|
||||||
|
[JsonPropertyName("algorithm")]
|
||||||
|
public required string Algorithm { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("keyId")]
|
||||||
|
public required string KeyId { get; init; }
|
||||||
|
|
||||||
|
[JsonPropertyName("value")]
|
||||||
|
public required string Value { get; init; }
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed class RevocationExportDigest
|
||||||
|
{
|
||||||
|
[JsonPropertyName("algorithm")]
|
||||||
|
public string Algorithm { get; init; } = "sha256";
|
||||||
|
|
||||||
|
[JsonPropertyName("value")]
|
||||||
|
public required string Value { get; init; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,93 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Signing;
|
||||||
|
|
||||||
|
internal sealed class AuthorityJwksService
|
||||||
|
{
|
||||||
|
private readonly ICryptoProviderRegistry registry;
|
||||||
|
private readonly ILogger<AuthorityJwksService> logger;
|
||||||
|
|
||||||
|
public AuthorityJwksService(ICryptoProviderRegistry registry, ILogger<AuthorityJwksService> logger)
|
||||||
|
{
|
||||||
|
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public AuthorityJwksResponse Build() => new(BuildKeys());
|
||||||
|
|
||||||
|
private IReadOnlyCollection<JwksKeyEntry> BuildKeys()
|
||||||
|
{
|
||||||
|
var keys = new List<JwksKeyEntry>();
|
||||||
|
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
foreach (var provider in registry.Providers)
|
||||||
|
{
|
||||||
|
foreach (var signingKey in provider.GetSigningKeys())
|
||||||
|
{
|
||||||
|
var keyId = signingKey.Reference.KeyId;
|
||||||
|
if (!seen.Add(keyId))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var signer = provider.GetSigner(signingKey.AlgorithmId, signingKey.Reference);
|
||||||
|
var jwk = signer.ExportPublicJsonWebKey();
|
||||||
|
var entry = new JwksKeyEntry
|
||||||
|
{
|
||||||
|
Kid = jwk.Kid,
|
||||||
|
Kty = jwk.Kty,
|
||||||
|
Use = string.IsNullOrWhiteSpace(jwk.Use) ? "sig" : jwk.Use,
|
||||||
|
Alg = jwk.Alg,
|
||||||
|
Crv = jwk.Crv,
|
||||||
|
X = jwk.X,
|
||||||
|
Y = jwk.Y,
|
||||||
|
Status = signingKey.Metadata.TryGetValue("status", out var status) ? status : "active"
|
||||||
|
};
|
||||||
|
keys.Add(entry);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
logger.LogWarning(ex, "Failed to export JWKS entry for key {KeyId}.", keyId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed record AuthorityJwksResponse([property: JsonPropertyName("keys")] IReadOnlyCollection<JwksKeyEntry> Keys);
|
||||||
|
|
||||||
|
internal sealed class JwksKeyEntry
|
||||||
|
{
|
||||||
|
[JsonPropertyName("kty")]
|
||||||
|
public string? Kty { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("use")]
|
||||||
|
public string? Use { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("kid")]
|
||||||
|
public string? Kid { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("alg")]
|
||||||
|
public string? Alg { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("crv")]
|
||||||
|
public string? Crv { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("x")]
|
||||||
|
public string? X { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("y")]
|
||||||
|
public string? Y { get; set; }
|
||||||
|
|
||||||
|
[JsonPropertyName("status")]
|
||||||
|
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||||
|
public string? Status { get; set; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,392 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using Microsoft.Extensions.Hosting;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using StellaOps.Configuration;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Signing;
|
||||||
|
|
||||||
|
internal sealed class AuthoritySigningKeyManager
|
||||||
|
{
|
||||||
|
private readonly object syncRoot = new();
|
||||||
|
private readonly ICryptoProviderRegistry registry;
|
||||||
|
private readonly IReadOnlyList<IAuthoritySigningKeySource> keySources;
|
||||||
|
private readonly StellaOpsAuthorityOptions authorityOptions;
|
||||||
|
private readonly string basePath;
|
||||||
|
private readonly ILogger<AuthoritySigningKeyManager> logger;
|
||||||
|
private RegisteredSigningKey? activeKey;
|
||||||
|
private readonly Dictionary<string, RegisteredSigningKey> retiredKeys = new(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
public AuthoritySigningKeyManager(
|
||||||
|
ICryptoProviderRegistry registry,
|
||||||
|
IEnumerable<IAuthoritySigningKeySource> keySources,
|
||||||
|
IOptions<StellaOpsAuthorityOptions> authorityOptions,
|
||||||
|
IHostEnvironment environment,
|
||||||
|
ILogger<AuthoritySigningKeyManager> logger)
|
||||||
|
{
|
||||||
|
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||||
|
if (keySources is null)
|
||||||
|
{
|
||||||
|
throw new ArgumentNullException(nameof(keySources));
|
||||||
|
}
|
||||||
|
|
||||||
|
this.keySources = keySources.ToArray();
|
||||||
|
if (this.keySources.Count == 0)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("At least one Authority signing key source must be registered.");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.authorityOptions = authorityOptions?.Value ?? throw new ArgumentNullException(nameof(authorityOptions));
|
||||||
|
basePath = environment?.ContentRootPath ?? throw new ArgumentNullException(nameof(environment));
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
|
||||||
|
LoadInitialKeys();
|
||||||
|
}
|
||||||
|
|
||||||
|
public SigningRotationResult Rotate(SigningRotationRequest request)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(request);
|
||||||
|
|
||||||
|
lock (syncRoot)
|
||||||
|
{
|
||||||
|
var signing = authorityOptions.Signing ?? throw new InvalidOperationException("Authority signing configuration is not available.");
|
||||||
|
if (!signing.Enabled)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Signing is disabled. Enable signing before rotating keys.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var keyId = (request.KeyId ?? string.Empty).Trim();
|
||||||
|
if (string.IsNullOrWhiteSpace(keyId))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Rotation requires a keyId.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var location = request.Location?.Trim();
|
||||||
|
if (string.IsNullOrWhiteSpace(location))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Rotation requires a keyPath/location for the new signing key.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var algorithm = NormaliseAlgorithm(string.IsNullOrWhiteSpace(request.Algorithm)
|
||||||
|
? signing.Algorithm
|
||||||
|
: request.Algorithm);
|
||||||
|
var source = NormaliseSource(string.IsNullOrWhiteSpace(request.Source)
|
||||||
|
? signing.KeySource
|
||||||
|
: request.Source);
|
||||||
|
var providerName = NormaliseProviderName(request.Provider ?? signing.Provider);
|
||||||
|
|
||||||
|
IReadOnlyDictionary<string, string?>? metadata = null;
|
||||||
|
if (request.Metadata is not null && request.Metadata.Count > 0)
|
||||||
|
{
|
||||||
|
metadata = new Dictionary<string, string?>(request.Metadata, StringComparer.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
var provider = ResolveProvider(providerName, algorithm);
|
||||||
|
var loader = ResolveSource(source);
|
||||||
|
var loadRequest = new AuthoritySigningKeyRequest(
|
||||||
|
keyId,
|
||||||
|
algorithm,
|
||||||
|
source,
|
||||||
|
location,
|
||||||
|
AuthoritySigningKeyStatus.Active,
|
||||||
|
basePath,
|
||||||
|
provider.Name,
|
||||||
|
additionalMetadata: metadata);
|
||||||
|
var newKey = loader.Load(loadRequest);
|
||||||
|
provider.UpsertSigningKey(newKey);
|
||||||
|
|
||||||
|
if (retiredKeys.Remove(keyId))
|
||||||
|
{
|
||||||
|
logger.LogInformation("Promoted retired signing key {KeyId} to active status.", keyId);
|
||||||
|
}
|
||||||
|
|
||||||
|
string? previousKeyId = null;
|
||||||
|
if (activeKey is not null)
|
||||||
|
{
|
||||||
|
previousKeyId = activeKey.Key.Reference.KeyId;
|
||||||
|
if (!string.Equals(previousKeyId, keyId, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
RetireCurrentActive();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
activeKey = new RegisteredSigningKey(newKey, provider.Name, source, location);
|
||||||
|
signing.ActiveKeyId = keyId;
|
||||||
|
signing.KeyPath = location;
|
||||||
|
signing.KeySource = source;
|
||||||
|
signing.Provider = provider.Name;
|
||||||
|
|
||||||
|
RemoveAdditionalOption(keyId);
|
||||||
|
|
||||||
|
logger.LogInformation("Authority signing key rotated. Active key is now {KeyId} via provider {Provider}.", keyId, provider.Name);
|
||||||
|
|
||||||
|
return new SigningRotationResult(
|
||||||
|
keyId,
|
||||||
|
provider.Name,
|
||||||
|
source,
|
||||||
|
location,
|
||||||
|
previousKeyId,
|
||||||
|
retiredKeys.Keys.ToArray());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public SigningKeySnapshot Snapshot
|
||||||
|
{
|
||||||
|
get
|
||||||
|
{
|
||||||
|
lock (syncRoot)
|
||||||
|
{
|
||||||
|
var active = activeKey;
|
||||||
|
return new SigningKeySnapshot(
|
||||||
|
active?.Key.Reference.KeyId,
|
||||||
|
active?.ProviderName,
|
||||||
|
active?.Source,
|
||||||
|
active?.Location,
|
||||||
|
retiredKeys.Values
|
||||||
|
.Select(static registration => new SigningKeySnapshot.RetiredKey(
|
||||||
|
registration.Key.Reference.KeyId,
|
||||||
|
registration.ProviderName,
|
||||||
|
registration.Source,
|
||||||
|
registration.Location))
|
||||||
|
.ToArray());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void LoadInitialKeys()
|
||||||
|
{
|
||||||
|
var signing = authorityOptions.Signing;
|
||||||
|
if (signing is null || !signing.Enabled)
|
||||||
|
{
|
||||||
|
logger.LogInformation("Authority signing is disabled; JWKS will expose ephemeral keys until signing is enabled.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var algorithm = NormaliseAlgorithm(signing.Algorithm);
|
||||||
|
var source = NormaliseSource(signing.KeySource);
|
||||||
|
var activeRequest = new AuthoritySigningKeyRequest(
|
||||||
|
signing.ActiveKeyId,
|
||||||
|
algorithm,
|
||||||
|
source,
|
||||||
|
signing.KeyPath,
|
||||||
|
AuthoritySigningKeyStatus.Active,
|
||||||
|
basePath,
|
||||||
|
NormaliseProviderName(signing.Provider));
|
||||||
|
activeKey = LoadAndRegister(activeRequest);
|
||||||
|
signing.KeySource = source;
|
||||||
|
signing.Provider = activeKey.ProviderName;
|
||||||
|
|
||||||
|
foreach (var additional in signing.AdditionalKeys)
|
||||||
|
{
|
||||||
|
var keyId = (additional.KeyId ?? string.Empty).Trim();
|
||||||
|
if (string.IsNullOrWhiteSpace(keyId))
|
||||||
|
{
|
||||||
|
logger.LogWarning("Skipped additional signing key with empty keyId.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.Equals(keyId, activeKey.Key.Reference.KeyId, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var additionalLocation = additional.Path?.Trim();
|
||||||
|
if (string.IsNullOrWhiteSpace(additionalLocation))
|
||||||
|
{
|
||||||
|
logger.LogWarning("Additional signing key {KeyId} is missing a path. Skipping.", keyId);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var additionalSource = NormaliseSource(additional.Source ?? source);
|
||||||
|
var request = new AuthoritySigningKeyRequest(
|
||||||
|
keyId,
|
||||||
|
algorithm,
|
||||||
|
additionalSource,
|
||||||
|
additionalLocation,
|
||||||
|
AuthoritySigningKeyStatus.Retired,
|
||||||
|
basePath,
|
||||||
|
NormaliseProviderName(signing.Provider));
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var registration = LoadAndRegister(request);
|
||||||
|
retiredKeys[registration.Key.Reference.KeyId] = registration;
|
||||||
|
additional.Source = additionalSource;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
logger.LogWarning(ex, "Failed to load retired signing key {KeyId}. It will be ignored for JWKS responses.", keyId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void RetireCurrentActive()
|
||||||
|
{
|
||||||
|
if (activeKey is null)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var previous = activeKey;
|
||||||
|
var metadata = new Dictionary<string, string?>(previous.Key.Metadata, StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
["status"] = AuthoritySigningKeyStatus.Retired
|
||||||
|
};
|
||||||
|
|
||||||
|
var retiredKey = new CryptoSigningKey(
|
||||||
|
previous.Key.Reference,
|
||||||
|
previous.Key.AlgorithmId,
|
||||||
|
in previous.Key.PrivateParameters,
|
||||||
|
previous.Key.CreatedAt,
|
||||||
|
previous.Key.ExpiresAt,
|
||||||
|
metadata);
|
||||||
|
|
||||||
|
var provider = ResolveProvider(previous.ProviderName, retiredKey.AlgorithmId);
|
||||||
|
provider.UpsertSigningKey(retiredKey);
|
||||||
|
|
||||||
|
var registration = new RegisteredSigningKey(retiredKey, provider.Name, previous.Source, previous.Location);
|
||||||
|
retiredKeys[registration.Key.Reference.KeyId] = registration;
|
||||||
|
UpsertAdditionalOption(registration);
|
||||||
|
|
||||||
|
logger.LogInformation("Moved signing key {KeyId} to retired set (provider {Provider}).", registration.Key.Reference.KeyId, provider.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
private RegisteredSigningKey LoadAndRegister(AuthoritySigningKeyRequest request)
|
||||||
|
{
|
||||||
|
var source = ResolveSource(request.Source);
|
||||||
|
var provider = ResolveProvider(request.Provider, request.Algorithm);
|
||||||
|
var key = source.Load(request);
|
||||||
|
provider.UpsertSigningKey(key);
|
||||||
|
|
||||||
|
logger.LogDebug("Loaded signing key {KeyId} (status {Status}) via provider {Provider}.", key.Reference.KeyId, request.Status, provider.Name);
|
||||||
|
|
||||||
|
return new RegisteredSigningKey(key, provider.Name, request.Source, request.Location);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IAuthoritySigningKeySource ResolveSource(string source)
|
||||||
|
{
|
||||||
|
foreach (var loader in keySources)
|
||||||
|
{
|
||||||
|
if (loader.CanLoad(source))
|
||||||
|
{
|
||||||
|
return loader;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new InvalidOperationException($"No signing key source registered for '{source}'.");
|
||||||
|
}
|
||||||
|
|
||||||
|
private ICryptoProvider ResolveProvider(string? preferredProvider, string algorithm)
|
||||||
|
{
|
||||||
|
var normalised = NormaliseProviderName(preferredProvider);
|
||||||
|
if (!string.IsNullOrWhiteSpace(normalised) &&
|
||||||
|
registry.TryResolve(normalised!, out var provider) &&
|
||||||
|
provider.Supports(CryptoCapability.Signing, algorithm))
|
||||||
|
{
|
||||||
|
return provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
return registry.ResolveOrThrow(CryptoCapability.Signing, algorithm);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void UpsertAdditionalOption(RegisteredSigningKey registration)
|
||||||
|
{
|
||||||
|
var additional = authorityOptions.Signing.AdditionalKeys;
|
||||||
|
for (var index = 0; index < additional.Count; index++)
|
||||||
|
{
|
||||||
|
var entry = additional[index];
|
||||||
|
if (string.Equals(entry.KeyId, registration.Key.Reference.KeyId, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
entry.Path = registration.Location;
|
||||||
|
entry.Source = registration.Source;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
additional.Add(new AuthoritySigningAdditionalKeyOptions
|
||||||
|
{
|
||||||
|
KeyId = registration.Key.Reference.KeyId,
|
||||||
|
Path = registration.Location,
|
||||||
|
Source = registration.Source
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private void RemoveAdditionalOption(string keyId)
|
||||||
|
{
|
||||||
|
var additional = authorityOptions.Signing.AdditionalKeys;
|
||||||
|
for (var index = additional.Count - 1; index >= 0; index--)
|
||||||
|
{
|
||||||
|
if (string.Equals(additional[index].KeyId, keyId, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
additional.RemoveAt(index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string NormaliseAlgorithm(string? algorithm)
|
||||||
|
{
|
||||||
|
return string.IsNullOrWhiteSpace(algorithm)
|
||||||
|
? SignatureAlgorithms.Es256
|
||||||
|
: algorithm.Trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string NormaliseSource(string? source)
|
||||||
|
{
|
||||||
|
return string.IsNullOrWhiteSpace(source) ? "file" : source.Trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string? NormaliseProviderName(string? provider)
|
||||||
|
{
|
||||||
|
return string.IsNullOrWhiteSpace(provider) ? null : provider.Trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed record RegisteredSigningKey(
|
||||||
|
CryptoSigningKey Key,
|
||||||
|
string ProviderName,
|
||||||
|
string Source,
|
||||||
|
string Location);
|
||||||
|
}
|
||||||
|
|
||||||
|
internal sealed record SigningRotationResult(
|
||||||
|
string ActiveKeyId,
|
||||||
|
string ActiveProvider,
|
||||||
|
string ActiveSource,
|
||||||
|
string ActiveLocation,
|
||||||
|
string? PreviousKeyId,
|
||||||
|
IReadOnlyCollection<string> RetiredKeyIds);
|
||||||
|
|
||||||
|
internal sealed class SigningKeySnapshot
|
||||||
|
{
|
||||||
|
public SigningKeySnapshot(
|
||||||
|
string? activeKeyId,
|
||||||
|
string? activeProvider,
|
||||||
|
string? activeSource,
|
||||||
|
string? activeLocation,
|
||||||
|
IReadOnlyCollection<RetiredKey> retired)
|
||||||
|
{
|
||||||
|
ActiveKeyId = activeKeyId;
|
||||||
|
ActiveProvider = activeProvider;
|
||||||
|
ActiveSource = activeSource;
|
||||||
|
ActiveLocation = activeLocation;
|
||||||
|
Retired = retired ?? Array.Empty<RetiredKey>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public string? ActiveKeyId { get; }
|
||||||
|
|
||||||
|
public string? ActiveProvider { get; }
|
||||||
|
|
||||||
|
public string? ActiveSource { get; }
|
||||||
|
|
||||||
|
public string? ActiveLocation { get; }
|
||||||
|
|
||||||
|
public IReadOnlyCollection<RetiredKey> Retired { get; }
|
||||||
|
|
||||||
|
public sealed record RetiredKey(
|
||||||
|
string KeyId,
|
||||||
|
string Provider,
|
||||||
|
string Source,
|
||||||
|
string Location);
|
||||||
|
}
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Signing;
|
||||||
|
|
||||||
|
internal sealed class AuthoritySigningKeyRequest
|
||||||
|
{
|
||||||
|
public AuthoritySigningKeyRequest(
|
||||||
|
string keyId,
|
||||||
|
string algorithm,
|
||||||
|
string source,
|
||||||
|
string location,
|
||||||
|
string status,
|
||||||
|
string basePath,
|
||||||
|
string? provider = null,
|
||||||
|
DateTimeOffset? createdAt = null,
|
||||||
|
DateTimeOffset? expiresAt = null,
|
||||||
|
IReadOnlyDictionary<string, string?>? additionalMetadata = null)
|
||||||
|
{
|
||||||
|
KeyId = keyId ?? throw new ArgumentNullException(nameof(keyId));
|
||||||
|
Algorithm = string.IsNullOrWhiteSpace(algorithm)
|
||||||
|
? throw new ArgumentException("Algorithm identifier is required.", nameof(algorithm))
|
||||||
|
: algorithm;
|
||||||
|
Source = string.IsNullOrWhiteSpace(source)
|
||||||
|
? throw new ArgumentException("Signing key source is required.", nameof(source))
|
||||||
|
: source;
|
||||||
|
Location = location ?? throw new ArgumentNullException(nameof(location));
|
||||||
|
Status = string.IsNullOrWhiteSpace(status)
|
||||||
|
? throw new ArgumentException("Signing key status is required.", nameof(status))
|
||||||
|
: status;
|
||||||
|
BasePath = basePath ?? throw new ArgumentNullException(nameof(basePath));
|
||||||
|
Provider = provider;
|
||||||
|
CreatedAt = createdAt;
|
||||||
|
ExpiresAt = expiresAt;
|
||||||
|
AdditionalMetadata = additionalMetadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
public string KeyId { get; }
|
||||||
|
|
||||||
|
public string Algorithm { get; }
|
||||||
|
|
||||||
|
public string Source { get; }
|
||||||
|
|
||||||
|
public string Location { get; }
|
||||||
|
|
||||||
|
public string Status { get; }
|
||||||
|
|
||||||
|
public string BasePath { get; }
|
||||||
|
|
||||||
|
public string? Provider { get; }
|
||||||
|
|
||||||
|
public DateTimeOffset? CreatedAt { get; }
|
||||||
|
|
||||||
|
public DateTimeOffset? ExpiresAt { get; }
|
||||||
|
|
||||||
|
public IReadOnlyDictionary<string, string?>? AdditionalMetadata { get; }
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
namespace StellaOps.Authority.Signing;
|
||||||
|
|
||||||
|
internal static class AuthoritySigningKeyStatus
|
||||||
|
{
|
||||||
|
public const string Active = "active";
|
||||||
|
public const string Retired = "retired";
|
||||||
|
public const string Disabled = "disabled";
|
||||||
|
}
|
||||||
@@ -0,0 +1,99 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using StellaOps.Cryptography;
|
||||||
|
|
||||||
|
namespace StellaOps.Authority.Signing;
|
||||||
|
|
||||||
|
internal sealed class FileAuthoritySigningKeySource : IAuthoritySigningKeySource
|
||||||
|
{
|
||||||
|
private readonly ILogger<FileAuthoritySigningKeySource> logger;
|
||||||
|
|
||||||
|
public FileAuthoritySigningKeySource(ILogger<FileAuthoritySigningKeySource> logger)
|
||||||
|
{
|
||||||
|
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||||
|
}
|
||||||
|
|
||||||
|
public bool CanLoad(string source)
|
||||||
|
=> string.Equals(source, "file", StringComparison.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
public CryptoSigningKey Load(AuthoritySigningKeyRequest request)
|
||||||
|
{
|
||||||
|
ArgumentNullException.ThrowIfNull(request);
|
||||||
|
|
||||||
|
var path = ResolvePath(request.BasePath, request.Location);
|
||||||
|
if (!File.Exists(path))
|
||||||
|
{
|
||||||
|
throw new FileNotFoundException($"Authority signing key '{request.KeyId}' not found.", path);
|
||||||
|
}
|
||||||
|
|
||||||
|
var pem = File.ReadAllText(path);
|
||||||
|
|
||||||
|
using var ecdsa = ECDsa.Create();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
ecdsa.ImportFromPem(pem);
|
||||||
|
}
|
||||||
|
catch (CryptographicException ex)
|
||||||
|
{
|
||||||
|
logger.LogError(ex, "Failed to load Authority signing key {KeyId} from {Path}.", request.KeyId, path);
|
||||||
|
throw new InvalidOperationException("Failed to import Authority signing key. Ensure the PEM is an unencrypted EC private key.", ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
|
||||||
|
|
||||||
|
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
|
||||||
|
{
|
||||||
|
["source"] = Path.GetFullPath(path),
|
||||||
|
["loader"] = "file",
|
||||||
|
["status"] = request.Status
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrWhiteSpace(request.Provider))
|
||||||
|
{
|
||||||
|
metadata["provider"] = request.Provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (request.AdditionalMetadata is not null)
|
||||||
|
{
|
||||||
|
foreach (var pair in request.AdditionalMetadata)
|
||||||
|
{
|
||||||
|
metadata[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata["status"] = request.Status;
|
||||||
|
|
||||||
|
logger.LogInformation("Loaded Authority signing key {KeyId} from {Path}.", request.KeyId, path);
|
||||||
|
|
||||||
|
return new CryptoSigningKey(
|
||||||
|
new CryptoKeyReference(request.KeyId, request.Provider),
|
||||||
|
request.Algorithm,
|
||||||
|
in parameters,
|
||||||
|
request.CreatedAt ?? DateTimeOffset.UtcNow,
|
||||||
|
request.ExpiresAt,
|
||||||
|
metadata);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ResolvePath(string basePath, string location)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(location))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("Signing key location is required.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Path.IsPathRooted(location))
|
||||||
|
{
|
||||||
|
return location;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(basePath))
|
||||||
|
{
|
||||||
|
return Path.GetFullPath(location);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Path.GetFullPath(Path.Combine(basePath, location));
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user