Compare commits
65 Commits
feature/do
...
3d01bf9edc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3d01bf9edc | ||
|
|
68bc53a07b | ||
|
|
4b124fb056 | ||
|
|
7c24ed96ee | ||
|
|
11597679ed | ||
|
|
e3f28a21ab | ||
|
|
a403979177 | ||
|
|
b8641b1959 | ||
|
|
98e6b76584 | ||
|
|
862bb6ed80 | ||
|
|
bd2529502e | ||
|
|
965cbf9574 | ||
|
|
af30fc322f | ||
|
|
e53a282fbe | ||
|
|
d907729778 | ||
|
|
8a72779c16 | ||
|
|
e0f6efecce | ||
|
|
98934170ca | ||
|
|
69651212ec | ||
|
|
53889d85e7 | ||
|
|
0de92144d2 | ||
|
|
9bd6a73926 | ||
|
|
4042fc2184 | ||
|
|
dd0067ea0b | ||
|
|
f6c22854a4 | ||
|
|
05597616d6 | ||
|
|
a6f1406509 | ||
|
|
0a8f8c14af | ||
|
|
7efee7dd41 | ||
|
|
952ba77924 | ||
|
|
23e463e346 | ||
|
|
849a70f9d1 | ||
|
|
868f8e0bb6 | ||
|
|
84c42ca2d8 | ||
|
|
efd6850c38 | ||
|
|
2b892ad1b2 | ||
|
|
e16d2b5224 | ||
|
|
5e514532df | ||
|
|
2141196496 | ||
|
|
bca02ec295 | ||
|
|
8cabdce3b6 | ||
|
|
6145d89468 | ||
|
|
ee317d3f61 | ||
|
|
4cc8bdb460 | ||
|
|
95ff83e0f0 | ||
|
|
3954615e81 | ||
|
|
8948b1a3e2 | ||
|
|
5cfcf0723a | ||
|
|
ba733b9f69 | ||
|
|
79d562ea5d | ||
|
|
a7cd10020a | ||
|
|
b978ae399f | ||
|
|
570746b7d9 | ||
|
|
8318b26370 | ||
|
|
1f76650b7e | ||
|
|
37304cf819 | ||
|
|
6beb9d7c4e | ||
|
|
be8c623e04 | ||
|
|
dd4bb50076 | ||
|
|
bf6ab6ba6f | ||
|
|
02849cc955 | ||
|
|
2eaf0f699b | ||
|
|
6c1177a6ce | ||
|
|
582a88e8f8 | ||
|
|
f0662dd45f |
@@ -1,8 +1,31 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(dotnet --list-sdks:*)",
|
||||
"Bash(winget install:*)",
|
||||
"Bash(dotnet restore:*)",
|
||||
"Bash(dotnet nuget:*)",
|
||||
"Bash(csc -parse:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(dotnet build:*)",
|
||||
"Bash(cat:*)",
|
||||
"Bash(copy:*)",
|
||||
"Bash(dotnet test:*)",
|
||||
"Bash(dir:*)",
|
||||
"Bash(Select-Object -ExpandProperty FullName)",
|
||||
"Bash(echo:*)",
|
||||
"Bash(Out-File -FilePath \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Libraries\\StellaOps.Scanner.Surface\\StellaOps.Scanner.Surface.csproj\" -Encoding utf8)",
|
||||
"Bash(wc:*)",
|
||||
"Bash(sort:*)"
|
||||
"Bash(find:*)",
|
||||
"WebFetch(domain:docs.gradle.org)",
|
||||
"WebSearch",
|
||||
"Bash(dotnet msbuild:*)",
|
||||
"Bash(test:*)",
|
||||
"Bash(taskkill:*)",
|
||||
"Bash(timeout /t)",
|
||||
"Bash(dotnet clean:*)",
|
||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")"
|
||||
],
|
||||
"deny": [],
|
||||
"ask": []
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
aoc-guard:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event_name != 'schedule'
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
||||
steps:
|
||||
|
||||
128
.gitea/workflows/artifact-signing.yml
Normal file
128
.gitea/workflows/artifact-signing.yml
Normal file
@@ -0,0 +1,128 @@
|
||||
name: Artifact Signing
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
artifact_path:
|
||||
description: 'Path to artifact to sign'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
COSIGN_VERSION: 'v2.2.0'
|
||||
|
||||
jobs:
|
||||
sign-containers:
|
||||
name: Sign Container Images
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Log in to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Sign images (keyless)
|
||||
if: ${{ !env.COSIGN_PRIVATE_KEY_B64 }}
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
run: |
|
||||
IMAGES=(
|
||||
"ghcr.io/${{ github.repository }}/concelier"
|
||||
"ghcr.io/${{ github.repository }}/scanner"
|
||||
"ghcr.io/${{ github.repository }}/authority"
|
||||
)
|
||||
for img in "${IMAGES[@]}"; do
|
||||
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||
cosign sign --yes "${img}:${{ github.ref_name }}"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Sign images (with key)
|
||||
if: ${{ env.COSIGN_PRIVATE_KEY_B64 }}
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
run: |
|
||||
echo "$COSIGN_PRIVATE_KEY" | base64 -d > /tmp/cosign.key
|
||||
IMAGES=(
|
||||
"ghcr.io/${{ github.repository }}/concelier"
|
||||
"ghcr.io/${{ github.repository }}/scanner"
|
||||
"ghcr.io/${{ github.repository }}/authority"
|
||||
)
|
||||
for img in "${IMAGES[@]}"; do
|
||||
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||
cosign sign --key /tmp/cosign.key "${img}:${{ github.ref_name }}"
|
||||
fi
|
||||
done
|
||||
rm -f /tmp/cosign.key
|
||||
|
||||
sign-sbom:
|
||||
name: Sign SBOM Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Generate and sign SBOM
|
||||
run: |
|
||||
# Generate SBOM using syft
|
||||
if command -v syft &> /dev/null; then
|
||||
syft . -o cyclonedx-json > sbom.cdx.json
|
||||
cosign sign-blob --yes sbom.cdx.json --output-signature sbom.cdx.json.sig
|
||||
else
|
||||
echo "syft not installed, skipping SBOM generation"
|
||||
fi
|
||||
|
||||
- name: Upload signed artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: signed-sbom
|
||||
path: |
|
||||
sbom.cdx.json
|
||||
sbom.cdx.json.sig
|
||||
if-no-files-found: ignore
|
||||
|
||||
verify-signatures:
|
||||
name: Verify Existing Signatures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Verify DSSE envelopes
|
||||
run: |
|
||||
find . -name "*.dsse" -o -name "*.dsse.json" | while read f; do
|
||||
echo "Checking $f..."
|
||||
# Basic JSON validation
|
||||
if ! jq empty "$f" 2>/dev/null; then
|
||||
echo "Warning: Invalid JSON in $f"
|
||||
fi
|
||||
done
|
||||
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
BUILD_CONFIGURATION: Release
|
||||
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
||||
RUNNER_TOOL_CACHE: /toolcache
|
||||
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install syft (SBOM)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Chaos smoke
|
||||
if: ${{ github.event.inputs.chaos == 'true' }}
|
||||
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
- name: Setup .NET 10 preview
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
||||
dotnet-version: '10.0.100'
|
||||
|
||||
- name: Restore Concelier solution
|
||||
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
||||
|
||||
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Concelier STORE-AOC-19-005 Dataset
|
||||
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
build-dataset:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/out/linksets
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get update && sudo apt-get install -y zstd
|
||||
|
||||
- name: Build dataset tarball
|
||||
run: |
|
||||
chmod +x scripts/concelier/build-store-aoc-19-005-dataset.sh scripts/concelier/test-store-aoc-19-005-dataset.sh
|
||||
scripts/concelier/build-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||
|
||||
- name: Validate dataset
|
||||
run: scripts/concelier/test-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||
|
||||
- name: Upload dataset artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: concelier-store-aoc-19-005-dataset
|
||||
path: |
|
||||
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst
|
||||
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst.sha256
|
||||
@@ -1,86 +1,58 @@
|
||||
name: Console CI
|
||||
name: console-ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/UI/**'
|
||||
- '.gitea/workflows/console-ci.yml'
|
||||
- 'docs/modules/devops/console-ci-contract.md'
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/UI/**'
|
||||
- 'src/Web/**'
|
||||
- '.gitea/workflows/console-ci.yml'
|
||||
- 'docs/modules/devops/console-ci-contract.md'
|
||||
- 'ops/devops/console/**'
|
||||
|
||||
jobs:
|
||||
console-ci:
|
||||
runs-on: ubuntu-22.04
|
||||
lint-test-build:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: src/Web
|
||||
env:
|
||||
PNPM_HOME: ~/.pnpm
|
||||
PLAYWRIGHT_BROWSERS_PATH: ./.playwright
|
||||
SOURCE_DATE_EPOCH: ${{ github.run_id }}
|
||||
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||
CI: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Node.js 20
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: src/Web/package-lock.json
|
||||
|
||||
- name: Enable pnpm
|
||||
- name: Install deps (offline-friendly)
|
||||
run: npm ci --prefer-offline --no-audit --progress=false
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint -- --no-progress
|
||||
|
||||
- name: Console export specs (targeted)
|
||||
run: bash ./scripts/ci-console-exports.sh
|
||||
|
||||
- name: Build
|
||||
run: npm run build -- --configuration=production --progress=false
|
||||
|
||||
- name: Collect artifacts
|
||||
if: always()
|
||||
run: |
|
||||
corepack enable
|
||||
corepack prepare pnpm@9 --activate
|
||||
|
||||
- name: Cache pnpm store & node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
node_modules
|
||||
./.pnpm-store
|
||||
./.playwright
|
||||
key: console-${{ runner.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Install dependencies (offline-first)
|
||||
env:
|
||||
PNPM_FETCH_RETRIES: 0
|
||||
PNPM_OFFLINE: 1
|
||||
run: |
|
||||
pnpm install --frozen-lockfile || PNPM_OFFLINE=0 pnpm install --frozen-lockfile --prefer-offline
|
||||
|
||||
- name: Lint / Types
|
||||
run: pnpm lint && pnpm format:check && pnpm typecheck
|
||||
|
||||
- name: Unit tests
|
||||
run: pnpm test -- --runInBand --reporter=junit --outputFile=.artifacts/junit.xml
|
||||
|
||||
- name: Storybook a11y
|
||||
run: |
|
||||
pnpm storybook:build
|
||||
pnpm storybook:a11y --ci --output .artifacts/storybook-a11y.json
|
||||
|
||||
- name: Playwright smoke
|
||||
run: pnpm playwright test --config=playwright.config.ci.ts --reporter=list,junit=.artifacts/playwright.xml
|
||||
|
||||
- name: Lighthouse (CI budgets)
|
||||
run: |
|
||||
pnpm serve --port 4173 &
|
||||
pnpm lhci autorun --config=lighthouserc.ci.js --upload.target=filesystem --upload.outputDir=.artifacts/lhci
|
||||
|
||||
- name: SBOM
|
||||
run: pnpm exec syft packages dir:dist --output=spdx-json=.artifacts/console.spdx.json
|
||||
mkdir -p ../artifacts
|
||||
cp -r dist ../artifacts/dist || true
|
||||
cp -r coverage ../artifacts/coverage || true
|
||||
find . -maxdepth 3 -type f -name "*.xml" -o -name "*.trx" -o -name "*.json" -path "*test*" -print0 | xargs -0 -I{} cp --parents {} ../artifacts 2>/dev/null || true
|
||||
|
||||
- name: Upload artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: console-ci-artifacts
|
||||
path: .artifacts
|
||||
name: console-ci-${{ github.run_id }}
|
||||
path: artifacts
|
||||
retention-days: 14
|
||||
|
||||
32
.gitea/workflows/console-runner-image.yml
Normal file
32
.gitea/workflows/console-runner-image.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
name: console-runner-image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'ops/devops/console/**'
|
||||
- '.gitea/workflows/console-runner-image.yml'
|
||||
|
||||
jobs:
|
||||
build-runner-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build runner image tarball (baked caches)
|
||||
env:
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
chmod +x ops/devops/console/build-runner-image.sh ops/devops/console/build-runner-image-ci.sh
|
||||
ops/devops/console/build-runner-image-ci.sh
|
||||
|
||||
- name: Upload runner image artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: console-runner-image-${{ github.run_id }}
|
||||
path: ops/devops/artifacts/console-runner/
|
||||
retention-days: 14
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
- name: Setup .NET 10 (preview)
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
|
||||
- name: Build CryptoPro plugin
|
||||
run: |
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
||||
dotnet-version: '10.0.100'
|
||||
|
||||
- name: Link check
|
||||
run: |
|
||||
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
export-ci:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
MINIO_ACCESS_KEY: exportci
|
||||
MINIO_SECRET_KEY: exportci123
|
||||
BUCKET: export-ci
|
||||
|
||||
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
@@ -0,0 +1,325 @@
|
||||
# .gitea/workflows/findings-ledger-ci.yml
|
||||
# Findings Ledger CI with RLS migration validation (DEVOPS-LEDGER-TEN-48-001-REL)
|
||||
|
||||
name: Findings Ledger CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Findings/**'
|
||||
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||
- 'deploy/releases/2025.09-stable.yaml'
|
||||
- 'deploy/releases/2025.09-airgap.yaml'
|
||||
- 'deploy/downloads/manifest.json'
|
||||
- 'ops/devops/release/check_release_manifest.py'
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/Findings/**'
|
||||
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
POSTGRES_IMAGE: postgres:16-alpine
|
||||
BUILD_CONFIGURATION: Release
|
||||
|
||||
jobs:
|
||||
build-test:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore dependencies
|
||||
run: |
|
||||
dotnet restore src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj
|
||||
dotnet restore src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj \
|
||||
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||
/p:ContinuousIntegrationBuild=true
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
mkdir -p $TEST_RESULTS_DIR
|
||||
dotnet test src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj \
|
||||
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||
--logger "trx;LogFileName=ledger-tests.trx" \
|
||||
--results-directory $TEST_RESULTS_DIR
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: ledger-test-results
|
||||
path: ${{ env.TEST_RESULTS_DIR }}
|
||||
|
||||
migration-validation:
|
||||
runs-on: ubuntu-22.04
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: ledgertest
|
||||
POSTGRES_PASSWORD: ledgertest
|
||||
POSTGRES_DB: ledger_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGUSER: ledgertest
|
||||
PGPASSWORD: ledgertest
|
||||
PGDATABASE: ledger_test
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install PostgreSQL client
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y postgresql-client
|
||||
|
||||
- name: Wait for PostgreSQL
|
||||
run: |
|
||||
until pg_isready -h $PGHOST -p $PGPORT -U $PGUSER; do
|
||||
echo "Waiting for PostgreSQL..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Apply prerequisite migrations (001-006)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIGRATION_DIR="src/Findings/StellaOps.Findings.Ledger/migrations"
|
||||
for migration in 001_initial.sql 002_add_evidence_bundle_ref.sql 002_projection_offsets.sql \
|
||||
003_policy_rationale.sql 004_ledger_attestations.sql 004_risk_fields.sql \
|
||||
005_risk_fields.sql 006_orchestrator_airgap.sql; do
|
||||
if [ -f "$MIGRATION_DIR/$migration" ]; then
|
||||
echo "Applying migration: $migration"
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f "$MIGRATION_DIR/$migration"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Apply RLS migration (007_enable_rls.sql)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Applying RLS migration..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||
|
||||
- name: Validate RLS configuration
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Validating RLS is enabled on all protected tables..."
|
||||
|
||||
# Check RLS enabled
|
||||
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||
WHERE n.nspname = 'public'
|
||||
AND c.relrowsecurity = true
|
||||
AND c.relname IN (
|
||||
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||
'orchestrator_exports', 'airgap_imports'
|
||||
);
|
||||
")
|
||||
|
||||
if [ "$TABLES_WITH_RLS" -ne 8 ]; then
|
||||
echo "::error::Expected 8 tables with RLS enabled, found $TABLES_WITH_RLS"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ All 8 tables have RLS enabled"
|
||||
|
||||
# Check policies exist
|
||||
POLICIES=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(DISTINCT tablename)
|
||||
FROM pg_policies
|
||||
WHERE schemaname = 'public'
|
||||
AND policyname LIKE '%_tenant_isolation';
|
||||
")
|
||||
|
||||
if [ "$POLICIES" -ne 8 ]; then
|
||||
echo "::error::Expected 8 tenant isolation policies, found $POLICIES"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ All 8 tenant isolation policies created"
|
||||
|
||||
# Check tenant function exists
|
||||
FUNC_EXISTS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_proc p
|
||||
JOIN pg_namespace n ON p.pronamespace = n.oid
|
||||
WHERE p.proname = 'require_current_tenant'
|
||||
AND n.nspname = 'findings_ledger_app';
|
||||
")
|
||||
|
||||
if [ "$FUNC_EXISTS" -ne 1 ]; then
|
||||
echo "::error::Tenant function 'require_current_tenant' not found"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Tenant function 'findings_ledger_app.require_current_tenant()' exists"
|
||||
|
||||
echo ""
|
||||
echo "=== RLS Migration Validation PASSED ==="
|
||||
|
||||
- name: Test rollback migration
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Testing rollback migration..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql
|
||||
|
||||
# Verify RLS is disabled
|
||||
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||
WHERE n.nspname = 'public'
|
||||
AND c.relrowsecurity = true
|
||||
AND c.relname IN (
|
||||
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||
'orchestrator_exports', 'airgap_imports'
|
||||
);
|
||||
")
|
||||
|
||||
if [ "$TABLES_WITH_RLS" -ne 0 ]; then
|
||||
echo "::error::Rollback failed - $TABLES_WITH_RLS tables still have RLS enabled"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Rollback successful - RLS disabled on all tables"
|
||||
- name: Validate release manifests (production)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python ops/devops/release/check_release_manifest.py
|
||||
|
||||
- name: Re-apply RLS migration (idempotency check)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Re-applying RLS migration to verify idempotency..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||
echo "✓ Migration is idempotent"
|
||||
|
||||
generate-manifest:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [build-test, migration-validation]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Generate migration manifest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIGRATION_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql"
|
||||
ROLLBACK_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql"
|
||||
MANIFEST_DIR="out/findings-ledger/migrations"
|
||||
mkdir -p "$MANIFEST_DIR"
|
||||
|
||||
# Compute SHA256 hashes
|
||||
MIGRATION_SHA=$(sha256sum "$MIGRATION_FILE" | awk '{print $1}')
|
||||
ROLLBACK_SHA=$(sha256sum "$ROLLBACK_FILE" | awk '{print $1}')
|
||||
CREATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
cat > "$MANIFEST_DIR/007_enable_rls.manifest.json" <<EOF
|
||||
{
|
||||
"\$schema": "https://stella-ops.org/schemas/migration-manifest.v1.json",
|
||||
"schemaVersion": "1.0.0",
|
||||
"migrationId": "007_enable_rls",
|
||||
"module": "findings-ledger",
|
||||
"version": "2025.12.0",
|
||||
"createdAt": "$CREATED_AT",
|
||||
"description": "Enable Row-Level Security for Findings Ledger tenant isolation",
|
||||
"taskId": "LEDGER-TEN-48-001-DEV",
|
||||
"contractRef": "CONTRACT-FINDINGS-LEDGER-RLS-011",
|
||||
"database": {
|
||||
"engine": "postgresql",
|
||||
"minVersion": "16.0"
|
||||
},
|
||||
"files": {
|
||||
"apply": {
|
||||
"path": "007_enable_rls.sql",
|
||||
"sha256": "$MIGRATION_SHA"
|
||||
},
|
||||
"rollback": {
|
||||
"path": "007_enable_rls_rollback.sql",
|
||||
"sha256": "$ROLLBACK_SHA"
|
||||
}
|
||||
},
|
||||
"affects": {
|
||||
"tables": [
|
||||
"ledger_events",
|
||||
"ledger_merkle_roots",
|
||||
"findings_projection",
|
||||
"finding_history",
|
||||
"triage_actions",
|
||||
"ledger_attestations",
|
||||
"orchestrator_exports",
|
||||
"airgap_imports"
|
||||
],
|
||||
"schemas": ["public", "findings_ledger_app"],
|
||||
"roles": ["findings_ledger_admin"]
|
||||
},
|
||||
"prerequisites": [
|
||||
"006_orchestrator_airgap"
|
||||
],
|
||||
"validation": {
|
||||
"type": "rls-check",
|
||||
"expectedTables": 8,
|
||||
"expectedPolicies": 8,
|
||||
"tenantFunction": "findings_ledger_app.require_current_tenant"
|
||||
},
|
||||
"offlineKit": {
|
||||
"includedInBundle": true,
|
||||
"requiresManualApply": true,
|
||||
"applyOrder": 7
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Generated migration manifest at $MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||
cat "$MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||
|
||||
- name: Copy migration files for offline-kit
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OFFLINE_DIR="out/findings-ledger/offline-kit/migrations"
|
||||
mkdir -p "$OFFLINE_DIR"
|
||||
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql "$OFFLINE_DIR/"
|
||||
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql "$OFFLINE_DIR/"
|
||||
cp out/findings-ledger/migrations/007_enable_rls.manifest.json "$OFFLINE_DIR/"
|
||||
echo "Offline-kit migration files prepared"
|
||||
ls -la "$OFFLINE_DIR"
|
||||
|
||||
- name: Upload migration artefacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: findings-ledger-migrations
|
||||
path: out/findings-ledger/
|
||||
if-no-files-found: error
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
lnm-backfill:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
vex-backfill:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
125
.gitea/workflows/manifest-integrity.yml
Normal file
125
.gitea/workflows/manifest-integrity.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
name: Manifest Integrity
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'docs/**/*.schema.json'
|
||||
- 'docs/contracts/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/packs/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**/*.schema.json'
|
||||
- 'docs/contracts/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/packs/**'
|
||||
|
||||
jobs:
|
||||
validate-schemas:
|
||||
name: Validate Schema Integrity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install -g ajv-cli ajv-formats
|
||||
|
||||
- name: Validate JSON schemas
|
||||
run: |
|
||||
EXIT_CODE=0
|
||||
for schema in docs/schemas/*.schema.json; do
|
||||
echo "Validating $schema..."
|
||||
if ! ajv compile -s "$schema" --spec=draft2020 2>/dev/null; then
|
||||
echo "Error: $schema is invalid"
|
||||
EXIT_CODE=1
|
||||
fi
|
||||
done
|
||||
exit $EXIT_CODE
|
||||
|
||||
validate-contracts:
|
||||
name: Validate Contract Documents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check contract structure
|
||||
run: |
|
||||
for contract in docs/contracts/*.md; do
|
||||
echo "Checking $contract..."
|
||||
# Verify required sections exist
|
||||
if ! grep -q "^## " "$contract"; then
|
||||
echo "Warning: $contract missing section headers"
|
||||
fi
|
||||
# Check for decision ID
|
||||
if grep -q "Decision ID" "$contract" && ! grep -q "DECISION-\|CONTRACT-" "$contract"; then
|
||||
echo "Warning: $contract missing decision ID format"
|
||||
fi
|
||||
done
|
||||
|
||||
validate-pack-fixtures:
|
||||
name: Validate Pack Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install jsonschema
|
||||
|
||||
- name: Run fixture validation
|
||||
run: |
|
||||
if [ -f scripts/packs/run-fixtures-check.sh ]; then
|
||||
chmod +x scripts/packs/run-fixtures-check.sh
|
||||
./scripts/packs/run-fixtures-check.sh
|
||||
fi
|
||||
|
||||
checksum-audit:
|
||||
name: Audit SHA256SUMS Files
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate checksums
|
||||
run: |
|
||||
find . -name "SHA256SUMS" -type f | while read f; do
|
||||
dir=$(dirname "$f")
|
||||
echo "Validating checksums in $dir..."
|
||||
cd "$dir"
|
||||
# Check if all referenced files exist
|
||||
while read hash file; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file referenced in SHA256SUMS but not found"
|
||||
fi
|
||||
done < SHA256SUMS
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
merkle-consistency:
|
||||
name: Verify Merkle Roots
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check DSSE Merkle roots
|
||||
run: |
|
||||
find . -name "*.dsse.json" -type f | while read f; do
|
||||
echo "Checking Merkle root in $f..."
|
||||
# Extract and validate Merkle root if present
|
||||
if jq -e '.payload' "$f" > /dev/null 2>&1; then
|
||||
PAYLOAD=$(jq -r '.payload' "$f" | base64 -d 2>/dev/null || echo "")
|
||||
if echo "$PAYLOAD" | jq -e '._stellaops.merkleRoot' > /dev/null 2>&1; then
|
||||
MERKLE=$(echo "$PAYLOAD" | jq -r '._stellaops.merkleRoot')
|
||||
echo " Merkle root: $MERKLE"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
@@ -18,10 +18,18 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||
run: |
|
||||
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
|
||||
44
.gitea/workflows/mock-dev-release.yml
Normal file
44
.gitea/workflows/mock-dev-release.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
name: mock-dev-release
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- deploy/releases/2025.09-mock-dev.yaml
|
||||
- deploy/downloads/manifest.json
|
||||
- ops/devops/mock-release/**
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
package-mock-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Package mock dev artefacts
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p out/mock-release
|
||||
cp deploy/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||
cp deploy/downloads/manifest.json out/mock-release/
|
||||
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
|
||||
|
||||
- name: Compose config (dev + mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ops/devops/mock-release/config_check.sh
|
||||
|
||||
- name: Helm template (mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
helm template mock ./deploy/helm/stellaops -f deploy/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||
ls -lh /tmp/helm-mock.yaml
|
||||
|
||||
- name: Upload mock release bundle
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: mock-dev-release
|
||||
path: |
|
||||
out/mock-release/mock-dev-release.tgz
|
||||
/tmp/compose-mock-config.yaml
|
||||
/tmp/helm-mock.yaml
|
||||
102
.gitea/workflows/notify-smoke-test.yml
Normal file
102
.gitea/workflows/notify-smoke-test.yml
Normal file
@@ -0,0 +1,102 @@
|
||||
name: Notify Smoke Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Notify/**'
|
||||
- 'src/Notifier/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Notify/**'
|
||||
- 'src/Notifier/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
name: Notify Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/Notify/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Notify/ --no-restore
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test src/Notify/ --no-build --verbosity normal
|
||||
|
||||
notifier-tests:
|
||||
name: Notifier Service Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/Notifier/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Notifier/ --no-restore
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test src/Notifier/ --no-build --verbosity normal
|
||||
|
||||
smoke-test:
|
||||
name: Notification Smoke Test
|
||||
runs-on: ubuntu-latest
|
||||
needs: [unit-tests, notifier-tests]
|
||||
services:
|
||||
mongodb:
|
||||
image: mongo:7.0
|
||||
ports:
|
||||
- 27017:27017
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Build Notifier
|
||||
run: dotnet build src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/
|
||||
|
||||
- name: Start service
|
||||
run: |
|
||||
dotnet run --project src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/ &
|
||||
sleep 10
|
||||
|
||||
- name: Health check
|
||||
run: |
|
||||
for i in {1..30}; do
|
||||
if curl -s http://localhost:5000/health > /dev/null; then
|
||||
echo "Service is healthy"
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
echo "Service failed to start"
|
||||
exit 1
|
||||
|
||||
- name: Test notification endpoint
|
||||
run: |
|
||||
# Test dry-run notification
|
||||
curl -X POST http://localhost:5000/api/v1/notifications/test \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"channel": "log", "message": "Smoke test", "dryRun": true}' \
|
||||
|| echo "Warning: Notification test endpoint not available"
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install Cosign
|
||||
|
||||
19
.gitea/workflows/release-manifest-verify.yml
Normal file
19
.gitea/workflows/release-manifest-verify.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
name: release-manifest-verify
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- deploy/releases/2025.09-stable.yaml
|
||||
- deploy/releases/2025.09-airgap.yaml
|
||||
- deploy/downloads/manifest.json
|
||||
- ops/devops/release/check_release_manifest.py
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
verify:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Validate release & downloads manifests
|
||||
run: |
|
||||
python ops/devops/release/check_release_manifest.py
|
||||
120
.gitea/workflows/release-validation.yml
Normal file
120
.gitea/workflows/release-validation.yml
Normal file
@@ -0,0 +1,120 @@
|
||||
name: Release Validation
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'deploy/**'
|
||||
- 'scripts/release/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_PREFIX: stellaops
|
||||
|
||||
jobs:
|
||||
validate-manifests:
|
||||
name: Validate Release Manifests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate Helm charts
|
||||
run: |
|
||||
helm lint deploy/helm/stellaops
|
||||
helm template stellaops deploy/helm/stellaops --dry-run
|
||||
|
||||
- name: Validate Kubernetes manifests
|
||||
run: |
|
||||
for f in deploy/k8s/*.yaml; do
|
||||
kubectl apply --dry-run=client -f "$f" || exit 1
|
||||
done
|
||||
|
||||
- name: Check required images exist
|
||||
run: |
|
||||
REQUIRED_IMAGES=(
|
||||
"concelier"
|
||||
"scanner"
|
||||
"authority"
|
||||
"signer"
|
||||
"attestor"
|
||||
"excititor"
|
||||
"policy"
|
||||
"scheduler"
|
||||
"notify"
|
||||
)
|
||||
for img in "${REQUIRED_IMAGES[@]}"; do
|
||||
echo "Checking $img..."
|
||||
# Validate Dockerfile exists
|
||||
if [ ! -f "src/${img^}/Dockerfile" ] && [ ! -f "deploy/docker/${img}/Dockerfile" ]; then
|
||||
echo "Warning: Dockerfile not found for $img"
|
||||
fi
|
||||
done
|
||||
|
||||
validate-checksums:
|
||||
name: Validate Artifact Checksums
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Verify SHA256SUMS files
|
||||
run: |
|
||||
find . -name "SHA256SUMS" -type f | while read f; do
|
||||
dir=$(dirname "$f")
|
||||
echo "Validating $f..."
|
||||
cd "$dir"
|
||||
if ! sha256sum -c SHA256SUMS --quiet 2>/dev/null; then
|
||||
echo "Warning: Checksum mismatch in $dir"
|
||||
fi
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
validate-schemas:
|
||||
name: Validate Schema Integrity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install ajv-cli
|
||||
run: npm install -g ajv-cli ajv-formats
|
||||
|
||||
- name: Validate JSON schemas
|
||||
run: |
|
||||
for schema in docs/schemas/*.schema.json; do
|
||||
echo "Validating $schema..."
|
||||
ajv compile -s "$schema" --spec=draft2020 || echo "Warning: $schema validation issue"
|
||||
done
|
||||
|
||||
release-notes:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
needs: [validate-manifests, validate-checksums, validate-schemas]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate changelog
|
||||
run: |
|
||||
PREV_TAG=$(git describe --abbrev=0 --tags HEAD^ 2>/dev/null || echo "")
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "## Changes since $PREV_TAG" > RELEASE_NOTES.md
|
||||
git log --pretty=format:"- %s (%h)" "$PREV_TAG"..HEAD >> RELEASE_NOTES.md
|
||||
else
|
||||
echo "## Initial Release" > RELEASE_NOTES.md
|
||||
fi
|
||||
|
||||
- name: Upload release notes
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-notes
|
||||
path: RELEASE_NOTES.md
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
build-release:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
REGISTRY: registry.stella-ops.org
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
|
||||
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
@@ -0,0 +1,198 @@
|
||||
name: Risk Bundle CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'ops/devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'ops/devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
include_osv:
|
||||
description: 'Include OSV providers (larger bundle)'
|
||||
type: boolean
|
||||
default: false
|
||||
publish_checksums:
|
||||
description: 'Publish checksums to artifact store'
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
risk-bundle-build:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
BUNDLE_OUTPUT: ${{ github.workspace }}/.artifacts/risk-bundle
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||
|
||||
- name: Test RiskBundle unit tests
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
dotnet test src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj \
|
||||
-c Release \
|
||||
--filter "FullyQualifiedName~RiskBundle" \
|
||||
--logger "trx;LogFileName=risk-bundle-tests.trx" \
|
||||
--results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Build risk bundle (fixtures)
|
||||
run: |
|
||||
mkdir -p $BUNDLE_OUTPUT
|
||||
ops/devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||
|
||||
- name: Verify bundle integrity
|
||||
run: ops/devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd $BUNDLE_OUTPUT
|
||||
sha256sum risk-bundle.tar.gz > risk-bundle.tar.gz.sha256
|
||||
sha256sum manifest.json > manifest.json.sha256
|
||||
cat risk-bundle.tar.gz.sha256 manifest.json.sha256 > checksums.txt
|
||||
echo "Bundle checksums:"
|
||||
cat checksums.txt
|
||||
|
||||
- name: Upload risk bundle artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: |
|
||||
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz
|
||||
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz.sig
|
||||
${{ env.BUNDLE_OUTPUT }}/manifest.json
|
||||
${{ env.BUNDLE_OUTPUT }}/checksums.txt
|
||||
${{ env.ARTIFACT_DIR }}/*.trx
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: risk-bundle-test-results
|
||||
path: ${{ env.ARTIFACT_DIR }}/*.trx
|
||||
|
||||
risk-bundle-offline-kit:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: risk-bundle-build
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
OFFLINE_KIT_DIR: ${{ github.workspace }}/.artifacts/offline-kit
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download risk bundle artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
- name: Package for offline kit
|
||||
run: |
|
||||
mkdir -p $OFFLINE_KIT_DIR/risk-bundles
|
||||
cp $ARTIFACT_DIR/risk-bundle.tar.gz $OFFLINE_KIT_DIR/risk-bundles/
|
||||
cp $ARTIFACT_DIR/risk-bundle.tar.gz.sig $OFFLINE_KIT_DIR/risk-bundles/ 2>/dev/null || true
|
||||
cp $ARTIFACT_DIR/manifest.json $OFFLINE_KIT_DIR/risk-bundles/
|
||||
cp $ARTIFACT_DIR/checksums.txt $OFFLINE_KIT_DIR/risk-bundles/
|
||||
|
||||
# Create offline kit manifest entry
|
||||
cat > $OFFLINE_KIT_DIR/risk-bundles/kit-manifest.json <<EOF
|
||||
{
|
||||
"component": "risk-bundle",
|
||||
"version": "$(date -u +%Y%m%d-%H%M%S)",
|
||||
"files": [
|
||||
{"path": "risk-bundle.tar.gz", "checksum_file": "risk-bundle.tar.gz.sha256"},
|
||||
{"path": "manifest.json", "checksum_file": "manifest.json.sha256"}
|
||||
],
|
||||
"verification": {
|
||||
"checksums": "checksums.txt",
|
||||
"signature": "risk-bundle.tar.gz.sig"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Verify offline kit structure
|
||||
run: |
|
||||
echo "Offline kit structure:"
|
||||
find $OFFLINE_KIT_DIR -type f
|
||||
echo ""
|
||||
echo "Checksum verification:"
|
||||
cd $OFFLINE_KIT_DIR/risk-bundles
|
||||
sha256sum -c checksums.txt
|
||||
|
||||
- name: Upload offline kit
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-offline-kit
|
||||
path: ${{ env.OFFLINE_KIT_DIR }}
|
||||
|
||||
publish-checksums:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: risk-bundle-build
|
||||
if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event.inputs.publish_checksums == 'true')
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download risk bundle artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
- name: Publish checksums
|
||||
run: |
|
||||
echo "Publishing checksums for risk bundle..."
|
||||
CHECKSUM_DIR=out/checksums/risk-bundle/$(date -u +%Y-%m-%d)
|
||||
mkdir -p $CHECKSUM_DIR
|
||||
cp $ARTIFACT_DIR/checksums.txt $CHECKSUM_DIR/
|
||||
cp $ARTIFACT_DIR/manifest.json $CHECKSUM_DIR/
|
||||
|
||||
# Create latest symlink manifest
|
||||
cat > out/checksums/risk-bundle/latest.json <<EOF
|
||||
{
|
||||
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"path": "$(date -u +%Y-%m-%d)/checksums.txt",
|
||||
"manifest": "$(date -u +%Y-%m-%d)/manifest.json"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Checksums published to $CHECKSUM_DIR"
|
||||
cat $CHECKSUM_DIR/checksums.txt
|
||||
|
||||
- name: Upload published checksums
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-published-checksums
|
||||
path: out/checksums/risk-bundle/
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install syft (SBOM)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
133
.gitea/workflows/scanner-analyzers.yml
Normal file
133
.gitea/workflows/scanner-analyzers.yml
Normal file
@@ -0,0 +1,133 @@
|
||||
name: Scanner Analyzers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
|
||||
jobs:
|
||||
discover-analyzers:
|
||||
name: Discover Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
analyzers: ${{ steps.find.outputs.analyzers }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Find analyzer projects
|
||||
id: find
|
||||
run: |
|
||||
ANALYZERS=$(find src/Scanner/__Libraries -name "StellaOps.Scanner.Analyzers.*.csproj" -exec dirname {} \; | xargs -I {} basename {} | sort -u | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo "analyzers=$ANALYZERS" >> $GITHUB_OUTPUT
|
||||
|
||||
build-analyzers:
|
||||
name: Build Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
needs: discover-analyzers
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
analyzer: ${{ fromJson(needs.discover-analyzers.outputs.analyzers) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Scanner/__Libraries/${{ matrix.analyzer }}/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Scanner/__Libraries/${{ matrix.analyzer }}/ --no-restore
|
||||
|
||||
test-lang-analyzers:
|
||||
name: Test Language Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-analyzers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Run Bun analyzer tests
|
||||
run: |
|
||||
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests" ]; then
|
||||
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests/ --verbosity normal
|
||||
fi
|
||||
|
||||
- name: Run Node analyzer tests
|
||||
run: |
|
||||
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests" ]; then
|
||||
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/ --verbosity normal
|
||||
fi
|
||||
|
||||
fixture-validation:
|
||||
name: Validate Test Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate fixture structure
|
||||
run: |
|
||||
find src/Scanner/__Tests -name "expected.json" | while read f; do
|
||||
echo "Validating $f..."
|
||||
if ! jq empty "$f" 2>/dev/null; then
|
||||
echo "Error: Invalid JSON in $f"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Check fixture completeness
|
||||
run: |
|
||||
find src/Scanner/__Tests -type d -name "Fixtures" | while read fixtures_dir; do
|
||||
echo "Checking $fixtures_dir..."
|
||||
find "$fixtures_dir" -mindepth 1 -maxdepth 1 -type d | while read test_case; do
|
||||
if [ ! -f "$test_case/expected.json" ]; then
|
||||
echo "Warning: $test_case missing expected.json"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
determinism-check:
|
||||
name: Verify Deterministic Output
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-lang-analyzers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Run determinism tests
|
||||
run: |
|
||||
# Run scanner on same input twice, compare outputs
|
||||
if [ -d "tests/fixtures/determinism" ]; then
|
||||
dotnet test --filter "Category=Determinism" --verbosity normal
|
||||
fi
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Run determinism harness
|
||||
run: |
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
|
||||
449
.gitea/workflows/wine-csp-build.yml
Normal file
449
.gitea/workflows/wine-csp-build.yml
Normal file
@@ -0,0 +1,449 @@
|
||||
name: wine-csp-build
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/__Tools/WineCspService/**'
|
||||
- 'ops/wine-csp/**'
|
||||
- 'third_party/forks/AlexMAS.GostCryptography/**'
|
||||
- '.gitea/workflows/wine-csp-build.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/__Tools/WineCspService/**'
|
||||
- 'ops/wine-csp/**'
|
||||
- 'third_party/forks/AlexMAS.GostCryptography/**'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
push:
|
||||
description: "Push to registry"
|
||||
required: false
|
||||
default: "false"
|
||||
version:
|
||||
description: "Version tag (e.g., 2025.10.0-edge)"
|
||||
required: false
|
||||
default: "2025.10.0-edge"
|
||||
skip_tests:
|
||||
description: "Skip integration tests"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
env:
|
||||
IMAGE_NAME: registry.stella-ops.org/stellaops/wine-csp
|
||||
DOCKERFILE: ops/wine-csp/Dockerfile
|
||||
# Wine CSP only supports linux/amd64 (Wine ARM64 has compatibility issues with Windows x64 apps)
|
||||
PLATFORMS: linux/amd64
|
||||
PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# Job 1: Build Docker Image
|
||||
# ===========================================================================
|
||||
build:
|
||||
name: Build Wine CSP Image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
outputs:
|
||||
image_tag: ${{ steps.version.outputs.tag }}
|
||||
image_digest: ${{ steps.build.outputs.digest }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Set version tag
|
||||
id: version
|
||||
run: |
|
||||
if [[ -n "${{ github.event.inputs.version }}" ]]; then
|
||||
echo "tag=${{ github.event.inputs.version }}" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
echo "tag=2025.10.0-edge" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=pr-${{ github.event.pull_request.number || github.sha }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=${{ steps.version.outputs.tag }}
|
||||
type=sha,format=short
|
||||
|
||||
- name: Build image
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ env.DOCKERFILE }}
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Save image for testing
|
||||
run: |
|
||||
mkdir -p /tmp/images
|
||||
docker save "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" | gzip > /tmp/images/wine-csp.tar.gz
|
||||
|
||||
- name: Upload image artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wine-csp-image
|
||||
path: /tmp/images/wine-csp.tar.gz
|
||||
retention-days: 1
|
||||
|
||||
# ===========================================================================
|
||||
# Job 2: Integration Tests
|
||||
# ===========================================================================
|
||||
test:
|
||||
name: Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: ${{ github.event.inputs.skip_tests != 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download image artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wine-csp-image
|
||||
path: /tmp/images
|
||||
|
||||
- name: Load Docker image
|
||||
run: |
|
||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install test dependencies
|
||||
run: |
|
||||
pip install -r ops/wine-csp/tests/requirements.txt
|
||||
|
||||
- name: Start Wine CSP container
|
||||
id: container
|
||||
run: |
|
||||
echo "Starting Wine CSP container..."
|
||||
docker run -d --name wine-csp-test \
|
||||
-e WINE_CSP_MODE=limited \
|
||||
-e WINE_CSP_LOG_LEVEL=Debug \
|
||||
-p 5099:5099 \
|
||||
"${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||
|
||||
echo "container_id=$(docker ps -q -f name=wine-csp-test)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Wait for service startup
|
||||
run: |
|
||||
echo "Waiting for Wine CSP service to be ready (up to 120s)..."
|
||||
for i in $(seq 1 24); do
|
||||
if curl -sf http://127.0.0.1:5099/health > /dev/null 2>&1; then
|
||||
echo "Service ready after $((i * 5))s"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting... ($((i * 5))s elapsed)"
|
||||
sleep 5
|
||||
done
|
||||
echo "Service failed to start!"
|
||||
docker logs wine-csp-test
|
||||
exit 1
|
||||
|
||||
- name: Run integration tests (pytest)
|
||||
id: pytest
|
||||
run: |
|
||||
mkdir -p test-results
|
||||
export WINE_CSP_URL=http://127.0.0.1:5099
|
||||
|
||||
pytest ops/wine-csp/tests/test_wine_csp.py \
|
||||
-v \
|
||||
--tb=short \
|
||||
--junitxml=test-results/junit.xml \
|
||||
--timeout=60 \
|
||||
-x \
|
||||
2>&1 | tee test-results/pytest-output.txt
|
||||
|
||||
- name: Run shell integration tests
|
||||
if: always()
|
||||
run: |
|
||||
chmod +x ops/wine-csp/tests/run-tests.sh
|
||||
ops/wine-csp/tests/run-tests.sh \
|
||||
--url http://127.0.0.1:5099 \
|
||||
--ci \
|
||||
--verbose || true
|
||||
|
||||
- name: Collect container logs
|
||||
if: always()
|
||||
run: |
|
||||
docker logs wine-csp-test > test-results/container.log 2>&1 || true
|
||||
|
||||
- name: Stop container
|
||||
if: always()
|
||||
run: |
|
||||
docker stop wine-csp-test || true
|
||||
docker rm wine-csp-test || true
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: wine-csp-test-results
|
||||
path: test-results/
|
||||
|
||||
- name: Publish test results
|
||||
uses: mikepenz/action-junit-report@v4
|
||||
if: always()
|
||||
with:
|
||||
report_paths: 'test-results/junit.xml'
|
||||
check_name: 'Wine CSP Integration Tests'
|
||||
fail_on_failure: true
|
||||
|
||||
# ===========================================================================
|
||||
# Job 3: Security Scan
|
||||
# ===========================================================================
|
||||
security:
|
||||
name: Security Scan
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
permissions:
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download image artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wine-csp-image
|
||||
path: /tmp/images
|
||||
|
||||
- name: Load Docker image
|
||||
run: |
|
||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@master
|
||||
with:
|
||||
image-ref: "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
ignore-unfixed: true
|
||||
|
||||
- name: Upload Trivy scan results
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
if: always()
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
||||
- name: Run Trivy for JSON report
|
||||
uses: aquasecurity/trivy-action@master
|
||||
with:
|
||||
image-ref: "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||
format: 'json'
|
||||
output: 'trivy-results.json'
|
||||
severity: 'CRITICAL,HIGH,MEDIUM'
|
||||
|
||||
- name: Upload Trivy JSON report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wine-csp-security-scan
|
||||
path: trivy-results.json
|
||||
|
||||
# ===========================================================================
|
||||
# Job 4: Generate SBOM
|
||||
# ===========================================================================
|
||||
sbom:
|
||||
name: Generate SBOM
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
|
||||
steps:
|
||||
- name: Download image artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wine-csp-image
|
||||
path: /tmp/images
|
||||
|
||||
- name: Load Docker image
|
||||
run: |
|
||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||
|
||||
- name: Install syft
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
- name: Generate SBOM (SPDX)
|
||||
run: |
|
||||
mkdir -p out/sbom
|
||||
syft "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" \
|
||||
-o spdx-json=out/sbom/wine-csp.spdx.json
|
||||
|
||||
- name: Generate SBOM (CycloneDX)
|
||||
run: |
|
||||
syft "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" \
|
||||
-o cyclonedx-json=out/sbom/wine-csp.cdx.json
|
||||
|
||||
- name: Upload SBOM artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wine-csp-sbom-${{ needs.build.outputs.image_tag }}
|
||||
path: out/sbom/
|
||||
|
||||
# ===========================================================================
|
||||
# Job 5: Publish (only on main branch or manual trigger)
|
||||
# ===========================================================================
|
||||
publish:
|
||||
name: Publish Image
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, test, security]
|
||||
if: ${{ (github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main')) && needs.test.result == 'success' }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Download image artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wine-csp-image
|
||||
path: /tmp/images
|
||||
|
||||
- name: Load Docker image
|
||||
run: |
|
||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.stella-ops.org
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Push to registry
|
||||
run: |
|
||||
docker push "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
||||
|
||||
# Also tag as latest if on main
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
docker tag "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" "${{ env.IMAGE_NAME }}:latest"
|
||||
docker push "${{ env.IMAGE_NAME }}:latest"
|
||||
fi
|
||||
|
||||
- name: Sign image with cosign
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
run: |
|
||||
cosign sign --yes "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" || echo "Signing skipped (no OIDC available)"
|
||||
|
||||
- name: Create release summary
|
||||
run: |
|
||||
echo "## Wine CSP Image Published" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Image:** \`${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**WARNING:** This image is for TEST VECTOR GENERATION ONLY." >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# ===========================================================================
|
||||
# Job 6: Air-Gap Bundle
|
||||
# ===========================================================================
|
||||
airgap:
|
||||
name: Air-Gap Bundle
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, test]
|
||||
if: ${{ needs.test.result == 'success' }}
|
||||
|
||||
steps:
|
||||
- name: Download image artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wine-csp-image
|
||||
path: /tmp/images
|
||||
|
||||
- name: Create air-gap bundle
|
||||
run: |
|
||||
mkdir -p out/bundles
|
||||
|
||||
# Copy the image tarball
|
||||
cp /tmp/images/wine-csp.tar.gz out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.tar.gz
|
||||
|
||||
# Generate bundle manifest
|
||||
cat > out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.manifest.json <<EOF
|
||||
{
|
||||
"name": "wine-csp",
|
||||
"version": "${{ needs.build.outputs.image_tag }}",
|
||||
"image": "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}",
|
||||
"platform": "linux/amd64",
|
||||
"sha256": "$(sha256sum out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.tar.gz | cut -d' ' -f1)",
|
||||
"created": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"git_commit": "${{ github.sha }}",
|
||||
"git_ref": "${{ github.ref }}",
|
||||
"warning": "FOR TEST VECTOR GENERATION ONLY - NOT FOR PRODUCTION SIGNING"
|
||||
}
|
||||
EOF
|
||||
|
||||
# Create checksums file
|
||||
cd out/bundles
|
||||
sha256sum *.tar.gz *.json > SHA256SUMS
|
||||
|
||||
echo "Air-gap bundle contents:"
|
||||
ls -lh
|
||||
|
||||
- name: Upload air-gap bundle
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wine-csp-bundle-${{ needs.build.outputs.image_tag }}
|
||||
path: out/bundles/
|
||||
|
||||
# ===========================================================================
|
||||
# Job 7: Test Summary
|
||||
# ===========================================================================
|
||||
summary:
|
||||
name: Test Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, test, security, sbom]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Download test results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wine-csp-test-results
|
||||
path: test-results/
|
||||
continue-on-error: true
|
||||
|
||||
- name: Create summary
|
||||
run: |
|
||||
echo "## Wine CSP Build Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Stage | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build | ${{ needs.build.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Tests | ${{ needs.test.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Security | ${{ needs.security.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| SBOM | ${{ needs.sbom.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Image Tag:** \`${{ needs.build.outputs.image_tag }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "---" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**SECURITY WARNING:** Wine CSP is for TEST VECTOR GENERATION ONLY." >> $GITHUB_STEP_SUMMARY
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -64,3 +64,6 @@ coverage/
|
||||
local-nugets/
|
||||
local-nuget/
|
||||
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
||||
.nuget-cache/
|
||||
.nuget-packages2/
|
||||
.nuget-temp/
|
||||
@@ -1,23 +1,46 @@
|
||||
<Project>
|
||||
|
||||
<PropertyGroup>
|
||||
|
||||
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
||||
<StellaOpsLocalNuGetSource Condition="'$(StellaOpsLocalNuGetSource)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nugets/'))</StellaOpsLocalNuGetSource>
|
||||
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
||||
<StellaOpsNuGetOrgSource Condition="'$(StellaOpsNuGetOrgSource)' == ''">https://api.nuget.org/v3/index.json</StellaOpsNuGetOrgSource>
|
||||
<_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource)</_StellaOpsDefaultRestoreSources>
|
||||
<_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources)</_StellaOpsOriginalRestoreSources>
|
||||
<RestorePackagesPath Condition="'$(RestorePackagesPath)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot).nuget/packages'))</RestorePackagesPath>
|
||||
<RestoreConfigFile Condition="'$(RestoreConfigFile)' == ''">$([System.IO.Path]::Combine('$(StellaOpsRepoRoot)','NuGet.config'))</RestoreConfigFile>
|
||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(_StellaOpsDefaultRestoreSources)</RestoreSources>
|
||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' != ''">$(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources)</RestoreSources>
|
||||
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
||||
<NoWarn>$(NoWarn);NU1608;NU1605</NoWarn>
|
||||
<WarningsNotAsErrors>$(WarningsNotAsErrors);NU1608;NU1605</WarningsNotAsErrors>
|
||||
<RestoreNoWarn>$(RestoreNoWarn);NU1608;NU1605</RestoreNoWarn>
|
||||
<RestoreWarningsAsErrors></RestoreWarningsAsErrors>
|
||||
<RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors>
|
||||
<RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder>
|
||||
<RestoreFallbackFolders>clear</RestoreFallbackFolders>
|
||||
<RestoreFallbackFoldersExcludes>clear</RestoreFallbackFoldersExcludes>
|
||||
<RestoreAdditionalProjectFallbackFolders>clear</RestoreAdditionalProjectFallbackFolders>
|
||||
<RestoreAdditionalProjectFallbackFoldersExcludes>clear</RestoreAdditionalProjectFallbackFoldersExcludes>
|
||||
<RestoreAdditionalFallbackFolders>clear</RestoreAdditionalFallbackFolders>
|
||||
<RestoreAdditionalFallbackFoldersExcludes>clear</RestoreAdditionalFallbackFoldersExcludes>
|
||||
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
||||
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Update="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
/nowarn:CA2022
|
||||
/p:DisableWorkloadResolver=true
|
||||
/p:RestoreAdditionalProjectFallbackFolders=
|
||||
/p:RestoreFallbackFolders=
|
||||
|
||||
18
NuGet.config
18
NuGet.config
@@ -2,18 +2,14 @@
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="local" value="local-nugets" />
|
||||
<add key="ablera-mirror" value="https://mirrors.ablera.dev/nuget/nuget-mirror/v3/index.json" />
|
||||
<add key="local-nugets" value="./local-nugets" />
|
||||
<add key="dotnet-public" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json" />
|
||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
</packageSources>
|
||||
<config>
|
||||
<add key="globalPackagesFolder" value="local-nugets/packages" />
|
||||
<add key="globalPackagesFolder" value="./.nuget/packages" />
|
||||
</config>
|
||||
<packageSourceMapping>
|
||||
<packageSource key="local">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
<packageSource key="ablera-mirror">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
</packageSourceMapping>
|
||||
<fallbackPackageFolders>
|
||||
<clear />
|
||||
</fallbackPackageFolders>
|
||||
</configuration>
|
||||
|
||||
@@ -10,6 +10,7 @@ This directory contains deterministic deployment bundles for the core Stella Ops
|
||||
- `compose/docker-compose.telemetry.yaml` – optional OpenTelemetry collector overlay (mutual TLS, OTLP pipelines).
|
||||
- `compose/docker-compose.telemetry-storage.yaml` – optional Prometheus/Tempo/Loki stack for observability backends.
|
||||
- `helm/stellaops/` – multi-profile Helm chart with values files for dev/stage/airgap.
|
||||
- `helm/stellaops/INSTALL.md` – install/runbook for prod and airgap profiles with digest pins.
|
||||
- `telemetry/` – shared OpenTelemetry collector configuration and certificate artefacts (generated via tooling).
|
||||
- `tools/validate-profiles.sh` – helper that runs `docker compose config` and `helm lint/template` for every profile.
|
||||
|
||||
|
||||
@@ -13,7 +13,12 @@ These Compose bundles ship the minimum services required to exercise the scanner
|
||||
| `docker-compose.mirror.yaml` | Managed mirror topology for `*.stella-ops.org` distribution (Concelier + Excititor + CDN gateway). |
|
||||
| `docker-compose.telemetry.yaml` | Optional OpenTelemetry collector overlay (mutual TLS, OTLP ingest endpoints). |
|
||||
| `docker-compose.telemetry-storage.yaml` | Prometheus/Tempo/Loki storage overlay with multi-tenant defaults. |
|
||||
| `docker-compose.gpu.yaml` | Optional GPU overlay enabling NVIDIA devices for Advisory AI web/worker. Apply with `-f docker-compose.<env>.yaml -f docker-compose.gpu.yaml`. |
|
||||
| `env/*.env.example` | Seed `.env` files that document required secrets and ports per profile. |
|
||||
| `scripts/backup.sh` | Pauses workers and creates tar.gz of Mongo/MinIO/Redis volumes (deterministic snapshot). |
|
||||
| `scripts/reset.sh` | Stops the stack and removes Mongo/MinIO/Redis volumes after explicit confirmation. |
|
||||
| `scripts/quickstart.sh` | Helper to validate config and start dev stack; set `USE_MOCK=1` to include `docker-compose.mock.yaml` overlay. |
|
||||
| `docker-compose.mock.yaml` | Dev-only overlay with placeholder digests for missing services (orchestrator, policy-registry, packs, task-runner, VEX/Vuln stack). Use only with mock release manifest `deploy/releases/2025.09-mock-dev.yaml`. |
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -101,4 +106,29 @@ The Helm chart mirrors these settings under `services.advisory-ai-web` / `adviso
|
||||
2. Update image digests in the relevant Compose file(s).
|
||||
3. Re-run `docker compose config` to confirm the bundle is deterministic.
|
||||
|
||||
### Mock overlay for missing digests (dev only)
|
||||
|
||||
Until official digests land, you can exercise Compose packaging with mock placeholders:
|
||||
|
||||
```bash
|
||||
# assumes docker-compose.dev.yaml as the base profile
|
||||
USE_MOCK=1 ./scripts/quickstart.sh env/dev.env.example
|
||||
```
|
||||
|
||||
The overlay pins the missing services (orchestrator, policy-registry, packs-registry, task-runner, VEX/Vuln stack) to mock digests from `deploy/releases/2025.09-mock-dev.yaml` and starts their real entrypoints so integration flows can be exercised end-to-end. Replace the mock pins with production digests once releases publish; keep the mock overlay dev-only.
|
||||
|
||||
Keep digests synchronized between Compose, Helm, and the release manifest to preserve reproducibility guarantees. `deploy/tools/validate-profiles.sh` performs a quick audit.
|
||||
|
||||
### GPU toggle for Advisory AI
|
||||
|
||||
GPU is disabled by default. To run inference on NVIDIA GPUs:
|
||||
|
||||
```bash
|
||||
docker compose \
|
||||
--env-file prod.env \
|
||||
-f docker-compose.prod.yaml \
|
||||
-f docker-compose.gpu.yaml \
|
||||
up -d
|
||||
```
|
||||
|
||||
The GPU overlay requests one GPU for `advisory-ai-worker` and `advisory-ai-web` and sets `ADVISORY_AI_INFERENCE_GPU=true`. Ensure the host has the NVIDIA container runtime and that the base compose file still sets the correct digests.
|
||||
|
||||
191
deploy/compose/docker-compose.cas.yaml
Normal file
191
deploy/compose/docker-compose.cas.yaml
Normal file
@@ -0,0 +1,191 @@
|
||||
# Content Addressable Storage (CAS) Infrastructure
|
||||
# Uses RustFS for S3-compatible immutable object storage
|
||||
# Aligned with best-in-class vulnerability scanner retention policies
|
||||
#
|
||||
# Usage:
|
||||
# docker compose -f docker-compose.cas.yaml up -d
|
||||
# docker compose -f docker-compose.cas.yaml -f docker-compose.dev.yaml up -d
|
||||
|
||||
x-release-labels: &release-labels
|
||||
com.stellaops.release.version: "2025.10.0-edge"
|
||||
com.stellaops.release.channel: "edge"
|
||||
com.stellaops.profile: "cas"
|
||||
|
||||
x-cas-config: &cas-config
|
||||
# Retention policies (aligned with Trivy/Grype/Anchore Enterprise)
|
||||
# - vulnerability-db: 7 days (matches Trivy default)
|
||||
# - sbom-artifacts: 365 days (audit compliance)
|
||||
# - scan-results: 90 days (SOC2/ISO27001 typical)
|
||||
# - evidence-bundles: indefinite (immutable, content-addressed)
|
||||
# - attestations: indefinite (in-toto/DSSE signed)
|
||||
CAS__RETENTION__VULNERABILITY_DB_DAYS: "7"
|
||||
CAS__RETENTION__SBOM_ARTIFACTS_DAYS: "365"
|
||||
CAS__RETENTION__SCAN_RESULTS_DAYS: "90"
|
||||
CAS__RETENTION__EVIDENCE_BUNDLES_DAYS: "0" # 0 = indefinite
|
||||
CAS__RETENTION__ATTESTATIONS_DAYS: "0" # 0 = indefinite
|
||||
CAS__RETENTION__TEMP_ARTIFACTS_DAYS: "1"
|
||||
|
||||
networks:
|
||||
cas:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
rustfs-cas-data:
|
||||
driver: local
|
||||
driver_opts:
|
||||
type: none
|
||||
o: bind
|
||||
device: ${CAS_DATA_PATH:-/var/lib/stellaops/cas}
|
||||
rustfs-evidence-data:
|
||||
driver: local
|
||||
driver_opts:
|
||||
type: none
|
||||
o: bind
|
||||
device: ${CAS_EVIDENCE_PATH:-/var/lib/stellaops/evidence}
|
||||
rustfs-attestation-data:
|
||||
driver: local
|
||||
driver_opts:
|
||||
type: none
|
||||
o: bind
|
||||
device: ${CAS_ATTESTATION_PATH:-/var/lib/stellaops/attestations}
|
||||
|
||||
services:
|
||||
# Primary CAS storage - runtime facts, signals, replay artifacts
|
||||
rustfs-cas:
|
||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
RUSTFS__LOG__LEVEL: "${RUSTFS_LOG_LEVEL:-info}"
|
||||
RUSTFS__STORAGE__PATH: /data
|
||||
RUSTFS__STORAGE__DEDUP: "true"
|
||||
RUSTFS__STORAGE__COMPRESSION: "${RUSTFS_COMPRESSION:-zstd}"
|
||||
RUSTFS__STORAGE__COMPRESSION_LEVEL: "${RUSTFS_COMPRESSION_LEVEL:-3}"
|
||||
# Bucket lifecycle (retention enforcement)
|
||||
RUSTFS__LIFECYCLE__ENABLED: "true"
|
||||
RUSTFS__LIFECYCLE__SCAN_INTERVAL_HOURS: "24"
|
||||
RUSTFS__LIFECYCLE__DEFAULT_RETENTION_DAYS: "90"
|
||||
# Access control
|
||||
RUSTFS__AUTH__ENABLED: "${RUSTFS_AUTH_ENABLED:-true}"
|
||||
RUSTFS__AUTH__API_KEY: "${RUSTFS_CAS_API_KEY:-cas-api-key-change-me}"
|
||||
RUSTFS__AUTH__READONLY_KEY: "${RUSTFS_CAS_READONLY_KEY:-cas-readonly-key-change-me}"
|
||||
# Service account configuration
|
||||
RUSTFS__ACCOUNTS__SCANNER__KEY: "${RUSTFS_SCANNER_KEY:-scanner-svc-key}"
|
||||
RUSTFS__ACCOUNTS__SCANNER__BUCKETS: "scanner-artifacts,surface-cache,runtime-facts"
|
||||
RUSTFS__ACCOUNTS__SCANNER__PERMISSIONS: "read,write"
|
||||
RUSTFS__ACCOUNTS__SIGNALS__KEY: "${RUSTFS_SIGNALS_KEY:-signals-svc-key}"
|
||||
RUSTFS__ACCOUNTS__SIGNALS__BUCKETS: "runtime-facts,signals-data,provenance-feed"
|
||||
RUSTFS__ACCOUNTS__SIGNALS__PERMISSIONS: "read,write"
|
||||
RUSTFS__ACCOUNTS__REPLAY__KEY: "${RUSTFS_REPLAY_KEY:-replay-svc-key}"
|
||||
RUSTFS__ACCOUNTS__REPLAY__BUCKETS: "replay-bundles,inputs-lock"
|
||||
RUSTFS__ACCOUNTS__REPLAY__PERMISSIONS: "read,write"
|
||||
RUSTFS__ACCOUNTS__READONLY__KEY: "${RUSTFS_READONLY_KEY:-readonly-svc-key}"
|
||||
RUSTFS__ACCOUNTS__READONLY__BUCKETS: "*"
|
||||
RUSTFS__ACCOUNTS__READONLY__PERMISSIONS: "read"
|
||||
<<: *cas-config
|
||||
volumes:
|
||||
- rustfs-cas-data:/data
|
||||
ports:
|
||||
- "${RUSTFS_CAS_PORT:-8180}:8080"
|
||||
networks:
|
||||
- cas
|
||||
labels: *release-labels
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
# Evidence storage - Merkle roots, hash chains, evidence bundles (immutable)
|
||||
rustfs-evidence:
|
||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data", "--immutable"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
RUSTFS__LOG__LEVEL: "${RUSTFS_LOG_LEVEL:-info}"
|
||||
RUSTFS__STORAGE__PATH: /data
|
||||
RUSTFS__STORAGE__DEDUP: "true"
|
||||
RUSTFS__STORAGE__COMPRESSION: "${RUSTFS_COMPRESSION:-zstd}"
|
||||
RUSTFS__STORAGE__IMMUTABLE: "true" # Write-once, never delete
|
||||
# Access control
|
||||
RUSTFS__AUTH__ENABLED: "true"
|
||||
RUSTFS__AUTH__API_KEY: "${RUSTFS_EVIDENCE_API_KEY:-evidence-api-key-change-me}"
|
||||
RUSTFS__AUTH__READONLY_KEY: "${RUSTFS_EVIDENCE_READONLY_KEY:-evidence-readonly-key-change-me}"
|
||||
# Service accounts
|
||||
RUSTFS__ACCOUNTS__LEDGER__KEY: "${RUSTFS_LEDGER_KEY:-ledger-svc-key}"
|
||||
RUSTFS__ACCOUNTS__LEDGER__BUCKETS: "evidence-bundles,merkle-roots,hash-chains"
|
||||
RUSTFS__ACCOUNTS__LEDGER__PERMISSIONS: "read,write"
|
||||
RUSTFS__ACCOUNTS__EXPORTER__KEY: "${RUSTFS_EXPORTER_KEY:-exporter-svc-key}"
|
||||
RUSTFS__ACCOUNTS__EXPORTER__BUCKETS: "evidence-bundles"
|
||||
RUSTFS__ACCOUNTS__EXPORTER__PERMISSIONS: "read"
|
||||
volumes:
|
||||
- rustfs-evidence-data:/data
|
||||
ports:
|
||||
- "${RUSTFS_EVIDENCE_PORT:-8181}:8080"
|
||||
networks:
|
||||
- cas
|
||||
labels: *release-labels
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
# Attestation storage - DSSE envelopes, in-toto attestations (immutable)
|
||||
rustfs-attestation:
|
||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data", "--immutable"]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
RUSTFS__LOG__LEVEL: "${RUSTFS_LOG_LEVEL:-info}"
|
||||
RUSTFS__STORAGE__PATH: /data
|
||||
RUSTFS__STORAGE__DEDUP: "true"
|
||||
RUSTFS__STORAGE__COMPRESSION: "${RUSTFS_COMPRESSION:-zstd}"
|
||||
RUSTFS__STORAGE__IMMUTABLE: "true" # Write-once, never delete
|
||||
# Access control
|
||||
RUSTFS__AUTH__ENABLED: "true"
|
||||
RUSTFS__AUTH__API_KEY: "${RUSTFS_ATTESTATION_API_KEY:-attestation-api-key-change-me}"
|
||||
RUSTFS__AUTH__READONLY_KEY: "${RUSTFS_ATTESTATION_READONLY_KEY:-attestation-readonly-key-change-me}"
|
||||
# Service accounts
|
||||
RUSTFS__ACCOUNTS__ATTESTOR__KEY: "${RUSTFS_ATTESTOR_KEY:-attestor-svc-key}"
|
||||
RUSTFS__ACCOUNTS__ATTESTOR__BUCKETS: "attestations,dsse-envelopes,rekor-receipts"
|
||||
RUSTFS__ACCOUNTS__ATTESTOR__PERMISSIONS: "read,write"
|
||||
RUSTFS__ACCOUNTS__VERIFIER__KEY: "${RUSTFS_VERIFIER_KEY:-verifier-svc-key}"
|
||||
RUSTFS__ACCOUNTS__VERIFIER__BUCKETS: "attestations,dsse-envelopes,rekor-receipts"
|
||||
RUSTFS__ACCOUNTS__VERIFIER__PERMISSIONS: "read"
|
||||
volumes:
|
||||
- rustfs-attestation-data:/data
|
||||
ports:
|
||||
- "${RUSTFS_ATTESTATION_PORT:-8182}:8080"
|
||||
networks:
|
||||
- cas
|
||||
labels: *release-labels
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
# Lifecycle manager - enforces retention policies
|
||||
cas-lifecycle:
|
||||
image: registry.stella-ops.org/stellaops/cas-lifecycle:2025.10.0-edge
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
rustfs-cas:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
LIFECYCLE__CAS__ENDPOINT: "http://rustfs-cas:8080"
|
||||
LIFECYCLE__CAS__API_KEY: "${RUSTFS_CAS_API_KEY:-cas-api-key-change-me}"
|
||||
LIFECYCLE__SCHEDULE__CRON: "${LIFECYCLE_CRON:-0 3 * * *}" # 3 AM daily
|
||||
LIFECYCLE__POLICIES__VULNERABILITY_DB: "7d"
|
||||
LIFECYCLE__POLICIES__SBOM_ARTIFACTS: "365d"
|
||||
LIFECYCLE__POLICIES__SCAN_RESULTS: "90d"
|
||||
LIFECYCLE__POLICIES__TEMP_ARTIFACTS: "1d"
|
||||
LIFECYCLE__TELEMETRY__ENABLED: "${LIFECYCLE_TELEMETRY:-true}"
|
||||
LIFECYCLE__TELEMETRY__OTLP_ENDPOINT: "${OTLP_ENDPOINT:-}"
|
||||
networks:
|
||||
- cas
|
||||
labels: *release-labels
|
||||
@@ -17,6 +17,8 @@ volumes:
|
||||
advisory-ai-plans:
|
||||
advisory-ai-outputs:
|
||||
postgres-data:
|
||||
wine-csp-prefix:
|
||||
wine-csp-logs:
|
||||
|
||||
services:
|
||||
mongo:
|
||||
@@ -329,3 +331,42 @@ services:
|
||||
networks:
|
||||
- stellaops
|
||||
labels: *release-labels
|
||||
|
||||
# Wine CSP Service - GOST cryptographic operations via Wine-hosted CryptoPro CSP
|
||||
# WARNING: For TEST VECTOR GENERATION ONLY - not for production signing
|
||||
wine-csp:
|
||||
image: registry.stella-ops.org/stellaops/wine-csp:${WINE_CSP_VERSION:-2025.10.0-edge}
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: ops/wine-csp/Dockerfile
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
WINE_CSP_PORT: "${WINE_CSP_PORT:-5099}"
|
||||
WINE_CSP_MODE: "${WINE_CSP_MODE:-limited}"
|
||||
WINE_CSP_INSTALLER_PATH: "${WINE_CSP_INSTALLER_PATH:-/opt/cryptopro/csp-installer.msi}"
|
||||
WINE_CSP_LOG_LEVEL: "${WINE_CSP_LOG_LEVEL:-Information}"
|
||||
ASPNETCORE_ENVIRONMENT: "${ASPNETCORE_ENVIRONMENT:-Development}"
|
||||
volumes:
|
||||
- wine-csp-prefix:/home/winecsp/.wine
|
||||
- wine-csp-logs:/var/log/wine-csp
|
||||
# Mount customer-provided CSP installer (optional):
|
||||
# - /path/to/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro
|
||||
ports:
|
||||
- "${WINE_CSP_PORT:-5099}:5099"
|
||||
networks:
|
||||
- stellaops
|
||||
healthcheck:
|
||||
test: ["/usr/local/bin/healthcheck.sh"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
start_period: 90s
|
||||
retries: 3
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2G
|
||||
labels:
|
||||
<<: *release-labels
|
||||
com.stellaops.component: "wine-csp"
|
||||
com.stellaops.security.production-signing: "false"
|
||||
com.stellaops.security.test-vectors-only: "true"
|
||||
|
||||
26
deploy/compose/docker-compose.gpu.yaml
Normal file
26
deploy/compose/docker-compose.gpu.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
advisory-ai-worker:
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- capabilities: [gpu]
|
||||
driver: nvidia
|
||||
count: 1
|
||||
environment:
|
||||
ADVISORY_AI_INFERENCE_GPU: "true"
|
||||
runtime: nvidia
|
||||
|
||||
advisory-ai-web:
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- capabilities: [gpu]
|
||||
driver: nvidia
|
||||
count: 1
|
||||
environment:
|
||||
ADVISORY_AI_INFERENCE_GPU: "true"
|
||||
runtime: nvidia
|
||||
85
deploy/compose/docker-compose.mock.yaml
Normal file
85
deploy/compose/docker-compose.mock.yaml
Normal file
@@ -0,0 +1,85 @@
|
||||
x-release-labels: &release-labels
|
||||
com.stellaops.release.version: "2025.09.2-mock"
|
||||
com.stellaops.release.channel: "dev-mock"
|
||||
com.stellaops.profile: "mock-overlay"
|
||||
|
||||
services:
|
||||
orchestrator:
|
||||
image: registry.stella-ops.org/stellaops/orchestrator@sha256:97f12856ce870bafd3328bda86833bcccbf56d255941d804966b5557f6610119
|
||||
command: ["dotnet", "StellaOps.Orchestrator.WebService.dll"]
|
||||
depends_on:
|
||||
- mongo
|
||||
- nats
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
policy-registry:
|
||||
image: registry.stella-ops.org/stellaops/policy-registry@sha256:c6cad8055e9827ebcbebb6ad4d6866dce4b83a0a49b0a8a6500b736a5cb26fa7
|
||||
command: ["dotnet", "StellaOps.Policy.Engine.dll"]
|
||||
depends_on:
|
||||
- mongo
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
vex-lens:
|
||||
image: registry.stella-ops.org/stellaops/vex-lens@sha256:b44e63ecfeebc345a70c073c1ce5ace709c58be0ffaad0e2862758aeee3092fb
|
||||
command: ["dotnet", "StellaOps.VexLens.dll"]
|
||||
depends_on:
|
||||
- mongo
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
issuer-directory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory@sha256:67e8ef02c97d3156741e857756994888f30c373ace8e84886762edba9dc51914
|
||||
command: ["dotnet", "StellaOps.IssuerDirectory.Web.dll"]
|
||||
depends_on:
|
||||
- mongo
|
||||
- authority
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
findings-ledger:
|
||||
image: registry.stella-ops.org/stellaops/findings-ledger@sha256:71d4c361ba8b2f8b69d652597bc3f2efc8a64f93fab854ce25272a88506df49c
|
||||
command: ["dotnet", "StellaOps.Findings.Ledger.WebService.dll"]
|
||||
depends_on:
|
||||
- postgres
|
||||
- authority
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
vuln-explorer-api:
|
||||
image: registry.stella-ops.org/stellaops/vuln-explorer-api@sha256:7fc7e43a05cbeb0106ce7d4d634612e83de6fdc119aaab754a71c1d60b82841d
|
||||
command: ["dotnet", "StellaOps.VulnExplorer.Api.dll"]
|
||||
depends_on:
|
||||
- findings-ledger
|
||||
- authority
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
packs-registry:
|
||||
image: registry.stella-ops.org/stellaops/packs-registry@sha256:1f5e9416c4dc608594ad6fad87c24d72134427f899c192b494e22b268499c791
|
||||
command: ["dotnet", "StellaOps.PacksRegistry.dll"]
|
||||
depends_on:
|
||||
- mongo
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
task-runner:
|
||||
image: registry.stella-ops.org/stellaops/task-runner@sha256:eb5ad992b49a41554f41516be1a6afcfa6522faf2111c08ff2b3664ad2fc954b
|
||||
command: ["dotnet", "StellaOps.TaskRunner.WebService.dll"]
|
||||
depends_on:
|
||||
- packs-registry
|
||||
- postgres
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
|
||||
# Wine CSP Service - GOST cryptographic operations via Wine-hosted CryptoPro CSP
|
||||
# WARNING: For TEST VECTOR GENERATION ONLY - not for production signing
|
||||
wine-csp:
|
||||
image: registry.stella-ops.org/stellaops/wine-csp:${WINE_CSP_VERSION:-2025.09.2-mock}
|
||||
environment:
|
||||
WINE_CSP_PORT: "5099"
|
||||
WINE_CSP_MODE: "limited"
|
||||
WINE_CSP_LOG_LEVEL: "Debug"
|
||||
labels: *release-labels
|
||||
networks: [stellaops]
|
||||
118
deploy/compose/env/cas.env.example
vendored
Normal file
118
deploy/compose/env/cas.env.example
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
# CAS (Content Addressable Storage) Environment Configuration
|
||||
# Copy to .env and customize for your deployment
|
||||
#
|
||||
# Aligned with best-in-class vulnerability scanner retention policies:
|
||||
# - Trivy: 7 days vulnerability DB
|
||||
# - Grype: 5 days DB, configurable
|
||||
# - Anchore Enterprise: 90-365 days typical
|
||||
# - Snyk Enterprise: 365 days
|
||||
|
||||
# =============================================================================
|
||||
# DATA PATHS (ensure directories exist with proper permissions)
|
||||
# =============================================================================
|
||||
CAS_DATA_PATH=/var/lib/stellaops/cas
|
||||
CAS_EVIDENCE_PATH=/var/lib/stellaops/evidence
|
||||
CAS_ATTESTATION_PATH=/var/lib/stellaops/attestations
|
||||
|
||||
# =============================================================================
|
||||
# RUSTFS CONFIGURATION
|
||||
# =============================================================================
|
||||
RUSTFS_LOG_LEVEL=info
|
||||
RUSTFS_COMPRESSION=zstd
|
||||
RUSTFS_COMPRESSION_LEVEL=3
|
||||
|
||||
# =============================================================================
|
||||
# PORTS
|
||||
# =============================================================================
|
||||
RUSTFS_CAS_PORT=8180
|
||||
RUSTFS_EVIDENCE_PORT=8181
|
||||
RUSTFS_ATTESTATION_PORT=8182
|
||||
|
||||
# =============================================================================
|
||||
# ACCESS CONTROL - API KEYS
|
||||
# IMPORTANT: Change these in production!
|
||||
# =============================================================================
|
||||
|
||||
# CAS Storage (mutable, lifecycle-managed)
|
||||
RUSTFS_CAS_API_KEY=cas-api-key-CHANGE-IN-PRODUCTION
|
||||
RUSTFS_CAS_READONLY_KEY=cas-readonly-key-CHANGE-IN-PRODUCTION
|
||||
|
||||
# Evidence Storage (immutable)
|
||||
RUSTFS_EVIDENCE_API_KEY=evidence-api-key-CHANGE-IN-PRODUCTION
|
||||
RUSTFS_EVIDENCE_READONLY_KEY=evidence-readonly-key-CHANGE-IN-PRODUCTION
|
||||
|
||||
# Attestation Storage (immutable)
|
||||
RUSTFS_ATTESTATION_API_KEY=attestation-api-key-CHANGE-IN-PRODUCTION
|
||||
RUSTFS_ATTESTATION_READONLY_KEY=attestation-readonly-key-CHANGE-IN-PRODUCTION
|
||||
|
||||
# =============================================================================
|
||||
# SERVICE ACCOUNT KEYS
|
||||
# Each service has its own key for fine-grained access control
|
||||
# IMPORTANT: Generate unique keys per environment!
|
||||
# =============================================================================
|
||||
|
||||
# Scanner service - access to scanner artifacts, surface cache, runtime facts
|
||||
RUSTFS_SCANNER_KEY=scanner-svc-key-GENERATE-UNIQUE
|
||||
# Bucket access: scanner-artifacts (rw), surface-cache (rw), runtime-facts (rw)
|
||||
|
||||
# Signals service - access to runtime facts, signals data, provenance feed
|
||||
RUSTFS_SIGNALS_KEY=signals-svc-key-GENERATE-UNIQUE
|
||||
# Bucket access: runtime-facts (rw), signals-data (rw), provenance-feed (rw)
|
||||
|
||||
# Replay service - access to replay bundles, inputs lock files
|
||||
RUSTFS_REPLAY_KEY=replay-svc-key-GENERATE-UNIQUE
|
||||
# Bucket access: replay-bundles (rw), inputs-lock (rw)
|
||||
|
||||
# Ledger service - access to evidence bundles, merkle roots, hash chains
|
||||
RUSTFS_LEDGER_KEY=ledger-svc-key-GENERATE-UNIQUE
|
||||
# Bucket access: evidence-bundles (rw), merkle-roots (rw), hash-chains (rw)
|
||||
|
||||
# Exporter service - read-only access to evidence bundles
|
||||
RUSTFS_EXPORTER_KEY=exporter-svc-key-GENERATE-UNIQUE
|
||||
# Bucket access: evidence-bundles (r)
|
||||
|
||||
# Attestor service - access to attestations, DSSE envelopes, Rekor receipts
|
||||
RUSTFS_ATTESTOR_KEY=attestor-svc-key-GENERATE-UNIQUE
|
||||
# Bucket access: attestations (rw), dsse-envelopes (rw), rekor-receipts (rw)
|
||||
|
||||
# Verifier service - read-only access to attestations
|
||||
RUSTFS_VERIFIER_KEY=verifier-svc-key-GENERATE-UNIQUE
|
||||
# Bucket access: attestations (r), dsse-envelopes (r), rekor-receipts (r)
|
||||
|
||||
# Global read-only key (for debugging/auditing)
|
||||
RUSTFS_READONLY_KEY=readonly-global-key-GENERATE-UNIQUE
|
||||
# Bucket access: * (r)
|
||||
|
||||
# =============================================================================
|
||||
# LIFECYCLE MANAGEMENT
|
||||
# =============================================================================
|
||||
# Cron schedule for retention policy enforcement (default: 3 AM daily)
|
||||
LIFECYCLE_CRON=0 3 * * *
|
||||
LIFECYCLE_TELEMETRY=true
|
||||
|
||||
# =============================================================================
|
||||
# RETENTION POLICIES (days, 0 = indefinite)
|
||||
# Aligned with enterprise vulnerability scanner best practices
|
||||
# =============================================================================
|
||||
# Vulnerability DB: 7 days (matches Trivy default, Grype uses 5)
|
||||
CAS_RETENTION_VULNERABILITY_DB_DAYS=7
|
||||
|
||||
# SBOM artifacts: 365 days (audit compliance - SOC2, ISO27001, FedRAMP)
|
||||
CAS_RETENTION_SBOM_ARTIFACTS_DAYS=365
|
||||
|
||||
# Scan results: 90 days (common compliance window)
|
||||
CAS_RETENTION_SCAN_RESULTS_DAYS=90
|
||||
|
||||
# Evidence bundles: indefinite (content-addressed, immutable, audit trail)
|
||||
CAS_RETENTION_EVIDENCE_BUNDLES_DAYS=0
|
||||
|
||||
# Attestations: indefinite (signed, immutable, verifiable)
|
||||
CAS_RETENTION_ATTESTATIONS_DAYS=0
|
||||
|
||||
# Temporary artifacts: 1 day (work-in-progress, intermediate files)
|
||||
CAS_RETENTION_TEMP_ARTIFACTS_DAYS=1
|
||||
|
||||
# =============================================================================
|
||||
# TELEMETRY (optional)
|
||||
# =============================================================================
|
||||
OTLP_ENDPOINT=
|
||||
12
deploy/compose/env/mock.env.example
vendored
Normal file
12
deploy/compose/env/mock.env.example
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# Dev-only overlay env for docker-compose.mock.yaml
|
||||
# Use together with dev.env.example:
|
||||
# docker compose --env-file env/dev.env.example --env-file env/mock.env.example -f docker-compose.dev.yaml -f docker-compose.mock.yaml config
|
||||
|
||||
# Optional: override ports if you expose mock services
|
||||
ORCHESTRATOR_PORT=8450
|
||||
POLICY_REGISTRY_PORT=8451
|
||||
VEX_LENS_PORT=8452
|
||||
FINDINGS_LEDGER_PORT=8453
|
||||
VULN_EXPLORER_API_PORT=8454
|
||||
PACKS_REGISTRY_PORT=8455
|
||||
TASK_RUNNER_PORT=8456
|
||||
49
deploy/compose/env/wine-csp.env.example
vendored
Normal file
49
deploy/compose/env/wine-csp.env.example
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
# Wine CSP Service Environment Configuration
|
||||
# ===========================================================================
|
||||
#
|
||||
# WARNING: This service is for TEST VECTOR GENERATION ONLY.
|
||||
# It MUST NOT be used for production cryptographic signing operations.
|
||||
#
|
||||
# ===========================================================================
|
||||
|
||||
# Service port (default: 5099)
|
||||
WINE_CSP_PORT=5099
|
||||
|
||||
# Operation mode:
|
||||
# - limited: Works without CryptoPro CSP (basic GostCryptography only)
|
||||
# - full: Requires CryptoPro CSP installer to be mounted at WINE_CSP_INSTALLER_PATH
|
||||
WINE_CSP_MODE=limited
|
||||
|
||||
# Path to CryptoPro CSP installer MSI (customer-provided)
|
||||
# Mount your licensed CSP installer to /opt/cryptopro/csp-installer.msi
|
||||
WINE_CSP_INSTALLER_PATH=/opt/cryptopro/csp-installer.msi
|
||||
|
||||
# Logging level: Trace, Debug, Information, Warning, Error, Critical
|
||||
WINE_CSP_LOG_LEVEL=Information
|
||||
|
||||
# Image version tag
|
||||
WINE_CSP_VERSION=2025.10.0-edge
|
||||
|
||||
# ASP.NET Core environment (Development, Staging, Production)
|
||||
ASPNETCORE_ENVIRONMENT=Production
|
||||
|
||||
# ===========================================================================
|
||||
# Advanced Configuration (typically not changed)
|
||||
# ===========================================================================
|
||||
|
||||
# Wine debug output (set to "warn+all" for troubleshooting)
|
||||
# WINEDEBUG=-all
|
||||
|
||||
# Wine architecture (must be win64 for CryptoPro CSP)
|
||||
# WINEARCH=win64
|
||||
|
||||
# ===========================================================================
|
||||
# Volume Mounts (configure in docker-compose, not here)
|
||||
# ===========================================================================
|
||||
# - Wine prefix: /home/winecsp/.wine (persistent storage)
|
||||
# - CSP installer: /opt/cryptopro (read-only mount)
|
||||
# - Logs: /var/log/wine-csp (log output)
|
||||
#
|
||||
# Example mount for CSP installer:
|
||||
# volumes:
|
||||
# - /path/to/your/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro
|
||||
28
deploy/compose/scripts/backup.sh
Normal file
28
deploy/compose/scripts/backup.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "StellaOps Compose Backup"
|
||||
echo "This will create a tar.gz of Mongo, MinIO (object-store), and Redis data volumes."
|
||||
read -rp "Proceed? [y/N] " ans
|
||||
[[ ${ans:-N} =~ ^[Yy]$ ]] || { echo "Aborted."; exit 1; }
|
||||
|
||||
TS=$(date -u +%Y%m%dT%H%M%SZ)
|
||||
OUT_DIR=${BACKUP_DIR:-backups}
|
||||
mkdir -p "$OUT_DIR"
|
||||
|
||||
docker compose ps >/dev/null
|
||||
|
||||
echo "Pausing worker containers for consistency..."
|
||||
docker compose pause scanner-worker scheduler-worker taskrunner-worker || true
|
||||
|
||||
echo "Backing up volumes..."
|
||||
docker run --rm \
|
||||
-v stellaops-mongo:/data/db:ro \
|
||||
-v stellaops-minio:/data/minio:ro \
|
||||
-v stellaops-redis:/data/redis:ro \
|
||||
-v "$PWD/$OUT_DIR":/out \
|
||||
alpine sh -c "cd / && tar czf /out/stellaops-backup-$TS.tar.gz data"
|
||||
|
||||
docker compose unpause scanner-worker scheduler-worker taskrunner-worker || true
|
||||
|
||||
echo "Backup written to $OUT_DIR/stellaops-backup-$TS.tar.gz"
|
||||
25
deploy/compose/scripts/quickstart.sh
Normal file
25
deploy/compose/scripts/quickstart.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
COMPOSE_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
ENV_FILE="${1:-$COMPOSE_DIR/env/dev.env.example}"
|
||||
USE_MOCK="${USE_MOCK:-0}"
|
||||
|
||||
FILES=(-f "$COMPOSE_DIR/docker-compose.dev.yaml")
|
||||
ENV_FILES=(--env-file "$ENV_FILE")
|
||||
|
||||
if [[ "$USE_MOCK" == "1" ]]; then
|
||||
FILES+=(-f "$COMPOSE_DIR/docker-compose.mock.yaml")
|
||||
ENV_FILES+=(--env-file "$COMPOSE_DIR/env/mock.env.example")
|
||||
fi
|
||||
|
||||
echo "Validating compose config..."
|
||||
docker compose "${ENV_FILES[@]}" "${FILES[@]}" config > /tmp/compose-validated.yaml
|
||||
echo "Config written to /tmp/compose-validated.yaml"
|
||||
|
||||
echo "Starting stack..."
|
||||
docker compose "${ENV_FILES[@]}" "${FILES[@]}" up -d
|
||||
|
||||
echo "Stack started. To stop: docker compose ${ENV_FILES[*]} ${FILES[*]} down"
|
||||
15
deploy/compose/scripts/reset.sh
Normal file
15
deploy/compose/scripts/reset.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "WARNING: This will stop the stack and wipe Mongo, MinIO, and Redis volumes."
|
||||
read -rp "Type 'RESET' to continue: " ans
|
||||
[[ ${ans:-} == "RESET" ]] || { echo "Aborted."; exit 1; }
|
||||
|
||||
docker compose down
|
||||
|
||||
for vol in stellaops-mongo stellaops-minio stellaops-redis; do
|
||||
echo "Removing volume $vol"
|
||||
docker volume rm "$vol" || true
|
||||
done
|
||||
|
||||
echo "Reset complete. Re-run compose with your env file to recreate volumes."
|
||||
18
deploy/downloads/manifest.json
Normal file
18
deploy/downloads/manifest.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": "2025.09.2-mock",
|
||||
"generatedAt": "2025-12-06T00:00:00Z",
|
||||
"items": [
|
||||
{
|
||||
"name": "console-web",
|
||||
"type": "container",
|
||||
"image": "registry.stella-ops.org/stellaops/web-ui@sha256:3878c335df50ca958907849b09d43ce397900d32fc7a417c0bf76742e1217ba1",
|
||||
"channel": "dev-mock"
|
||||
},
|
||||
{
|
||||
"name": "console-bundle",
|
||||
"type": "archive",
|
||||
"url": "https://downloads.stella-ops.mock/console/2025.09.2-mock/console.tar.gz",
|
||||
"sha256": "12dd89e012b1262ac61188ac5b7721ddab80c4e2b6341251d03925eb49a48521"
|
||||
}
|
||||
]
|
||||
}
|
||||
64
deploy/helm/stellaops/INSTALL.md
Normal file
64
deploy/helm/stellaops/INSTALL.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# StellaOps Helm Install Guide
|
||||
|
||||
This guide ships with the `stellaops` chart and provides deterministic install steps for **prod** and **airgap** profiles. All images are pinned by digest from `deploy/releases/<channel>.yaml`.
|
||||
|
||||
## Prerequisites
|
||||
- Helm ≥ 3.14 and kubectl configured for the target cluster.
|
||||
- Pull secrets for `registry.stella-ops.org` (or your mirrored registry in air-gapped mode).
|
||||
- TLS/ingress secrets created if you enable ingress in the values files.
|
||||
|
||||
## Channels and values
|
||||
- Prod/stable: `deploy/releases/2025.09-stable.yaml` + `values-prod.yaml`
|
||||
- Airgap: `deploy/releases/2025.09-airgap.yaml` + `values-airgap.yaml`
|
||||
- Mirror (optional): `values-mirror.yaml` overlays registry endpoints when using a private mirror.
|
||||
|
||||
## Quick install (prod)
|
||||
```bash
|
||||
export RELEASE_CHANNEL=2025.09-stable
|
||||
export NAMESPACE=stellaops
|
||||
|
||||
helm upgrade --install stellaops ./deploy/helm/stellaops \
|
||||
--namespace "$NAMESPACE" --create-namespace \
|
||||
-f deploy/helm/stellaops/values-prod.yaml \
|
||||
--set global.release.channel=stable \
|
||||
--set global.release.version="2025.09.2" \
|
||||
--set global.release.manifestSha256="dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7"
|
||||
```
|
||||
|
||||
## Quick install (airgap)
|
||||
Assumes images are already loaded into your private registry and `values-airgap.yaml` points to that registry.
|
||||
```bash
|
||||
export NAMESPACE=stellaops
|
||||
|
||||
helm upgrade --install stellaops ./deploy/helm/stellaops \
|
||||
--namespace "$NAMESPACE" --create-namespace \
|
||||
-f deploy/helm/stellaops/values-airgap.yaml \
|
||||
--set global.release.channel=airgap \
|
||||
--set global.release.version="2025.09.0-airgap" \
|
||||
--set global.release.manifestSha256="d422ae3ea01d5f27ea8b5fdc5b19667cb4e3e2c153a35cb761cb53a6ce4f6ba4"
|
||||
```
|
||||
|
||||
## Mirror overlay
|
||||
If using a mirrored registry, layer the mirror values:
|
||||
```bash
|
||||
helm upgrade --install stellaops ./deploy/helm/stellaops \
|
||||
--namespace "$NAMESPACE" --create-namespace \
|
||||
-f deploy/helm/stellaops/values-prod.yaml \
|
||||
-f deploy/helm/stellaops/values-mirror.yaml \
|
||||
--set global.release.version="2025.09.2" \
|
||||
--set global.release.manifestSha256="dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7"
|
||||
```
|
||||
|
||||
## Validate chart and digests
|
||||
```bash
|
||||
deploy/tools/check-channel-alignment.py --manifest deploy/releases/$RELEASE_CHANNEL.yaml \
|
||||
--values deploy/helm/stellaops/values-prod.yaml
|
||||
|
||||
helm lint ./deploy/helm/stellaops
|
||||
helm template stellaops ./deploy/helm/stellaops -f deploy/helm/stellaops/values-prod.yaml >/tmp/stellaops.yaml
|
||||
```
|
||||
|
||||
## Notes
|
||||
- Surface.Env and Surface.Secrets defaults are defined in `values*.yaml`; adjust endpoints, cache roots, and providers before promotion.
|
||||
- Keep `global.release.*` in sync with the chosen release manifest; never deploy with empty version/channel/manifestSha256.
|
||||
- For offline clusters, run image preload and secret creation before `helm upgrade` to avoid pull failures.
|
||||
16
deploy/helm/stellaops/README-mock.md
Normal file
16
deploy/helm/stellaops/README-mock.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# Mock Overlay (Dev Only)
|
||||
|
||||
Purpose: let deployment tasks progress with placeholder digests until real releases land.
|
||||
|
||||
Use:
|
||||
```bash
|
||||
helm template mock ./deploy/helm/stellaops -f deploy/helm/stellaops/values-mock.yaml
|
||||
```
|
||||
|
||||
Contents:
|
||||
- Mock deployments for orchestrator, policy-registry, packs-registry, task-runner, VEX Lens, issuer-directory, findings-ledger, vuln-explorer-api.
|
||||
- Image pins pulled from `deploy/releases/2025.09-mock-dev.yaml`.
|
||||
|
||||
Notes:
|
||||
- Annotated with `stellaops.dev/mock: "true"` to discourage production use.
|
||||
- Swap to real values once official digests publish; keep mock overlay gated behind `mock.enabled`.
|
||||
@@ -23,14 +23,26 @@ spec:
|
||||
metadata:
|
||||
labels:
|
||||
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 8 }}
|
||||
{{- if $svc.podAnnotations }}
|
||||
annotations:
|
||||
{{ toYaml $svc.podAnnotations | nindent 8 }}
|
||||
{{- end }}
|
||||
annotations:
|
||||
stellaops.release/version: {{ $root.Values.global.release.version | quote }}
|
||||
stellaops.release/channel: {{ $root.Values.global.release.channel | quote }}
|
||||
spec:
|
||||
{{- if $svc.podSecurityContext }}
|
||||
securityContext:
|
||||
{{ toYaml $svc.podSecurityContext | nindent 6 }}
|
||||
{{- end }}
|
||||
containers:
|
||||
- name: {{ $name }}
|
||||
image: {{ $svc.image | quote }}
|
||||
imagePullPolicy: {{ default $root.Values.global.image.pullPolicy $svc.imagePullPolicy }}
|
||||
{{- if $svc.securityContext }}
|
||||
securityContext:
|
||||
{{ toYaml $svc.securityContext | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.command }}
|
||||
command:
|
||||
{{- range $cmd := $svc.command }}
|
||||
@@ -85,6 +97,14 @@ spec:
|
||||
resources:
|
||||
{{ toYaml $svc.resources | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.securityContext }}
|
||||
securityContext:
|
||||
{{ toYaml $svc.securityContext | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.securityContext }}
|
||||
securityContext:
|
||||
{{ toYaml $svc.securityContext | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{ toYaml $svc.livenessProbe | nindent 12 }}
|
||||
@@ -93,6 +113,15 @@ spec:
|
||||
readinessProbe:
|
||||
{{ toYaml $svc.readinessProbe | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if $svc.prometheus }}
|
||||
{{- $pr := $svc.prometheus }}
|
||||
{{- if $pr.enabled }}
|
||||
{{- if not $svc.podAnnotations }}
|
||||
{{- $svc = merge $svc (dict "podAnnotations" (dict)) }}
|
||||
{{- end }}
|
||||
{{- $svc.podAnnotations = merge $svc.podAnnotations (dict "prometheus.io/scrape" "true" "prometheus.io/path" (default "/metrics" $pr.path) "prometheus.io/port" (toString (default 8080 $pr.port)) "prometheus.io/scheme" (default "http" $pr.scheme))) }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if or $svc.volumeMounts $configMounts }}
|
||||
volumeMounts:
|
||||
{{- if $svc.volumeMounts }}
|
||||
@@ -148,10 +177,29 @@ spec:
|
||||
affinity:
|
||||
{{ toYaml $svc.affinity | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if $svc.tolerations }}
|
||||
{{- if $svc.tolerations }}
|
||||
tolerations:
|
||||
{{ toYaml $svc.tolerations | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if $svc.pdb }}
|
||||
---
|
||||
apiVersion: policy/v1
|
||||
kind: PodDisruptionBudget
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" (dict "root" $root "name" $name) }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" (dict "root" $root "name" $name "svc" $svc) | nindent 4 }}
|
||||
spec:
|
||||
{{- if $svc.pdb.minAvailable }}
|
||||
minAvailable: {{ $svc.pdb.minAvailable }}
|
||||
{{- end }}
|
||||
{{- if $svc.pdb.maxUnavailable }}
|
||||
maxUnavailable: {{ $svc.pdb.maxUnavailable }}
|
||||
{{- end }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 6 }}
|
||||
{{- end }}
|
||||
---
|
||||
{{- if $svc.service }}
|
||||
apiVersion: v1
|
||||
|
||||
28
deploy/helm/stellaops/templates/externalsecrets.yaml
Normal file
28
deploy/helm/stellaops/templates/externalsecrets.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
{{- if and .Values.externalSecrets.enabled .Values.externalSecrets.secrets }}
|
||||
{{- range $secret := .Values.externalSecrets.secrets }}
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" $ }}-{{ $secret.name }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" $ | nindent 4 }}
|
||||
spec:
|
||||
refreshInterval: {{ default "1h" $secret.refreshInterval }}
|
||||
secretStoreRef:
|
||||
name: {{ $secret.storeRef.name }}
|
||||
kind: {{ default "ClusterSecretStore" $secret.storeRef.kind }}
|
||||
target:
|
||||
name: {{ $secret.target.name | default (printf "%s-%s" (include "stellaops.fullname" $) $secret.name) }}
|
||||
creationPolicy: {{ default "Owner" $secret.target.creationPolicy }}
|
||||
data:
|
||||
{{- range $secret.data }}
|
||||
- secretKey: {{ .key }}
|
||||
remoteRef:
|
||||
key: {{ .remoteKey }}
|
||||
{{- if .property }}
|
||||
property: {{ .property }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
---
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
39
deploy/helm/stellaops/templates/hpa.yaml
Normal file
39
deploy/helm/stellaops/templates/hpa.yaml
Normal file
@@ -0,0 +1,39 @@
|
||||
{{- if and .Values.hpa.enabled .Values.services }}
|
||||
{{- range $name, $svc := .Values.services }}
|
||||
{{- if and $svc.hpa $svc.hpa.enabled }}
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" (dict "root" $ "name" $name) }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" (dict "root" $ "name" $name "svc" $svc) | nindent 4 }}
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: {{ include "stellaops.fullname" (dict "root" $ "name" $name) }}
|
||||
minReplicas: {{ default $.Values.hpa.minReplicas $svc.hpa.minReplicas }}
|
||||
maxReplicas: {{ default $.Values.hpa.maxReplicas $svc.hpa.maxReplicas }}
|
||||
metrics:
|
||||
{{- $cpu := coalesce $svc.hpa.cpu.targetPercentage $.Values.hpa.cpu.targetPercentage -}}
|
||||
{{- if $cpu }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: {{ $cpu }}
|
||||
{{- end }}
|
||||
{{- $mem := coalesce $svc.hpa.memory.targetPercentage $.Values.hpa.memory.targetPercentage -}}
|
||||
{{- if $mem }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: memory
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: {{ $mem }}
|
||||
{{- end }}
|
||||
---
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
32
deploy/helm/stellaops/templates/ingress.yaml
Normal file
32
deploy/helm/stellaops/templates/ingress.yaml
Normal file
@@ -0,0 +1,32 @@
|
||||
{{- if and .Values.ingress.enabled .Values.ingress.hosts }}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" . }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" . | nindent 4 }}
|
||||
annotations:
|
||||
{{- range $k, $v := .Values.ingress.annotations }}
|
||||
{{ $k }}: {{ $v | quote }}
|
||||
{{- end }}
|
||||
spec:
|
||||
ingressClassName: {{ .Values.ingress.className | default "nginx" | quote }}
|
||||
tls:
|
||||
{{- range .Values.ingress.tls }}
|
||||
- hosts: {{ toYaml .hosts | nindent 6 }}
|
||||
secretName: {{ .secretName }}
|
||||
{{- end }}
|
||||
rules:
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- host: {{ .host }}
|
||||
http:
|
||||
paths:
|
||||
- path: {{ .path | default "/" }}
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: {{ include "stellaops.fullname" $ }}-gateway
|
||||
port:
|
||||
number: {{ .servicePort | default 80 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
50
deploy/helm/stellaops/templates/migrations.yaml
Normal file
50
deploy/helm/stellaops/templates/migrations.yaml
Normal file
@@ -0,0 +1,50 @@
|
||||
{{- if and .Values.migrations.enabled .Values.migrations.jobs }}
|
||||
{{- range $job := .Values.migrations.jobs }}
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" $ }}-migration-{{ $job.name | trunc 30 | trimSuffix "-" }}
|
||||
labels:
|
||||
{{- include "stellaops.labels" $ | nindent 4 }}
|
||||
stellaops.io/component: migration
|
||||
stellaops.io/migration-name: {{ $job.name | quote }}
|
||||
spec:
|
||||
backoffLimit: {{ default 3 $job.backoffLimit }}
|
||||
ttlSecondsAfterFinished: {{ default 3600 $job.ttlSecondsAfterFinished }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
{{- include "stellaops.selectorLabels" $ | nindent 8 }}
|
||||
stellaops.io/component: migration
|
||||
stellaops.io/migration-name: {{ $job.name | quote }}
|
||||
spec:
|
||||
restartPolicy: {{ default "Never" $job.restartPolicy }}
|
||||
serviceAccountName: {{ default "default" $job.serviceAccountName }}
|
||||
containers:
|
||||
- name: {{ $job.name | trunc 50 | trimSuffix "-" }}
|
||||
image: {{ $job.image | quote }}
|
||||
imagePullPolicy: {{ default "IfNotPresent" $job.imagePullPolicy }}
|
||||
command: {{- if $job.command }} {{ toJson $job.command }} {{- else }} null {{- end }}
|
||||
args: {{- if $job.args }} {{ toJson $job.args }} {{- else }} null {{- end }}
|
||||
env:
|
||||
{{- if $job.env }}
|
||||
{{- range $k, $v := $job.env }}
|
||||
- name: {{ $k }}
|
||||
value: {{ $v | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
envFrom:
|
||||
{{- if $job.envFrom }}
|
||||
{{- toYaml $job.envFrom | nindent 12 }}
|
||||
{{- end }}
|
||||
resources:
|
||||
{{- if $job.resources }}
|
||||
{{- toYaml $job.resources | nindent 12 }}
|
||||
{{- else }}{}
|
||||
{{- end }}
|
||||
imagePullSecrets:
|
||||
{{- if $.Values.global.image.pullSecrets }}
|
||||
{{- toYaml $.Values.global.image.pullSecrets | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
45
deploy/helm/stellaops/templates/networkpolicy.yaml
Normal file
45
deploy/helm/stellaops/templates/networkpolicy.yaml
Normal file
@@ -0,0 +1,45 @@
|
||||
{{- if .Values.networkPolicy.enabled }}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: NetworkPolicy
|
||||
metadata:
|
||||
name: {{ include "stellaops.fullname" . }}-default
|
||||
labels:
|
||||
{{- include "stellaops.labels" . | nindent 4 }}
|
||||
spec:
|
||||
podSelector:
|
||||
matchLabels:
|
||||
{{- include "stellaops.selectorLabelsRoot" . | nindent 6 }}
|
||||
policyTypes:
|
||||
- Ingress
|
||||
- Egress
|
||||
ingress:
|
||||
- from:
|
||||
{{- if .Values.networkPolicy.ingressNamespaces }}
|
||||
- namespaceSelector:
|
||||
matchLabels:
|
||||
{{- toYaml .Values.networkPolicy.ingressNamespaces | nindent 14 }}
|
||||
{{- end }}
|
||||
{{- if .Values.networkPolicy.ingressPods }}
|
||||
- podSelector:
|
||||
matchLabels:
|
||||
{{- toYaml .Values.networkPolicy.ingressPods | nindent 14 }}
|
||||
{{- end }}
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: {{ default 80 .Values.networkPolicy.ingressPort }}
|
||||
egress:
|
||||
- to:
|
||||
{{- if .Values.networkPolicy.egressNamespaces }}
|
||||
- namespaceSelector:
|
||||
matchLabels:
|
||||
{{- toYaml .Values.networkPolicy.egressNamespaces | nindent 14 }}
|
||||
{{- end }}
|
||||
{{- if .Values.networkPolicy.egressPods }}
|
||||
- podSelector:
|
||||
matchLabels:
|
||||
{{- toYaml .Values.networkPolicy.egressPods | nindent 14 }}
|
||||
{{- end }}
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: {{ default 443 .Values.networkPolicy.egressPort }}
|
||||
{{- end }}
|
||||
22
deploy/helm/stellaops/templates/orchestrator-mock.yaml
Normal file
22
deploy/helm/stellaops/templates/orchestrator-mock.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
{{- if .Values.mock.enabled }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: orchestrator-mock
|
||||
annotations:
|
||||
stellaops.dev/mock: "true"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: orchestrator-mock
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: orchestrator-mock
|
||||
spec:
|
||||
containers:
|
||||
- name: orchestrator
|
||||
image: "{{ .Values.mock.orchestrator.image }}"
|
||||
args: ["dotnet", "StellaOps.Orchestrator.WebService.dll"]
|
||||
{{- end }}
|
||||
44
deploy/helm/stellaops/templates/packs-mock.yaml
Normal file
44
deploy/helm/stellaops/templates/packs-mock.yaml
Normal file
@@ -0,0 +1,44 @@
|
||||
{{- if .Values.mock.enabled }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: packs-registry-mock
|
||||
annotations:
|
||||
stellaops.dev/mock: "true"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: packs-registry-mock
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: packs-registry-mock
|
||||
spec:
|
||||
containers:
|
||||
- name: packs-registry
|
||||
image: "{{ .Values.mock.packsRegistry.image }}"
|
||||
args: ["dotnet", "StellaOps.PacksRegistry.dll"]
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: task-runner-mock
|
||||
annotations:
|
||||
stellaops.dev/mock: "true"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: task-runner-mock
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: task-runner-mock
|
||||
spec:
|
||||
containers:
|
||||
- name: task-runner
|
||||
image: "{{ .Values.mock.taskRunner.image }}"
|
||||
args: ["dotnet", "StellaOps.TaskRunner.WebService.dll"]
|
||||
{{- end }}
|
||||
22
deploy/helm/stellaops/templates/policy-mock.yaml
Normal file
22
deploy/helm/stellaops/templates/policy-mock.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
{{- if .Values.mock.enabled }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: policy-registry-mock
|
||||
annotations:
|
||||
stellaops.dev/mock: "true"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: policy-registry-mock
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: policy-registry-mock
|
||||
spec:
|
||||
containers:
|
||||
- name: policy-registry
|
||||
image: "{{ .Values.mock.policyRegistry.image }}"
|
||||
args: ["dotnet", "StellaOps.Policy.Engine.dll"]
|
||||
{{- end }}
|
||||
22
deploy/helm/stellaops/templates/vex-mock.yaml
Normal file
22
deploy/helm/stellaops/templates/vex-mock.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
{{- if .Values.mock.enabled }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: vex-lens-mock
|
||||
annotations:
|
||||
stellaops.dev/mock: "true"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: vex-lens-mock
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: vex-lens-mock
|
||||
spec:
|
||||
containers:
|
||||
- name: vex-lens
|
||||
image: "{{ .Values.mock.vexLens.image }}"
|
||||
args: ["dotnet", "StellaOps.VexLens.dll"]
|
||||
{{- end }}
|
||||
44
deploy/helm/stellaops/templates/vuln-mock.yaml
Normal file
44
deploy/helm/stellaops/templates/vuln-mock.yaml
Normal file
@@ -0,0 +1,44 @@
|
||||
{{- if .Values.mock.enabled }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: findings-ledger-mock
|
||||
annotations:
|
||||
stellaops.dev/mock: "true"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: findings-ledger-mock
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: findings-ledger-mock
|
||||
spec:
|
||||
containers:
|
||||
- name: findings-ledger
|
||||
image: "{{ .Values.mock.findingsLedger.image }}"
|
||||
args: ["dotnet", "StellaOps.Findings.Ledger.WebService.dll"]
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: vuln-explorer-api-mock
|
||||
annotations:
|
||||
stellaops.dev/mock: "true"
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: vuln-explorer-api-mock
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: vuln-explorer-api-mock
|
||||
spec:
|
||||
containers:
|
||||
- name: vuln-explorer-api
|
||||
image: "{{ .Values.mock.vulnExplorerApi.image }}"
|
||||
args: ["dotnet", "StellaOps.VulnExplorer.Api.dll"]
|
||||
{{- end }}
|
||||
@@ -9,6 +9,45 @@ global:
|
||||
labels:
|
||||
stellaops.io/channel: airgap
|
||||
|
||||
migrations:
|
||||
enabled: false
|
||||
jobs: []
|
||||
|
||||
networkPolicy:
|
||||
enabled: true
|
||||
ingressPort: 8443
|
||||
egressPort: 443
|
||||
ingressNamespaces:
|
||||
kubernetes.io/metadata.name: stellaops
|
||||
egressNamespaces:
|
||||
kubernetes.io/metadata.name: stellaops
|
||||
|
||||
ingress:
|
||||
enabled: false
|
||||
className: nginx
|
||||
annotations: {}
|
||||
hosts: []
|
||||
tls: []
|
||||
|
||||
externalSecrets:
|
||||
enabled: false
|
||||
secrets: []
|
||||
|
||||
prometheus:
|
||||
enabled: true
|
||||
path: /metrics
|
||||
port: 8080
|
||||
scheme: http
|
||||
|
||||
hpa:
|
||||
enabled: false
|
||||
minReplicas: 1
|
||||
maxReplicas: 3
|
||||
cpu:
|
||||
targetPercentage: 70
|
||||
memory:
|
||||
targetPercentage: 80
|
||||
|
||||
configMaps:
|
||||
notify-config:
|
||||
data:
|
||||
|
||||
18
deploy/helm/stellaops/values-mock.yaml
Normal file
18
deploy/helm/stellaops/values-mock.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
mock:
|
||||
enabled: true
|
||||
orchestrator:
|
||||
image: registry.stella-ops.org/stellaops/orchestrator@sha256:97f12856ce870bafd3328bda86833bcccbf56d255941d804966b5557f6610119
|
||||
policyRegistry:
|
||||
image: registry.stella-ops.org/stellaops/policy-registry@sha256:c6cad8055e9827ebcbebb6ad4d6866dce4b83a0a49b0a8a6500b736a5cb26fa7
|
||||
packsRegistry:
|
||||
image: registry.stella-ops.org/stellaops/packs-registry@sha256:1f5e9416c4dc608594ad6fad87c24d72134427f899c192b494e22b268499c791
|
||||
taskRunner:
|
||||
image: registry.stella-ops.org/stellaops/task-runner@sha256:eb5ad992b49a41554f41516be1a6afcfa6522faf2111c08ff2b3664ad2fc954b
|
||||
vexLens:
|
||||
image: registry.stella-ops.org/stellaops/vex-lens@sha256:b44e63ecfeebc345a70c073c1ce5ace709c58be0ffaad0e2862758aeee3092fb
|
||||
issuerDirectory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory@sha256:67e8ef02c97d3156741e857756994888f30c373ace8e84886762edba9dc51914
|
||||
findingsLedger:
|
||||
image: registry.stella-ops.org/stellaops/findings-ledger@sha256:71d4c361ba8b2f8b69d652597bc3f2efc8a64f93fab854ce25272a88506df49c
|
||||
vulnExplorerApi:
|
||||
image: registry.stella-ops.org/stellaops/vuln-explorer-api@sha256:7fc7e43a05cbeb0106ce7d4d634612e83de6fdc119aaab754a71c1d60b82841d
|
||||
@@ -10,6 +10,66 @@ global:
|
||||
stellaops.io/channel: stable
|
||||
stellaops.io/profile: prod
|
||||
|
||||
# Migration jobs for controlled rollouts (disabled by default)
|
||||
migrations:
|
||||
enabled: false
|
||||
jobs: []
|
||||
|
||||
networkPolicy:
|
||||
enabled: true
|
||||
ingressPort: 8443
|
||||
egressPort: 443
|
||||
ingressNamespaces:
|
||||
kubernetes.io/metadata.name: stellaops
|
||||
egressNamespaces:
|
||||
kubernetes.io/metadata.name: stellaops
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: nginx
|
||||
annotations:
|
||||
nginx.ingress.kubernetes.io/proxy-body-size: "50m"
|
||||
nginx.ingress.kubernetes.io/ssl-redirect: "true"
|
||||
cert-manager.io/cluster-issuer: "letsencrypt-prod"
|
||||
hosts:
|
||||
- host: gateway.prod.stella-ops.org
|
||||
path: /
|
||||
servicePort: 80
|
||||
tls:
|
||||
- secretName: stellaops-prod-tls
|
||||
hosts:
|
||||
- gateway.prod.stella-ops.org
|
||||
|
||||
externalSecrets:
|
||||
enabled: true
|
||||
secrets:
|
||||
- name: core-secrets
|
||||
storeRef:
|
||||
name: stellaops-secret-store
|
||||
kind: ClusterSecretStore
|
||||
target:
|
||||
name: stellaops-prod-core
|
||||
data:
|
||||
- key: STELLAOPS_AUTHORITY__JWT__SIGNINGKEY
|
||||
remoteKey: prod/authority/jwt-signing-key
|
||||
- key: STELLAOPS_SECRETS_ENCRYPTION_KEY
|
||||
remoteKey: prod/core/secrets-encryption-key
|
||||
|
||||
prometheus:
|
||||
enabled: true
|
||||
path: /metrics
|
||||
port: 8080
|
||||
scheme: http
|
||||
|
||||
hpa:
|
||||
enabled: true
|
||||
minReplicas: 2
|
||||
maxReplicas: 6
|
||||
cpu:
|
||||
targetPercentage: 70
|
||||
memory:
|
||||
targetPercentage: 75
|
||||
|
||||
configMaps:
|
||||
notify-config:
|
||||
data:
|
||||
|
||||
@@ -8,6 +8,45 @@ global:
|
||||
pullPolicy: IfNotPresent
|
||||
labels: {}
|
||||
|
||||
migrations:
|
||||
enabled: false
|
||||
jobs: []
|
||||
|
||||
networkPolicy:
|
||||
enabled: false
|
||||
ingressPort: 80
|
||||
egressPort: 443
|
||||
ingressNamespaces: {}
|
||||
ingressPods: {}
|
||||
egressNamespaces: {}
|
||||
egressPods: {}
|
||||
|
||||
ingress:
|
||||
enabled: false
|
||||
className: nginx
|
||||
annotations: {}
|
||||
hosts: []
|
||||
tls: []
|
||||
|
||||
externalSecrets:
|
||||
enabled: false
|
||||
secrets: []
|
||||
|
||||
prometheus:
|
||||
enabled: false
|
||||
path: /metrics
|
||||
port: 8080
|
||||
scheme: http
|
||||
|
||||
hpa:
|
||||
enabled: false
|
||||
minReplicas: 1
|
||||
maxReplicas: 3
|
||||
cpu:
|
||||
targetPercentage: 75
|
||||
memory:
|
||||
targetPercentage: null
|
||||
|
||||
# Surface.Env configuration for Scanner/Zastava components
|
||||
# See docs/modules/scanner/design/surface-env.md for details
|
||||
surface:
|
||||
@@ -224,3 +263,22 @@ services:
|
||||
volumeClaims:
|
||||
- name: advisory-ai-data
|
||||
claimName: stellaops-advisory-ai-data
|
||||
|
||||
mock:
|
||||
enabled: false
|
||||
orchestrator:
|
||||
image: registry.stella-ops.org/stellaops/orchestrator@sha256:97f12856ce870bafd3328bda86833bcccbf56d255941d804966b5557f6610119
|
||||
policyRegistry:
|
||||
image: registry.stella-ops.org/stellaops/policy-registry@sha256:c6cad8055e9827ebcbebb6ad4d6866dce4b83a0a49b0a8a6500b736a5cb26fa7
|
||||
packsRegistry:
|
||||
image: registry.stella-ops.org/stellaops/packs-registry@sha256:1f5e9416c4dc608594ad6fad87c24d72134427f899c192b494e22b268499c791
|
||||
taskRunner:
|
||||
image: registry.stella-ops.org/stellaops/task-runner@sha256:eb5ad992b49a41554f41516be1a6afcfa6522faf2111c08ff2b3664ad2fc954b
|
||||
vexLens:
|
||||
image: registry.stella-ops.org/stellaops/vex-lens@sha256:b44e63ecfeebc345a70c073c1ce5ace709c58be0ffaad0e2862758aeee3092fb
|
||||
issuerDirectory:
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory@sha256:67e8ef02c97d3156741e857756994888f30c373ace8e84886762edba9dc51914
|
||||
findingsLedger:
|
||||
image: registry.stella-ops.org/stellaops/findings-ledger@sha256:71d4c361ba8b2f8b69d652597bc3f2efc8a64f93fab854ce25272a88506df49c
|
||||
vulnExplorerApi:
|
||||
image: registry.stella-ops.org/stellaops/vuln-explorer-api@sha256:7fc7e43a05cbeb0106ce7d4d634612e83de6fdc119aaab754a71c1d60b82841d
|
||||
|
||||
49
deploy/releases/2025.09-mock-dev.yaml
Normal file
49
deploy/releases/2025.09-mock-dev.yaml
Normal file
@@ -0,0 +1,49 @@
|
||||
release:
|
||||
version: 2025.09.2
|
||||
channel: stable
|
||||
date: '2025-09-20T00:00:00Z'
|
||||
calendar: '2025.09'
|
||||
components:
|
||||
- name: authority
|
||||
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
||||
- name: signer
|
||||
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
||||
- name: attestor
|
||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||
- name: scanner-web
|
||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
||||
- name: scanner-worker
|
||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
||||
- name: concelier
|
||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||
- name: excititor
|
||||
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
||||
- name: advisory-ai-web
|
||||
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2
|
||||
- name: advisory-ai-worker
|
||||
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2
|
||||
- name: web-ui
|
||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
||||
- name: orchestrator
|
||||
image: registry.stella-ops.org/stellaops/orchestrator@sha256:97f12856ce870bafd3328bda86833bcccbf56d255941d804966b5557f6610119
|
||||
- name: policy-registry
|
||||
image: registry.stella-ops.org/stellaops/policy-registry@sha256:c6cad8055e9827ebcbebb6ad4d6866dce4b83a0a49b0a8a6500b736a5cb26fa7
|
||||
- name: vex-lens
|
||||
image: registry.stella-ops.org/stellaops/vex-lens@sha256:b44e63ecfeebc345a70c073c1ce5ace709c58be0ffaad0e2862758aeee3092fb
|
||||
- name: issuer-directory
|
||||
image: registry.stella-ops.org/stellaops/issuer-directory@sha256:67e8ef02c97d3156741e857756994888f30c373ace8e84886762edba9dc51914
|
||||
- name: findings-ledger
|
||||
image: registry.stella-ops.org/stellaops/findings-ledger@sha256:71d4c361ba8b2f8b69d652597bc3f2efc8a64f93fab854ce25272a88506df49c
|
||||
- name: vuln-explorer-api
|
||||
image: registry.stella-ops.org/stellaops/vuln-explorer-api@sha256:7fc7e43a05cbeb0106ce7d4d634612e83de6fdc119aaab754a71c1d60b82841d
|
||||
- name: packs-registry
|
||||
image: registry.stella-ops.org/stellaops/packs-registry@sha256:1f5e9416c4dc608594ad6fad87c24d72134427f899c192b494e22b268499c791
|
||||
- name: task-runner
|
||||
image: registry.stella-ops.org/stellaops/task-runner@sha256:eb5ad992b49a41554f41516be1a6afcfa6522faf2111c08ff2b3664ad2fc954b
|
||||
infrastructure:
|
||||
mongo:
|
||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
||||
minio:
|
||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
||||
checksums:
|
||||
releaseManifestSha256: dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7
|
||||
164
deploy/telemetry/alerts/export-center-alerts.yaml
Normal file
164
deploy/telemetry/alerts/export-center-alerts.yaml
Normal file
@@ -0,0 +1,164 @@
|
||||
# ExportCenter Alert Rules
|
||||
# SLO Burn-rate alerts for export service reliability
|
||||
|
||||
groups:
|
||||
- name: export-center-slo
|
||||
interval: 30s
|
||||
rules:
|
||||
# SLO: 99.5% success rate target
|
||||
# Error budget: 0.5% (432 errors per day at 86400 requests/day)
|
||||
|
||||
# Fast burn - 2% budget consumption in 1 hour (critical)
|
||||
- alert: ExportCenterHighErrorBurnRate
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_failed_total[1h]))
|
||||
/
|
||||
sum(rate(export_runs_total[1h]))
|
||||
) > (14.4 * 0.005)
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
slo: availability
|
||||
annotations:
|
||||
summary: "ExportCenter high error burn rate"
|
||||
description: "Error rate is {{ $value | humanizePercentage }} over the last hour, consuming error budget at 14.4x the sustainable rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-error-rate"
|
||||
|
||||
# Slow burn - 10% budget consumption in 6 hours (warning)
|
||||
- alert: ExportCenterElevatedErrorBurnRate
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_failed_total[6h]))
|
||||
/
|
||||
sum(rate(export_runs_total[6h]))
|
||||
) > (6 * 0.005)
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
slo: availability
|
||||
annotations:
|
||||
summary: "ExportCenter elevated error burn rate"
|
||||
description: "Error rate is {{ $value | humanizePercentage }} over the last 6 hours, consuming error budget at 6x the sustainable rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/elevated-error-rate"
|
||||
|
||||
- name: export-center-latency
|
||||
interval: 30s
|
||||
rules:
|
||||
# SLO: 95% of exports complete within 120s
|
||||
# Fast burn - p95 latency exceeding threshold
|
||||
- alert: ExportCenterHighLatency
|
||||
expr: |
|
||||
histogram_quantile(0.95,
|
||||
sum(rate(export_run_duration_seconds_bucket[5m])) by (le)
|
||||
) > 120
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
slo: latency
|
||||
annotations:
|
||||
summary: "ExportCenter high latency"
|
||||
description: "95th percentile export duration is {{ $value | humanizeDuration }}, exceeding 120s SLO target."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-latency"
|
||||
|
||||
# Critical latency - p99 exceeding 5 minutes
|
||||
- alert: ExportCenterCriticalLatency
|
||||
expr: |
|
||||
histogram_quantile(0.99,
|
||||
sum(rate(export_run_duration_seconds_bucket[5m])) by (le)
|
||||
) > 300
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
slo: latency
|
||||
annotations:
|
||||
summary: "ExportCenter critical latency"
|
||||
description: "99th percentile export duration is {{ $value | humanizeDuration }}, indicating severe performance degradation."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/critical-latency"
|
||||
|
||||
- name: export-center-capacity
|
||||
interval: 60s
|
||||
rules:
|
||||
# Queue buildup warning
|
||||
- alert: ExportCenterHighConcurrency
|
||||
expr: sum(export_runs_in_progress) > 50
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter high concurrency"
|
||||
description: "{{ $value }} exports currently in progress. Consider scaling or investigating slow exports."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/high-concurrency"
|
||||
|
||||
# Stuck exports - exports running longer than 30 minutes
|
||||
- alert: ExportCenterStuckExports
|
||||
expr: |
|
||||
histogram_quantile(0.99,
|
||||
sum(rate(export_run_duration_seconds_bucket{status!="completed"}[1h])) by (le)
|
||||
) > 1800
|
||||
for: 10m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter potentially stuck exports"
|
||||
description: "Some exports may be stuck - 99th percentile duration for incomplete exports exceeds 30 minutes."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/stuck-exports"
|
||||
|
||||
- name: export-center-errors
|
||||
interval: 30s
|
||||
rules:
|
||||
# Specific error code spike
|
||||
- alert: ExportCenterErrorCodeSpike
|
||||
expr: |
|
||||
sum by (error_code) (
|
||||
rate(export_runs_failed_total[5m])
|
||||
) > 0.1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter error code spike: {{ $labels.error_code }}"
|
||||
description: "Error code {{ $labels.error_code }} is occurring at {{ $value | humanize }}/s rate."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/error-codes"
|
||||
|
||||
# No successful exports in 15 minutes (when there is traffic)
|
||||
- alert: ExportCenterNoSuccessfulExports
|
||||
expr: |
|
||||
(
|
||||
sum(rate(export_runs_total[15m])) > 0
|
||||
)
|
||||
and
|
||||
(
|
||||
sum(rate(export_runs_success_total[15m])) == 0
|
||||
)
|
||||
for: 10m
|
||||
labels:
|
||||
severity: critical
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "ExportCenter no successful exports"
|
||||
description: "No exports have completed successfully in the last 15 minutes despite ongoing attempts."
|
||||
runbook_url: "https://docs.stellaops.io/runbooks/export-center/no-successful-exports"
|
||||
|
||||
- name: export-center-deprecation
|
||||
interval: 5m
|
||||
rules:
|
||||
# Deprecated endpoint usage
|
||||
- alert: ExportCenterDeprecatedEndpointUsage
|
||||
expr: |
|
||||
sum(rate(export_center_deprecated_endpoint_access_total[1h])) > 0
|
||||
for: 1h
|
||||
labels:
|
||||
severity: info
|
||||
service: export-center
|
||||
annotations:
|
||||
summary: "Deprecated export endpoints still in use"
|
||||
description: "Legacy /exports endpoints are still being accessed at {{ $value | humanize }}/s. Migration to v1 API recommended."
|
||||
runbook_url: "https://docs.stellaops.io/api/export-center/migration"
|
||||
638
deploy/telemetry/dashboards/export-center.json
Normal file
638
deploy/telemetry/dashboards/export-center.json
Normal file
@@ -0,0 +1,638 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": { "type": "grafana", "uid": "-- Grafana --" },
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "ExportCenter service observability dashboard",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"links": [],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 0 },
|
||||
"id": 1,
|
||||
"panels": [],
|
||||
"title": "Export Runs Overview",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 0, "y": 1 },
|
||||
"id": 2,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Total Runs",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Total Export Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 4, "y": 1 },
|
||||
"id": 3,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Successful",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Successful Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 5 }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 8, "y": 1 },
|
||||
"id": 4,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Failed",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Failed Runs",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "red", "value": null },
|
||||
{ "color": "yellow", "value": 95 },
|
||||
{ "color": "green", "value": 99 }
|
||||
]
|
||||
},
|
||||
"unit": "percent"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 12, "y": 1 },
|
||||
"id": 5,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "100 * sum(increase(export_runs_success_total{tenant=~\"$tenant\"}[$__range])) / sum(increase(export_runs_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "Success Rate",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Success Rate",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "thresholds" },
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null }
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 4, "w": 4, "x": 16, "y": 1 },
|
||||
"id": 6,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "10.0.0",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(export_runs_in_progress{tenant=~\"$tenant\"})",
|
||||
"legendFormat": "In Progress",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Runs In Progress",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 5 },
|
||||
"id": 7,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (export_type) (rate(export_runs_total{tenant=~\"$tenant\"}[5m]))",
|
||||
"legendFormat": "{{export_type}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Export Runs by Type (rate/5m)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "s"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 5 },
|
||||
"id": 8,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max", "p95"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.50, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p50",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.95, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p95",
|
||||
"range": true,
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.99, sum by (le) (rate(export_run_duration_seconds_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "p99",
|
||||
"range": true,
|
||||
"refId": "C"
|
||||
}
|
||||
],
|
||||
"title": "Export Run Duration (latency percentiles)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 13 },
|
||||
"id": 9,
|
||||
"panels": [],
|
||||
"title": "Artifacts & Bundle Sizes",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "bars",
|
||||
"fillOpacity": 50,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "normal" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 0, "y": 14 },
|
||||
"id": 10,
|
||||
"options": {
|
||||
"legend": { "calcs": ["sum"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (artifact_type) (increase(export_artifacts_total{tenant=~\"$tenant\"}[1h]))",
|
||||
"legendFormat": "{{artifact_type}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Artifacts Exported by Type (per hour)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "off" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [{ "color": "green", "value": null }]
|
||||
},
|
||||
"unit": "bytes"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 12, "x": 12, "y": 14 },
|
||||
"id": 11,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.50, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "{{export_type}} p50",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.95, sum by (le, export_type) (rate(export_bundle_size_bytes_bucket{tenant=~\"$tenant\"}[5m])))",
|
||||
"legendFormat": "{{export_type}} p95",
|
||||
"range": true,
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "Bundle Size Distribution by Type",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": { "h": 1, "w": 24, "x": 0, "y": 22 },
|
||||
"id": 12,
|
||||
"panels": [],
|
||||
"title": "Error Analysis",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false }
|
||||
},
|
||||
"mappings": [],
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 8, "x": 0, "y": 23 },
|
||||
"id": 13,
|
||||
"options": {
|
||||
"legend": { "displayMode": "table", "placement": "right", "showLegend": true },
|
||||
"pieType": "pie",
|
||||
"reduceOptions": { "calcs": ["lastNotNull"], "fields": "", "values": false },
|
||||
"tooltip": { "mode": "single", "sort": "none" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (error_code) (increase(export_runs_failed_total{tenant=~\"$tenant\"}[$__range]))",
|
||||
"legendFormat": "{{error_code}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Failures by Error Code",
|
||||
"type": "piechart"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "mode": "palette-classic" },
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": { "legend": false, "tooltip": false, "viz": false },
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 2,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": { "type": "linear" },
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": { "group": "A", "mode": "none" },
|
||||
"thresholdsStyle": { "mode": "line" }
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "red", "value": 0.01 }
|
||||
]
|
||||
},
|
||||
"unit": "percentunit"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": { "h": 8, "w": 16, "x": 8, "y": 23 },
|
||||
"id": 14,
|
||||
"options": {
|
||||
"legend": { "calcs": ["mean", "max"], "displayMode": "table", "placement": "bottom", "showLegend": true },
|
||||
"tooltip": { "mode": "multi", "sort": "desc" }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"editorMode": "code",
|
||||
"expr": "sum(rate(export_runs_failed_total{tenant=~\"$tenant\"}[5m])) / sum(rate(export_runs_total{tenant=~\"$tenant\"}[5m]))",
|
||||
"legendFormat": "Error Rate",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Error Rate (5m window)",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"refresh": "30s",
|
||||
"schemaVersion": 38,
|
||||
"style": "dark",
|
||||
"tags": ["export-center", "stellaops"],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"current": {},
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "datasource",
|
||||
"options": [],
|
||||
"query": "prometheus",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"type": "datasource"
|
||||
},
|
||||
{
|
||||
"allValue": ".*",
|
||||
"current": {},
|
||||
"datasource": { "type": "prometheus", "uid": "${datasource}" },
|
||||
"definition": "label_values(export_runs_total, tenant)",
|
||||
"hide": 0,
|
||||
"includeAll": true,
|
||||
"multi": true,
|
||||
"name": "tenant",
|
||||
"options": [],
|
||||
"query": { "query": "label_values(export_runs_total, tenant)", "refId": "StandardVariableQuery" },
|
||||
"refresh": 2,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 1,
|
||||
"type": "query"
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": { "from": "now-6h", "to": "now" },
|
||||
"timepicker": {},
|
||||
"timezone": "utc",
|
||||
"title": "ExportCenter Service",
|
||||
"uid": "export-center-overview",
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
}
|
||||
@@ -646,6 +646,39 @@ Persisted documents capture the canonical envelope (`payload` field), tenant/nod
|
||||
|
||||
---
|
||||
|
||||
### 2.9 CVSS Receipts (Policy Gateway)
|
||||
|
||||
Policy Gateway proxies the Policy Engine CVSS v4 receipt APIs. Scopes: `policy.run` for create/amend, `findings.read` for read/history/policies.
|
||||
|
||||
| Method | Path | Scope | Purpose |
|
||||
|--------|------|-------|---------|
|
||||
| `POST` | `/api/cvss/receipts` | `policy.run` | Create a receipt from `vulnerabilityId`, `policy` (CvssPolicy JSON), `baseMetrics`, optional `threatMetrics`/`environmentalMetrics`/`supplementalMetrics`, optional `evidence[]`, and optional `signingKey` (DSSE). |
|
||||
| `GET` | `/api/cvss/receipts/{id}` | `findings.read` | Fetch receipt with scores (`baseScore`, `threatScore`, `environmentalScore`, `fullScore`, `effectiveScore/type`), `vectorString`, `policyRef`, `inputHash`, `attestationRefs`, `evidence`, `history`. |
|
||||
| `PUT` | `/api/cvss/receipts/{id}/amend` | `policy.run` | Append history entry (`field`, `newValue`, `reason`, `referenceUri`, optional `signingKey`, `actor`); re-signs when a key is provided. |
|
||||
| `GET` | `/api/cvss/receipts/{id}/history` | `findings.read` | Return chronological amendments. |
|
||||
| `GET` | `/api/cvss/policies` | `findings.read` | List active CvssPolicy documents (id/version/hash/effective window). |
|
||||
|
||||
**Create example**
|
||||
|
||||
```
|
||||
POST /api/cvss/receipts
|
||||
Authorization: Bearer <token>
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"vulnerabilityId": "CVE-2025-1234",
|
||||
"policy": { "policyId": "default", "version": "1.0.0", "name": "Default CVSS", "effectiveFrom": "2025-12-01T00:00:00Z", "hash": "sha256:..." },
|
||||
"baseMetrics": { "av": "Network", "ac": "Low", "at": "None", "pr": "None", "ui": "None", "vc": "High", "vi": "High", "va": "High", "sc": "High", "si": "High", "sa": "High" },
|
||||
"environmentalMetrics": { "cr": "High", "ir": "High", "ar": "Medium" },
|
||||
"createdBy": "cli",
|
||||
"signingKey": { "keyId": "cvss-dev", "store": "local" }
|
||||
}
|
||||
```
|
||||
|
||||
Responses include `receiptId`, `vectorString`, `scores`, `severity`, `policyRef`, `inputHash`, optional `attestationRefs`, `evidence[]`, and `history[]` (empty on create). History endpoint returns ordered entries with `field`, `previousValue`, `newValue`, `reason`, `actor`, `referenceUri`, and timestamp.
|
||||
|
||||
---
|
||||
|
||||
## 3 StellaOps CLI (`stellaops-cli`)
|
||||
|
||||
The new CLI is built on **System.CommandLine 2.0.0‑beta5** and mirrors the Concelier backend REST API.
|
||||
@@ -672,6 +705,7 @@ See `docs/dev/32_AUTH_CLIENT_GUIDE.md` for recommended profiles (online vs. air-
|
||||
| `stellaops-cli scanner download` | Fetch and install scanner container | `--channel <stable\|beta\|nightly>` (default `stable`)<br>`--output <path>`<br>`--overwrite`<br>`--no-install` | Saves artefact under `ScannerCacheDirectory`, verifies digest/signature, and executes `docker load` unless `--no-install` is supplied. |
|
||||
| `stellaops-cli scan run` | Execute scanner container against a directory (auto-upload) | `--target <directory>` (required)<br>`--runner <docker\|dotnet\|self>` (default from config)<br>`--entry <image-or-entrypoint>`<br>`[scanner-args...]` | Runs the scanner, writes results into `ResultsDirectory`, emits a structured `scan-run-*.json` metadata file, and automatically uploads the artefact when the exit code is `0`. |
|
||||
| `stellaops-cli scan upload` | Re-upload existing scan artefact | `--file <path>` | Useful for retries when automatic upload fails or when operating offline. |
|
||||
| `stellaops-cli cvss <score\|show\|history\|export>` | Create/read/export CVSS v4 receipts against Policy Gateway | `score --vuln <id> --policy-file <path> --vector <cvss4> [--json]`<br>`show <receiptId> [--json]`<br>`history <receiptId> [--json]`<br>`export <receiptId> --format json --out <file>` | Uses `/api/cvss/receipts` + `/history` + `/policies`; `score`/`amend` require `policy.run` scope, read/export require `findings.read`. CLI validates vectors locally with `CvssV4Engine` and preserves deterministic `inputHash`/DSSE refs in output. |
|
||||
| `stellaops-cli ruby inspect` | Offline Ruby workspace inspection (Gemfile / lock + runtime signals) | `--root <directory>` (default current directory)<br>`--format <table\|json>` (default `table`) | Runs the bundled `RubyLanguageAnalyzer`, renders Observation summary (bundler/runtime/capabilities) plus Package/Version/Group/Source/Lockfile/Runtime columns, or emits JSON `{ packages: [...], observation: {...} }`. Exit codes: `0` success, `64` invalid format, `70` unexpected failure, `71` missing directory. |
|
||||
| `stellaops-cli ruby resolve` | Fetch Ruby package inventory for a completed scan | `--image <registry-ref>` *or* `--scan-id <id>` (one required)<br>`--format <table\|json>` (default `table`) | Calls `GetRubyPackagesAsync` (`GET /api/scans/{scanId}/ruby-packages`) to download the canonical `RubyPackageInventory`. Table output mirrors `inspect` with groups/platform/runtime usage; JSON now returns `{ scanId, imageDigest, generatedAt, groups: [...] }`. Exit codes: `0` success, `64` invalid args, `70` backend failure, `0` with warning when inventory hasn’t been persisted yet. |
|
||||
| `stellaops-cli db fetch` | Trigger connector jobs | `--source <id>` (e.g. `redhat`, `osv`)<br>`--stage <fetch\|parse\|map>` (default `fetch`)<br>`--mode <resume|init|cursor>` | Translates to `POST /jobs/source:{source}:{stage}` with `trigger=cli` |
|
||||
|
||||
@@ -1,8 +1,25 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps Concelier – Link-Not-Merge Policy APIs
|
||||
version: "0.1.0"
|
||||
description: Fact-only advisory/linkset retrieval for Policy Engine consumers.
|
||||
version: "1.0.0"
|
||||
description: |
|
||||
Fact-only advisory/linkset retrieval for Policy Engine consumers.
|
||||
|
||||
## Philosophy
|
||||
Link-Not-Merge (LNM) provides raw advisory data with full provenance:
|
||||
- **Link**: Observations from multiple sources are linked via shared identifiers.
|
||||
- **Not Merge**: Conflicting data is preserved rather than collapsed.
|
||||
- **Surface, Don't Resolve**: Conflicts are clearly marked for consumers.
|
||||
|
||||
## Authentication
|
||||
All endpoints require the `X-Stella-Tenant` header for multi-tenant isolation.
|
||||
|
||||
## Pagination
|
||||
List endpoints support cursor-based pagination with `page` and `pageSize` parameters.
|
||||
Maximum page size is 200 items.
|
||||
|
||||
## Documentation
|
||||
See `/docs/modules/concelier/api/` for detailed examples and conflict resolution strategies.
|
||||
servers:
|
||||
- url: /
|
||||
description: Relative base path (API Gateway rewrites in production).
|
||||
@@ -44,6 +61,65 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PagedLinksets'
|
||||
examples:
|
||||
single-linkset:
|
||||
summary: Single linkset result
|
||||
value:
|
||||
items:
|
||||
- advisoryId: "CVE-2021-23337"
|
||||
source: "nvd"
|
||||
purl: ["pkg:npm/lodash@4.17.20"]
|
||||
cpe: ["cpe:2.3:a:lodash:lodash:4.17.20:*:*:*:*:node.js:*:*"]
|
||||
summary: "Lodash Command Injection vulnerability"
|
||||
publishedAt: "2021-02-15T13:15:00Z"
|
||||
modifiedAt: "2024-08-04T19:16:00Z"
|
||||
severity: "high"
|
||||
provenance:
|
||||
ingestedAt: "2025-11-20T10:30:00Z"
|
||||
connectorId: "nvd-osv-connector"
|
||||
evidenceHash: "sha256:a1b2c3d4e5f6"
|
||||
conflicts: []
|
||||
cached: false
|
||||
page: 1
|
||||
pageSize: 50
|
||||
total: 1
|
||||
with-conflicts:
|
||||
summary: Linkset with severity conflict
|
||||
value:
|
||||
items:
|
||||
- advisoryId: "CVE-2024-1234"
|
||||
source: "aggregated"
|
||||
purl: ["pkg:npm/example@1.0.0"]
|
||||
cpe: []
|
||||
severity: "high"
|
||||
provenance:
|
||||
ingestedAt: "2025-11-20T10:30:00Z"
|
||||
connectorId: "multi-source"
|
||||
conflicts:
|
||||
- field: "severity"
|
||||
reason: "severity-mismatch"
|
||||
observedValue: "critical"
|
||||
observedAt: "2025-11-18T08:00:00Z"
|
||||
evidenceHash: "sha256:conflict-hash"
|
||||
cached: false
|
||||
page: 1
|
||||
pageSize: 50
|
||||
total: 1
|
||||
"400":
|
||||
description: Invalid request parameters
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
example:
|
||||
type: "https://stellaops.io/errors/validation-failed"
|
||||
title: "Validation Failed"
|
||||
status: 400
|
||||
detail: "The 'pageSize' parameter exceeds the maximum allowed value."
|
||||
error:
|
||||
code: "ERR_PAGE_SIZE_EXCEEDED"
|
||||
message: "Page size must be between 1 and 200."
|
||||
target: "pageSize"
|
||||
/v1/lnm/linksets/{advisoryId}:
|
||||
get:
|
||||
summary: Get linkset by advisory ID
|
||||
@@ -275,3 +351,63 @@ components:
|
||||
event: { type: string }
|
||||
at: { type: string, format: date-time }
|
||||
evidenceHash: { type: string }
|
||||
ErrorEnvelope:
|
||||
type: object
|
||||
description: RFC 7807 Problem Details with StellaOps extensions
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
format: uri
|
||||
description: URI identifying the problem type
|
||||
title:
|
||||
type: string
|
||||
description: Short, human-readable summary
|
||||
status:
|
||||
type: integer
|
||||
description: HTTP status code
|
||||
detail:
|
||||
type: string
|
||||
description: Specific explanation of the problem
|
||||
instance:
|
||||
type: string
|
||||
format: uri
|
||||
description: URI of the specific occurrence
|
||||
traceId:
|
||||
type: string
|
||||
description: Distributed trace identifier
|
||||
error:
|
||||
$ref: '#/components/schemas/ErrorDetail'
|
||||
ErrorDetail:
|
||||
type: object
|
||||
description: Machine-readable error information
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
description: Machine-readable error code (e.g., ERR_VALIDATION_FAILED)
|
||||
message:
|
||||
type: string
|
||||
description: Human-readable error message
|
||||
target:
|
||||
type: string
|
||||
description: Field or resource that caused the error
|
||||
metadata:
|
||||
type: object
|
||||
additionalProperties: true
|
||||
description: Additional contextual data
|
||||
innerErrors:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ValidationError'
|
||||
description: Nested validation errors
|
||||
ValidationError:
|
||||
type: object
|
||||
properties:
|
||||
field:
|
||||
type: string
|
||||
description: Field path (e.g., "data.severity")
|
||||
code:
|
||||
type: string
|
||||
description: Error code for this field
|
||||
message:
|
||||
type: string
|
||||
description: Human-readable message
|
||||
|
||||
20
docs/api/console/exception-schema.md
Normal file
20
docs/api/console/exception-schema.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Console Exceptions API Schema (draft placeholder)
|
||||
|
||||
**Status:** TODO · awaiting Policy Guild + Platform Events
|
||||
|
||||
## Scope
|
||||
- `/exceptions` CRUD/workflow (create, propose, approve, revoke, list, history) proxied by Web gateway.
|
||||
- Audit logging, pagination, notification hooks, rate limits, RBAC scopes.
|
||||
|
||||
## Needed from owners
|
||||
- JSON schema for exception entity and workflow transitions; validation rules.
|
||||
- Required scopes/roles; audit fields; pagination/sorting defaults; max durations/guardrails.
|
||||
- Notification hook contract (`exception.*` events) and rate-limit policy.
|
||||
- Sample payloads for each state and error cases.
|
||||
|
||||
## Draft sample (placeholder)
|
||||
- See `docs/api/console/samples/exception-schema-sample.json` for a skeleton payload covering `pending_review` state.
|
||||
- Replace with authoritative samples once schema is published.
|
||||
|
||||
## TODO
|
||||
- Replace with ratified schema + samples; log hash/date; link from Web I/II sprint logs.
|
||||
37
docs/api/console/samples/console-download-manifest.json
Normal file
37
docs/api/console/samples/console-download-manifest.json
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"version": "2025-12-07",
|
||||
"exportId": "console-export::tenant-default::2025-12-07::0009",
|
||||
"tenantId": "tenant-default",
|
||||
"generatedAt": "2025-12-07T10:15:00Z",
|
||||
"items": [
|
||||
{
|
||||
"type": "vuln",
|
||||
"id": "CVE-2024-12345",
|
||||
"format": "json",
|
||||
"url": "https://downloads.local/exports/0009/vuln/CVE-2024-12345.json?sig=abc",
|
||||
"sha256": "f1c5a94d5e7e0b12f8a6c3b9e2f3d1017c6b9c1c822f4d2d5fa0c3e46f0e9a10",
|
||||
"size": 18432
|
||||
},
|
||||
{
|
||||
"type": "vex",
|
||||
"id": "vex:tenant-default:jwt-auth:5d1a",
|
||||
"format": "ndjson",
|
||||
"url": "https://downloads.local/exports/0009/vex/vex-tenant-default-jwt-auth-5d1a.ndjson?sig=def",
|
||||
"sha256": "3a2d0edc2bfa4c5c9e1a7f96b0b5e6de378c1f9baf2d6f2a7e9c5d4b3f0c1a2e",
|
||||
"size": 9216
|
||||
},
|
||||
{
|
||||
"type": "bundle",
|
||||
"id": "console-export::tenant-default::2025-12-07::0009",
|
||||
"format": "tar.gz",
|
||||
"url": "https://downloads.local/exports/0009/bundle.tar.gz?sig=ghi",
|
||||
"sha256": "12ae34f51c2b4c6d7e8f90ab1c2d3e4f5a6b7c8d9e0f1a2b3c4d5e6f7081920a",
|
||||
"size": 48732102
|
||||
}
|
||||
],
|
||||
"checksums": {
|
||||
"manifest": "sha256:8bbf3cc1f8c7d6e5a4b3c2d1e0f9a8b7c6d5e4f3a2b1c0dffeeddccbbaa99887",
|
||||
"bundle": "sha256:12ae34f51c2b4c6d7e8f90ab1c2d3e4f5a6b7c8d9e0f1a2b3c4d5e6f7081920a"
|
||||
},
|
||||
"expiresAt": "2025-12-14T10:15:00Z"
|
||||
}
|
||||
14
docs/api/console/samples/console-export-events.ndjson
Normal file
14
docs/api/console/samples/console-export-events.ndjson
Normal file
@@ -0,0 +1,14 @@
|
||||
event: started
|
||||
data: {"exportId":"console-export::tenant-default::2025-12-06::0007","status":"running","percent":0}
|
||||
|
||||
event: progress
|
||||
data: {"exportId":"console-export::tenant-default::2025-12-06::0007","percent":25,"itemsCompleted":125,"itemsTotal":500}
|
||||
|
||||
event: asset_ready
|
||||
data: {"exportId":"console-export::tenant-default::2025-12-06::0007","type":"advisory","id":"CVE-2024-12345","url":"https://exports.local/...","sha256":"cafe0001..."}
|
||||
|
||||
event: progress
|
||||
data: {"exportId":"console-export::tenant-default::2025-12-06::0007","percent":75,"itemsCompleted":375,"itemsTotal":500}
|
||||
|
||||
event: completed
|
||||
data: {"exportId":"console-export::tenant-default::2025-12-06::0007","status":"succeeded","manifestUrl":"https://exports.local/.../manifest.json"}
|
||||
53
docs/api/console/samples/console-export-manifest.json
Normal file
53
docs/api/console/samples/console-export-manifest.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"version": "2025-12-06",
|
||||
"exportId": "console-export::tenant-default::2025-12-06::0007",
|
||||
"tenantId": "tenant-default",
|
||||
"generatedAt": "2025-12-06T12:11:05Z",
|
||||
"expiresAt": "2025-12-13T12:11:05Z",
|
||||
"items": [
|
||||
{
|
||||
"type": "advisory",
|
||||
"id": "CVE-2024-12345",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0007/CVE-2024-12345.json?sig=...",
|
||||
"sha256": "sha256:cafe0001...",
|
||||
"size": 18432
|
||||
},
|
||||
{
|
||||
"type": "vex",
|
||||
"id": "vex:tenant-default:jwt-auth:5d1a",
|
||||
"format": "ndjson",
|
||||
"url": "https://exports.local/tenant-default/0007/vex-jwt-auth.ndjson?sig=...",
|
||||
"sha256": "sha256:cafe0002...",
|
||||
"size": 9216
|
||||
},
|
||||
{
|
||||
"type": "policy",
|
||||
"id": "policy://tenant-default/runtime-hardening",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0007/policy-runtime-hardening.json?sig=...",
|
||||
"sha256": "sha256:cafe0003...",
|
||||
"size": 16384
|
||||
},
|
||||
{
|
||||
"type": "scan",
|
||||
"id": "scan::tenant-default::auth-api::2025-11-07",
|
||||
"format": "ndjson",
|
||||
"url": "https://exports.local/tenant-default/0007/scan-auth-api.ndjson?sig=...",
|
||||
"sha256": "sha256:cafe0004...",
|
||||
"size": 32768
|
||||
},
|
||||
{
|
||||
"type": "bundle",
|
||||
"id": "console-export::tenant-default::2025-12-06::0007",
|
||||
"format": "tar.gz",
|
||||
"url": "https://exports.local/tenant-default/0007/bundle.tar.gz?sig=...",
|
||||
"sha256": "sha256:deadbeefcafefeed00000000000000000000000000000000000000000000000",
|
||||
"size": 48732102
|
||||
}
|
||||
],
|
||||
"checksums": {
|
||||
"manifest": "sha256:c0ffee00000000000000000000000000000000000000000000000000000000",
|
||||
"bundle": "sha256:deadbeef000000000000000000000000000000000000000000000000000000"
|
||||
}
|
||||
}
|
||||
16
docs/api/console/samples/console-export-request.json
Normal file
16
docs/api/console/samples/console-export-request.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"scope": {
|
||||
"tenantId": "tenant-default",
|
||||
"projectId": "sre-prod"
|
||||
},
|
||||
"sources": [
|
||||
{ "type": "advisory", "ids": ["CVE-2024-12345", "CVE-2024-23456"] },
|
||||
{ "type": "vex", "ids": ["vex:tenant-default:jwt-auth:5d1a"] },
|
||||
{ "type": "policy", "ids": ["policy://tenant-default/runtime-hardening"] },
|
||||
{ "type": "scan", "ids": ["scan::tenant-default::auth-api::2025-11-07"] }
|
||||
],
|
||||
"formats": ["json", "ndjson", "csv"],
|
||||
"attestations": { "include": true, "sigstoreBundle": true },
|
||||
"notify": { "webhooks": ["https://hooks.local/export"], "email": ["secops@example.com"] },
|
||||
"priority": "normal"
|
||||
}
|
||||
24
docs/api/console/samples/console-export-status.json
Normal file
24
docs/api/console/samples/console-export-status.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"exportId": "console-export::tenant-default::2025-12-06::0007",
|
||||
"status": "running",
|
||||
"estimateSeconds": 420,
|
||||
"retryAfter": 15,
|
||||
"createdAt": "2025-12-06T12:10:00Z",
|
||||
"updatedAt": "2025-12-06T12:11:05Z",
|
||||
"outputs": [
|
||||
{
|
||||
"type": "manifest",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0007/manifest.json?sig=...",
|
||||
"sha256": "c0ffee...",
|
||||
"expiresAt": "2025-12-06T13:10:00Z"
|
||||
}
|
||||
],
|
||||
"progress": {
|
||||
"percent": 42,
|
||||
"itemsCompleted": 210,
|
||||
"itemsTotal": 500,
|
||||
"assetsReady": 12
|
||||
},
|
||||
"errors": []
|
||||
}
|
||||
37
docs/api/console/samples/exception-schema-sample.json
Normal file
37
docs/api/console/samples/exception-schema-sample.json
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"exceptionId": "exc::tenant-default::2025-12-06::00012",
|
||||
"tenantId": "tenant-default",
|
||||
"title": "Risk accepted for log4j on batch nodes",
|
||||
"state": "pending_review",
|
||||
"type": "advisory",
|
||||
"scope": {
|
||||
"level": "asset",
|
||||
"assetIds": ["batch-node-17", "batch-node-18"],
|
||||
"advisoryIds": ["CVE-2021-44228"],
|
||||
"components": ["pkg:maven/org.apache.logging.log4j/log4j-core@2.14.0"]
|
||||
},
|
||||
"justification": {
|
||||
"template": "compensating_control",
|
||||
"details": "Ingress disabled; nodes isolated; patch planned 2025-12-20"
|
||||
},
|
||||
"timebox": {
|
||||
"start": "2025-12-06T00:00:00Z",
|
||||
"end": "2025-12-31T00:00:00Z",
|
||||
"maxRenewals": 1
|
||||
},
|
||||
"audit": {
|
||||
"createdBy": "alice@example.com",
|
||||
"createdAt": "2025-12-06T11:12:13Z",
|
||||
"modifiedAt": "2025-12-06T11:12:13Z"
|
||||
},
|
||||
"links": {
|
||||
"history": "/console/exceptions/exc::tenant-default::2025-12-06::00012/history",
|
||||
"attachments": [
|
||||
{
|
||||
"name": "risk-assessment.pdf",
|
||||
"url": "https://console.local/files/risk-assessment.pdf?sig=...",
|
||||
"sha256": "cafe..."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
58
docs/api/console/search-downloads.md
Normal file
58
docs/api/console/search-downloads.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# Console Search & Downloads · Draft v0.2
|
||||
|
||||
Scope: unblock WEB-CONSOLE-23-004/005 by defining deterministic ranking, caching rules, and the download manifest structure (including signed metadata option) for console search and offline bundle downloads. Final guild sign-off still required.
|
||||
|
||||
## 1) Deterministic search ranking
|
||||
- Primary sort: `severity (desc)` → `exploitScore (desc)` → `reachability (reachable > unknown > unreachable)` → `policyBadge (fail > warn > pass > waived)` → `vexState (under_investigation > fixed > not_affected > unknown)` → `findingId (asc)`.
|
||||
- Secondary tie-breakers (when above fields absent): `advisoryId (asc)` then `product (asc)`.
|
||||
- All pages are pre-sorted server-side; clients MUST NOT re-order.
|
||||
|
||||
## 2) Caching + freshness
|
||||
- Response headers: `Cache-Control: public, max-age=300, stale-while-revalidate=60, stale-if-error=300`.
|
||||
- `ETag` is a stable SHA-256 over the sorted payload; clients send `If-None-Match` for revalidation.
|
||||
- `Last-Modified` reflects the newest `updatedAt` in the result set.
|
||||
- Retry/backoff guidance: honor `Retry-After` when present; default client backoff `1s,2s,4s,8s` capped at 30s.
|
||||
- Deterministic page cursors: opaque base64url, signed; include `sortKeys` and `tenant` to avoid cross-tenant reuse.
|
||||
|
||||
## 3) Download manifest (for `/console/downloads` and export outputs)
|
||||
Top-level:
|
||||
```jsonc
|
||||
{
|
||||
"version": "2025-12-07",
|
||||
"exportId": "console-export::tenant-default::2025-12-07::0009",
|
||||
"tenantId": "tenant-default",
|
||||
"generatedAt": "2025-12-07T10:15:00Z",
|
||||
"items": [
|
||||
{
|
||||
"type": "vuln", // advisory|vex|policy|scan|chart|bundle
|
||||
"id": "CVE-2024-12345",
|
||||
"format": "json",
|
||||
"url": "https://downloads.local/exports/0009/vuln/CVE-2024-12345.json?sig=...",
|
||||
"sha256": "f1c5…",
|
||||
"size": 18432
|
||||
}
|
||||
],
|
||||
"checksums": {
|
||||
"manifest": "sha256:8bbf…",
|
||||
"bundle": "sha256:12ae…" // optional when a tar/zip bundle is produced
|
||||
},
|
||||
"expiresAt": "2025-12-14T10:15:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
### 3.1 Signed metadata
|
||||
- Optional DSSE envelope for `checksums.manifest`, using `sha256` digest and `application/json` payload type `stellaops.console.manifest`.
|
||||
- Envelope is attached as `manifest.dsse` or provided via `Link: <...>; rel="alternate"; type="application/dsse+json"`.
|
||||
- Signers: Authority-issued short-lived key scoped to `console:export`.
|
||||
|
||||
### 3.2 Error handling
|
||||
- Known error codes: `ERR_CONSOLE_DOWNLOAD_INVALID_CURSOR`, `ERR_CONSOLE_DOWNLOAD_EXPIRED`, `ERR_CONSOLE_DOWNLOAD_RATE_LIMIT`, `ERR_CONSOLE_DOWNLOAD_UNAVAILABLE`.
|
||||
- On error, respond with deterministic JSON body including `requestId` and `retryAfterSeconds` when applicable.
|
||||
|
||||
## 4) Sample manifest
|
||||
- `docs/api/console/samples/console-download-manifest.json` illustrates the exact shape above.
|
||||
|
||||
## 5) Open items for guild sign-off
|
||||
- Final TTL values for `max-age` and `stale-*`.
|
||||
- Whether DSSE envelope is mandatory for sealed tenants.
|
||||
- Maximum bundle size / item count caps (proposal: 1000 items, 500 MiB compressed per export).
|
||||
@@ -309,3 +309,102 @@ data: {
|
||||
- `docs/api/console/samples/vex-statement-sse.ndjson` – contains 5 chronological SSE events for screenshot reproduction.
|
||||
|
||||
> Until backend implementations ship, use the examples above to unblock DOCS-AIAI-31-004; replace them with live captures once the gateway endpoints are available in staging.
|
||||
|
||||
## Exports (draft contract v0.4 for sign-off)
|
||||
|
||||
### Routes
|
||||
- `POST /console/exports` — start an evidence bundle export job.
|
||||
- `GET /console/exports/{exportId}` — fetch job status, manifest link, and download locations.
|
||||
- `GET /console/exports/{exportId}/events` — SSE stream of job progress (optional).
|
||||
|
||||
### Security / headers
|
||||
- `Authorization: DPoP <token>`
|
||||
- `DPoP: <proof>`
|
||||
- `X-StellaOps-Tenant: <tenantId>`
|
||||
- `Idempotency-Key: <uuid>` (recommended for POST)
|
||||
- `Accept: application/json` (status) or `text/event-stream` (events)
|
||||
- Required scopes: `console:read` AND `console:export` (proposal).
|
||||
|
||||
### Request body (POST)
|
||||
```jsonc
|
||||
{
|
||||
"scope": { "tenantId": "t1", "projectId": "p1" },
|
||||
"sources": [
|
||||
{ "type": "advisory", "ids": ["CVE-2024-12345"] },
|
||||
{ "type": "vex", "ids": ["vex:tenant-default:jwt-auth:5d1a"] }
|
||||
],
|
||||
"formats": ["json", "ndjson", "csv"],
|
||||
"attestations": { "include": true, "sigstoreBundle": true, "dsse": true },
|
||||
"notify": { "webhooks": ["https://hooks.local/export"], "email": ["secops@example.com"] },
|
||||
"priority": "normal"
|
||||
}
|
||||
```
|
||||
|
||||
### Response: 202 Accepted
|
||||
- `exportId`, `status: queued|running|succeeded|failed|expired`
|
||||
- `estimateSeconds`, `retryAfter` (seconds)
|
||||
- `links`: `{ status: url, events?: url }`
|
||||
|
||||
### Response: GET status
|
||||
```jsonc
|
||||
{
|
||||
"exportId": "console-export::tenant-default::2025-12-06::0007",
|
||||
"status": "running",
|
||||
"estimateSeconds": 420,
|
||||
"outputs": [
|
||||
{
|
||||
"type": "manifest",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0007/manifest.json?sig=...",
|
||||
"sha256": "sha256:c0ffee...",
|
||||
"dsseUrl": "https://exports.local/tenant-default/0007/manifest.dsse?sig=...",
|
||||
"expiresAt": "2025-12-06T13:10:00Z"
|
||||
}
|
||||
],
|
||||
"progress": { "percent": 42, "itemsCompleted": 210, "itemsTotal": 500, "assetsReady": 12 },
|
||||
"errors": []
|
||||
}
|
||||
```
|
||||
|
||||
### Response: SSE events
|
||||
- `started`: `{ exportId, status }`
|
||||
- `progress`: `{ exportId, percent, itemsCompleted, itemsTotal }`
|
||||
- `asset_ready`: `{ exportId, type, id, url, sha256, format }`
|
||||
- `completed`: `{ exportId, status: "succeeded", manifestUrl, manifestDsseUrl? }`
|
||||
- `failed`: `{ exportId, status: "failed", code, message, retryAfterSeconds? }`
|
||||
|
||||
### Manifest shape (downloaded via outputs)
|
||||
- Ordering: sort items by `(type asc, id asc, format asc, url asc)`.
|
||||
- `version`: string (date), `exportId`, `tenantId`, `generatedAt`, `expiresAt`
|
||||
- `items[]`: `{ type: advisory|vex|policy|scan|chart|bundle, id, format, url, sha256, size }`
|
||||
- `checksums`: `{ manifest: "sha256:<digest>", bundle?: "sha256:<digest>" }`
|
||||
- Optional DSSE envelope for manifest: `manifest.dsse` (payload type `stellaops.console.manifest`).
|
||||
|
||||
### Limits (proposed)
|
||||
- Max request body 256 KiB; max sources 50; max outputs 1000 assets/export.
|
||||
- Max bundle size 500 MiB compressed.
|
||||
- Default job timeout 30 minutes; idle SSE timeout 60s; backoff via `Retry-After`.
|
||||
|
||||
### Determinism, caching, retry
|
||||
- Responses set `Cache-Control: public, max-age=300, stale-while-revalidate=60, stale-if-error=300`.
|
||||
- `ETag` is SHA-256 over sorted payload; clients send `If-None-Match`.
|
||||
- Respect `Retry-After`; client backoff `1s,2s,4s,8s` capped at 30s.
|
||||
- Cursors (if introduced later) MUST be opaque, base64url, signed with tenant + sortKeys.
|
||||
|
||||
### Error codes (proposal)
|
||||
- `ERR_CONSOLE_EXPORT_INVALID_SOURCE`
|
||||
- `ERR_CONSOLE_EXPORT_TOO_LARGE`
|
||||
- `ERR_CONSOLE_EXPORT_RATE_LIMIT`
|
||||
- `ERR_CONSOLE_EXPORT_UNAVAILABLE`
|
||||
- `ERR_CONSOLE_EXPORT_EXPIRED`
|
||||
|
||||
### Samples
|
||||
- Request: `docs/api/console/samples/console-export-request.json`
|
||||
- Status: `docs/api/console/samples/console-export-status.json`
|
||||
- Manifest: `docs/api/console/samples/console-export-manifest.json`
|
||||
- Events: `docs/api/console/samples/console-export-events.ndjson`
|
||||
|
||||
### Open items (needs guild sign-off)
|
||||
- Final scopes list (`console:export` vs broader `console:*`).
|
||||
- Final limits and error codes; checksum manifest format; attestation options.
|
||||
- Caching/tie-break rules for downstream `/console/search` and `/console/downloads`.
|
||||
|
||||
106
docs/api/gateway/export-center.md
Normal file
106
docs/api/gateway/export-center.md
Normal file
@@ -0,0 +1,106 @@
|
||||
# Export Center Gateway Contract (draft v0.9)
|
||||
|
||||
Scope: proxy Export Center APIs through the Web gateway with tenant scoping, deterministic responses, sealed-mode readiness, and offline-friendly signed URL handling.
|
||||
|
||||
## Security / headers
|
||||
- `Authorization: DPoP <token>`, `DPoP: <proof>`
|
||||
- `X-StellaOps-Tenant: <tenantId>` (required)
|
||||
- `X-StellaOps-Project: <projectId>` (optional)
|
||||
- `Idempotency-Key: <uuid>` (recommended for POST)
|
||||
- `Accept: application/json` (or `text/event-stream` for SSE)
|
||||
- Scopes (proposal): `export:read` for GET, `export:write` for POST.
|
||||
|
||||
## Endpoints
|
||||
- `GET /export-center/profiles` — list export profiles (tenant-scoped).
|
||||
- `POST /export-center/runs` — start an export run.
|
||||
- `GET /export-center/runs/{runId}` — run status + outputs.
|
||||
- `GET /export-center/runs/{runId}/events` — SSE progress stream.
|
||||
- `GET /export-center/distributions/{id}` — signed URLs for OCI/object storage distribution.
|
||||
|
||||
## POST /export-center/runs (request)
|
||||
```jsonc
|
||||
{
|
||||
"profileId": "export-profile::tenant-default::daily-vex",
|
||||
"targets": ["vex", "advisory", "policy"],
|
||||
"formats": ["json", "ndjson"],
|
||||
"distribution": {
|
||||
"type": "oci",
|
||||
"ref": "registry.local/exports/daily",
|
||||
"signing": { "enabled": true, "keyRef": "k8s://secrets/eks/oci-signer" }
|
||||
},
|
||||
"retentionDays": 30,
|
||||
"encryption": { "enabled": true, "kmsKey": "kms://tenant-default/key1" },
|
||||
"priority": "normal"
|
||||
}
|
||||
```
|
||||
|
||||
## 202 Accepted
|
||||
```jsonc
|
||||
{
|
||||
"runId": "export-run::tenant-default::2025-12-06::0003",
|
||||
"status": "queued",
|
||||
"estimateSeconds": 420,
|
||||
"links": {
|
||||
"status": "/export-center/runs/export-run::tenant-default::2025-12-06::0003",
|
||||
"events": "/export-center/runs/export-run::tenant-default::2025-12-06::0003/events"
|
||||
},
|
||||
"retryAfter": 5
|
||||
}
|
||||
```
|
||||
|
||||
## GET /export-center/runs/{runId}
|
||||
```jsonc
|
||||
{
|
||||
"runId": "export-run::tenant-default::2025-12-06::0003",
|
||||
"status": "running",
|
||||
"profileId": "export-profile::tenant-default::daily-vex",
|
||||
"startedAt": "2025-12-06T10:00:00Z",
|
||||
"outputs": [
|
||||
{
|
||||
"type": "manifest",
|
||||
"format": "json",
|
||||
"url": "https://exports.local/tenant-default/0003/manifest.json?sig=...",
|
||||
"sha256": "sha256:c0ffee...",
|
||||
"dsseUrl": "https://exports.local/tenant-default/0003/manifest.dsse?sig=...",
|
||||
"expiresAt": "2025-12-06T16:00:00Z"
|
||||
}
|
||||
],
|
||||
"progress": { "percent": 35, "itemsCompleted": 70, "itemsTotal": 200 },
|
||||
"errors": []
|
||||
}
|
||||
```
|
||||
|
||||
## SSE events
|
||||
- `started`: `{ runId, status }`
|
||||
- `progress`: `{ runId, percent, itemsCompleted, itemsTotal }`
|
||||
- `artifact_ready`: `{ runId, type, id, url, sha256, format }`
|
||||
- `completed`: `{ runId, status: "succeeded", manifestUrl, manifestDsseUrl? }`
|
||||
- `failed`: `{ runId, status: "failed", code, message, retryAfterSeconds? }`
|
||||
|
||||
## Distributions
|
||||
- `GET /export-center/distributions/{id}` returns signed URLs, expiry, checksum, and optional DSSE envelope reference.
|
||||
- Response headers: `Cache-Control: private, max-age=60, stale-if-error=300`; `ETag` over sorted payload.
|
||||
- Signed URL rels: `self`, `alternate` (DSSE), `bundle` when tar/zip produced.
|
||||
|
||||
## Determinism & limits
|
||||
- Max request body 256 KiB; max targets 50; max outputs 1000 assets/export; max bundle size 500 MiB compressed.
|
||||
- Default job timeout 60 minutes; idle SSE timeout 60s; client backoff `1s,2s,4s,8s` capped at 30s; honor `Retry-After`.
|
||||
- Ordering: manifest items sorted `(type asc, id asc, format asc, url asc)`.
|
||||
- Timestamps: ISO-8601 UTC; stable SHA-256 hashes only.
|
||||
|
||||
## Error codes (proposal)
|
||||
- `ERR_EXPORT_PROFILE_NOT_FOUND`
|
||||
- `ERR_EXPORT_REQUEST_INVALID`
|
||||
- `ERR_EXPORT_TOO_LARGE`
|
||||
- `ERR_EXPORT_RATE_LIMIT`
|
||||
- `ERR_EXPORT_DISTRIBUTION_FAILED`
|
||||
- `ERR_EXPORT_EXPIRED`
|
||||
|
||||
## Samples
|
||||
- Run request/response: see blocks above.
|
||||
- Status/manifest/events: reuse Console manifest sample (`docs/api/console/samples/console-export-manifest.json`) until Export Center publishes dedicated samples.
|
||||
|
||||
## Outstanding for sign-off
|
||||
- Final scope/limit numbers (targets, bundle cap, timeouts).
|
||||
- Whether DSSE is mandatory for sealed tenants.
|
||||
- Distribution signing rules (key source, validity duration) and retention defaults.
|
||||
42
docs/api/graph/overlay-schema.md
Normal file
42
docs/api/graph/overlay-schema.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Graph Overlay & Cache Schema (draft placeholder)
|
||||
|
||||
**Status:** Draft v0.2 · owner-proposed
|
||||
|
||||
## Scope
|
||||
- Overlay/cache schema for graph tiles used by Web gateway and UI overlays.
|
||||
- Validation rules for bbox/zoom/path; pagination tokens; deterministic ordering.
|
||||
- Error codes and sampling/telemetry fields.
|
||||
|
||||
## Schema (draft)
|
||||
```jsonc
|
||||
{
|
||||
"version": "2025-12-06",
|
||||
"tenantId": "tenant-default",
|
||||
"tile": {
|
||||
"id": "graph-tile::asset::<hash>::z8/x12/y5",
|
||||
"bbox": { "minX": -122.41, "minY": 37.77, "maxX": -122.38, "maxY": 37.79 },
|
||||
"zoom": 8,
|
||||
"etag": "c0ffee-etag"
|
||||
},
|
||||
"nodes": [ { "id": "asset:...", "kind": "asset|component|vuln", "label": "", "severity": "high|medium|low|info", "reachability": "reachable|unreachable|unknown", "attributes": {} } ],
|
||||
"edges": [ { "id": "edge-1", "source": "nodeId", "target": "nodeId", "type": "depends_on|contains|evidence", "weight": 0.0 } ],
|
||||
"overlays": {
|
||||
"policy": [ { "nodeId": "nodeId", "badge": "pass|warn|fail|waived", "policyId": "", "verdictAt": "2025-12-05T09:00:00Z" } ],
|
||||
"vex": [ { "nodeId": "nodeId", "state": "not_affected|fixed|under_investigation|affected", "statementId": "", "lastUpdated": "2025-12-05T09:10:00Z" } ],
|
||||
"aoc": [ { "nodeId": "nodeId", "status": "pass|fail|warn", "lastVerified": "2025-12-05T10:11:12Z" } ]
|
||||
},
|
||||
"telemetry": { "generationMs": 0, "cache": "hit|miss", "samples": 0 }
|
||||
}
|
||||
```
|
||||
|
||||
## Constraints (proposal)
|
||||
- Max nodes per tile: 2,000; max edges: 4,000.
|
||||
- Zoom range: 0–12; tiles must include bbox and etag.
|
||||
- Arrays must be pre-sorted: nodes by `id`, edges by `id`, overlays by `nodeId` then `policyId|statementId`.
|
||||
|
||||
## Samples
|
||||
- `docs/api/graph/samples/overlay-sample.json`
|
||||
|
||||
## Outstanding
|
||||
- Confirm max sizes, allowed edge types, and etag hashing rule.
|
||||
- Provide validation error example and rate-limit headers for gateway responses.
|
||||
75
docs/api/graph/samples/overlay-sample.json
Normal file
75
docs/api/graph/samples/overlay-sample.json
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"version": "2025-12-06",
|
||||
"tenantId": "tenant-default",
|
||||
"tile": {
|
||||
"id": "graph-tile::asset::sha256:abc123::z8/x12/y5",
|
||||
"bbox": {
|
||||
"minX": -122.41,
|
||||
"minY": 37.77,
|
||||
"maxX": -122.38,
|
||||
"maxY": 37.79
|
||||
},
|
||||
"zoom": 8,
|
||||
"etag": "c0ffee-overlay-etag"
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"id": "asset:registry.local/library/app@sha256:abc123",
|
||||
"kind": "asset",
|
||||
"label": "app:1.2.3",
|
||||
"severity": "high",
|
||||
"reachability": "reachable",
|
||||
"aoc": { "summary": "pass", "lastVerified": "2025-12-05T10:11:12Z" },
|
||||
"attributes": {
|
||||
"purl": "pkg:docker/app@sha256:abc123",
|
||||
"componentCount": 42
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "component:pkg:npm/jsonwebtoken@9.0.2",
|
||||
"kind": "component",
|
||||
"label": "jsonwebtoken@9.0.2",
|
||||
"severity": "high",
|
||||
"reachability": "reachable"
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"id": "edge-1",
|
||||
"source": "asset:registry.local/library/app@sha256:abc123",
|
||||
"target": "component:pkg:npm/jsonwebtoken@9.0.2",
|
||||
"type": "depends_on",
|
||||
"weight": 0.87
|
||||
}
|
||||
],
|
||||
"overlays": {
|
||||
"policy": [
|
||||
{
|
||||
"nodeId": "component:pkg:npm/jsonwebtoken@9.0.2",
|
||||
"badge": "fail",
|
||||
"policyId": "policy://tenant-default/runtime-hardening",
|
||||
"verdictAt": "2025-12-05T09:00:00Z"
|
||||
}
|
||||
],
|
||||
"vex": [
|
||||
{
|
||||
"nodeId": "component:pkg:npm/jsonwebtoken@9.0.2",
|
||||
"state": "under_investigation",
|
||||
"statementId": "vex:tenant-default:jwt:2025-12-05",
|
||||
"lastUpdated": "2025-12-05T09:10:00Z"
|
||||
}
|
||||
],
|
||||
"aoc": [
|
||||
{
|
||||
"nodeId": "asset:registry.local/library/app@sha256:abc123",
|
||||
"status": "pass",
|
||||
"lastVerified": "2025-12-05T10:11:12Z"
|
||||
}
|
||||
]
|
||||
},
|
||||
"telemetry": {
|
||||
"generationMs": 120,
|
||||
"cache": "hit",
|
||||
"samples": 3
|
||||
}
|
||||
}
|
||||
66
docs/api/signals/reachability-contract.md
Normal file
66
docs/api/signals/reachability-contract.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# Signals Reachability API Contract (draft placeholder)
|
||||
|
||||
**Status:** Draft v0.2 · owner-proposed
|
||||
|
||||
## Scope
|
||||
- `/signals/callgraphs`, `/signals/facts`, reachability scoring overlays feeding UI/Web.
|
||||
- Deterministic fixtures for SIG-26 chain (columns/badges, call paths, timelines, overlays, coverage).
|
||||
|
||||
## Endpoints
|
||||
- `GET /signals/callgraphs` — returns call paths contributing to reachability.
|
||||
- `GET /signals/facts` — returns reachability/coverage facts.
|
||||
|
||||
Common headers: `Authorization: DPoP <token>`, `DPoP: <proof>`, `X-StellaOps-Tenant`, optional `If-None-Match`.
|
||||
Pagination: cursor via `pageToken`; default 50, max 200.
|
||||
ETag: required on responses; clients must send `If-None-Match` for cache validation.
|
||||
|
||||
### Callgraphs response (draft)
|
||||
```jsonc
|
||||
{
|
||||
"tenantId": "tenant-default",
|
||||
"assetId": "registry.local/library/app@sha256:abc123",
|
||||
"paths": [
|
||||
{
|
||||
"id": "path-1",
|
||||
"source": "api-gateway",
|
||||
"target": "jwt-auth-service",
|
||||
"hops": [
|
||||
{ "service": "api-gateway", "endpoint": "/login", "timestamp": "2025-12-05T10:00:00Z" },
|
||||
{ "service": "jwt-auth-service", "endpoint": "/verify", "timestamp": "2025-12-05T10:00:01Z" }
|
||||
],
|
||||
"evidence": { "traceId": "trace-abc", "spanCount": 2, "score": 0.92 }
|
||||
}
|
||||
],
|
||||
"pagination": { "nextPageToken": null },
|
||||
"etag": "sig-callgraphs-etag"
|
||||
}
|
||||
```
|
||||
|
||||
### Facts response (draft)
|
||||
```jsonc
|
||||
{
|
||||
"tenantId": "tenant-default",
|
||||
"facts": [
|
||||
{
|
||||
"id": "fact-1",
|
||||
"type": "reachability",
|
||||
"assetId": "registry.local/library/app@sha256:abc123",
|
||||
"component": "pkg:npm/jsonwebtoken@9.0.2",
|
||||
"status": "reachable",
|
||||
"confidence": 0.88,
|
||||
"observedAt": "2025-12-05T10:10:00Z",
|
||||
"signalsVersion": "signals-2025.310.1"
|
||||
}
|
||||
],
|
||||
"pagination": { "nextPageToken": "..." },
|
||||
"etag": "sig-facts-etag"
|
||||
}
|
||||
```
|
||||
|
||||
### Samples
|
||||
- Callgraphs: `docs/api/signals/samples/callgraph-sample.json`
|
||||
- Facts: `docs/api/signals/samples/facts-sample.json`
|
||||
|
||||
### Outstanding
|
||||
- Finalize score model, accepted `type` values, and max page size.
|
||||
- Provide OpenAPI/JSON schema and error codes.
|
||||
23
docs/api/signals/samples/callgraph-sample.json
Normal file
23
docs/api/signals/samples/callgraph-sample.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"tenantId": "tenant-default",
|
||||
"assetId": "registry.local/library/app@sha256:abc123",
|
||||
"paths": [
|
||||
{
|
||||
"id": "path-1",
|
||||
"source": "api-gateway",
|
||||
"target": "jwt-auth-service",
|
||||
"hops": [
|
||||
{ "service": "api-gateway", "endpoint": "/login", "timestamp": "2025-12-05T10:00:00Z" },
|
||||
{ "service": "jwt-auth-service", "endpoint": "/verify", "timestamp": "2025-12-05T10:00:01Z" }
|
||||
],
|
||||
"evidence": {
|
||||
"traceId": "trace-abc",
|
||||
"spanCount": 2,
|
||||
"score": 0.92
|
||||
}
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
"nextPageToken": null
|
||||
}
|
||||
}
|
||||
26
docs/api/signals/samples/facts-sample.json
Normal file
26
docs/api/signals/samples/facts-sample.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"tenantId": "tenant-default",
|
||||
"facts": [
|
||||
{
|
||||
"id": "fact-1",
|
||||
"type": "reachability",
|
||||
"assetId": "registry.local/library/app@sha256:abc123",
|
||||
"component": "pkg:npm/jsonwebtoken@9.0.2",
|
||||
"status": "reachable",
|
||||
"confidence": 0.88,
|
||||
"observedAt": "2025-12-05T10:10:00Z",
|
||||
"signalsVersion": "signals-2025.310.1"
|
||||
},
|
||||
{
|
||||
"id": "fact-2",
|
||||
"type": "coverage",
|
||||
"assetId": "registry.local/library/app@sha256:abc123",
|
||||
"metric": "sensors_present",
|
||||
"value": 0.94,
|
||||
"observedAt": "2025-12-05T10:11:00Z"
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
"nextPageToken": "eyJmYWN0SWQiOiJmYWN0LTIifQ"
|
||||
}
|
||||
}
|
||||
886
docs/api/taskrunner-openapi.yaml
Normal file
886
docs/api/taskrunner-openapi.yaml
Normal file
@@ -0,0 +1,886 @@
|
||||
# OpenAPI 3.1 specification for StellaOps TaskRunner WebService
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps TaskRunner API
|
||||
version: 0.1.0-draft
|
||||
description: |
|
||||
Contract for TaskRunner service covering pack runs, simulations, logs, artifacts, and approvals.
|
||||
Uses the platform error envelope and tenant header `X-StellaOps-Tenant`.
|
||||
|
||||
## Streaming Endpoints
|
||||
The `/runs/{runId}/logs` endpoint returns logs in NDJSON (Newline Delimited JSON) format
|
||||
for efficient streaming. Each line is a complete JSON object.
|
||||
|
||||
## Control Flow Steps
|
||||
TaskPacks support the following step kinds:
|
||||
- **run**: Execute an action using a builtin or custom executor
|
||||
- **parallel**: Execute child steps concurrently with optional maxParallel limit
|
||||
- **map**: Iterate over items and execute a template step for each
|
||||
- **loop**: Iterate with items expression, range, or static list
|
||||
- **conditional**: Branch based on condition expressions
|
||||
- **gate.approval**: Require manual approval before proceeding
|
||||
- **gate.policy**: Evaluate policy and optionally require override approval
|
||||
servers:
|
||||
- url: https://taskrunner.stellaops.example.com
|
||||
description: Production
|
||||
- url: https://taskrunner.dev.stellaops.example.com
|
||||
description: Development
|
||||
security:
|
||||
- oauth2: [taskrunner.viewer]
|
||||
- oauth2: [taskrunner.operator]
|
||||
- oauth2: [taskrunner.admin]
|
||||
|
||||
paths:
|
||||
/v1/task-runner/simulations:
|
||||
post:
|
||||
summary: Simulate a task pack
|
||||
description: |
|
||||
Validates a task pack manifest, creates an execution plan, and simulates the run
|
||||
without actually executing any steps. Returns the simulation result showing which
|
||||
steps would execute, which are skipped, and which require approvals.
|
||||
operationId: simulateTaskPack
|
||||
tags: [Simulations]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/Tenant'
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SimulationRequest'
|
||||
examples:
|
||||
basic-simulation:
|
||||
summary: Basic simulation request
|
||||
value:
|
||||
manifest: |
|
||||
apiVersion: stellaops.io/pack.v1
|
||||
kind: TaskPack
|
||||
metadata:
|
||||
name: scan-deploy
|
||||
version: 1.0.0
|
||||
spec:
|
||||
inputs:
|
||||
- name: target
|
||||
type: string
|
||||
required: true
|
||||
sandbox:
|
||||
mode: sealed
|
||||
egressAllowlist: []
|
||||
cpuLimitMillicores: 100
|
||||
memoryLimitMiB: 128
|
||||
quotaSeconds: 60
|
||||
slo:
|
||||
runP95Seconds: 300
|
||||
approvalP95Seconds: 900
|
||||
maxQueueDepth: 100
|
||||
steps:
|
||||
- id: scan
|
||||
run:
|
||||
uses: builtin:scanner
|
||||
with:
|
||||
target: "{{ inputs.target }}"
|
||||
inputs:
|
||||
target: "registry.example.com/app:v1.2.3"
|
||||
responses:
|
||||
'200':
|
||||
description: Simulation completed
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SimulationResponse'
|
||||
examples:
|
||||
simulation-result:
|
||||
value:
|
||||
planHash: "sha256:a1b2c3d4e5f6..."
|
||||
failurePolicy:
|
||||
maxAttempts: 1
|
||||
backoffSeconds: 0
|
||||
continueOnError: false
|
||||
steps:
|
||||
- id: scan
|
||||
templateId: scan
|
||||
kind: Run
|
||||
enabled: true
|
||||
status: Pending
|
||||
uses: "builtin:scanner"
|
||||
children: []
|
||||
outputs: []
|
||||
hasPendingApprovals: false
|
||||
'400':
|
||||
description: Invalid manifest or inputs
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PlanErrorResponse'
|
||||
default:
|
||||
$ref: '#/components/responses/Error'
|
||||
|
||||
/v1/task-runner/runs:
|
||||
post:
|
||||
summary: Create a pack run
|
||||
description: |
|
||||
Creates a new pack run from a task pack manifest. The run is scheduled for execution
|
||||
and will proceed through its steps. If approval gates are present, the run will pause
|
||||
at those gates until approvals are granted.
|
||||
operationId: createPackRun
|
||||
tags: [Runs]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/Tenant'
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CreateRunRequest'
|
||||
examples:
|
||||
create-run:
|
||||
summary: Create a new run
|
||||
value:
|
||||
runId: "run-20251206-001"
|
||||
manifest: |
|
||||
apiVersion: stellaops.io/pack.v1
|
||||
kind: TaskPack
|
||||
metadata:
|
||||
name: deploy-app
|
||||
version: 2.0.0
|
||||
spec:
|
||||
sandbox:
|
||||
mode: sealed
|
||||
egressAllowlist: []
|
||||
cpuLimitMillicores: 200
|
||||
memoryLimitMiB: 256
|
||||
quotaSeconds: 120
|
||||
slo:
|
||||
runP95Seconds: 600
|
||||
approvalP95Seconds: 1800
|
||||
maxQueueDepth: 50
|
||||
approvals:
|
||||
- id: security-review
|
||||
grants: [packs.approve]
|
||||
steps:
|
||||
- id: build
|
||||
run:
|
||||
uses: builtin:build
|
||||
- id: approval
|
||||
gate:
|
||||
approval:
|
||||
id: security-review
|
||||
message: "Security review required before deploy"
|
||||
- id: deploy
|
||||
run:
|
||||
uses: builtin:deploy
|
||||
tenantId: "tenant-prod"
|
||||
responses:
|
||||
'201':
|
||||
description: Run created
|
||||
headers:
|
||||
Location:
|
||||
description: URL of the created run
|
||||
schema:
|
||||
type: string
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RunStateResponse'
|
||||
'400':
|
||||
description: Invalid manifest or inputs
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/PlanErrorResponse'
|
||||
'409':
|
||||
description: Run ID already exists
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
default:
|
||||
$ref: '#/components/responses/Error'
|
||||
|
||||
/v1/task-runner/runs/{runId}:
|
||||
get:
|
||||
summary: Get run state
|
||||
description: |
|
||||
Returns the current state of a pack run, including status of all steps,
|
||||
failure policy, and timing information.
|
||||
operationId: getRunState
|
||||
tags: [Runs]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/Tenant'
|
||||
- $ref: '#/components/parameters/RunId'
|
||||
responses:
|
||||
'200':
|
||||
description: Run state
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RunStateResponse'
|
||||
examples:
|
||||
running:
|
||||
summary: Run in progress
|
||||
value:
|
||||
runId: "run-20251206-001"
|
||||
planHash: "sha256:a1b2c3d4..."
|
||||
failurePolicy:
|
||||
maxAttempts: 2
|
||||
backoffSeconds: 30
|
||||
continueOnError: false
|
||||
createdAt: "2025-12-06T10:00:00Z"
|
||||
updatedAt: "2025-12-06T10:05:00Z"
|
||||
steps:
|
||||
- stepId: build
|
||||
kind: Run
|
||||
enabled: true
|
||||
continueOnError: false
|
||||
status: Succeeded
|
||||
attempts: 1
|
||||
lastTransitionAt: "2025-12-06T10:02:00Z"
|
||||
- stepId: approval
|
||||
kind: GateApproval
|
||||
enabled: true
|
||||
continueOnError: false
|
||||
approvalId: security-review
|
||||
gateMessage: "Security review required before deploy"
|
||||
status: Pending
|
||||
attempts: 0
|
||||
statusReason: "awaiting-approval"
|
||||
- stepId: deploy
|
||||
kind: Run
|
||||
enabled: true
|
||||
continueOnError: false
|
||||
status: Pending
|
||||
attempts: 0
|
||||
'404':
|
||||
description: Run not found
|
||||
default:
|
||||
$ref: '#/components/responses/Error'
|
||||
|
||||
/v1/task-runner/runs/{runId}/logs:
|
||||
get:
|
||||
summary: Stream run logs
|
||||
description: |
|
||||
Returns run logs as a stream of NDJSON (Newline Delimited JSON) entries.
|
||||
Each line is a complete JSON object representing a log entry with timestamp,
|
||||
level, event type, message, and optional metadata.
|
||||
|
||||
**Content-Type**: `application/x-ndjson`
|
||||
operationId: streamRunLogs
|
||||
tags: [Logs]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/Tenant'
|
||||
- $ref: '#/components/parameters/RunId'
|
||||
responses:
|
||||
'200':
|
||||
description: Log stream
|
||||
content:
|
||||
application/x-ndjson:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RunLogEntry'
|
||||
examples:
|
||||
log-stream:
|
||||
summary: Sample NDJSON log stream
|
||||
value: |
|
||||
{"timestamp":"2025-12-06T10:00:00Z","level":"info","eventType":"run.created","message":"Run created via API.","metadata":{"planHash":"sha256:a1b2c3d4...","requestedAt":"2025-12-06T10:00:00Z"}}
|
||||
{"timestamp":"2025-12-06T10:00:01Z","level":"info","eventType":"step.started","message":"Starting step: build","stepId":"build"}
|
||||
{"timestamp":"2025-12-06T10:02:00Z","level":"info","eventType":"step.completed","message":"Step completed: build","stepId":"build","metadata":{"duration":"119s"}}
|
||||
{"timestamp":"2025-12-06T10:02:01Z","level":"warn","eventType":"gate.awaiting","message":"Awaiting approval: security-review","stepId":"approval"}
|
||||
'404':
|
||||
description: Run not found
|
||||
default:
|
||||
$ref: '#/components/responses/Error'
|
||||
|
||||
/v1/task-runner/runs/{runId}/artifacts:
|
||||
get:
|
||||
summary: List run artifacts
|
||||
description: |
|
||||
Returns a list of artifacts captured during the run, including file outputs,
|
||||
evidence bundles, and expression-evaluated results.
|
||||
operationId: listRunArtifacts
|
||||
tags: [Artifacts]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/Tenant'
|
||||
- $ref: '#/components/parameters/RunId'
|
||||
responses:
|
||||
'200':
|
||||
description: Artifact list
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/RunArtifact'
|
||||
examples:
|
||||
artifacts:
|
||||
value:
|
||||
- name: scan-report
|
||||
type: file
|
||||
sourcePath: "/output/scan-report.json"
|
||||
storedPath: "runs/run-20251206-001/artifacts/scan-report.json"
|
||||
status: captured
|
||||
capturedAt: "2025-12-06T10:02:00Z"
|
||||
- name: evidence-bundle
|
||||
type: object
|
||||
status: captured
|
||||
capturedAt: "2025-12-06T10:02:00Z"
|
||||
expressionJson: '{"sha256":"abc123...","attestations":[...]}'
|
||||
'404':
|
||||
description: Run not found
|
||||
default:
|
||||
$ref: '#/components/responses/Error'
|
||||
|
||||
/v1/task-runner/runs/{runId}/approvals/{approvalId}:
|
||||
post:
|
||||
summary: Apply approval decision
|
||||
description: |
|
||||
Applies an approval decision (approved, rejected, or expired) to a pending
|
||||
approval gate. The planHash must match to prevent approving a stale plan.
|
||||
|
||||
If approved, the run will resume execution. If rejected, the run will fail
|
||||
at the gate step.
|
||||
operationId: applyApprovalDecision
|
||||
tags: [Approvals]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/Tenant'
|
||||
- $ref: '#/components/parameters/RunId'
|
||||
- $ref: '#/components/parameters/ApprovalId'
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ApprovalDecisionRequest'
|
||||
examples:
|
||||
approve:
|
||||
summary: Approve the gate
|
||||
value:
|
||||
decision: approved
|
||||
planHash: "sha256:a1b2c3d4e5f678901234567890abcdef1234567890abcdef1234567890abcdef"
|
||||
actorId: "user:alice@example.com"
|
||||
summary: "Reviewed and approved for production deployment"
|
||||
reject:
|
||||
summary: Reject the gate
|
||||
value:
|
||||
decision: rejected
|
||||
planHash: "sha256:a1b2c3d4e5f678901234567890abcdef1234567890abcdef1234567890abcdef"
|
||||
actorId: "user:bob@example.com"
|
||||
summary: "Security scan found critical vulnerabilities"
|
||||
responses:
|
||||
'200':
|
||||
description: Decision applied
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ApprovalDecisionResponse'
|
||||
examples:
|
||||
approved:
|
||||
value:
|
||||
status: approved
|
||||
resumed: true
|
||||
'400':
|
||||
description: Invalid decision or planHash format
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
'404':
|
||||
description: Run or approval not found
|
||||
'409':
|
||||
description: Plan hash mismatch
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
default:
|
||||
$ref: '#/components/responses/Error'
|
||||
|
||||
/v1/task-runner/runs/{runId}/cancel:
|
||||
post:
|
||||
summary: Cancel a run
|
||||
description: |
|
||||
Requests cancellation of a run. Remaining pending steps will be marked as
|
||||
skipped. Steps that have already succeeded or been skipped are not affected.
|
||||
operationId: cancelRun
|
||||
tags: [Runs]
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/Tenant'
|
||||
- $ref: '#/components/parameters/RunId'
|
||||
responses:
|
||||
'202':
|
||||
description: Cancellation accepted
|
||||
headers:
|
||||
Location:
|
||||
description: URL of the run
|
||||
schema:
|
||||
type: string
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
enum: [cancelled]
|
||||
'404':
|
||||
description: Run not found
|
||||
default:
|
||||
$ref: '#/components/responses/Error'
|
||||
|
||||
/.well-known/openapi:
|
||||
get:
|
||||
summary: Get OpenAPI metadata
|
||||
description: |
|
||||
Returns metadata about the OpenAPI specification including the spec URL,
|
||||
ETag for caching, and a signature for verification.
|
||||
operationId: getOpenApiMetadata
|
||||
tags: [Metadata]
|
||||
responses:
|
||||
'200':
|
||||
description: OpenAPI metadata
|
||||
headers:
|
||||
ETag:
|
||||
description: Spec version ETag
|
||||
schema:
|
||||
type: string
|
||||
X-Signature:
|
||||
description: Spec signature for verification
|
||||
schema:
|
||||
type: string
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/OpenApiMetadata'
|
||||
examples:
|
||||
metadata:
|
||||
value:
|
||||
specUrl: "/openapi"
|
||||
version: "0.1.0-draft"
|
||||
buildVersion: "20251206.1"
|
||||
etag: '"abc123"'
|
||||
signature: "sha256:def456..."
|
||||
|
||||
components:
|
||||
securitySchemes:
|
||||
oauth2:
|
||||
type: oauth2
|
||||
flows:
|
||||
clientCredentials:
|
||||
tokenUrl: https://auth.stellaops.example.com/oauth/token
|
||||
scopes:
|
||||
taskrunner.viewer: Read-only access to runs and logs
|
||||
taskrunner.operator: Create runs and apply approvals
|
||||
taskrunner.admin: Full administrative access
|
||||
|
||||
parameters:
|
||||
Tenant:
|
||||
name: X-StellaOps-Tenant
|
||||
in: header
|
||||
required: false
|
||||
description: Tenant slug (optional for single-tenant deployments)
|
||||
schema:
|
||||
type: string
|
||||
RunId:
|
||||
name: runId
|
||||
in: path
|
||||
required: true
|
||||
description: Unique run identifier
|
||||
schema:
|
||||
type: string
|
||||
pattern: '^[a-zA-Z0-9_-]+$'
|
||||
ApprovalId:
|
||||
name: approvalId
|
||||
in: path
|
||||
required: true
|
||||
description: Approval gate identifier (from task pack approvals section)
|
||||
schema:
|
||||
type: string
|
||||
|
||||
responses:
|
||||
Error:
|
||||
description: Standard error envelope
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorEnvelope'
|
||||
examples:
|
||||
internal-error:
|
||||
value:
|
||||
error:
|
||||
code: internal_error
|
||||
message: "An unexpected error occurred"
|
||||
traceId: "f62f3c2b9c8e4c53"
|
||||
|
||||
schemas:
|
||||
ErrorEnvelope:
|
||||
type: object
|
||||
required: [error]
|
||||
properties:
|
||||
error:
|
||||
type: object
|
||||
required: [code, message]
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
description: Machine-readable error code
|
||||
message:
|
||||
type: string
|
||||
description: Human-readable error message
|
||||
traceId:
|
||||
type: string
|
||||
description: Trace ID for debugging
|
||||
|
||||
SimulationRequest:
|
||||
type: object
|
||||
required: [manifest]
|
||||
properties:
|
||||
manifest:
|
||||
type: string
|
||||
description: Task pack manifest in YAML format
|
||||
inputs:
|
||||
type: object
|
||||
additionalProperties: true
|
||||
description: Input values to provide to the task pack
|
||||
|
||||
SimulationResponse:
|
||||
type: object
|
||||
required: [planHash, failurePolicy, steps, outputs, hasPendingApprovals]
|
||||
properties:
|
||||
planHash:
|
||||
type: string
|
||||
description: SHA-256 hash of the execution plan
|
||||
pattern: '^sha256:[a-f0-9]{64}$'
|
||||
failurePolicy:
|
||||
$ref: '#/components/schemas/FailurePolicy'
|
||||
steps:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/SimulationStep'
|
||||
outputs:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/SimulationOutput'
|
||||
hasPendingApprovals:
|
||||
type: boolean
|
||||
description: Whether the plan contains approval gates
|
||||
|
||||
SimulationStep:
|
||||
type: object
|
||||
required: [id, templateId, kind, enabled, status, children]
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
templateId:
|
||||
type: string
|
||||
kind:
|
||||
type: string
|
||||
enum: [Run, GateApproval, GatePolicy, Parallel, Map, Loop, Conditional, Unknown]
|
||||
enabled:
|
||||
type: boolean
|
||||
status:
|
||||
type: string
|
||||
enum: [Pending, Skipped, RequiresApproval, RequiresPolicy, WillIterate, WillBranch]
|
||||
statusReason:
|
||||
type: string
|
||||
uses:
|
||||
type: string
|
||||
description: Executor reference for run steps
|
||||
approvalId:
|
||||
type: string
|
||||
gateMessage:
|
||||
type: string
|
||||
maxParallel:
|
||||
type: integer
|
||||
continueOnError:
|
||||
type: boolean
|
||||
children:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/SimulationStep'
|
||||
loopInfo:
|
||||
$ref: '#/components/schemas/LoopInfo'
|
||||
conditionalInfo:
|
||||
$ref: '#/components/schemas/ConditionalInfo'
|
||||
policyInfo:
|
||||
$ref: '#/components/schemas/PolicyInfo'
|
||||
|
||||
LoopInfo:
|
||||
type: object
|
||||
description: Loop step simulation details
|
||||
properties:
|
||||
itemsExpression:
|
||||
type: string
|
||||
iterator:
|
||||
type: string
|
||||
index:
|
||||
type: string
|
||||
maxIterations:
|
||||
type: integer
|
||||
aggregationMode:
|
||||
type: string
|
||||
enum: [collect, merge, last, first, none]
|
||||
|
||||
ConditionalInfo:
|
||||
type: object
|
||||
description: Conditional step simulation details
|
||||
properties:
|
||||
branches:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
condition:
|
||||
type: string
|
||||
stepCount:
|
||||
type: integer
|
||||
elseStepCount:
|
||||
type: integer
|
||||
outputUnion:
|
||||
type: boolean
|
||||
|
||||
PolicyInfo:
|
||||
type: object
|
||||
description: Policy gate simulation details
|
||||
properties:
|
||||
policyId:
|
||||
type: string
|
||||
policyVersion:
|
||||
type: string
|
||||
failureAction:
|
||||
type: string
|
||||
enum: [abort, warn, requestOverride, branch]
|
||||
retryCount:
|
||||
type: integer
|
||||
|
||||
SimulationOutput:
|
||||
type: object
|
||||
required: [name, type, requiresRuntimeValue]
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
requiresRuntimeValue:
|
||||
type: boolean
|
||||
pathExpression:
|
||||
type: string
|
||||
valueExpression:
|
||||
type: string
|
||||
|
||||
CreateRunRequest:
|
||||
type: object
|
||||
required: [manifest]
|
||||
properties:
|
||||
runId:
|
||||
type: string
|
||||
description: Optional custom run ID (auto-generated if not provided)
|
||||
manifest:
|
||||
type: string
|
||||
description: Task pack manifest in YAML format
|
||||
inputs:
|
||||
type: object
|
||||
additionalProperties: true
|
||||
description: Input values to provide to the task pack
|
||||
tenantId:
|
||||
type: string
|
||||
description: Tenant identifier
|
||||
|
||||
RunStateResponse:
|
||||
type: object
|
||||
required: [runId, planHash, failurePolicy, createdAt, updatedAt, steps]
|
||||
properties:
|
||||
runId:
|
||||
type: string
|
||||
planHash:
|
||||
type: string
|
||||
pattern: '^sha256:[a-f0-9]{64}$'
|
||||
failurePolicy:
|
||||
$ref: '#/components/schemas/FailurePolicy'
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
updatedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
steps:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/RunStateStep'
|
||||
|
||||
RunStateStep:
|
||||
type: object
|
||||
required: [stepId, kind, enabled, continueOnError, status, attempts]
|
||||
properties:
|
||||
stepId:
|
||||
type: string
|
||||
kind:
|
||||
type: string
|
||||
enum: [Run, GateApproval, GatePolicy, Parallel, Map, Loop, Conditional, Unknown]
|
||||
enabled:
|
||||
type: boolean
|
||||
continueOnError:
|
||||
type: boolean
|
||||
maxParallel:
|
||||
type: integer
|
||||
approvalId:
|
||||
type: string
|
||||
gateMessage:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
enum: [Pending, Running, Succeeded, Failed, Skipped]
|
||||
attempts:
|
||||
type: integer
|
||||
lastTransitionAt:
|
||||
type: string
|
||||
format: date-time
|
||||
nextAttemptAt:
|
||||
type: string
|
||||
format: date-time
|
||||
statusReason:
|
||||
type: string
|
||||
|
||||
FailurePolicy:
|
||||
type: object
|
||||
required: [maxAttempts, backoffSeconds, continueOnError]
|
||||
properties:
|
||||
maxAttempts:
|
||||
type: integer
|
||||
minimum: 1
|
||||
backoffSeconds:
|
||||
type: integer
|
||||
minimum: 0
|
||||
continueOnError:
|
||||
type: boolean
|
||||
|
||||
RunLogEntry:
|
||||
type: object
|
||||
required: [timestamp, level, eventType, message]
|
||||
description: |
|
||||
Log entry returned in NDJSON stream. Each entry is a single JSON object
|
||||
followed by a newline character.
|
||||
properties:
|
||||
timestamp:
|
||||
type: string
|
||||
format: date-time
|
||||
level:
|
||||
type: string
|
||||
enum: [debug, info, warn, error]
|
||||
eventType:
|
||||
type: string
|
||||
description: |
|
||||
Event type identifier, e.g.:
|
||||
- run.created, run.started, run.completed, run.failed, run.cancelled
|
||||
- step.started, step.completed, step.failed, step.skipped
|
||||
- gate.awaiting, gate.approved, gate.rejected
|
||||
- run.schedule-failed, run.cancel-requested
|
||||
message:
|
||||
type: string
|
||||
stepId:
|
||||
type: string
|
||||
metadata:
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: string
|
||||
|
||||
RunArtifact:
|
||||
type: object
|
||||
required: [name, type, status]
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
enum: [file, object]
|
||||
sourcePath:
|
||||
type: string
|
||||
storedPath:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
enum: [pending, captured, failed]
|
||||
notes:
|
||||
type: string
|
||||
capturedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
expressionJson:
|
||||
type: string
|
||||
description: JSON string of evaluated expression result for object outputs
|
||||
|
||||
ApprovalDecisionRequest:
|
||||
type: object
|
||||
required: [decision, planHash]
|
||||
properties:
|
||||
decision:
|
||||
type: string
|
||||
enum: [approved, rejected, expired]
|
||||
planHash:
|
||||
type: string
|
||||
pattern: '^sha256:[a-f0-9]{64}$'
|
||||
description: Plan hash to verify against (must match current run plan)
|
||||
actorId:
|
||||
type: string
|
||||
description: Identifier of the approver (e.g., user:alice@example.com)
|
||||
summary:
|
||||
type: string
|
||||
description: Optional comment explaining the decision
|
||||
|
||||
ApprovalDecisionResponse:
|
||||
type: object
|
||||
required: [status, resumed]
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
enum: [approved, rejected, expired]
|
||||
resumed:
|
||||
type: boolean
|
||||
description: Whether the run was resumed (true for approved decisions)
|
||||
|
||||
PlanErrorResponse:
|
||||
type: object
|
||||
required: [errors]
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required: [path, message]
|
||||
properties:
|
||||
path:
|
||||
type: string
|
||||
description: JSON path to the error location
|
||||
message:
|
||||
type: string
|
||||
|
||||
OpenApiMetadata:
|
||||
type: object
|
||||
required: [specUrl, version, etag]
|
||||
properties:
|
||||
specUrl:
|
||||
type: string
|
||||
description: URL to fetch the full OpenAPI spec
|
||||
version:
|
||||
type: string
|
||||
description: API version
|
||||
buildVersion:
|
||||
type: string
|
||||
description: Build version identifier
|
||||
etag:
|
||||
type: string
|
||||
description: ETag for caching
|
||||
signature:
|
||||
type: string
|
||||
description: Signature for spec verification
|
||||
|
||||
tags:
|
||||
- name: Simulations
|
||||
description: Task pack simulation without execution
|
||||
- name: Runs
|
||||
description: Pack run lifecycle management
|
||||
- name: Logs
|
||||
description: Run log streaming
|
||||
- name: Artifacts
|
||||
description: Run artifact management
|
||||
- name: Approvals
|
||||
description: Approval gate decisions
|
||||
- name: Metadata
|
||||
description: Service metadata and discovery
|
||||
11
docs/api/vex-consensus-sample.ndjson
Normal file
11
docs/api/vex-consensus-sample.ndjson
Normal file
@@ -0,0 +1,11 @@
|
||||
event: started
|
||||
data: {"tenantId":"tenant-default","streamId":"vex-consensus::2025-12-06","status":"running"}
|
||||
|
||||
event: consensus_update
|
||||
data: {"statementId":"vex:tenant-default:jwt-auth:5d1a","state":"under_investigation","justification":"reachable path confirmed","validFrom":"2025-12-06T10:00:00Z","validUntil":"2025-12-20T00:00:00Z","sources":["signals","policy"],"etag":"vex-etag-123"}
|
||||
|
||||
event: consensus_update
|
||||
data: {"statementId":"vex:tenant-default:openssl:7b2c","state":"not_affected","justification":"no call-path and patched","validFrom":"2025-12-05T00:00:00Z","validUntil":"2026-01-01T00:00:00Z","sources":["sbom","scanner"],"etag":"vex-etag-456"}
|
||||
|
||||
event: completed
|
||||
data: {"streamId":"vex-consensus::2025-12-06","status":"succeeded"}
|
||||
25
docs/api/vex-consensus.md
Normal file
25
docs/api/vex-consensus.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# VEX Consensus Stream Contract (draft placeholder)
|
||||
|
||||
**Status:** Draft v0.2 · owner-proposed
|
||||
|
||||
## Scope
|
||||
- `/vex/consensus` streaming APIs via Web gateway with tenant RBAC/ABAC, caching, and telemetry.
|
||||
|
||||
## Endpoint
|
||||
- `GET /vex/consensus/stream` — SSE stream of consensus VEX statements per tenant.
|
||||
|
||||
Headers: `Authorization: DPoP <token>`, `DPoP: <proof>`, `X-StellaOps-Tenant`, optional `If-None-Match`.
|
||||
Scopes (proposal): `vex:read` and `vex:consensus`.
|
||||
|
||||
Events (draft)
|
||||
- `started`: `{ tenantId, streamId, status }`
|
||||
- `consensus_update`: `{ statementId, state, justification, validFrom, validUntil, sources[], etag }`
|
||||
- `heartbeat`: `{ streamId, ts }`
|
||||
- `completed`: `{ streamId, status }`
|
||||
- `failed`: `{ streamId, code, message }`
|
||||
|
||||
Rate limits: heartbeats every 30s; idle timeout 90s; backoff via `Retry-After` header on reconnect.
|
||||
|
||||
Samples: `docs/api/vex-consensus-sample.ndjson`
|
||||
|
||||
Outstanding: finalize scopes, error codes, cache/etag semantics, and add pagination/replay guidance.
|
||||
1050
docs/api/vexlens-openapi.yaml
Normal file
1050
docs/api/vexlens-openapi.yaml
Normal file
File diff suppressed because it is too large
Load Diff
182
docs/assets/vuln-explorer/console/CAPTURES.md
Normal file
182
docs/assets/vuln-explorer/console/CAPTURES.md
Normal file
@@ -0,0 +1,182 @@
|
||||
# Console Asset Captures for Vuln Explorer Documentation
|
||||
|
||||
> **Status:** Ready for capture
|
||||
> **Last Updated:** 2025-12-06
|
||||
> **Owner:** Console Guild
|
||||
> **Hash Manifest:** See SHA256SUMS after capture
|
||||
|
||||
## Capture Instructions
|
||||
|
||||
Run the console app locally and capture each screen:
|
||||
|
||||
```bash
|
||||
# Start the dev environment
|
||||
docker compose -f deploy/compose/docker-compose.dev.yaml up -d
|
||||
|
||||
# Access console at https://localhost:8443
|
||||
# Log in with dev credentials
|
||||
# Navigate to each section below and capture
|
||||
```
|
||||
|
||||
## Required Captures
|
||||
|
||||
### 1. Dashboard Overview
|
||||
|
||||
**File:** `dashboard-overview.png`
|
||||
**Description:** Main dashboard showing vulnerability counts, risk scores, and recent activity.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The dashboard provides:
|
||||
- Total vulnerability count by severity (Critical, High, Medium, Low)
|
||||
- Risk score trend over time
|
||||
- Top affected components
|
||||
- Recent scan activity
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 2. Vulnerability Explorer List
|
||||
|
||||
**File:** `vuln-explorer-list.png`
|
||||
**Description:** Vulnerability list view with filters and sorting.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The vulnerability list shows:
|
||||
- CVE ID, severity, CVSS score
|
||||
- Affected package and version
|
||||
- Fix availability status
|
||||
- VEX status (affected, not_affected, fixed, under_investigation)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 3. Vulnerability Detail View
|
||||
|
||||
**File:** `vuln-detail.png`
|
||||
**Description:** Single vulnerability detail page with full context.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The detail view includes:
|
||||
- Full vulnerability description
|
||||
- CVSS vector breakdown
|
||||
- Affected components
|
||||
- Reachability analysis
|
||||
- VEX statements
|
||||
- Remediation guidance
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 4. Findings Ledger Timeline
|
||||
|
||||
**File:** `findings-timeline.png`
|
||||
**Description:** Timeline view of vulnerability findings and state changes.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The timeline shows:
|
||||
- Finding discovery events
|
||||
- Status transitions
|
||||
- Evidence snapshots
|
||||
- Attestation links
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 5. Risk Score Panel
|
||||
|
||||
**File:** `risk-score-panel.png`
|
||||
**Description:** Risk score breakdown with contributing factors.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The risk panel displays:
|
||||
- Overall risk score (0-100)
|
||||
- Factor breakdown (severity, exploitability, asset criticality)
|
||||
- Score history
|
||||
- Policy compliance status
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 6. VEX Consensus View
|
||||
|
||||
**File:** `vex-consensus.png`
|
||||
**Description:** VEX consensus display showing multiple issuer statements.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The VEX consensus view shows:
|
||||
- Aggregated status from multiple issuers
|
||||
- Issuer trust levels
|
||||
- Statement timestamps
|
||||
- Rationale summaries
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 7. Policy Studio Editor
|
||||
|
||||
**File:** `policy-studio-editor.png`
|
||||
**Description:** Policy Studio with Monaco editor and rule builder.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The Policy Studio includes:
|
||||
- Monaco editor with StellaOps DSL highlighting
|
||||
- Rule builder sidebar
|
||||
- Simulation panel
|
||||
- Lint/compile feedback
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 8. Air-Gap Status Panel
|
||||
|
||||
**File:** `airgap-status.png`
|
||||
**Description:** Air-gap mode status and bundle information.
|
||||
|
||||
```markdown
|
||||

|
||||
|
||||
The air-gap panel shows:
|
||||
- Sealed mode status
|
||||
- Last advisory update timestamp
|
||||
- Bundle version
|
||||
- Time anchor validity
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## After Capture
|
||||
|
||||
1. Place captured images in this directory
|
||||
2. Generate hashes:
|
||||
```bash
|
||||
sha256sum *.png > SHA256SUMS
|
||||
```
|
||||
3. Update `docs/assets/vuln-explorer/SHA256SUMS` with new entries
|
||||
4. Mark DOCS-CONSOLE-OBS-52-001 as DONE in sprint file
|
||||
|
||||
## Sample SHA256SUMS Entry
|
||||
|
||||
```
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 dashboard-overview.png
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 vuln-explorer-list.png
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 vuln-detail.png
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 findings-timeline.png
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 risk-score-panel.png
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 vex-consensus.png
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 policy-studio-editor.png
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 airgap-status.png
|
||||
```
|
||||
369
docs/contracts/authority-crypto-provider.md
Normal file
369
docs/contracts/authority-crypto-provider.md
Normal file
@@ -0,0 +1,369 @@
|
||||
# Authority Crypto Provider Contract
|
||||
|
||||
> **Status:** APPROVED
|
||||
> **Version:** 1.0.0
|
||||
> **Last Updated:** 2025-12-06
|
||||
> **Owner:** Authority Core Guild
|
||||
> **Unblocks:** AUTH-CRYPTO-90-001, SEC-CRYPTO-90-014, SCANNER-CRYPTO-90-001, ATTESTOR-CRYPTO-90-001
|
||||
|
||||
## Overview
|
||||
|
||||
This contract defines the Authority signing provider interface for StellaOps, enabling pluggable cryptographic backends including:
|
||||
- **Software keys** (default) — ECDSA P-256/P-384, RSA, EdDSA
|
||||
- **HSM integration** — PKCS#11, Cloud KMS (AWS, GCP, Azure)
|
||||
- **Regional compliance** — CryptoPro GOST (R1), SM2/SM3 (CN), eIDAS (EU), FIPS 140-2
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────────────┐
|
||||
│ Authority Crypto Provider │
|
||||
├─────────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────────┐│
|
||||
│ │ ISigningProvider Interface ││
|
||||
│ │ ││
|
||||
│ │ + Sign(data: byte[], keyId: string) → SignatureResult ││
|
||||
│ │ + Verify(data: byte[], signature: byte[], keyId: string) → bool ││
|
||||
│ │ + GetPublicKey(keyId: string) → PublicKeyInfo ││
|
||||
│ │ + ListKeys(filter: KeyFilter) → KeyInfo[] ││
|
||||
│ │ + CreateKey(spec: KeySpec) → KeyInfo ││
|
||||
│ │ + RotateKey(keyId: string) → KeyInfo ││
|
||||
│ │ + ExportJWKS(keyIds: string[]) → JWKS ││
|
||||
│ └─────────────────────────────────────────────────────────────────────────┘│
|
||||
│ │ │
|
||||
│ ┌────────────────────┼────────────────────┐ │
|
||||
│ ▼ ▼ ▼ │
|
||||
│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
|
||||
│ │ Software │ │ PKCS#11 │ │ Cloud KMS │ │
|
||||
│ │ Provider │ │ Provider │ │ Provider │ │
|
||||
│ │ │ │ │ │ │ │
|
||||
│ │ • File keys │ │ • HSM │ │ • AWS KMS │ │
|
||||
│ │ • Memory │ │ • SmartCard │ │ • GCP KMS │ │
|
||||
│ │ • Vault │ │ • CryptoPro │ │ • Azure KV │ │
|
||||
│ └──────────────┘ └──────────────┘ └──────────────┘ │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## 1. ISigningProvider Interface
|
||||
|
||||
### 1.1 Core Methods
|
||||
|
||||
```csharp
|
||||
/// <summary>
|
||||
/// Pluggable cryptographic signing provider for Authority service.
|
||||
/// </summary>
|
||||
public interface ISigningProvider
|
||||
{
|
||||
/// <summary>Provider identifier (e.g., "software", "pkcs11", "aws-kms")</summary>
|
||||
string ProviderId { get; }
|
||||
|
||||
/// <summary>Supported algorithms by this provider</summary>
|
||||
IReadOnlyList<string> SupportedAlgorithms { get; }
|
||||
|
||||
/// <summary>Sign data with the specified key</summary>
|
||||
Task<SignatureResult> SignAsync(
|
||||
byte[] data,
|
||||
string keyId,
|
||||
SigningOptions? options = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>Verify a signature</summary>
|
||||
Task<bool> VerifyAsync(
|
||||
byte[] data,
|
||||
byte[] signature,
|
||||
string keyId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>Get public key information</summary>
|
||||
Task<PublicKeyInfo> GetPublicKeyAsync(
|
||||
string keyId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>List available keys</summary>
|
||||
Task<IReadOnlyList<KeyInfo>> ListKeysAsync(
|
||||
KeyFilter? filter = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>Create a new key pair</summary>
|
||||
Task<KeyInfo> CreateKeyAsync(
|
||||
KeySpec spec,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>Rotate a key (create new version)</summary>
|
||||
Task<KeyInfo> RotateKeyAsync(
|
||||
string keyId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>Export keys as JWKS for distributed verification</summary>
|
||||
Task<JsonWebKeySet> ExportJwksAsync(
|
||||
IEnumerable<string>? keyIds = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>Import a public key for verification</summary>
|
||||
Task<KeyInfo> ImportPublicKeyAsync(
|
||||
byte[] keyData,
|
||||
string format,
|
||||
KeyMetadata? metadata = null,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
```
|
||||
|
||||
### 1.2 Supporting Types
|
||||
|
||||
```csharp
|
||||
public record SignatureResult(
|
||||
byte[] Signature,
|
||||
string Algorithm,
|
||||
string KeyId,
|
||||
string? KeyVersion,
|
||||
DateTimeOffset Timestamp);
|
||||
|
||||
public record SigningOptions(
|
||||
string? Algorithm = null,
|
||||
bool IncludeTimestamp = true,
|
||||
string? Nonce = null);
|
||||
|
||||
public record PublicKeyInfo(
|
||||
string KeyId,
|
||||
string Algorithm,
|
||||
byte[] PublicKey,
|
||||
string Format, // "PEM", "DER", "JWK"
|
||||
string? Fingerprint,
|
||||
DateTimeOffset? ExpiresAt);
|
||||
|
||||
public record KeyInfo(
|
||||
string KeyId,
|
||||
string Algorithm,
|
||||
KeyState State,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset? ExpiresAt,
|
||||
string? CurrentVersion,
|
||||
IReadOnlyDictionary<string, string>? Metadata);
|
||||
|
||||
public enum KeyState
|
||||
{
|
||||
Active,
|
||||
Disabled,
|
||||
PendingDeletion,
|
||||
Deleted
|
||||
}
|
||||
|
||||
public record KeySpec(
|
||||
string Algorithm,
|
||||
int? KeySize = null,
|
||||
string? Purpose = null, // "signing", "attestation", "authority"
|
||||
IReadOnlyDictionary<string, string>? Metadata = null,
|
||||
DateTimeOffset? ExpiresAt = null);
|
||||
|
||||
public record KeyFilter(
|
||||
string? Purpose = null,
|
||||
KeyState? State = null,
|
||||
string? Algorithm = null);
|
||||
```
|
||||
|
||||
## 2. Supported Algorithms
|
||||
|
||||
### 2.1 Algorithm Registry
|
||||
|
||||
| Algorithm | OID | Key Size | Compliance | Provider Support |
|
||||
|-----------|-----|----------|------------|------------------|
|
||||
| **ES256** | 1.2.840.10045.4.3.2 | P-256 | FIPS, eIDAS | All |
|
||||
| **ES384** | 1.2.840.10045.4.3.3 | P-384 | FIPS, eIDAS | All |
|
||||
| **RS256** | 1.2.840.113549.1.1.11 | 2048+ | FIPS, eIDAS | All |
|
||||
| **RS384** | 1.2.840.113549.1.1.12 | 2048+ | FIPS, eIDAS | All |
|
||||
| **EdDSA** | 1.3.101.112 | Ed25519 | — | Software, some HSM |
|
||||
| **PS256** | 1.2.840.113549.1.1.10 | 2048+ | FIPS | All |
|
||||
| **GOST R 34.10-2012** | 1.2.643.7.1.1.1.1 | 256/512 | R1 | PKCS#11 (CryptoPro) |
|
||||
| **SM2** | 1.2.156.10197.1.301 | 256 | CN | PKCS#11 |
|
||||
|
||||
### 2.2 Default Configuration
|
||||
|
||||
```yaml
|
||||
# etc/authority.yaml
|
||||
crypto:
|
||||
provider: software # or: pkcs11, aws-kms, gcp-kms, azure-keyvault
|
||||
|
||||
software:
|
||||
keys_path: /var/lib/stellaops/keys
|
||||
default_algorithm: ES256
|
||||
|
||||
pkcs11:
|
||||
library_path: /usr/lib/libpkcs11.so
|
||||
slot_id: 0
|
||||
pin_env: AUTHORITY_HSM_PIN
|
||||
# For CryptoPro:
|
||||
# library_path: /opt/cprocsp/lib/amd64/libcapi20.so
|
||||
|
||||
aws_kms:
|
||||
region: us-east-1
|
||||
key_alias_prefix: stellaops/
|
||||
|
||||
azure_keyvault:
|
||||
vault_url: https://stellaops.vault.azure.net/
|
||||
|
||||
gcp_kms:
|
||||
project: stellaops-prod
|
||||
location: global
|
||||
key_ring: attestation-keys
|
||||
|
||||
# Regional compliance overrides
|
||||
compliance:
|
||||
ru:
|
||||
provider: pkcs11
|
||||
algorithms: [GOST-R-34.10-2012-256, GOST-R-34.10-2012-512]
|
||||
library_path: /opt/cprocsp/lib/amd64/libcapi20.so
|
||||
cn:
|
||||
provider: pkcs11
|
||||
algorithms: [SM2]
|
||||
```
|
||||
|
||||
## 3. JWKS Export Requirements
|
||||
|
||||
### 3.1 JWKS Endpoint
|
||||
|
||||
The Authority service MUST expose a JWKS endpoint for distributed verification:
|
||||
|
||||
```
|
||||
GET /.well-known/jwks.json
|
||||
```
|
||||
|
||||
Response format:
|
||||
|
||||
```json
|
||||
{
|
||||
"keys": [
|
||||
{
|
||||
"kty": "EC",
|
||||
"crv": "P-256",
|
||||
"x": "base64url-encoded-x",
|
||||
"y": "base64url-encoded-y",
|
||||
"kid": "attestation-key-001",
|
||||
"alg": "ES256",
|
||||
"use": "sig",
|
||||
"key_ops": ["verify"],
|
||||
"x5t#S256": "sha256-fingerprint"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 3.2 Key Rotation
|
||||
|
||||
When keys are rotated:
|
||||
1. New key becomes `Active`, old key becomes `Disabled` (verification-only)
|
||||
2. JWKS includes both keys during transition period
|
||||
3. Old key removed after `rotation_grace_period` (default: 7 days)
|
||||
4. All consuming services refresh JWKS on schedule or via webhook
|
||||
|
||||
### 3.3 Key Discovery Flow
|
||||
|
||||
```
|
||||
┌──────────┐ ┌──────────┐ ┌──────────┐
|
||||
│ Scanner │ │ Authority │ │ Attestor │
|
||||
└────┬─────┘ └────┬─────┘ └────┬─────┘
|
||||
│ │ │
|
||||
│ GET /jwks.json│ │
|
||||
│───────────────>│ │
|
||||
│<───────────────│ │
|
||||
│ JWKS │ │
|
||||
│ │ │
|
||||
│ Sign(SBOM) │ │
|
||||
│───────────────>│ │
|
||||
│<───────────────│ │
|
||||
│ Signature │ │
|
||||
│ │ │
|
||||
│ │ GET /jwks.json │
|
||||
│ │<────────────────│
|
||||
│ │────────────────>│
|
||||
│ │ JWKS │
|
||||
│ │ │
|
||||
│ │ Verify(SBOM) │
|
||||
│ │<────────────────│
|
||||
│ │ ✓ Valid │
|
||||
```
|
||||
|
||||
## 4. Provider Registration
|
||||
|
||||
### 4.1 Service Registration
|
||||
|
||||
```csharp
|
||||
// Program.cs
|
||||
services.AddAuthoritySigningProvider(options =>
|
||||
{
|
||||
options.Provider = configuration["Crypto:Provider"];
|
||||
options.Configuration = configuration.GetSection("Crypto");
|
||||
});
|
||||
|
||||
// Extension method
|
||||
public static IServiceCollection AddAuthoritySigningProvider(
|
||||
this IServiceCollection services,
|
||||
Action<CryptoProviderOptions> configure)
|
||||
{
|
||||
var options = new CryptoProviderOptions();
|
||||
configure(options);
|
||||
|
||||
return options.Provider switch
|
||||
{
|
||||
"software" => services.AddSingleton<ISigningProvider, SoftwareSigningProvider>(),
|
||||
"pkcs11" => services.AddSingleton<ISigningProvider, Pkcs11SigningProvider>(),
|
||||
"aws-kms" => services.AddSingleton<ISigningProvider, AwsKmsSigningProvider>(),
|
||||
"gcp-kms" => services.AddSingleton<ISigningProvider, GcpKmsSigningProvider>(),
|
||||
"azure-keyvault" => services.AddSingleton<ISigningProvider, AzureKeyVaultSigningProvider>(),
|
||||
_ => throw new ArgumentException($"Unknown provider: {options.Provider}")
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### 4.2 Regional Provider Registry
|
||||
|
||||
For multi-region deployments with compliance requirements:
|
||||
|
||||
```yaml
|
||||
# Regional key registry
|
||||
key_registry:
|
||||
attestation-sbom:
|
||||
default:
|
||||
key_id: "stellaops/attestation-sbom-001"
|
||||
algorithm: ES256
|
||||
provider: aws-kms
|
||||
ru:
|
||||
key_id: "ru/attestation-sbom-gost"
|
||||
algorithm: GOST-R-34.10-2012-256
|
||||
provider: pkcs11
|
||||
cn:
|
||||
key_id: "cn/attestation-sbom-sm2"
|
||||
algorithm: SM2
|
||||
provider: pkcs11
|
||||
```
|
||||
|
||||
## 5. Error Codes
|
||||
|
||||
| Code | Name | Description |
|
||||
|------|------|-------------|
|
||||
| `CRYPTO_001` | `KEY_NOT_FOUND` | Requested key does not exist |
|
||||
| `CRYPTO_002` | `KEY_DISABLED` | Key is disabled and cannot sign |
|
||||
| `CRYPTO_003` | `ALGORITHM_UNSUPPORTED` | Algorithm not supported by provider |
|
||||
| `CRYPTO_004` | `HSM_UNAVAILABLE` | HSM/PKCS#11 device not available |
|
||||
| `CRYPTO_005` | `SIGNATURE_FAILED` | Signing operation failed |
|
||||
| `CRYPTO_006` | `VERIFICATION_FAILED` | Signature verification failed |
|
||||
| `CRYPTO_007` | `KEY_EXPIRED` | Key has expired |
|
||||
| `CRYPTO_008` | `COMPLIANCE_VIOLATION` | Algorithm not allowed by compliance profile |
|
||||
|
||||
## 6. Tasks Unblocked
|
||||
|
||||
This contract unblocks:
|
||||
|
||||
| Task ID | Description | Status |
|
||||
|---------|-------------|--------|
|
||||
| AUTH-CRYPTO-90-001 | Authority signing provider contract | ✅ UNBLOCKED |
|
||||
| SEC-CRYPTO-90-014 | Security Guild crypto integration | ✅ UNBLOCKED |
|
||||
| SCANNER-CRYPTO-90-001 | Scanner SBOM signing | ✅ UNBLOCKED |
|
||||
| ATTESTOR-CRYPTO-90-001 | Attestor DSSE signing | ✅ UNBLOCKED |
|
||||
|
||||
## 7. Changelog
|
||||
|
||||
| Date | Version | Change |
|
||||
|------|---------|--------|
|
||||
| 2025-12-06 | 1.0.0 | Initial contract with interface, algorithms, JWKS, regional support |
|
||||
72
docs/contracts/authority-routing-decision.md
Normal file
72
docs/contracts/authority-routing-decision.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Authority Routing Decision
|
||||
|
||||
**Decision ID:** DECISION-AUTH-001
|
||||
**Status:** DEFAULT-APPROVED
|
||||
**Effective Date:** 2025-12-06
|
||||
**48h Window Started:** 2025-12-06T00:00:00Z
|
||||
|
||||
## Decision
|
||||
|
||||
Authority claim routing uses **RBAC-standard routing** patterns aligned with existing `docs/security/scopes-and-roles.md`.
|
||||
|
||||
## Rationale
|
||||
|
||||
1. RBAC patterns are well-established and auditable
|
||||
2. Consistent with Authority module implementation
|
||||
3. Supports multi-tenancy requirements
|
||||
4. Compatible with external IdP integration (OIDC, SAML)
|
||||
|
||||
## Routing Matrix
|
||||
|
||||
| Claim | Source | Routing | Scope |
|
||||
|-------|--------|---------|-------|
|
||||
| `tenant_id` | Token/Session | Per-request | All endpoints |
|
||||
| `project_id` | Token/Header | Per-request | Project-scoped |
|
||||
| `user_id` | Token | Per-request | User-scoped |
|
||||
| `role` | Token claims | Authorization | Role-based access |
|
||||
| `scope` | Token claims | Authorization | Fine-grained access |
|
||||
|
||||
## Claim Priority
|
||||
|
||||
When claims conflict:
|
||||
1. Explicit header overrides token claim (if authorized)
|
||||
2. Token claim is authoritative for identity
|
||||
3. Session context provides defaults
|
||||
|
||||
## Implementation Pattern
|
||||
|
||||
```csharp
|
||||
// Authority claim resolution
|
||||
public class ClaimResolver : IClaimResolver
|
||||
{
|
||||
public AuthorityContext Resolve(HttpContext context)
|
||||
{
|
||||
var tenantId = context.Request.Headers["X-Tenant-Id"]
|
||||
?? context.User.FindFirst("tenant_id")?.Value;
|
||||
|
||||
var projectId = context.Request.Headers["X-Project-Id"]
|
||||
?? context.User.FindFirst("project_id")?.Value;
|
||||
|
||||
return new AuthorityContext(tenantId, projectId);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Impact
|
||||
|
||||
- Tasks unblocked: ~5
|
||||
- Sprint files affected: SPRINT_0303
|
||||
|
||||
## Reversibility
|
||||
|
||||
To change routing patterns:
|
||||
1. Update `docs/security/scopes-and-roles.md`
|
||||
2. Get Authority Guild + Security Guild sign-off
|
||||
3. Update `AuthorityClaimsProvider` implementations
|
||||
4. Migration path for existing integrations
|
||||
|
||||
## References
|
||||
|
||||
- [Scopes and Roles](../security/scopes-and-roles.md)
|
||||
- [Auth Scopes](../security/auth-scopes.md)
|
||||
- [Tenancy Overview](../security/tenancy-overview.md)
|
||||
157
docs/contracts/cas-infrastructure.md
Normal file
157
docs/contracts/cas-infrastructure.md
Normal file
@@ -0,0 +1,157 @@
|
||||
# CAS (Content Addressable Storage) Infrastructure Contract
|
||||
|
||||
> **Status:** APPROVED
|
||||
> **Version:** 1.0.0
|
||||
> **Last Updated:** 2025-12-06
|
||||
> **Owner:** Platform Storage Guild
|
||||
|
||||
## Overview
|
||||
|
||||
This contract defines the Content Addressable Storage (CAS) infrastructure for StellaOps, using RustFS as the S3-compatible storage backend. The design provides:
|
||||
|
||||
- **Content-addressed storage** — Objects addressed by SHA-256 hash
|
||||
- **Immutable evidence storage** — Write-once, never-delete for audit trails
|
||||
- **Lifecycle management** — Automated retention policy enforcement
|
||||
- **Service account isolation** — Fine-grained access control per service
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────────────┐
|
||||
│ CAS Infrastructure │
|
||||
├─────────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │
|
||||
│ │ rustfs-cas │ │ rustfs-evidence │ │rustfs-attestation│ │
|
||||
│ │ (mutable) │ │ (immutable) │ │ (immutable) │ │
|
||||
│ │ │ │ │ │ │ │
|
||||
│ │ • scanner- │ │ • evidence- │ │ • attestations │ │
|
||||
│ │ artifacts │ │ bundles │ │ • dsse-envelopes│ │
|
||||
│ │ • surface-cache │ │ • merkle-roots │ │ • rekor-receipts│ │
|
||||
│ │ • runtime-facts │ │ • hash-chains │ │ │ │
|
||||
│ │ • signals-data │ │ │ │ │ │
|
||||
│ │ • provenance- │ │ │ │ │ │
|
||||
│ │ feed │ │ │ │ │ │
|
||||
│ │ • replay- │ │ │ │ │ │
|
||||
│ │ bundles │ │ │ │ │ │
|
||||
│ └────────┬────────┘ └────────┬────────┘ └────────┬────────┘ │
|
||||
│ │ │ │ │
|
||||
│ └────────────────────┼────────────────────┘ │
|
||||
│ │ │
|
||||
│ ┌───────────┴───────────┐ │
|
||||
│ │ cas-lifecycle │ │
|
||||
│ │ (retention manager) │ │
|
||||
│ └───────────────────────┘ │
|
||||
└─────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Retention Policies
|
||||
|
||||
Aligned with best-in-class on-premise vulnerability scanners:
|
||||
|
||||
| Data Type | Retention | Rationale | Scanner Comparison |
|
||||
|-----------|-----------|-----------|-------------------|
|
||||
| Vulnerability DB | 7 days | Fresh advisories required | Trivy: 7d, Grype: 5d |
|
||||
| SBOM artifacts | 365 days | Audit compliance (SOC2, ISO27001) | Anchore: 365d |
|
||||
| Scan results | 90 days | Common compliance window | Snyk: 90d enterprise |
|
||||
| Evidence bundles | Indefinite | Immutable audit trail | N/A (StellaOps unique) |
|
||||
| Attestations | Indefinite | Signed, verifiable | N/A (StellaOps unique) |
|
||||
| Temp artifacts | 1 day | Work-in-progress cleanup | Standard practice |
|
||||
|
||||
## Access Control Matrix
|
||||
|
||||
### Service Accounts
|
||||
|
||||
| Service | Buckets | Permissions | Purpose |
|
||||
|---------|---------|-------------|---------|
|
||||
| `scanner` | scanner-artifacts, surface-cache, runtime-facts | read, write | Scan job artifacts, cache |
|
||||
| `signals` | runtime-facts, signals-data, provenance-feed | read, write | Runtime signal ingestion |
|
||||
| `replay` | replay-bundles, inputs-lock | read, write | Deterministic replay |
|
||||
| `ledger` | evidence-bundles, merkle-roots, hash-chains | read, write | Evidence ledger writes |
|
||||
| `exporter` | evidence-bundles | read | Export center reads |
|
||||
| `attestor` | attestations, dsse-envelopes, rekor-receipts | read, write | Attestation storage |
|
||||
| `verifier` | attestations, dsse-envelopes, rekor-receipts | read | Verification reads |
|
||||
| `readonly` | * | read | Global audit access |
|
||||
|
||||
### Bucket Classification
|
||||
|
||||
| Bucket | Storage Type | Lifecycle | Access Pattern |
|
||||
|--------|--------------|-----------|----------------|
|
||||
| scanner-artifacts | rustfs-cas | 90 days | Write-heavy |
|
||||
| surface-cache | rustfs-cas | 7 days | Read-heavy, cache |
|
||||
| runtime-facts | rustfs-cas | 90 days | Write-heavy |
|
||||
| signals-data | rustfs-cas | 90 days | Write-heavy |
|
||||
| provenance-feed | rustfs-cas | 90 days | Append-only |
|
||||
| replay-bundles | rustfs-cas | 365 days | Read-heavy |
|
||||
| inputs-lock | rustfs-cas | 365 days | Write-once |
|
||||
| evidence-bundles | rustfs-evidence | Indefinite | Write-once |
|
||||
| merkle-roots | rustfs-evidence | Indefinite | Append-only |
|
||||
| hash-chains | rustfs-evidence | Indefinite | Append-only |
|
||||
| attestations | rustfs-attestation | Indefinite | Write-once |
|
||||
| dsse-envelopes | rustfs-attestation | Indefinite | Write-once |
|
||||
| rekor-receipts | rustfs-attestation | Indefinite | Write-once |
|
||||
|
||||
## Docker Compose Integration
|
||||
|
||||
```yaml
|
||||
# Use with existing compose files
|
||||
docker compose -f docker-compose.cas.yaml -f docker-compose.dev.yaml up -d
|
||||
|
||||
# Standalone CAS
|
||||
docker compose -f docker-compose.cas.yaml up -d
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
See `deploy/compose/env/cas.env.example` for full configuration.
|
||||
|
||||
Key variables:
|
||||
- `RUSTFS_*_API_KEY` — Admin API keys (CHANGE IN PRODUCTION)
|
||||
- `RUSTFS_*_KEY` — Service account keys (GENERATE UNIQUE)
|
||||
- `CAS_*_PATH` — Data directory paths
|
||||
- `CAS_RETENTION_*_DAYS` — Retention policy overrides
|
||||
|
||||
## Endpoints
|
||||
|
||||
| Service | Port | Path | Purpose |
|
||||
|---------|------|------|---------|
|
||||
| rustfs-cas | 8180 | /api/v1 | Mutable CAS storage |
|
||||
| rustfs-evidence | 8181 | /api/v1 | Immutable evidence |
|
||||
| rustfs-attestation | 8182 | /api/v1 | Immutable attestations |
|
||||
|
||||
## Health Checks
|
||||
|
||||
All RustFS instances expose `/health` endpoint:
|
||||
|
||||
```bash
|
||||
curl http://localhost:8180/health # CAS
|
||||
curl http://localhost:8181/health # Evidence
|
||||
curl http://localhost:8182/health # Attestations
|
||||
```
|
||||
|
||||
## Migration from MinIO
|
||||
|
||||
For existing deployments using MinIO:
|
||||
|
||||
1. Deploy CAS infrastructure alongside MinIO
|
||||
2. Configure scanner/signals services with `RUSTFS_*` endpoints
|
||||
3. Migrate data using `stella cas migrate --source minio --target rustfs`
|
||||
4. Verify data integrity with `stella cas verify --bucket <name>`
|
||||
5. Update service configurations to use RustFS
|
||||
6. Decommission MinIO after validation
|
||||
|
||||
## Tasks Unblocked
|
||||
|
||||
This contract unblocks the CAS approval gate (PREP-SIGNALS-24-002):
|
||||
|
||||
- **24-002:** Surface cache availability → UNBLOCKED
|
||||
- **24-003:** Runtime facts ingestion → UNBLOCKED
|
||||
- **24-004:** Authority scopes → UNBLOCKED
|
||||
- **24-005:** Scoring outputs → UNBLOCKED
|
||||
- **GRAPH-INDEX-28-007 through 28-010** → UNBLOCKED
|
||||
|
||||
## Changelog
|
||||
|
||||
| Date | Version | Change |
|
||||
|------|---------|--------|
|
||||
| 2025-12-06 | 1.0.0 | Initial contract with RustFS, retention policies, access controls |
|
||||
56
docs/contracts/dossier-sequencing-decision.md
Normal file
56
docs/contracts/dossier-sequencing-decision.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# Dossier Sequencing Decision
|
||||
|
||||
**Decision ID:** DECISION-DOCS-001
|
||||
**Status:** DEFAULT-APPROVED
|
||||
**Effective Date:** 2025-12-06
|
||||
**48h Window Started:** 2025-12-06T00:00:00Z
|
||||
|
||||
## Decision
|
||||
|
||||
Module dossiers (Md.II through Md.X) are **sequenced after Md.I completion**, following the dependency chain in `docs/implplan/SPRINT_0300_*.md` files.
|
||||
|
||||
## Rationale
|
||||
|
||||
1. Md.I establishes baseline architecture documentation structure
|
||||
2. Subsequent modules depend on patterns defined in Md.I
|
||||
3. Sequential ordering prevents documentation conflicts
|
||||
4. Allows parallel work within each dossier batch
|
||||
|
||||
## Sequencing Order
|
||||
|
||||
| Phase | Dossiers | Dependencies | Sprint |
|
||||
|-------|----------|--------------|--------|
|
||||
| Md.I | Concelier, Scanner, Authority | None | 0300 |
|
||||
| Md.II | Attestor, Signer, Evidence | Md.I complete | 0301 |
|
||||
| Md.III | VEX Lens, Excititor | Md.II complete | 0302 |
|
||||
| Md.IV | Policy, Risk | Md.II complete | 0303 |
|
||||
| Md.V | Scheduler, TaskRunner | Md.IV complete | 0304 |
|
||||
| Md.VI | Notify, Telemetry | Md.V complete | 0305 |
|
||||
| Md.VII | CLI, Web | Md.VI complete | 0306 |
|
||||
| Md.VIII | AirGap, Mirror | Md.VII complete | 0307 |
|
||||
| Md.IX | Zastava, Signals | Md.VIII complete | 0308 |
|
||||
| Md.X | Integration, E2E | All above | 0309 |
|
||||
|
||||
## Parallelism Rules
|
||||
|
||||
Within each phase, dossiers MAY be worked in parallel if:
|
||||
1. No cross-dependencies within the phase
|
||||
2. Shared components are stable
|
||||
3. Different owners/guilds assigned
|
||||
|
||||
## Impact
|
||||
|
||||
- Tasks unblocked: ~10
|
||||
- Sprint files affected: SPRINT_0300, SPRINT_0301, SPRINT_0302
|
||||
|
||||
## Reversibility
|
||||
|
||||
To change sequencing:
|
||||
1. Propose new order in `docs/process/dossier-sequencing.md`
|
||||
2. Get Docs Guild sign-off
|
||||
3. Update all affected SPRINT_03xx files
|
||||
|
||||
## References
|
||||
|
||||
- [SPRINT_0300 Documentation](../implplan/SPRINT_0300_0001_0001_documentation_i.md)
|
||||
- [Module Dossier Template](../modules/template/)
|
||||
263
docs/contracts/rate-limit-design.md
Normal file
263
docs/contracts/rate-limit-design.md
Normal file
@@ -0,0 +1,263 @@
|
||||
# Rate Limit Design Contract
|
||||
|
||||
**Contract ID:** CONTRACT-RATE-LIMIT-001
|
||||
**Status:** APPROVED
|
||||
**Effective Date:** 2025-12-07
|
||||
**Owners:** Platform Reliability Guild, Gateway Guild
|
||||
|
||||
## Overview
|
||||
|
||||
This contract defines the rate limiting design for StellaOps API endpoints, ensuring fair resource allocation, protection against abuse, and consistent client experience across all services.
|
||||
|
||||
## Rate Limiting Strategy
|
||||
|
||||
### Tiered Rate Limits
|
||||
|
||||
| Tier | Requests/Minute | Requests/Hour | Burst Limit | Typical Use Case |
|
||||
|------|-----------------|---------------|-------------|------------------|
|
||||
| **Free** | 60 | 1,000 | 10 | Evaluation, small projects |
|
||||
| **Standard** | 300 | 10,000 | 50 | Production workloads |
|
||||
| **Enterprise** | 1,000 | 50,000 | 200 | Large-scale deployments |
|
||||
| **Unlimited** | No limit | No limit | No limit | Internal services, VIP |
|
||||
|
||||
### Per-Endpoint Rate Limits
|
||||
|
||||
Some endpoints have additional rate limits based on resource intensity:
|
||||
|
||||
| Endpoint Category | Rate Limit | Rationale |
|
||||
|-------------------|------------|-----------|
|
||||
| `/api/risk/simulation/*` | 30/min | CPU-intensive simulation |
|
||||
| `/api/risk/simulation/studio/*` | 10/min | Full breakdown analysis |
|
||||
| `/system/airgap/seal` | 5/hour | Critical state change |
|
||||
| `/policy/decisions` | 100/min | Lightweight evaluation |
|
||||
| `/api/policy/packs/*/bundle` | 10/min | Bundle compilation |
|
||||
| Export endpoints | 20/min | I/O-intensive operations |
|
||||
|
||||
## Implementation
|
||||
|
||||
### Algorithm
|
||||
|
||||
Use **Token Bucket** algorithm with the following configuration:
|
||||
|
||||
```yaml
|
||||
rate_limit:
|
||||
algorithm: token_bucket
|
||||
bucket_size: ${BURST_LIMIT}
|
||||
refill_rate: ${REQUESTS_PER_MINUTE} / 60
|
||||
refill_interval: 1s
|
||||
```
|
||||
|
||||
### Rate Limit Headers
|
||||
|
||||
All responses include standard rate limit headers:
|
||||
|
||||
```http
|
||||
X-RateLimit-Limit: 300
|
||||
X-RateLimit-Remaining: 295
|
||||
X-RateLimit-Reset: 1701936000
|
||||
X-RateLimit-Policy: standard
|
||||
Retry-After: 30
|
||||
```
|
||||
|
||||
### Rate Limit Response
|
||||
|
||||
When rate limit is exceeded, return:
|
||||
|
||||
```http
|
||||
HTTP/1.1 429 Too Many Requests
|
||||
Content-Type: application/problem+json
|
||||
Retry-After: 30
|
||||
|
||||
```
|
||||
|
||||
## Rate Limit Keys
|
||||
|
||||
### Primary Key: Tenant ID + Client ID
|
||||
|
||||
```
|
||||
rate_limit_key = "${tenant_id}:${client_id}"
|
||||
```
|
||||
|
||||
### Fallback Keys
|
||||
|
||||
1. Authenticated: `tenant:${tenant_id}:user:${user_id}`
|
||||
2. API Key: `apikey:${api_key_hash}`
|
||||
3. Anonymous: `ip:${client_ip}`
|
||||
|
||||
## Exemptions
|
||||
|
||||
### Exempt Endpoints
|
||||
|
||||
The following endpoints are exempt from rate limiting:
|
||||
|
||||
- `GET /health`
|
||||
- `GET /ready`
|
||||
- `GET /metrics`
|
||||
- `GET /.well-known/*`
|
||||
|
||||
### Exempt Clients
|
||||
|
||||
- Internal service mesh traffic (mTLS authenticated)
|
||||
- Localhost connections in development mode
|
||||
- Clients with `unlimited` tier
|
||||
|
||||
## Quota Management
|
||||
|
||||
### Tenant Quota Tracking
|
||||
|
||||
```yaml
|
||||
quota:
|
||||
tracking:
|
||||
storage: redis
|
||||
key_prefix: "stellaops:quota:"
|
||||
ttl: 3600 # 1 hour rolling window
|
||||
|
||||
dimensions:
|
||||
- tenant_id
|
||||
- endpoint_category
|
||||
- time_bucket
|
||||
```
|
||||
|
||||
### Quota Alerts
|
||||
|
||||
| Threshold | Action |
|
||||
|-----------|--------|
|
||||
| 80% consumed | Emit `quota.warning` event |
|
||||
| 95% consumed | Emit `quota.critical` event |
|
||||
| 100% consumed | Block requests, emit `quota.exceeded` event |
|
||||
|
||||
## Configuration
|
||||
|
||||
### Gateway Configuration
|
||||
|
||||
```yaml
|
||||
# gateway/rate-limits.yaml
|
||||
rateLimiting:
|
||||
enabled: true
|
||||
defaultTier: standard
|
||||
|
||||
tiers:
|
||||
free:
|
||||
requestsPerMinute: 60
|
||||
requestsPerHour: 1000
|
||||
burstLimit: 10
|
||||
standard:
|
||||
requestsPerMinute: 300
|
||||
requestsPerHour: 10000
|
||||
burstLimit: 50
|
||||
enterprise:
|
||||
requestsPerMinute: 1000
|
||||
requestsPerHour: 50000
|
||||
burstLimit: 200
|
||||
|
||||
endpoints:
|
||||
- pattern: "/api/risk/simulation/*"
|
||||
limit: 30
|
||||
window: 60s
|
||||
- pattern: "/api/risk/simulation/studio/*"
|
||||
limit: 10
|
||||
window: 60s
|
||||
- pattern: "/system/airgap/seal"
|
||||
limit: 5
|
||||
window: 3600s
|
||||
```
|
||||
|
||||
### Policy Engine Configuration
|
||||
|
||||
```csharp
|
||||
// PolicyEngineRateLimitOptions.cs
|
||||
public static class PolicyEngineRateLimitOptions
|
||||
{
|
||||
public const string PolicyName = "PolicyEngineRateLimit";
|
||||
|
||||
public static void Configure(RateLimiterOptions options)
|
||||
{
|
||||
options.AddTokenBucketLimiter(PolicyName, opt =>
|
||||
{
|
||||
opt.TokenLimit = 50;
|
||||
opt.QueueLimit = 10;
|
||||
opt.ReplenishmentPeriod = TimeSpan.FromSeconds(10);
|
||||
opt.TokensPerPeriod = 5;
|
||||
opt.AutoReplenishment = true;
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Metrics
|
||||
|
||||
| Metric | Type | Labels |
|
||||
|--------|------|--------|
|
||||
| `stellaops_rate_limit_requests_total` | Counter | tier, endpoint, status |
|
||||
| `stellaops_rate_limit_exceeded_total` | Counter | tier, endpoint |
|
||||
| `stellaops_rate_limit_remaining` | Gauge | tenant_id, tier |
|
||||
| `stellaops_rate_limit_queue_size` | Gauge | endpoint |
|
||||
|
||||
### Alerts
|
||||
|
||||
```yaml
|
||||
# prometheus/rules/rate-limiting.yaml
|
||||
groups:
|
||||
- name: rate_limiting
|
||||
rules:
|
||||
- alert: HighRateLimitExceeded
|
||||
expr: rate(stellaops_rate_limit_exceeded_total[5m]) > 10
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
annotations:
|
||||
summary: "High rate of rate limit exceeded events"
|
||||
```
|
||||
|
||||
## Integration with Web UI
|
||||
|
||||
### Client SDK Configuration
|
||||
|
||||
```typescript
|
||||
// stellaops-sdk/rate-limit-handler.ts
|
||||
interface RateLimitConfig {
|
||||
retryOnRateLimit: boolean;
|
||||
maxRetries: number;
|
||||
backoffMultiplier: number;
|
||||
maxBackoffSeconds: number;
|
||||
}
|
||||
|
||||
const defaultConfig: RateLimitConfig = {
|
||||
retryOnRateLimit: true,
|
||||
maxRetries: 3,
|
||||
backoffMultiplier: 2,
|
||||
maxBackoffSeconds: 60
|
||||
};
|
||||
```
|
||||
|
||||
### UI Rate Limit Display
|
||||
|
||||
The Web UI displays rate limit status in the console header with:
|
||||
- Current remaining requests
|
||||
- Time until reset
|
||||
- Visual indicator when approaching limit (< 20% remaining)
|
||||
|
||||
## Changelog
|
||||
|
||||
| Date | Version | Change |
|
||||
|------|---------|--------|
|
||||
| 2025-12-07 | 1.0.0 | Initial contract definition |
|
||||
|
||||
## References
|
||||
|
||||
- [API Governance Baseline](./api-governance-baseline.md)
|
||||
- [Web Gateway Architecture](../modules/gateway/architecture.md)
|
||||
- [Policy Engine Rate Limiting](../modules/policy/design/rate-limiting.md)
|
||||
## Changelog
|
||||
|
||||
| Date | Version | Change |
|
||||
|------|---------|--------|
|
||||
| 2025-12-07 | 1.0.0 | Initial contract definition |
|
||||
|
||||
## References
|
||||
|
||||
- [API Governance Baseline](./api-governance-baseline.md)
|
||||
- [Web Gateway Architecture](../modules/gateway/architecture.md)
|
||||
- [Policy Engine Rate Limiting](../modules/policy/design/rate-limiting.md)
|
||||
67
docs/contracts/redaction-defaults-decision.md
Normal file
67
docs/contracts/redaction-defaults-decision.md
Normal file
@@ -0,0 +1,67 @@
|
||||
# Redaction Defaults Decision
|
||||
|
||||
**Decision ID:** DECISION-SECURITY-001
|
||||
**Status:** DEFAULT-APPROVED
|
||||
**Effective Date:** 2025-12-06
|
||||
**48h Window Started:** 2025-12-06T00:00:00Z
|
||||
|
||||
## Decision
|
||||
|
||||
Notification and export pipelines use **restrictive redaction defaults** that redact PII, secrets, and cryptographic keys.
|
||||
|
||||
## Rationale
|
||||
|
||||
1. Security-first approach minimizes data exposure risk
|
||||
2. Users can opt-in to less restrictive settings via configuration
|
||||
3. Aligns with GDPR and data minimization principles
|
||||
4. Consistent with existing Evidence Locker redaction patterns
|
||||
|
||||
## Default Redaction Rules
|
||||
|
||||
### Always Redacted (HIGH)
|
||||
- Private keys (RSA, ECDSA, Ed25519)
|
||||
- API keys and tokens
|
||||
- Passwords and secrets
|
||||
- Database connection strings
|
||||
- JWT tokens
|
||||
|
||||
### Redacted by Default (MEDIUM) - Opt-out available
|
||||
- Email addresses
|
||||
- IP addresses (external)
|
||||
- File paths containing usernames
|
||||
- Environment variable values (not names)
|
||||
|
||||
### Not Redacted (LOW)
|
||||
- Package names and versions
|
||||
- CVE identifiers
|
||||
- Severity scores
|
||||
- Public key fingerprints
|
||||
|
||||
## Configuration
|
||||
|
||||
```yaml
|
||||
# etc/notify.yaml
|
||||
redaction:
|
||||
level: restrictive # Options: permissive, standard, restrictive
|
||||
custom_patterns:
|
||||
- pattern: "INTERNAL_.*"
|
||||
action: redact
|
||||
```
|
||||
|
||||
## Impact
|
||||
|
||||
- Tasks unblocked: ~5
|
||||
- Sprint files affected: SPRINT_0170, SPRINT_0171
|
||||
|
||||
## Reversibility
|
||||
|
||||
To change redaction defaults:
|
||||
1. Update `docs/security/redaction-and-privacy.md`
|
||||
2. Get Security Guild sign-off
|
||||
3. Update configuration schemas
|
||||
4. Ensure backward compatibility
|
||||
|
||||
## References
|
||||
|
||||
- [Redaction and Privacy](../security/redaction-and-privacy.md)
|
||||
- [SPRINT_0170 Notifications](../implplan/SPRINT_0170_0001_0001_notifications_telemetry.md)
|
||||
425
docs/contracts/sealed-install-enforcement.md
Normal file
425
docs/contracts/sealed-install-enforcement.md
Normal file
@@ -0,0 +1,425 @@
|
||||
# Sealed Install Enforcement Contract
|
||||
|
||||
> **Status:** APPROVED
|
||||
> **Version:** 1.0.0
|
||||
> **Last Updated:** 2025-12-06
|
||||
> **Owner:** AirGap Controller Guild
|
||||
> **Unblocks:** TASKRUN-AIRGAP-57-001, TASKRUN-AIRGAP-58-001
|
||||
|
||||
## Overview
|
||||
|
||||
This contract defines the sealed install enforcement semantics for StellaOps air-gapped deployments. When a pack or task declares `sealed_install: true`, the Task Runner MUST refuse to execute if the environment is not properly sealed.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────────────────┐
|
||||
│ Sealed Install Enforcement Flow │
|
||||
├─────────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
|
||||
│ │ Task Pack │ │ Task Runner │ │ AirGap │ │
|
||||
│ │ │────>│ │────>│ Controller │ │
|
||||
│ │ sealed_ │ │ Enforcement │ │ │ │
|
||||
│ │ install:true │ │ Check │ │ /status │ │
|
||||
│ └──────────────┘ └──────────────┘ └──────────────┘ │
|
||||
│ │ │ │
|
||||
│ ▼ ▼ │
|
||||
│ ┌──────────────────────────────────┐ │
|
||||
│ │ Decision Matrix │ │
|
||||
│ │ │ │
|
||||
│ │ Pack: sealed Env: sealed │ │
|
||||
│ │ ────────────── ──────────── │ │
|
||||
│ │ true true → RUN │ │
|
||||
│ │ true false → DENY │ │
|
||||
│ │ false true → RUN │ │
|
||||
│ │ false false → RUN │ │
|
||||
│ └──────────────────────────────────┘ │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## 1. Pack Declaration
|
||||
|
||||
### 1.1 Sealed Install Flag
|
||||
|
||||
Packs declare their sealed requirement in the pack manifest:
|
||||
|
||||
```json
|
||||
{
|
||||
"pack_id": "compliance-scan-airgap",
|
||||
"version": "1.0.0",
|
||||
"name": "Air-Gap Compliance Scanner",
|
||||
"sealed_install": true,
|
||||
"sealed_requirements": {
|
||||
"min_bundle_version": "2025.10.0",
|
||||
"max_advisory_staleness_hours": 168,
|
||||
"require_time_anchor": true,
|
||||
"allowed_offline_duration_hours": 720
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 1.2 Sealed Requirements Schema
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"sealed_install": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"description": "If true, pack MUST run in sealed environment"
|
||||
},
|
||||
"sealed_requirements": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"min_bundle_version": {
|
||||
"type": "string",
|
||||
"description": "Minimum air-gap bundle version"
|
||||
},
|
||||
"max_advisory_staleness_hours": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"default": 168,
|
||||
"description": "Maximum age of advisory data in hours"
|
||||
},
|
||||
"require_time_anchor": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"description": "Require valid time anchor"
|
||||
},
|
||||
"allowed_offline_duration_hours": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"default": 720,
|
||||
"description": "Maximum allowed offline duration"
|
||||
},
|
||||
"require_signature_verification": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"description": "Require bundle signature verification"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 2. Environment Detection
|
||||
|
||||
### 2.1 Sealed Mode Status API
|
||||
|
||||
The Task Runner queries the AirGap Controller to determine sealed status:
|
||||
|
||||
```
|
||||
GET /api/v1/airgap/status
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"sealed": true,
|
||||
"mode": "sealed",
|
||||
"sealed_at": "2025-12-01T00:00:00Z",
|
||||
"sealed_by": "ops-admin@company.com",
|
||||
"bundle_version": "2025.10.0",
|
||||
"bundle_digest": "sha256:abc123...",
|
||||
"last_advisory_update": "2025-12-01T00:00:00Z",
|
||||
"advisory_staleness_hours": 120,
|
||||
"time_anchor": {
|
||||
"timestamp": "2025-12-01T00:00:00Z",
|
||||
"signature": "base64...",
|
||||
"valid": true,
|
||||
"expires_at": "2025-12-31T00:00:00Z"
|
||||
},
|
||||
"egress_blocked": true,
|
||||
"network_policy": "deny-all"
|
||||
}
|
||||
```
|
||||
|
||||
### 2.2 Detection Heuristics
|
||||
|
||||
If the AirGap Controller is unavailable, the Task Runner uses fallback heuristics:
|
||||
|
||||
| Heuristic | Weight | Indicates |
|
||||
|-----------|--------|-----------|
|
||||
| No external DNS resolution | High | Sealed |
|
||||
| Blocked ports 80, 443 | High | Sealed |
|
||||
| AIRGAP_MODE=sealed env var | High | Sealed |
|
||||
| /etc/stellaops/sealed file exists | Medium | Sealed |
|
||||
| No internet connectivity | Medium | Sealed |
|
||||
| Local-only registry configured | Low | Sealed |
|
||||
|
||||
Combined heuristic score threshold: **0.7** to consider environment sealed.
|
||||
|
||||
## 3. Enforcement Logic
|
||||
|
||||
### 3.1 Pre-Execution Check
|
||||
|
||||
```csharp
|
||||
public sealed class SealedInstallEnforcer
|
||||
{
|
||||
public async Task<EnforcementResult> EnforceAsync(
|
||||
TaskPack pack,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// If pack doesn't require sealed install, allow
|
||||
if (!pack.SealedInstall)
|
||||
{
|
||||
return EnforcementResult.Allowed("Pack does not require sealed install");
|
||||
}
|
||||
|
||||
// Get environment sealed status
|
||||
var status = await _airgapController.GetStatusAsync(ct);
|
||||
|
||||
// Core check: environment must be sealed
|
||||
if (!status.Sealed)
|
||||
{
|
||||
return EnforcementResult.Denied(
|
||||
"SEALED_INSTALL_VIOLATION",
|
||||
"Pack requires sealed environment but environment is not sealed",
|
||||
new SealedInstallViolation
|
||||
{
|
||||
PackId = pack.PackId,
|
||||
RequiredSealed = true,
|
||||
ActualSealed = false,
|
||||
Recommendation = "Activate sealed mode with: stella airgap seal"
|
||||
});
|
||||
}
|
||||
|
||||
// Check sealed requirements
|
||||
if (pack.SealedRequirements != null)
|
||||
{
|
||||
var violations = ValidateRequirements(pack.SealedRequirements, status);
|
||||
if (violations.Any())
|
||||
{
|
||||
return EnforcementResult.Denied(
|
||||
"SEALED_REQUIREMENTS_VIOLATION",
|
||||
"Sealed requirements not met",
|
||||
violations);
|
||||
}
|
||||
}
|
||||
|
||||
return EnforcementResult.Allowed("Sealed install requirements satisfied");
|
||||
}
|
||||
|
||||
private List<RequirementViolation> ValidateRequirements(
|
||||
SealedRequirements requirements,
|
||||
SealedModeStatus status)
|
||||
{
|
||||
var violations = new List<RequirementViolation>();
|
||||
|
||||
// Bundle version check
|
||||
if (requirements.MinBundleVersion != null)
|
||||
{
|
||||
if (Version.Parse(status.BundleVersion) < Version.Parse(requirements.MinBundleVersion))
|
||||
{
|
||||
violations.Add(new RequirementViolation
|
||||
{
|
||||
Requirement = "min_bundle_version",
|
||||
Expected = requirements.MinBundleVersion,
|
||||
Actual = status.BundleVersion,
|
||||
Message = $"Bundle version {status.BundleVersion} < required {requirements.MinBundleVersion}"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Advisory staleness check
|
||||
if (status.AdvisoryStalenessHours > requirements.MaxAdvisoryStalenessHours)
|
||||
{
|
||||
violations.Add(new RequirementViolation
|
||||
{
|
||||
Requirement = "max_advisory_staleness_hours",
|
||||
Expected = requirements.MaxAdvisoryStalenessHours.ToString(),
|
||||
Actual = status.AdvisoryStalenessHours.ToString(),
|
||||
Message = $"Advisory data is {status.AdvisoryStalenessHours}h old, max allowed is {requirements.MaxAdvisoryStalenessHours}h"
|
||||
});
|
||||
}
|
||||
|
||||
// Time anchor check
|
||||
if (requirements.RequireTimeAnchor && (status.TimeAnchor == null || !status.TimeAnchor.Valid))
|
||||
{
|
||||
violations.Add(new RequirementViolation
|
||||
{
|
||||
Requirement = "require_time_anchor",
|
||||
Expected = "valid time anchor",
|
||||
Actual = status.TimeAnchor?.Valid.ToString() ?? "missing",
|
||||
Message = "Valid time anchor required but not present"
|
||||
});
|
||||
}
|
||||
|
||||
return violations;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3.2 Decision Matrix
|
||||
|
||||
| Pack `sealed_install` | Environment Sealed | Bundle Valid | Advisories Fresh | Result |
|
||||
|-----------------------|-------------------|--------------|------------------|--------|
|
||||
| `true` | `true` | `true` | `true` | ✅ RUN |
|
||||
| `true` | `true` | `true` | `false` | ⚠️ WARN + RUN (if within grace) |
|
||||
| `true` | `true` | `false` | * | ❌ DENY |
|
||||
| `true` | `false` | * | * | ❌ DENY |
|
||||
| `false` | `true` | * | * | ✅ RUN |
|
||||
| `false` | `false` | * | * | ✅ RUN |
|
||||
|
||||
### 3.3 Grace Period Handling
|
||||
|
||||
For advisory staleness, a grace period can be configured:
|
||||
|
||||
```yaml
|
||||
# etc/taskrunner.yaml
|
||||
enforcement:
|
||||
sealed_install:
|
||||
staleness_grace_period_hours: 24
|
||||
staleness_warning_threshold_hours: 120
|
||||
deny_on_staleness: true # or false for warn-only
|
||||
```
|
||||
|
||||
## 4. Refusal Semantics
|
||||
|
||||
### 4.1 Error Response
|
||||
|
||||
When enforcement denies execution:
|
||||
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "SEALED_INSTALL_VIOLATION",
|
||||
"message": "Pack requires sealed environment but environment is not sealed",
|
||||
"details": {
|
||||
"pack_id": "compliance-scan-airgap",
|
||||
"pack_version": "1.0.0",
|
||||
"sealed_install_required": true,
|
||||
"environment_sealed": false,
|
||||
"violations": [],
|
||||
"recommendation": "Activate sealed mode with: stella airgap seal"
|
||||
}
|
||||
},
|
||||
"status": "rejected",
|
||||
"rejected_at": "2025-12-06T10:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
### 4.2 CLI Exit Codes
|
||||
|
||||
| Code | Name | Description |
|
||||
|------|------|-------------|
|
||||
| 40 | `SEALED_INSTALL_VIOLATION` | Pack requires sealed but environment is not |
|
||||
| 41 | `BUNDLE_VERSION_VIOLATION` | Bundle version below minimum |
|
||||
| 42 | `ADVISORY_STALENESS_VIOLATION` | Advisory data too stale |
|
||||
| 43 | `TIME_ANCHOR_VIOLATION` | Time anchor missing or invalid |
|
||||
| 44 | `SIGNATURE_VERIFICATION_VIOLATION` | Bundle signature verification failed |
|
||||
|
||||
### 4.3 Audit Logging
|
||||
|
||||
All enforcement decisions are logged:
|
||||
|
||||
```json
|
||||
{
|
||||
"event_type": "sealed_install_enforcement",
|
||||
"timestamp": "2025-12-06T10:00:00Z",
|
||||
"pack_id": "compliance-scan-airgap",
|
||||
"pack_version": "1.0.0",
|
||||
"decision": "denied",
|
||||
"reason": "SEALED_INSTALL_VIOLATION",
|
||||
"environment": {
|
||||
"sealed": false,
|
||||
"bundle_version": null,
|
||||
"advisory_staleness_hours": null
|
||||
},
|
||||
"user": "task-runner-service",
|
||||
"tenant_id": "550e8400-e29b-41d4-a716-446655440000"
|
||||
}
|
||||
```
|
||||
|
||||
## 5. Integration Points
|
||||
|
||||
### 5.1 Task Runner Integration
|
||||
|
||||
```csharp
|
||||
// In TaskRunner execution pipeline
|
||||
public async Task<TaskResult> ExecuteAsync(TaskPack pack, TaskContext context)
|
||||
{
|
||||
// Pre-execution enforcement
|
||||
var enforcement = await _sealedInstallEnforcer.EnforceAsync(pack);
|
||||
if (!enforcement.Allowed)
|
||||
{
|
||||
await _auditLogger.LogEnforcementDenialAsync(pack, enforcement);
|
||||
return TaskResult.Rejected(enforcement);
|
||||
}
|
||||
|
||||
// Continue with execution
|
||||
return await _executor.ExecuteAsync(pack, context);
|
||||
}
|
||||
```
|
||||
|
||||
### 5.2 CLI Integration
|
||||
|
||||
```bash
|
||||
# Check sealed status before running pack
|
||||
$ stella pack run compliance-scan-airgap
|
||||
|
||||
Error: Sealed install violation
|
||||
Pack 'compliance-scan-airgap' requires a sealed environment.
|
||||
|
||||
Current environment:
|
||||
Sealed: false
|
||||
|
||||
To resolve:
|
||||
1. Import an air-gap bundle: stella airgap import <bundle.tar.gz>
|
||||
2. Activate sealed mode: stella airgap seal
|
||||
3. Verify status: stella airgap status
|
||||
|
||||
Exit code: 40
|
||||
```
|
||||
|
||||
## 6. Configuration
|
||||
|
||||
### 6.1 Task Runner Configuration
|
||||
|
||||
```yaml
|
||||
# etc/taskrunner.yaml
|
||||
enforcement:
|
||||
sealed_install:
|
||||
enabled: true
|
||||
|
||||
# Staleness handling
|
||||
staleness_grace_period_hours: 24
|
||||
staleness_warning_threshold_hours: 120
|
||||
deny_on_staleness: true
|
||||
|
||||
# Fallback detection
|
||||
use_heuristic_detection: true
|
||||
heuristic_threshold: 0.7
|
||||
|
||||
# Logging
|
||||
log_all_decisions: true
|
||||
audit_retention_days: 365
|
||||
```
|
||||
|
||||
### 6.2 Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `AIRGAP_MODE` | Force sealed mode detection | — |
|
||||
| `AIRGAP_CONTROLLER_URL` | AirGap controller endpoint | `http://localhost:8080` |
|
||||
| `SEALED_INSTALL_BYPASS` | Bypass enforcement (dev only) | `false` |
|
||||
|
||||
## 7. Tasks Unblocked
|
||||
|
||||
This contract unblocks:
|
||||
|
||||
| Task ID | Description | Status |
|
||||
|---------|-------------|--------|
|
||||
| TASKRUN-AIRGAP-57-001 | Sealed install enforcement contract | ✅ UNBLOCKED |
|
||||
| TASKRUN-AIRGAP-58-001 | Sealed install CLI integration | ✅ UNBLOCKED |
|
||||
|
||||
## 8. Changelog
|
||||
|
||||
| Date | Version | Change |
|
||||
|------|---------|--------|
|
||||
| 2025-12-06 | 1.0.0 | Initial contract with enforcement logic, decision matrix, CLI integration |
|
||||
467
docs/contracts/web-gateway-tenant-rbac.md
Normal file
467
docs/contracts/web-gateway-tenant-rbac.md
Normal file
@@ -0,0 +1,467 @@
|
||||
# Web Gateway Tenant RBAC Contract
|
||||
|
||||
**Contract ID:** CONTRACT-GATEWAY-RBAC-001
|
||||
**Status:** APPROVED
|
||||
**Effective Date:** 2025-12-07
|
||||
**Owners:** Gateway Guild, Authority Guild, Web UI Guild
|
||||
|
||||
## Overview
|
||||
|
||||
This contract defines the tenant isolation and role-based access control (RBAC) model for the StellaOps Web Gateway, ensuring consistent authorization across all API endpoints and UI components.
|
||||
|
||||
## Tenant Model
|
||||
|
||||
### Tenant Hierarchy
|
||||
|
||||
```
|
||||
Organization (Org)
|
||||
├── Tenant A
|
||||
│ ├── Project 1
|
||||
│ │ └── Resources...
|
||||
│ └── Project 2
|
||||
│ └── Resources...
|
||||
└── Tenant B
|
||||
└── Project 3
|
||||
└── Resources...
|
||||
```
|
||||
|
||||
### Tenant Identification
|
||||
|
||||
Tenants are identified through:
|
||||
|
||||
1. **JWT Claims:** `tenant_id` or `stellaops:tenant` claim
|
||||
2. **Header:** `X-Tenant-Id` header (for service-to-service)
|
||||
3. **Path Parameter:** `/tenants/{tenantId}/...` routes
|
||||
|
||||
### Tenant Resolution Priority
|
||||
|
||||
```
|
||||
1. Path parameter (explicit)
|
||||
2. JWT claim (authenticated user context)
|
||||
3. X-Tenant-Id header (service-to-service)
|
||||
4. Default tenant (configuration fallback)
|
||||
```
|
||||
|
||||
## Role Definitions
|
||||
|
||||
### Built-in Roles
|
||||
|
||||
| Role | Description | Scope |
|
||||
|------|-------------|-------|
|
||||
| `org:admin` | Organization administrator | Org-wide |
|
||||
| `org:reader` | Organization read-only access | Org-wide |
|
||||
| `tenant:admin` | Tenant administrator | Single tenant |
|
||||
| `tenant:operator` | Can modify resources within tenant | Single tenant |
|
||||
| `tenant:viewer` | Read-only access to tenant | Single tenant |
|
||||
| `project:admin` | Project administrator | Single project |
|
||||
| `project:contributor` | Can modify project resources | Single project |
|
||||
| `project:viewer` | Read-only project access | Single project |
|
||||
| `policy:admin` | Policy management | Tenant-wide |
|
||||
| `scanner:operator` | Scanner operations | Tenant-wide |
|
||||
| `airgap:admin` | Air-gap operations | Tenant-wide |
|
||||
|
||||
### Role Hierarchy
|
||||
|
||||
```
|
||||
org:admin
|
||||
├── org:reader
|
||||
├── tenant:admin
|
||||
│ ├── tenant:operator
|
||||
│ │ └── tenant:viewer
|
||||
│ ├── policy:admin
|
||||
│ ├── scanner:operator
|
||||
│ └── airgap:admin
|
||||
└── project:admin
|
||||
├── project:contributor
|
||||
└── project:viewer
|
||||
```
|
||||
|
||||
## Scopes
|
||||
|
||||
### OAuth 2.0 Scopes
|
||||
|
||||
| Scope | Description | Required Role |
|
||||
|-------|-------------|---------------|
|
||||
| `policy:read` | Read policies and profiles | `tenant:viewer` |
|
||||
| `policy:edit` | Create/modify policies | `policy:admin` |
|
||||
| `policy:activate` | Activate policies | `policy:admin` |
|
||||
| `scanner:read` | View scan results | `tenant:viewer` |
|
||||
| `scanner:execute` | Execute scans | `scanner:operator` |
|
||||
| `airgap:seal` | Seal/unseal environment | `airgap:admin` |
|
||||
| `airgap:status:read` | Read sealed mode status | `tenant:viewer` |
|
||||
| `airgap:verify` | Verify bundles | `tenant:operator` |
|
||||
| `export:read` | Read exports | `tenant:viewer` |
|
||||
| `export:create` | Create exports | `tenant:operator` |
|
||||
| `admin:users` | Manage users | `tenant:admin` |
|
||||
| `admin:settings` | Manage settings | `tenant:admin` |
|
||||
|
||||
### Scope Inheritance
|
||||
|
||||
Child scopes are automatically granted when parent scope is present:
|
||||
|
||||
```yaml
|
||||
scope_inheritance:
|
||||
"policy:edit": ["policy:read"]
|
||||
"policy:activate": ["policy:read", "policy:edit"]
|
||||
"scanner:execute": ["scanner:read"]
|
||||
"export:create": ["export:read"]
|
||||
"admin:users": ["admin:settings"]
|
||||
```
|
||||
|
||||
## Resource Authorization
|
||||
|
||||
### Resource Types
|
||||
|
||||
| Resource Type | Tenant Scoped | Project Scoped | Description |
|
||||
|--------------|---------------|----------------|-------------|
|
||||
| `risk_profile` | Yes | No | Risk scoring profiles |
|
||||
| `policy_pack` | Yes | No | Policy bundles |
|
||||
| `scan_result` | Yes | Yes | Scan outputs |
|
||||
| `export` | Yes | Yes | Export jobs |
|
||||
| `finding` | Yes | Yes | Vulnerability findings |
|
||||
| `vex_document` | Yes | Yes | VEX statements |
|
||||
| `sealed_mode` | Yes | No | Air-gap state |
|
||||
| `user` | Yes | No | Tenant users |
|
||||
| `project` | Yes | No | Projects |
|
||||
|
||||
### Authorization Rules
|
||||
|
||||
```yaml
|
||||
# authorization-rules.yaml
|
||||
rules:
|
||||
- resource: risk_profile
|
||||
actions:
|
||||
read:
|
||||
required_scopes: [policy:read]
|
||||
tenant_isolation: strict
|
||||
create:
|
||||
required_scopes: [policy:edit]
|
||||
tenant_isolation: strict
|
||||
update:
|
||||
required_scopes: [policy:edit]
|
||||
tenant_isolation: strict
|
||||
activate:
|
||||
required_scopes: [policy:activate]
|
||||
tenant_isolation: strict
|
||||
delete:
|
||||
required_scopes: [policy:edit]
|
||||
tenant_isolation: strict
|
||||
require_role: policy:admin
|
||||
|
||||
- resource: scan_result
|
||||
actions:
|
||||
read:
|
||||
required_scopes: [scanner:read]
|
||||
tenant_isolation: strict
|
||||
project_isolation: optional
|
||||
create:
|
||||
required_scopes: [scanner:execute]
|
||||
tenant_isolation: strict
|
||||
delete:
|
||||
required_scopes: [scanner:execute]
|
||||
tenant_isolation: strict
|
||||
require_role: scanner:operator
|
||||
|
||||
- resource: sealed_mode
|
||||
actions:
|
||||
read:
|
||||
required_scopes: [airgap:status:read]
|
||||
tenant_isolation: strict
|
||||
seal:
|
||||
required_scopes: [airgap:seal]
|
||||
tenant_isolation: strict
|
||||
require_role: airgap:admin
|
||||
audit: required
|
||||
unseal:
|
||||
required_scopes: [airgap:seal]
|
||||
tenant_isolation: strict
|
||||
require_role: airgap:admin
|
||||
audit: required
|
||||
```
|
||||
|
||||
## Tenant Isolation
|
||||
|
||||
### Strict Isolation
|
||||
|
||||
All data access is tenant-scoped by default:
|
||||
|
||||
```sql
|
||||
-- Example: All queries include tenant filter
|
||||
SELECT * FROM findings
|
||||
WHERE tenant_id = @current_tenant_id
|
||||
AND deleted_at IS NULL;
|
||||
```
|
||||
|
||||
### Cross-Tenant Access
|
||||
|
||||
Cross-tenant access is prohibited except:
|
||||
|
||||
1. **Organization admins** can access all tenants in their org
|
||||
2. **Internal services** with explicit `cross_tenant` scope
|
||||
3. **Aggregation endpoints** with `org:reader` role
|
||||
|
||||
### Isolation Enforcement Points
|
||||
|
||||
| Layer | Enforcement |
|
||||
|-------|-------------|
|
||||
| Gateway | Validates tenant claim, injects X-Tenant-Id |
|
||||
| Service | Applies tenant filter to all queries |
|
||||
| Database | Row-level security (RLS) policies |
|
||||
| Cache | Tenant-prefixed cache keys |
|
||||
|
||||
## JWT Claims
|
||||
|
||||
### Required Claims
|
||||
|
||||
```json
|
||||
{
|
||||
"sub": "user-uuid",
|
||||
"aud": ["stellaops-api"],
|
||||
"iss": "https://auth.stellaops.io",
|
||||
"exp": 1701936000,
|
||||
"iat": 1701932400,
|
||||
"stellaops:tenant": "tenant-uuid",
|
||||
"stellaops:org": "org-uuid",
|
||||
"stellaops:roles": ["tenant:operator", "policy:admin"],
|
||||
"scope": "policy:read policy:edit scanner:read"
|
||||
}
|
||||
```
|
||||
|
||||
### Custom Claims
|
||||
|
||||
| Claim | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `stellaops:tenant` | string | Current tenant UUID |
|
||||
| `stellaops:org` | string | Organization UUID |
|
||||
| `stellaops:roles` | string[] | Assigned roles |
|
||||
| `stellaops:projects` | string[] | Accessible projects |
|
||||
| `stellaops:tier` | string | Rate limit tier |
|
||||
|
||||
## Gateway Implementation
|
||||
|
||||
### Authorization Middleware
|
||||
|
||||
```csharp
|
||||
// AuthorizationMiddleware.cs
|
||||
public class TenantAuthorizationMiddleware
|
||||
{
|
||||
public async Task InvokeAsync(HttpContext context, RequestDelegate next)
|
||||
{
|
||||
// 1. Extract tenant from JWT/header/path
|
||||
var tenantId = ResolveTenantId(context);
|
||||
|
||||
// 2. Validate tenant access
|
||||
if (!await ValidateTenantAccess(context.User, tenantId))
|
||||
{
|
||||
context.Response.StatusCode = 403;
|
||||
return;
|
||||
}
|
||||
|
||||
// 3. Set tenant context for downstream
|
||||
context.Items["TenantId"] = tenantId;
|
||||
context.Request.Headers["X-Tenant-Id"] = tenantId;
|
||||
|
||||
await next(context);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Scope Authorization
|
||||
|
||||
```csharp
|
||||
// ScopeAuthorization.cs
|
||||
public static class ScopeAuthorization
|
||||
{
|
||||
public static IResult? RequireScope(HttpContext context, string requiredScope)
|
||||
{
|
||||
var scopes = context.User.FindFirst("scope")?.Value?.Split(' ') ?? [];
|
||||
|
||||
if (!scopes.Contains(requiredScope) && !HasInheritedScope(scopes, requiredScope))
|
||||
{
|
||||
return Results.Problem(
|
||||
title: "Forbidden",
|
||||
detail: $"Missing required scope: {requiredScope}",
|
||||
statusCode: 403);
|
||||
}
|
||||
|
||||
return null; // Access granted
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Web UI Integration
|
||||
|
||||
### Route Guards
|
||||
|
||||
```typescript
|
||||
// route-guards.ts
|
||||
export const TenantGuard: CanActivateFn = (route, state) => {
|
||||
const auth = inject(AuthService);
|
||||
const requiredRoles = route.data['roles'] as string[];
|
||||
|
||||
if (!auth.hasAnyRole(requiredRoles)) {
|
||||
return inject(Router).createUrlTree(['/unauthorized']);
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
// Usage in routes
|
||||
{
|
||||
path: 'policy/studio',
|
||||
component: PolicyStudioComponent,
|
||||
canActivate: [TenantGuard],
|
||||
data: { roles: ['policy:admin', 'tenant:admin'] }
|
||||
}
|
||||
```
|
||||
|
||||
### Scope-Based UI Elements
|
||||
|
||||
```typescript
|
||||
// rbac.directive.ts
|
||||
@Directive({ selector: '[requireScope]' })
|
||||
export class RequireScopeDirective {
|
||||
@Input() set requireScope(scope: string) {
|
||||
this.updateVisibility(scope);
|
||||
}
|
||||
|
||||
private updateVisibility(scope: string): void {
|
||||
const hasScope = this.auth.hasScope(scope);
|
||||
this.viewContainer.clear();
|
||||
if (hasScope) {
|
||||
this.viewContainer.createEmbeddedView(this.templateRef);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Usage in templates
|
||||
<button *requireScope="'policy:activate'">Activate Policy</button>
|
||||
```
|
||||
|
||||
## Audit Trail
|
||||
|
||||
### Audited Operations
|
||||
|
||||
All write operations are logged with:
|
||||
|
||||
```json
|
||||
{
|
||||
"timestamp": "2025-12-07T10:30:00Z",
|
||||
"actor": {
|
||||
"userId": "user-uuid",
|
||||
"tenantId": "tenant-uuid",
|
||||
"roles": ["policy:admin"],
|
||||
"ipAddress": "192.168.1.100"
|
||||
},
|
||||
"action": "policy.activate",
|
||||
"resource": {
|
||||
"type": "policy_pack",
|
||||
"id": "pack-123",
|
||||
"version": 5
|
||||
},
|
||||
"outcome": "success",
|
||||
"details": {
|
||||
"previousStatus": "approved",
|
||||
"newStatus": "active"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Sensitive Operations
|
||||
|
||||
These operations require enhanced audit logging:
|
||||
|
||||
- `sealed_mode.seal` / `sealed_mode.unseal`
|
||||
- `policy.activate`
|
||||
- `export.create` (with PII)
|
||||
- `user.role.assign`
|
||||
- `tenant.settings.modify`
|
||||
|
||||
## Configuration
|
||||
|
||||
### Gateway RBAC Configuration
|
||||
|
||||
```yaml
|
||||
# gateway/rbac.yaml
|
||||
rbac:
|
||||
enabled: true
|
||||
strictTenantIsolation: true
|
||||
allowCrossTenantForOrgAdmin: true
|
||||
|
||||
defaultRole: tenant:viewer
|
||||
defaultScopes:
|
||||
- policy:read
|
||||
- scanner:read
|
||||
|
||||
roleBindings:
|
||||
tenant:admin:
|
||||
scopes:
|
||||
- policy:read
|
||||
- policy:edit
|
||||
- policy:activate
|
||||
- scanner:read
|
||||
- scanner:execute
|
||||
- airgap:status:read
|
||||
- export:read
|
||||
- export:create
|
||||
- admin:users
|
||||
- admin:settings
|
||||
|
||||
policy:admin:
|
||||
scopes:
|
||||
- policy:read
|
||||
- policy:edit
|
||||
- policy:activate
|
||||
```
|
||||
|
||||
## Error Responses
|
||||
|
||||
### 401 Unauthorized
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "https://stellaops.org/problems/unauthorized",
|
||||
"title": "Unauthorized",
|
||||
"status": 401,
|
||||
"detail": "Authentication required."
|
||||
}
|
||||
```
|
||||
|
||||
### 403 Forbidden
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "https://stellaops.org/problems/forbidden",
|
||||
"title": "Forbidden",
|
||||
"status": 403,
|
||||
"detail": "You do not have permission to access this resource.",
|
||||
"requiredScope": "policy:activate",
|
||||
"currentScopes": ["policy:read"]
|
||||
}
|
||||
```
|
||||
|
||||
### 404 Not Found (Tenant Isolation)
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "https://stellaops.org/problems/not-found",
|
||||
"title": "Not Found",
|
||||
"status": 404,
|
||||
"detail": "Resource not found."
|
||||
}
|
||||
```
|
||||
|
||||
Note: 404 is returned instead of 403 for resources in other tenants to prevent enumeration attacks.
|
||||
|
||||
## Changelog
|
||||
|
||||
| Date | Version | Change |
|
||||
|------|---------|--------|
|
||||
| 2025-12-07 | 1.0.0 | Initial contract definition |
|
||||
|
||||
## References
|
||||
|
||||
- [Auth Scopes Documentation](../security/auth-scopes.md)
|
||||
- [RBAC Documentation](../security/scopes-and-roles.md)
|
||||
- [Tenancy Overview](../security/tenancy-overview.md)
|
||||
- [Rate Limit Design](./rate-limit-design.md)
|
||||
8
docs/db/reports/assets/vuln-parity-20251211/README.md
Normal file
8
docs/db/reports/assets/vuln-parity-20251211/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
This folder holds frozen inputs for the 2025-12-11 Vulnerability parity run (Mongo vs Postgres).
|
||||
|
||||
Drop files here and record their SHA256 in the parity report tables:
|
||||
- sboms/: SBOM samples
|
||||
- advisories/: advisory export subset (10k) if used
|
||||
- hashes.sha256: manifest of all files
|
||||
|
||||
Do not modify contents once hashes are recorded.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user