Compare commits
67 Commits
feature/do
...
108d1c64b3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
108d1c64b3 | ||
|
|
bc0762e97d | ||
|
|
3d01bf9edc | ||
|
|
68bc53a07b | ||
|
|
4b124fb056 | ||
|
|
7c24ed96ee | ||
|
|
11597679ed | ||
|
|
e3f28a21ab | ||
|
|
a403979177 | ||
|
|
b8641b1959 | ||
|
|
98e6b76584 | ||
|
|
862bb6ed80 | ||
|
|
bd2529502e | ||
|
|
965cbf9574 | ||
|
|
af30fc322f | ||
|
|
e53a282fbe | ||
|
|
d907729778 | ||
|
|
8a72779c16 | ||
|
|
e0f6efecce | ||
|
|
98934170ca | ||
|
|
69651212ec | ||
|
|
53889d85e7 | ||
|
|
0de92144d2 | ||
|
|
9bd6a73926 | ||
|
|
4042fc2184 | ||
|
|
dd0067ea0b | ||
|
|
f6c22854a4 | ||
|
|
05597616d6 | ||
|
|
a6f1406509 | ||
|
|
0a8f8c14af | ||
|
|
7efee7dd41 | ||
|
|
952ba77924 | ||
|
|
23e463e346 | ||
|
|
849a70f9d1 | ||
|
|
868f8e0bb6 | ||
|
|
84c42ca2d8 | ||
|
|
efd6850c38 | ||
|
|
2b892ad1b2 | ||
|
|
e16d2b5224 | ||
|
|
5e514532df | ||
|
|
2141196496 | ||
|
|
bca02ec295 | ||
|
|
8cabdce3b6 | ||
|
|
6145d89468 | ||
|
|
ee317d3f61 | ||
|
|
4cc8bdb460 | ||
|
|
95ff83e0f0 | ||
|
|
3954615e81 | ||
|
|
8948b1a3e2 | ||
|
|
5cfcf0723a | ||
|
|
ba733b9f69 | ||
|
|
79d562ea5d | ||
|
|
a7cd10020a | ||
|
|
b978ae399f | ||
|
|
570746b7d9 | ||
|
|
8318b26370 | ||
|
|
1f76650b7e | ||
|
|
37304cf819 | ||
|
|
6beb9d7c4e | ||
|
|
be8c623e04 | ||
|
|
dd4bb50076 | ||
|
|
bf6ab6ba6f | ||
|
|
02849cc955 | ||
|
|
2eaf0f699b | ||
|
|
6c1177a6ce | ||
|
|
582a88e8f8 | ||
|
|
f0662dd45f |
@@ -1,8 +1,31 @@
|
|||||||
{
|
{
|
||||||
"permissions": {
|
"permissions": {
|
||||||
"allow": [
|
"allow": [
|
||||||
|
"Bash(dotnet --list-sdks:*)",
|
||||||
|
"Bash(winget install:*)",
|
||||||
|
"Bash(dotnet restore:*)",
|
||||||
|
"Bash(dotnet nuget:*)",
|
||||||
|
"Bash(csc -parse:*)",
|
||||||
|
"Bash(grep:*)",
|
||||||
|
"Bash(dotnet build:*)",
|
||||||
|
"Bash(cat:*)",
|
||||||
|
"Bash(copy:*)",
|
||||||
|
"Bash(dotnet test:*)",
|
||||||
|
"Bash(dir:*)",
|
||||||
|
"Bash(Select-Object -ExpandProperty FullName)",
|
||||||
|
"Bash(echo:*)",
|
||||||
|
"Bash(Out-File -FilePath \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Libraries\\StellaOps.Scanner.Surface\\StellaOps.Scanner.Surface.csproj\" -Encoding utf8)",
|
||||||
"Bash(wc:*)",
|
"Bash(wc:*)",
|
||||||
"Bash(sort:*)"
|
"Bash(find:*)",
|
||||||
|
"WebFetch(domain:docs.gradle.org)",
|
||||||
|
"WebSearch",
|
||||||
|
"Bash(dotnet msbuild:*)",
|
||||||
|
"Bash(test:*)",
|
||||||
|
"Bash(taskkill:*)",
|
||||||
|
"Bash(timeout /t)",
|
||||||
|
"Bash(dotnet clean:*)",
|
||||||
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
||||||
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")"
|
||||||
],
|
],
|
||||||
"deny": [],
|
"deny": [],
|
||||||
"ask": []
|
"ask": []
|
||||||
|
|||||||
23
.dockerignore
Normal file
23
.dockerignore
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitea
|
||||||
|
.venv
|
||||||
|
bin
|
||||||
|
obj
|
||||||
|
**/bin
|
||||||
|
**/obj
|
||||||
|
local-nugets
|
||||||
|
.nuget
|
||||||
|
**/node_modules
|
||||||
|
**/dist
|
||||||
|
**/coverage
|
||||||
|
**/*.user
|
||||||
|
**/*.suo
|
||||||
|
**/*.cache
|
||||||
|
**/.vscode
|
||||||
|
**/.idea
|
||||||
|
**/.DS_Store
|
||||||
|
**/TestResults
|
||||||
|
**/out
|
||||||
|
**/packages
|
||||||
|
/tmp
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
aoc-guard:
|
aoc-guard:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -72,7 +72,7 @@ jobs:
|
|||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
if: github.event_name != 'schedule'
|
if: github.event_name != 'schedule'
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
128
.gitea/workflows/artifact-signing.yml
Normal file
128
.gitea/workflows/artifact-signing.yml
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
name: Artifact Signing
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
artifact_path:
|
||||||
|
description: 'Path to artifact to sign'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
|
||||||
|
env:
|
||||||
|
COSIGN_VERSION: 'v2.2.0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sign-containers:
|
||||||
|
name: Sign Container Images
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
id-token: write
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Sign images (keyless)
|
||||||
|
if: ${{ !env.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
env:
|
||||||
|
COSIGN_EXPERIMENTAL: "1"
|
||||||
|
run: |
|
||||||
|
IMAGES=(
|
||||||
|
"ghcr.io/${{ github.repository }}/concelier"
|
||||||
|
"ghcr.io/${{ github.repository }}/scanner"
|
||||||
|
"ghcr.io/${{ github.repository }}/authority"
|
||||||
|
)
|
||||||
|
for img in "${IMAGES[@]}"; do
|
||||||
|
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||||
|
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||||
|
cosign sign --yes "${img}:${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Sign images (with key)
|
||||||
|
if: ${{ env.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
run: |
|
||||||
|
echo "$COSIGN_PRIVATE_KEY" | base64 -d > /tmp/cosign.key
|
||||||
|
IMAGES=(
|
||||||
|
"ghcr.io/${{ github.repository }}/concelier"
|
||||||
|
"ghcr.io/${{ github.repository }}/scanner"
|
||||||
|
"ghcr.io/${{ github.repository }}/authority"
|
||||||
|
)
|
||||||
|
for img in "${IMAGES[@]}"; do
|
||||||
|
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||||
|
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||||
|
cosign sign --key /tmp/cosign.key "${img}:${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
rm -f /tmp/cosign.key
|
||||||
|
|
||||||
|
sign-sbom:
|
||||||
|
name: Sign SBOM Artifacts
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Generate and sign SBOM
|
||||||
|
run: |
|
||||||
|
# Generate SBOM using syft
|
||||||
|
if command -v syft &> /dev/null; then
|
||||||
|
syft . -o cyclonedx-json > sbom.cdx.json
|
||||||
|
cosign sign-blob --yes sbom.cdx.json --output-signature sbom.cdx.json.sig
|
||||||
|
else
|
||||||
|
echo "syft not installed, skipping SBOM generation"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload signed artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: signed-sbom
|
||||||
|
path: |
|
||||||
|
sbom.cdx.json
|
||||||
|
sbom.cdx.json.sig
|
||||||
|
if-no-files-found: ignore
|
||||||
|
|
||||||
|
verify-signatures:
|
||||||
|
name: Verify Existing Signatures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Verify DSSE envelopes
|
||||||
|
run: |
|
||||||
|
find . -name "*.dsse" -o -name "*.dsse.json" | while read f; do
|
||||||
|
echo "Checking $f..."
|
||||||
|
# Basic JSON validation
|
||||||
|
if ! jq empty "$f" 2>/dev/null; then
|
||||||
|
echo "Warning: Invalid JSON in $f"
|
||||||
|
fi
|
||||||
|
done
|
||||||
@@ -37,7 +37,7 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
BUILD_CONFIGURATION: Release
|
BUILD_CONFIGURATION: Release
|
||||||
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
||||||
RUNNER_TOOL_CACHE: /toolcache
|
RUNNER_TOOL_CACHE: /toolcache
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Install syft (SBOM)
|
- name: Install syft (SBOM)
|
||||||
uses: anchore/sbom-action/download-syft@v0
|
uses: anchore/sbom-action/download-syft@v0
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Chaos smoke
|
- name: Chaos smoke
|
||||||
if: ${{ github.event.inputs.chaos == 'true' }}
|
if: ${{ github.event.inputs.chaos == 'true' }}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 preview
|
- name: Setup .NET 10 preview
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
dotnet-version: '10.0.100'
|
||||||
|
|
||||||
- name: Restore Concelier solution
|
- name: Restore Concelier solution
|
||||||
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
||||||
|
|||||||
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Concelier STORE-AOC-19-005 Dataset
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-dataset:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/out/linksets
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: sudo apt-get update && sudo apt-get install -y zstd
|
||||||
|
|
||||||
|
- name: Build dataset tarball
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/concelier/build-store-aoc-19-005-dataset.sh scripts/concelier/test-store-aoc-19-005-dataset.sh
|
||||||
|
scripts/concelier/build-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Validate dataset
|
||||||
|
run: scripts/concelier/test-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Upload dataset artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: concelier-store-aoc-19-005-dataset
|
||||||
|
path: |
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst.sha256
|
||||||
@@ -1,86 +1,58 @@
|
|||||||
name: Console CI
|
name: console-ci
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
workflow_dispatch:
|
||||||
branches: [ main ]
|
|
||||||
paths:
|
|
||||||
- 'src/UI/**'
|
|
||||||
- '.gitea/workflows/console-ci.yml'
|
|
||||||
- 'docs/modules/devops/console-ci-contract.md'
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ main, develop ]
|
|
||||||
paths:
|
paths:
|
||||||
- 'src/UI/**'
|
- 'src/Web/**'
|
||||||
- '.gitea/workflows/console-ci.yml'
|
- '.gitea/workflows/console-ci.yml'
|
||||||
- 'docs/modules/devops/console-ci-contract.md'
|
- 'ops/devops/console/**'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
console-ci:
|
lint-test-build:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: src/Web
|
||||||
env:
|
env:
|
||||||
PNPM_HOME: ~/.pnpm
|
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||||
PLAYWRIGHT_BROWSERS_PATH: ./.playwright
|
CI: true
|
||||||
SOURCE_DATE_EPOCH: ${{ github.run_id }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Task Pack offline bundle fixtures
|
- name: Setup Node
|
||||||
run: python3 scripts/packs/run-fixtures-check.sh
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Node.js 20
|
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
|
cache: npm
|
||||||
|
cache-dependency-path: src/Web/package-lock.json
|
||||||
|
|
||||||
- name: Enable pnpm
|
- name: Install deps (offline-friendly)
|
||||||
|
run: npm ci --prefer-offline --no-audit --progress=false
|
||||||
|
|
||||||
|
- name: Lint
|
||||||
|
run: npm run lint -- --no-progress
|
||||||
|
|
||||||
|
- name: Console export specs (targeted)
|
||||||
|
run: bash ./scripts/ci-console-exports.sh
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: npm run build -- --configuration=production --progress=false
|
||||||
|
|
||||||
|
- name: Collect artifacts
|
||||||
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
corepack enable
|
mkdir -p ../artifacts
|
||||||
corepack prepare pnpm@9 --activate
|
cp -r dist ../artifacts/dist || true
|
||||||
|
cp -r coverage ../artifacts/coverage || true
|
||||||
- name: Cache pnpm store & node_modules
|
find . -maxdepth 3 -type f -name "*.xml" -o -name "*.trx" -o -name "*.json" -path "*test*" -print0 | xargs -0 -I{} cp --parents {} ../artifacts 2>/dev/null || true
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.pnpm-store
|
|
||||||
node_modules
|
|
||||||
./.pnpm-store
|
|
||||||
./.playwright
|
|
||||||
key: console-${{ runner.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
|
||||||
|
|
||||||
- name: Install dependencies (offline-first)
|
|
||||||
env:
|
|
||||||
PNPM_FETCH_RETRIES: 0
|
|
||||||
PNPM_OFFLINE: 1
|
|
||||||
run: |
|
|
||||||
pnpm install --frozen-lockfile || PNPM_OFFLINE=0 pnpm install --frozen-lockfile --prefer-offline
|
|
||||||
|
|
||||||
- name: Lint / Types
|
|
||||||
run: pnpm lint && pnpm format:check && pnpm typecheck
|
|
||||||
|
|
||||||
- name: Unit tests
|
|
||||||
run: pnpm test -- --runInBand --reporter=junit --outputFile=.artifacts/junit.xml
|
|
||||||
|
|
||||||
- name: Storybook a11y
|
|
||||||
run: |
|
|
||||||
pnpm storybook:build
|
|
||||||
pnpm storybook:a11y --ci --output .artifacts/storybook-a11y.json
|
|
||||||
|
|
||||||
- name: Playwright smoke
|
|
||||||
run: pnpm playwright test --config=playwright.config.ci.ts --reporter=list,junit=.artifacts/playwright.xml
|
|
||||||
|
|
||||||
- name: Lighthouse (CI budgets)
|
|
||||||
run: |
|
|
||||||
pnpm serve --port 4173 &
|
|
||||||
pnpm lhci autorun --config=lighthouserc.ci.js --upload.target=filesystem --upload.outputDir=.artifacts/lhci
|
|
||||||
|
|
||||||
- name: SBOM
|
|
||||||
run: pnpm exec syft packages dir:dist --output=spdx-json=.artifacts/console.spdx.json
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
|
if: always()
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: console-ci-artifacts
|
name: console-ci-${{ github.run_id }}
|
||||||
path: .artifacts
|
path: artifacts
|
||||||
|
retention-days: 14
|
||||||
|
|||||||
32
.gitea/workflows/console-runner-image.yml
Normal file
32
.gitea/workflows/console-runner-image.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: console-runner-image
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'ops/devops/console/**'
|
||||||
|
- '.gitea/workflows/console-runner-image.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner-image:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Build runner image tarball (baked caches)
|
||||||
|
env:
|
||||||
|
RUN_ID: ${{ github.run_id }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
chmod +x ops/devops/console/build-runner-image.sh ops/devops/console/build-runner-image-ci.sh
|
||||||
|
ops/devops/console/build-runner-image-ci.sh
|
||||||
|
|
||||||
|
- name: Upload runner image artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: console-runner-image-${{ github.run_id }}
|
||||||
|
path: ops/devops/artifacts/console-runner/
|
||||||
|
retention-days: 14
|
||||||
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: cryptopro-linux-csp
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: cryptopro-linux-csp
|
||||||
|
DOCKERFILE: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Build image (accept EULA explicitly)
|
||||||
|
run: |
|
||||||
|
docker build -t $IMAGE_NAME \
|
||||||
|
--build-arg CRYPTOPRO_ACCEPT_EULA=1 \
|
||||||
|
-f $DOCKERFILE .
|
||||||
|
|
||||||
|
- name: Run container
|
||||||
|
run: |
|
||||||
|
docker run -d --rm --name $IMAGE_NAME -p 18080:8080 $IMAGE_NAME
|
||||||
|
for i in {1..20}; do
|
||||||
|
if curl -sf http://127.0.0.1:18080/health >/dev/null; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
sleep 3
|
||||||
|
done
|
||||||
|
echo "Service failed to start" && exit 1
|
||||||
|
|
||||||
|
- name: Test endpoints
|
||||||
|
run: |
|
||||||
|
curl -sf http://127.0.0.1:18080/health
|
||||||
|
curl -sf http://127.0.0.1:18080/license || true
|
||||||
|
curl -sf -X POST http://127.0.0.1:18080/hash \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"data_b64":"SGVsbG8="}'
|
||||||
|
|
||||||
|
- name: Stop container
|
||||||
|
if: always()
|
||||||
|
run: docker rm -f $IMAGE_NAME || true
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 (preview)
|
- name: Setup .NET 10 (preview)
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
|
|
||||||
- name: Build CryptoPro plugin
|
- name: Build CryptoPro plugin
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
- name: Setup .NET SDK
|
- name: Setup .NET SDK
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
dotnet-version: '10.0.100'
|
||||||
|
|
||||||
- name: Link check
|
- name: Link check
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ jobs:
|
|||||||
export-ci:
|
export-ci:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
MINIO_ACCESS_KEY: exportci
|
MINIO_ACCESS_KEY: exportci
|
||||||
MINIO_SECRET_KEY: exportci123
|
MINIO_SECRET_KEY: exportci123
|
||||||
BUCKET: export-ci
|
BUCKET: export-ci
|
||||||
|
|||||||
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
# .gitea/workflows/findings-ledger-ci.yml
|
||||||
|
# Findings Ledger CI with RLS migration validation (DEVOPS-LEDGER-TEN-48-001-REL)
|
||||||
|
|
||||||
|
name: Findings Ledger CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Findings/**'
|
||||||
|
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||||
|
- 'deploy/releases/2025.09-stable.yaml'
|
||||||
|
- 'deploy/releases/2025.09-airgap.yaml'
|
||||||
|
- 'deploy/downloads/manifest.json'
|
||||||
|
- 'ops/devops/release/check_release_manifest.py'
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/Findings/**'
|
||||||
|
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
POSTGRES_IMAGE: postgres:16-alpine
|
||||||
|
BUILD_CONFIGURATION: Release
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj
|
||||||
|
dotnet restore src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
dotnet build src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj \
|
||||||
|
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||||
|
/p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Run unit tests
|
||||||
|
run: |
|
||||||
|
mkdir -p $TEST_RESULTS_DIR
|
||||||
|
dotnet test src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj \
|
||||||
|
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||||
|
--logger "trx;LogFileName=ledger-tests.trx" \
|
||||||
|
--results-directory $TEST_RESULTS_DIR
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: ledger-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
|
||||||
|
migration-validation:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: ledgertest
|
||||||
|
POSTGRES_PASSWORD: ledgertest
|
||||||
|
POSTGRES_DB: ledger_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
env:
|
||||||
|
PGHOST: localhost
|
||||||
|
PGPORT: 5432
|
||||||
|
PGUSER: ledgertest
|
||||||
|
PGPASSWORD: ledgertest
|
||||||
|
PGDATABASE: ledger_test
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Install PostgreSQL client
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y postgresql-client
|
||||||
|
|
||||||
|
- name: Wait for PostgreSQL
|
||||||
|
run: |
|
||||||
|
until pg_isready -h $PGHOST -p $PGPORT -U $PGUSER; do
|
||||||
|
echo "Waiting for PostgreSQL..."
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Apply prerequisite migrations (001-006)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
MIGRATION_DIR="src/Findings/StellaOps.Findings.Ledger/migrations"
|
||||||
|
for migration in 001_initial.sql 002_add_evidence_bundle_ref.sql 002_projection_offsets.sql \
|
||||||
|
003_policy_rationale.sql 004_ledger_attestations.sql 004_risk_fields.sql \
|
||||||
|
005_risk_fields.sql 006_orchestrator_airgap.sql; do
|
||||||
|
if [ -f "$MIGRATION_DIR/$migration" ]; then
|
||||||
|
echo "Applying migration: $migration"
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f "$MIGRATION_DIR/$migration"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Apply RLS migration (007_enable_rls.sql)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Applying RLS migration..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||||
|
|
||||||
|
- name: Validate RLS configuration
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Validating RLS is enabled on all protected tables..."
|
||||||
|
|
||||||
|
# Check RLS enabled
|
||||||
|
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||||
|
WHERE n.nspname = 'public'
|
||||||
|
AND c.relrowsecurity = true
|
||||||
|
AND c.relname IN (
|
||||||
|
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||||
|
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||||
|
'orchestrator_exports', 'airgap_imports'
|
||||||
|
);
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$TABLES_WITH_RLS" -ne 8 ]; then
|
||||||
|
echo "::error::Expected 8 tables with RLS enabled, found $TABLES_WITH_RLS"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ All 8 tables have RLS enabled"
|
||||||
|
|
||||||
|
# Check policies exist
|
||||||
|
POLICIES=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(DISTINCT tablename)
|
||||||
|
FROM pg_policies
|
||||||
|
WHERE schemaname = 'public'
|
||||||
|
AND policyname LIKE '%_tenant_isolation';
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$POLICIES" -ne 8 ]; then
|
||||||
|
echo "::error::Expected 8 tenant isolation policies, found $POLICIES"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ All 8 tenant isolation policies created"
|
||||||
|
|
||||||
|
# Check tenant function exists
|
||||||
|
FUNC_EXISTS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_proc p
|
||||||
|
JOIN pg_namespace n ON p.pronamespace = n.oid
|
||||||
|
WHERE p.proname = 'require_current_tenant'
|
||||||
|
AND n.nspname = 'findings_ledger_app';
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$FUNC_EXISTS" -ne 1 ]; then
|
||||||
|
echo "::error::Tenant function 'require_current_tenant' not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ Tenant function 'findings_ledger_app.require_current_tenant()' exists"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== RLS Migration Validation PASSED ==="
|
||||||
|
|
||||||
|
- name: Test rollback migration
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Testing rollback migration..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql
|
||||||
|
|
||||||
|
# Verify RLS is disabled
|
||||||
|
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||||
|
WHERE n.nspname = 'public'
|
||||||
|
AND c.relrowsecurity = true
|
||||||
|
AND c.relname IN (
|
||||||
|
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||||
|
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||||
|
'orchestrator_exports', 'airgap_imports'
|
||||||
|
);
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$TABLES_WITH_RLS" -ne 0 ]; then
|
||||||
|
echo "::error::Rollback failed - $TABLES_WITH_RLS tables still have RLS enabled"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ Rollback successful - RLS disabled on all tables"
|
||||||
|
- name: Validate release manifests (production)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
python ops/devops/release/check_release_manifest.py
|
||||||
|
|
||||||
|
- name: Re-apply RLS migration (idempotency check)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Re-applying RLS migration to verify idempotency..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||||
|
echo "✓ Migration is idempotent"
|
||||||
|
|
||||||
|
generate-manifest:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [build-test, migration-validation]
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Generate migration manifest
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
MIGRATION_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql"
|
||||||
|
ROLLBACK_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql"
|
||||||
|
MANIFEST_DIR="out/findings-ledger/migrations"
|
||||||
|
mkdir -p "$MANIFEST_DIR"
|
||||||
|
|
||||||
|
# Compute SHA256 hashes
|
||||||
|
MIGRATION_SHA=$(sha256sum "$MIGRATION_FILE" | awk '{print $1}')
|
||||||
|
ROLLBACK_SHA=$(sha256sum "$ROLLBACK_FILE" | awk '{print $1}')
|
||||||
|
CREATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
|
cat > "$MANIFEST_DIR/007_enable_rls.manifest.json" <<EOF
|
||||||
|
{
|
||||||
|
"\$schema": "https://stella-ops.org/schemas/migration-manifest.v1.json",
|
||||||
|
"schemaVersion": "1.0.0",
|
||||||
|
"migrationId": "007_enable_rls",
|
||||||
|
"module": "findings-ledger",
|
||||||
|
"version": "2025.12.0",
|
||||||
|
"createdAt": "$CREATED_AT",
|
||||||
|
"description": "Enable Row-Level Security for Findings Ledger tenant isolation",
|
||||||
|
"taskId": "LEDGER-TEN-48-001-DEV",
|
||||||
|
"contractRef": "CONTRACT-FINDINGS-LEDGER-RLS-011",
|
||||||
|
"database": {
|
||||||
|
"engine": "postgresql",
|
||||||
|
"minVersion": "16.0"
|
||||||
|
},
|
||||||
|
"files": {
|
||||||
|
"apply": {
|
||||||
|
"path": "007_enable_rls.sql",
|
||||||
|
"sha256": "$MIGRATION_SHA"
|
||||||
|
},
|
||||||
|
"rollback": {
|
||||||
|
"path": "007_enable_rls_rollback.sql",
|
||||||
|
"sha256": "$ROLLBACK_SHA"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"affects": {
|
||||||
|
"tables": [
|
||||||
|
"ledger_events",
|
||||||
|
"ledger_merkle_roots",
|
||||||
|
"findings_projection",
|
||||||
|
"finding_history",
|
||||||
|
"triage_actions",
|
||||||
|
"ledger_attestations",
|
||||||
|
"orchestrator_exports",
|
||||||
|
"airgap_imports"
|
||||||
|
],
|
||||||
|
"schemas": ["public", "findings_ledger_app"],
|
||||||
|
"roles": ["findings_ledger_admin"]
|
||||||
|
},
|
||||||
|
"prerequisites": [
|
||||||
|
"006_orchestrator_airgap"
|
||||||
|
],
|
||||||
|
"validation": {
|
||||||
|
"type": "rls-check",
|
||||||
|
"expectedTables": 8,
|
||||||
|
"expectedPolicies": 8,
|
||||||
|
"tenantFunction": "findings_ledger_app.require_current_tenant"
|
||||||
|
},
|
||||||
|
"offlineKit": {
|
||||||
|
"includedInBundle": true,
|
||||||
|
"requiresManualApply": true,
|
||||||
|
"applyOrder": 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Generated migration manifest at $MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||||
|
cat "$MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||||
|
|
||||||
|
- name: Copy migration files for offline-kit
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
OFFLINE_DIR="out/findings-ledger/offline-kit/migrations"
|
||||||
|
mkdir -p "$OFFLINE_DIR"
|
||||||
|
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql "$OFFLINE_DIR/"
|
||||||
|
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql "$OFFLINE_DIR/"
|
||||||
|
cp out/findings-ledger/migrations/007_enable_rls.manifest.json "$OFFLINE_DIR/"
|
||||||
|
echo "Offline-kit migration files prepared"
|
||||||
|
ls -la "$OFFLINE_DIR"
|
||||||
|
|
||||||
|
- name: Upload migration artefacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: findings-ledger-migrations
|
||||||
|
path: out/findings-ledger/
|
||||||
|
if-no-files-found: error
|
||||||
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
name: ICS/KISA Feed Refresh
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * MON'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
live_fetch:
|
||||||
|
description: 'Attempt live RSS fetch (fallback to samples on failure)'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
offline_snapshot:
|
||||||
|
description: 'Force offline samples only (no network)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
refresh:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
ICSCISA_FEED_URL: ${{ secrets.ICSCISA_FEED_URL }}
|
||||||
|
KISA_FEED_URL: ${{ secrets.KISA_FEED_URL }}
|
||||||
|
FEED_GATEWAY_HOST: concelier-webservice
|
||||||
|
FEED_GATEWAY_SCHEME: http
|
||||||
|
LIVE_FETCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.live_fetch || 'true' }}
|
||||||
|
OFFLINE_SNAPSHOT: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.offline_snapshot || 'false' }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set run metadata
|
||||||
|
id: meta
|
||||||
|
run: |
|
||||||
|
RUN_DATE=$(date -u +%Y%m%d)
|
||||||
|
RUN_ID="icscisa-kisa-$(date -u +%Y%m%dT%H%M%SZ)"
|
||||||
|
echo "run_date=$RUN_DATE" >> $GITHUB_OUTPUT
|
||||||
|
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||||
|
echo "RUN_DATE=$RUN_DATE" >> $GITHUB_ENV
|
||||||
|
echo "RUN_ID=$RUN_ID" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Run ICS/KISA refresh
|
||||||
|
run: |
|
||||||
|
python scripts/feeds/run_icscisa_kisa_refresh.py \
|
||||||
|
--out-dir out/feeds/icscisa-kisa \
|
||||||
|
--run-date "${{ steps.meta.outputs.run_date }}" \
|
||||||
|
--run-id "${{ steps.meta.outputs.run_id }}"
|
||||||
|
|
||||||
|
- name: Show fetch log
|
||||||
|
run: cat out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}/fetch.log
|
||||||
|
|
||||||
|
- name: Upload refresh artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: icscisa-kisa-${{ steps.meta.outputs.run_date }}
|
||||||
|
path: out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 21
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
lnm-backfill:
|
lnm-backfill:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
vex-backfill:
|
vex-backfill:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
|
|||||||
125
.gitea/workflows/manifest-integrity.yml
Normal file
125
.gitea/workflows/manifest-integrity.yml
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
name: Manifest Integrity
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'docs/**/*.schema.json'
|
||||||
|
- 'docs/contracts/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/packs/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'docs/**/*.schema.json'
|
||||||
|
- 'docs/contracts/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/packs/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-schemas:
|
||||||
|
name: Validate Schema Integrity
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm install -g ajv-cli ajv-formats
|
||||||
|
|
||||||
|
- name: Validate JSON schemas
|
||||||
|
run: |
|
||||||
|
EXIT_CODE=0
|
||||||
|
for schema in docs/schemas/*.schema.json; do
|
||||||
|
echo "Validating $schema..."
|
||||||
|
if ! ajv compile -s "$schema" --spec=draft2020 2>/dev/null; then
|
||||||
|
echo "Error: $schema is invalid"
|
||||||
|
EXIT_CODE=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
exit $EXIT_CODE
|
||||||
|
|
||||||
|
validate-contracts:
|
||||||
|
name: Validate Contract Documents
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check contract structure
|
||||||
|
run: |
|
||||||
|
for contract in docs/contracts/*.md; do
|
||||||
|
echo "Checking $contract..."
|
||||||
|
# Verify required sections exist
|
||||||
|
if ! grep -q "^## " "$contract"; then
|
||||||
|
echo "Warning: $contract missing section headers"
|
||||||
|
fi
|
||||||
|
# Check for decision ID
|
||||||
|
if grep -q "Decision ID" "$contract" && ! grep -q "DECISION-\|CONTRACT-" "$contract"; then
|
||||||
|
echo "Warning: $contract missing decision ID format"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
validate-pack-fixtures:
|
||||||
|
name: Validate Pack Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install jsonschema
|
||||||
|
|
||||||
|
- name: Run fixture validation
|
||||||
|
run: |
|
||||||
|
if [ -f scripts/packs/run-fixtures-check.sh ]; then
|
||||||
|
chmod +x scripts/packs/run-fixtures-check.sh
|
||||||
|
./scripts/packs/run-fixtures-check.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
checksum-audit:
|
||||||
|
name: Audit SHA256SUMS Files
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate checksums
|
||||||
|
run: |
|
||||||
|
find . -name "SHA256SUMS" -type f | while read f; do
|
||||||
|
dir=$(dirname "$f")
|
||||||
|
echo "Validating checksums in $dir..."
|
||||||
|
cd "$dir"
|
||||||
|
# Check if all referenced files exist
|
||||||
|
while read hash file; do
|
||||||
|
if [ ! -f "$file" ]; then
|
||||||
|
echo "Warning: $file referenced in SHA256SUMS but not found"
|
||||||
|
fi
|
||||||
|
done < SHA256SUMS
|
||||||
|
cd - > /dev/null
|
||||||
|
done
|
||||||
|
|
||||||
|
merkle-consistency:
|
||||||
|
name: Verify Merkle Roots
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check DSSE Merkle roots
|
||||||
|
run: |
|
||||||
|
find . -name "*.dsse.json" -type f | while read f; do
|
||||||
|
echo "Checking Merkle root in $f..."
|
||||||
|
# Extract and validate Merkle root if present
|
||||||
|
if jq -e '.payload' "$f" > /dev/null 2>&1; then
|
||||||
|
PAYLOAD=$(jq -r '.payload' "$f" | base64 -d 2>/dev/null || echo "")
|
||||||
|
if echo "$PAYLOAD" | jq -e '._stellaops.merkleRoot' > /dev/null 2>&1; then
|
||||||
|
MERKLE=$(echo "$PAYLOAD" | jq -r '._stellaops.merkleRoot')
|
||||||
|
echo " Merkle root: $MERKLE"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
@@ -18,10 +18,18 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||||
|
run: |
|
||||||
|
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||||
|
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||||
|
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||||
|
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Task Pack offline bundle fixtures
|
- name: Task Pack offline bundle fixtures
|
||||||
@@ -38,6 +46,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
||||||
|
|
||||||
|
- name: Prepare Export Center handoff (metadata + optional schedule)
|
||||||
|
run: |
|
||||||
|
scripts/mirror/export-center-wire.sh
|
||||||
|
env:
|
||||||
|
EXPORT_CENTER_BASE_URL: ${{ secrets.EXPORT_CENTER_BASE_URL }}
|
||||||
|
EXPORT_CENTER_TOKEN: ${{ secrets.EXPORT_CENTER_TOKEN }}
|
||||||
|
EXPORT_CENTER_TENANT: ${{ secrets.EXPORT_CENTER_TENANT }}
|
||||||
|
EXPORT_CENTER_PROJECT: ${{ secrets.EXPORT_CENTER_PROJECT }}
|
||||||
|
EXPORT_CENTER_AUTO_SCHEDULE: ${{ secrets.EXPORT_CENTER_AUTO_SCHEDULE }}
|
||||||
|
|
||||||
- name: Upload signed artifacts
|
- name: Upload signed artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
@@ -49,5 +67,8 @@ jobs:
|
|||||||
out/mirror/thin/tuf/
|
out/mirror/thin/tuf/
|
||||||
out/mirror/thin/oci/
|
out/mirror/thin/oci/
|
||||||
out/mirror/thin/milestone.json
|
out/mirror/thin/milestone.json
|
||||||
|
out/mirror/thin/export-center/export-center-handoff.json
|
||||||
|
out/mirror/thin/export-center/export-center-targets.json
|
||||||
|
out/mirror/thin/export-center/schedule-response.json
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 14
|
retention-days: 14
|
||||||
|
|||||||
44
.gitea/workflows/mock-dev-release.yml
Normal file
44
.gitea/workflows/mock-dev-release.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: mock-dev-release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- deploy/releases/2025.09-mock-dev.yaml
|
||||||
|
- deploy/downloads/manifest.json
|
||||||
|
- ops/devops/mock-release/**
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-mock-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Package mock dev artefacts
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
mkdir -p out/mock-release
|
||||||
|
cp deploy/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||||
|
cp deploy/downloads/manifest.json out/mock-release/
|
||||||
|
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
|
||||||
|
|
||||||
|
- name: Compose config (dev + mock overlay)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
ops/devops/mock-release/config_check.sh
|
||||||
|
|
||||||
|
- name: Helm template (mock overlay)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
helm template mock ./deploy/helm/stellaops -f deploy/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||||
|
ls -lh /tmp/helm-mock.yaml
|
||||||
|
|
||||||
|
- name: Upload mock release bundle
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: mock-dev-release
|
||||||
|
path: |
|
||||||
|
out/mock-release/mock-dev-release.tgz
|
||||||
|
/tmp/compose-mock-config.yaml
|
||||||
|
/tmp/helm-mock.yaml
|
||||||
102
.gitea/workflows/notify-smoke-test.yml
Normal file
102
.gitea/workflows/notify-smoke-test.yml
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
name: Notify Smoke Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Notify/**'
|
||||||
|
- 'src/Notifier/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Notify/**'
|
||||||
|
- 'src/Notifier/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
name: Notify Unit Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/Notify/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Notify/ --no-restore
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test src/Notify/ --no-build --verbosity normal
|
||||||
|
|
||||||
|
notifier-tests:
|
||||||
|
name: Notifier Service Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/Notifier/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Notifier/ --no-restore
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test src/Notifier/ --no-build --verbosity normal
|
||||||
|
|
||||||
|
smoke-test:
|
||||||
|
name: Notification Smoke Test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [unit-tests, notifier-tests]
|
||||||
|
services:
|
||||||
|
mongodb:
|
||||||
|
image: mongo:7.0
|
||||||
|
ports:
|
||||||
|
- 27017:27017
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Build Notifier
|
||||||
|
run: dotnet build src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/
|
||||||
|
|
||||||
|
- name: Start service
|
||||||
|
run: |
|
||||||
|
dotnet run --project src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/ &
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
- name: Health check
|
||||||
|
run: |
|
||||||
|
for i in {1..30}; do
|
||||||
|
if curl -s http://localhost:5000/health > /dev/null; then
|
||||||
|
echo "Service is healthy"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "Service failed to start"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Test notification endpoint
|
||||||
|
run: |
|
||||||
|
# Test dry-run notification
|
||||||
|
curl -X POST http://localhost:5000/api/v1/notifications/test \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"channel": "log", "message": "Smoke test", "dryRun": true}' \
|
||||||
|
|| echo "Warning: Notification test endpoint not available"
|
||||||
@@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Cache NuGet packages
|
- name: Cache NuGet packages
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
|
|||||||
19
.gitea/workflows/release-manifest-verify.yml
Normal file
19
.gitea/workflows/release-manifest-verify.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
name: release-manifest-verify
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- deploy/releases/2025.09-stable.yaml
|
||||||
|
- deploy/releases/2025.09-airgap.yaml
|
||||||
|
- deploy/downloads/manifest.json
|
||||||
|
- ops/devops/release/check_release_manifest.py
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
verify:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Validate release & downloads manifests
|
||||||
|
run: |
|
||||||
|
python ops/devops/release/check_release_manifest.py
|
||||||
120
.gitea/workflows/release-validation.yml
Normal file
120
.gitea/workflows/release-validation.yml
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
name: Release Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'deploy/**'
|
||||||
|
- 'scripts/release/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_PREFIX: stellaops
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-manifests:
|
||||||
|
name: Validate Release Manifests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate Helm charts
|
||||||
|
run: |
|
||||||
|
helm lint deploy/helm/stellaops
|
||||||
|
helm template stellaops deploy/helm/stellaops --dry-run
|
||||||
|
|
||||||
|
- name: Validate Kubernetes manifests
|
||||||
|
run: |
|
||||||
|
for f in deploy/k8s/*.yaml; do
|
||||||
|
kubectl apply --dry-run=client -f "$f" || exit 1
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Check required images exist
|
||||||
|
run: |
|
||||||
|
REQUIRED_IMAGES=(
|
||||||
|
"concelier"
|
||||||
|
"scanner"
|
||||||
|
"authority"
|
||||||
|
"signer"
|
||||||
|
"attestor"
|
||||||
|
"excititor"
|
||||||
|
"policy"
|
||||||
|
"scheduler"
|
||||||
|
"notify"
|
||||||
|
)
|
||||||
|
for img in "${REQUIRED_IMAGES[@]}"; do
|
||||||
|
echo "Checking $img..."
|
||||||
|
# Validate Dockerfile exists
|
||||||
|
if [ ! -f "src/${img^}/Dockerfile" ] && [ ! -f "deploy/docker/${img}/Dockerfile" ]; then
|
||||||
|
echo "Warning: Dockerfile not found for $img"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
validate-checksums:
|
||||||
|
name: Validate Artifact Checksums
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Verify SHA256SUMS files
|
||||||
|
run: |
|
||||||
|
find . -name "SHA256SUMS" -type f | while read f; do
|
||||||
|
dir=$(dirname "$f")
|
||||||
|
echo "Validating $f..."
|
||||||
|
cd "$dir"
|
||||||
|
if ! sha256sum -c SHA256SUMS --quiet 2>/dev/null; then
|
||||||
|
echo "Warning: Checksum mismatch in $dir"
|
||||||
|
fi
|
||||||
|
cd - > /dev/null
|
||||||
|
done
|
||||||
|
|
||||||
|
validate-schemas:
|
||||||
|
name: Validate Schema Integrity
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install ajv-cli
|
||||||
|
run: npm install -g ajv-cli ajv-formats
|
||||||
|
|
||||||
|
- name: Validate JSON schemas
|
||||||
|
run: |
|
||||||
|
for schema in docs/schemas/*.schema.json; do
|
||||||
|
echo "Validating $schema..."
|
||||||
|
ajv compile -s "$schema" --spec=draft2020 || echo "Warning: $schema validation issue"
|
||||||
|
done
|
||||||
|
|
||||||
|
release-notes:
|
||||||
|
name: Generate Release Notes
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
needs: [validate-manifests, validate-checksums, validate-schemas]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Generate changelog
|
||||||
|
run: |
|
||||||
|
PREV_TAG=$(git describe --abbrev=0 --tags HEAD^ 2>/dev/null || echo "")
|
||||||
|
if [ -n "$PREV_TAG" ]; then
|
||||||
|
echo "## Changes since $PREV_TAG" > RELEASE_NOTES.md
|
||||||
|
git log --pretty=format:"- %s (%h)" "$PREV_TAG"..HEAD >> RELEASE_NOTES.md
|
||||||
|
else
|
||||||
|
echo "## Initial Release" > RELEASE_NOTES.md
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload release notes
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-notes
|
||||||
|
path: RELEASE_NOTES.md
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
build-release:
|
build-release:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
REGISTRY: registry.stella-ops.org
|
REGISTRY: registry.stella-ops.org
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
|
|||||||
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
name: Risk Bundle CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
include_osv:
|
||||||
|
description: 'Include OSV providers (larger bundle)'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
publish_checksums:
|
||||||
|
description: 'Publish checksums to artifact store'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
risk-bundle-build:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
BUNDLE_OUTPUT: ${{ github.workspace }}/.artifacts/risk-bundle
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: scripts/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Test RiskBundle unit tests
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
dotnet test src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--filter "FullyQualifiedName~RiskBundle" \
|
||||||
|
--logger "trx;LogFileName=risk-bundle-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Build risk bundle (fixtures)
|
||||||
|
run: |
|
||||||
|
mkdir -p $BUNDLE_OUTPUT
|
||||||
|
ops/devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||||
|
|
||||||
|
- name: Verify bundle integrity
|
||||||
|
run: ops/devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||||
|
|
||||||
|
- name: Generate checksums
|
||||||
|
run: |
|
||||||
|
cd $BUNDLE_OUTPUT
|
||||||
|
sha256sum risk-bundle.tar.gz > risk-bundle.tar.gz.sha256
|
||||||
|
sha256sum manifest.json > manifest.json.sha256
|
||||||
|
cat risk-bundle.tar.gz.sha256 manifest.json.sha256 > checksums.txt
|
||||||
|
echo "Bundle checksums:"
|
||||||
|
cat checksums.txt
|
||||||
|
|
||||||
|
- name: Upload risk bundle artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: |
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz.sig
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/manifest.json
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/checksums.txt
|
||||||
|
${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: risk-bundle-test-results
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
risk-bundle-offline-kit:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
OFFLINE_KIT_DIR: ${{ github.workspace }}/.artifacts/offline-kit
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Package for offline kit
|
||||||
|
run: |
|
||||||
|
mkdir -p $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz.sig $OFFLINE_KIT_DIR/risk-bundles/ 2>/dev/null || true
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
|
||||||
|
# Create offline kit manifest entry
|
||||||
|
cat > $OFFLINE_KIT_DIR/risk-bundles/kit-manifest.json <<EOF
|
||||||
|
{
|
||||||
|
"component": "risk-bundle",
|
||||||
|
"version": "$(date -u +%Y%m%d-%H%M%S)",
|
||||||
|
"files": [
|
||||||
|
{"path": "risk-bundle.tar.gz", "checksum_file": "risk-bundle.tar.gz.sha256"},
|
||||||
|
{"path": "manifest.json", "checksum_file": "manifest.json.sha256"}
|
||||||
|
],
|
||||||
|
"verification": {
|
||||||
|
"checksums": "checksums.txt",
|
||||||
|
"signature": "risk-bundle.tar.gz.sig"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Verify offline kit structure
|
||||||
|
run: |
|
||||||
|
echo "Offline kit structure:"
|
||||||
|
find $OFFLINE_KIT_DIR -type f
|
||||||
|
echo ""
|
||||||
|
echo "Checksum verification:"
|
||||||
|
cd $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
sha256sum -c checksums.txt
|
||||||
|
|
||||||
|
- name: Upload offline kit
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-offline-kit
|
||||||
|
path: ${{ env.OFFLINE_KIT_DIR }}
|
||||||
|
|
||||||
|
publish-checksums:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event.inputs.publish_checksums == 'true')
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Publish checksums
|
||||||
|
run: |
|
||||||
|
echo "Publishing checksums for risk bundle..."
|
||||||
|
CHECKSUM_DIR=out/checksums/risk-bundle/$(date -u +%Y-%m-%d)
|
||||||
|
mkdir -p $CHECKSUM_DIR
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $CHECKSUM_DIR/
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $CHECKSUM_DIR/
|
||||||
|
|
||||||
|
# Create latest symlink manifest
|
||||||
|
cat > out/checksums/risk-bundle/latest.json <<EOF
|
||||||
|
{
|
||||||
|
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"path": "$(date -u +%Y-%m-%d)/checksums.txt",
|
||||||
|
"manifest": "$(date -u +%Y-%m-%d)/manifest.json"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Checksums published to $CHECKSUM_DIR"
|
||||||
|
cat $CHECKSUM_DIR/checksums.txt
|
||||||
|
|
||||||
|
- name: Upload published checksums
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-published-checksums
|
||||||
|
path: out/checksums/risk-bundle/
|
||||||
@@ -20,7 +20,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Install syft (SBOM)
|
- name: Install syft (SBOM)
|
||||||
uses: anchore/sbom-action/download-syft@v0
|
uses: anchore/sbom-action/download-syft@v0
|
||||||
|
|||||||
133
.gitea/workflows/scanner-analyzers.yml
Normal file
133
.gitea/workflows/scanner-analyzers.yml
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
name: Scanner Analyzers
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
discover-analyzers:
|
||||||
|
name: Discover Analyzers
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
analyzers: ${{ steps.find.outputs.analyzers }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Find analyzer projects
|
||||||
|
id: find
|
||||||
|
run: |
|
||||||
|
ANALYZERS=$(find src/Scanner/__Libraries -name "StellaOps.Scanner.Analyzers.*.csproj" -exec dirname {} \; | xargs -I {} basename {} | sort -u | jq -R -s -c 'split("\n")[:-1]')
|
||||||
|
echo "analyzers=$ANALYZERS" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build-analyzers:
|
||||||
|
name: Build Analyzers
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: discover-analyzers
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
analyzer: ${{ fromJson(needs.discover-analyzers.outputs.analyzers) }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/Scanner/__Libraries/${{ matrix.analyzer }}/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Scanner/__Libraries/${{ matrix.analyzer }}/ --no-restore
|
||||||
|
|
||||||
|
test-lang-analyzers:
|
||||||
|
name: Test Language Analyzers
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-analyzers
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Bun
|
||||||
|
uses: oven-sh/setup-bun@v1
|
||||||
|
with:
|
||||||
|
bun-version: latest
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Run Bun analyzer tests
|
||||||
|
run: |
|
||||||
|
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests" ]; then
|
||||||
|
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests/ --verbosity normal
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run Node analyzer tests
|
||||||
|
run: |
|
||||||
|
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests" ]; then
|
||||||
|
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/ --verbosity normal
|
||||||
|
fi
|
||||||
|
|
||||||
|
fixture-validation:
|
||||||
|
name: Validate Test Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate fixture structure
|
||||||
|
run: |
|
||||||
|
find src/Scanner/__Tests -name "expected.json" | while read f; do
|
||||||
|
echo "Validating $f..."
|
||||||
|
if ! jq empty "$f" 2>/dev/null; then
|
||||||
|
echo "Error: Invalid JSON in $f"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Check fixture completeness
|
||||||
|
run: |
|
||||||
|
find src/Scanner/__Tests -type d -name "Fixtures" | while read fixtures_dir; do
|
||||||
|
echo "Checking $fixtures_dir..."
|
||||||
|
find "$fixtures_dir" -mindepth 1 -maxdepth 1 -type d | while read test_case; do
|
||||||
|
if [ ! -f "$test_case/expected.json" ]; then
|
||||||
|
echo "Warning: $test_case missing expected.json"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
determinism-check:
|
||||||
|
name: Verify Deterministic Output
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test-lang-analyzers
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Run determinism tests
|
||||||
|
run: |
|
||||||
|
# Run scanner on same input twice, compare outputs
|
||||||
|
if [ -d "tests/fixtures/determinism" ]; then
|
||||||
|
dotnet test --filter "Category=Determinism" --verbosity normal
|
||||||
|
fi
|
||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Run determinism harness
|
- name: Run determinism harness
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Cache NuGet packages
|
- name: Cache NuGet packages
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Cache NuGet packages
|
- name: Cache NuGet packages
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ jobs:
|
|||||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-01' }}
|
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-01' }}
|
||||||
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -42,6 +44,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
cosign-release: 'v2.2.4'
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Verify artifacts exist
|
- name: Verify artifacts exist
|
||||||
run: |
|
run: |
|
||||||
cd docs/modules/signals
|
cd docs/modules/signals
|
||||||
@@ -90,9 +102,9 @@ jobs:
|
|||||||
retention-days: 90
|
retention-days: 90
|
||||||
|
|
||||||
- name: Push to Evidence Locker
|
- name: Push to Evidence Locker
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
env:
|
env:
|
||||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
run: |
|
run: |
|
||||||
tar -cf /tmp/signals-dsse.tar -C "$OUT_DIR" .
|
tar -cf /tmp/signals-dsse.tar -C "$OUT_DIR" .
|
||||||
@@ -102,7 +114,7 @@ jobs:
|
|||||||
echo "Pushed to Evidence Locker"
|
echo "Pushed to Evidence Locker"
|
||||||
|
|
||||||
- name: Evidence Locker skip notice
|
- name: Evidence Locker skip notice
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
run: |
|
run: |
|
||||||
echo "::notice::Evidence Locker push skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
echo "::notice::Evidence Locker push skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||||
echo "Artifacts available as workflow artifact for manual ingestion"
|
echo "Artifacts available as workflow artifact for manual ingestion"
|
||||||
|
|||||||
@@ -2,6 +2,14 @@ name: signals-evidence-locker
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
|
out_dir:
|
||||||
|
description: "Output directory containing signed artifacts"
|
||||||
|
required: false
|
||||||
|
default: "evidence-locker/signals/2025-12-05"
|
||||||
|
allow_dev_key:
|
||||||
|
description: "Allow dev key fallback (1=yes, 0=no)"
|
||||||
|
required: false
|
||||||
|
default: "0"
|
||||||
retention_target:
|
retention_target:
|
||||||
description: "Retention days target"
|
description: "Retention days target"
|
||||||
required: false
|
required: false
|
||||||
@@ -12,7 +20,12 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
MODULE_ROOT: docs/modules/signals
|
MODULE_ROOT: docs/modules/signals
|
||||||
OUT_DIR: evidence-locker/signals/2025-12-05
|
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||||
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -20,6 +33,31 @@ jobs:
|
|||||||
- name: Task Pack offline bundle fixtures
|
- name: Task Pack offline bundle fixtures
|
||||||
run: python3 scripts/packs/run-fixtures-check.sh
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify artifacts exist
|
||||||
|
run: |
|
||||||
|
cd "$MODULE_ROOT"
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Sign signals artifacts
|
||||||
|
run: |
|
||||||
|
chmod +x tools/cosign/sign-signals.sh
|
||||||
|
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||||
|
|
||||||
- name: Build deterministic signals evidence tar
|
- name: Build deterministic signals evidence tar
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -52,16 +90,17 @@ jobs:
|
|||||||
/tmp/signals-evidence.tar.sha256
|
/tmp/signals-evidence.tar.sha256
|
||||||
|
|
||||||
- name: Push to Evidence Locker
|
- name: Push to Evidence Locker
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
env:
|
env:
|
||||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
run: |
|
run: |
|
||||||
curl -f -X PUT "$URL/signals/2025-12-05/signals-evidence.tar" \
|
upload_path="${OUT_DIR#evidence-locker/}"
|
||||||
|
curl -f -X PUT "$URL/${upload_path}/signals-evidence.tar" \
|
||||||
-H "Authorization: Bearer $TOKEN" \
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
--data-binary @/tmp/signals-evidence.tar
|
--data-binary @/tmp/signals-evidence.tar
|
||||||
|
|
||||||
- name: Skip push (missing secret or URL)
|
- name: Skip push (missing secret or URL)
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
run: |
|
run: |
|
||||||
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
||||||
|
|||||||
127
.gitea/workflows/signals-reachability.yml
Normal file
127
.gitea/workflows/signals-reachability.yml
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
name: Signals Reachability Scoring & Events
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
allow_dev_key:
|
||||||
|
description: "Allow dev signing key fallback (1=yes, 0=no)"
|
||||||
|
required: false
|
||||||
|
default: "0"
|
||||||
|
evidence_out_dir:
|
||||||
|
description: "Evidence output dir for signing/upload"
|
||||||
|
required: false
|
||||||
|
default: "evidence-locker/signals/2025-12-05"
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/Signals/**'
|
||||||
|
- 'scripts/signals/reachability-smoke.sh'
|
||||||
|
- '.gitea/workflows/signals-reachability.yml'
|
||||||
|
- 'tools/cosign/sign-signals.sh'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
reachability-smoke:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/Signals/StellaOps.Signals.sln --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Signals/StellaOps.Signals.sln -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Reachability scoring + cache/events smoke
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/signals/reachability-smoke.sh
|
||||||
|
scripts/signals/reachability-smoke.sh
|
||||||
|
|
||||||
|
sign-and-upload:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: reachability-smoke
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
OUT_DIR: ${{ github.event.inputs.evidence_out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify artifacts exist
|
||||||
|
run: |
|
||||||
|
cd docs/modules/signals
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Sign signals artifacts
|
||||||
|
run: |
|
||||||
|
chmod +x tools/cosign/sign-signals.sh
|
||||||
|
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||||
|
|
||||||
|
- name: Upload signed artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: signals-evidence-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
${{ env.OUT_DIR }}/*.sigstore.json
|
||||||
|
${{ env.OUT_DIR }}/*.dsse
|
||||||
|
${{ env.OUT_DIR }}/SHA256SUMS
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Push to Evidence Locker
|
||||||
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
|
env:
|
||||||
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
|
run: |
|
||||||
|
tar -cf /tmp/signals-evidence.tar -C "$OUT_DIR" .
|
||||||
|
sha256sum /tmp/signals-evidence.tar
|
||||||
|
curl -f -X PUT "$URL/signals/$(date -u +%Y-%m-%d)/signals-evidence.tar" \
|
||||||
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
|
--data-binary @/tmp/signals-evidence.tar
|
||||||
|
echo "Uploaded to Evidence Locker"
|
||||||
|
|
||||||
|
- name: Evidence Locker skip notice
|
||||||
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
|
run: |
|
||||||
|
echo "::notice::Evidence Locker upload skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||||
33
.gitea/workflows/sm-remote-ci.yml
Normal file
33
.gitea/workflows/sm-remote-ci.yml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: sm-remote-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "src/SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||||
|
- "ops/sm-remote/**"
|
||||||
|
- ".gitea/workflows/sm-remote-ci.yml"
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "src/SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||||
|
- "ops/sm-remote/**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.x
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj
|
||||||
|
- name: Test
|
||||||
|
run: dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj --no-build --verbosity normal
|
||||||
|
- name: Publish service
|
||||||
|
run: dotnet publish src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj -c Release -o out/sm-remote
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -64,3 +64,6 @@ coverage/
|
|||||||
local-nugets/
|
local-nugets/
|
||||||
local-nuget/
|
local-nuget/
|
||||||
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
||||||
|
.nuget-cache/
|
||||||
|
.nuget-packages2/
|
||||||
|
.nuget-temp/
|
||||||
@@ -1,23 +1,46 @@
|
|||||||
<Project>
|
<Project>
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
|
|
||||||
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
||||||
<StellaOpsLocalNuGetSource Condition="'$(StellaOpsLocalNuGetSource)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nugets/'))</StellaOpsLocalNuGetSource>
|
<StellaOpsLocalNuGetSource Condition="'$(StellaOpsLocalNuGetSource)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nugets/'))</StellaOpsLocalNuGetSource>
|
||||||
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
||||||
<StellaOpsNuGetOrgSource Condition="'$(StellaOpsNuGetOrgSource)' == ''">https://api.nuget.org/v3/index.json</StellaOpsNuGetOrgSource>
|
<StellaOpsNuGetOrgSource Condition="'$(StellaOpsNuGetOrgSource)' == ''">https://api.nuget.org/v3/index.json</StellaOpsNuGetOrgSource>
|
||||||
<_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource)</_StellaOpsDefaultRestoreSources>
|
<_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource)</_StellaOpsDefaultRestoreSources>
|
||||||
<_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources)</_StellaOpsOriginalRestoreSources>
|
<_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources)</_StellaOpsOriginalRestoreSources>
|
||||||
|
<RestorePackagesPath Condition="'$(RestorePackagesPath)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot).nuget/packages'))</RestorePackagesPath>
|
||||||
|
<RestoreConfigFile Condition="'$(RestoreConfigFile)' == ''">$([System.IO.Path]::Combine('$(StellaOpsRepoRoot)','NuGet.config'))</RestoreConfigFile>
|
||||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(_StellaOpsDefaultRestoreSources)</RestoreSources>
|
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(_StellaOpsDefaultRestoreSources)</RestoreSources>
|
||||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' != ''">$(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources)</RestoreSources>
|
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' != ''">$(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources)</RestoreSources>
|
||||||
|
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
||||||
|
<NoWarn>$(NoWarn);NU1608;NU1605</NoWarn>
|
||||||
|
<WarningsNotAsErrors>$(WarningsNotAsErrors);NU1608;NU1605</WarningsNotAsErrors>
|
||||||
|
<RestoreNoWarn>$(RestoreNoWarn);NU1608;NU1605</RestoreNoWarn>
|
||||||
|
<RestoreWarningsAsErrors></RestoreWarningsAsErrors>
|
||||||
|
<RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors>
|
||||||
|
<RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder>
|
||||||
|
<RestoreFallbackFolders>clear</RestoreFallbackFolders>
|
||||||
|
<RestoreFallbackFoldersExcludes>clear</RestoreFallbackFoldersExcludes>
|
||||||
|
<RestoreAdditionalProjectFallbackFolders>clear</RestoreAdditionalProjectFallbackFolders>
|
||||||
|
<RestoreAdditionalProjectFallbackFoldersExcludes>clear</RestoreAdditionalProjectFallbackFoldersExcludes>
|
||||||
|
<RestoreAdditionalFallbackFolders>clear</RestoreAdditionalFallbackFolders>
|
||||||
|
<RestoreAdditionalFallbackFoldersExcludes>clear</RestoreAdditionalFallbackFoldersExcludes>
|
||||||
|
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
||||||
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -1 +1,4 @@
|
|||||||
/nowarn:CA2022
|
/nowarn:CA2022
|
||||||
|
/p:DisableWorkloadResolver=true
|
||||||
|
/p:RestoreAdditionalProjectFallbackFolders=
|
||||||
|
/p:RestoreFallbackFolders=
|
||||||
|
|||||||
18
NuGet.config
18
NuGet.config
@@ -2,18 +2,14 @@
|
|||||||
<configuration>
|
<configuration>
|
||||||
<packageSources>
|
<packageSources>
|
||||||
<clear />
|
<clear />
|
||||||
<add key="local" value="local-nugets" />
|
<add key="local-nugets" value="./local-nugets" />
|
||||||
<add key="ablera-mirror" value="https://mirrors.ablera.dev/nuget/nuget-mirror/v3/index.json" />
|
<add key="dotnet-public" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json" />
|
||||||
|
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||||
</packageSources>
|
</packageSources>
|
||||||
<config>
|
<config>
|
||||||
<add key="globalPackagesFolder" value="local-nugets/packages" />
|
<add key="globalPackagesFolder" value="./.nuget/packages" />
|
||||||
</config>
|
</config>
|
||||||
<packageSourceMapping>
|
<fallbackPackageFolders>
|
||||||
<packageSource key="local">
|
<clear />
|
||||||
<package pattern="*" />
|
</fallbackPackageFolders>
|
||||||
</packageSource>
|
|
||||||
<packageSource key="ablera-mirror">
|
|
||||||
<package pattern="*" />
|
|
||||||
</packageSource>
|
|
||||||
</packageSourceMapping>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|||||||
1
bench/reachability-benchmark/.gitignore
vendored
Normal file
1
bench/reachability-benchmark/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.jdk/
|
||||||
@@ -20,6 +20,7 @@
|
|||||||
## Working Agreements
|
## Working Agreements
|
||||||
- Determinism: pin toolchains; set `SOURCE_DATE_EPOCH`; sort file lists; stable JSON/YAML ordering; fixed seeds for any sampling.
|
- Determinism: pin toolchains; set `SOURCE_DATE_EPOCH`; sort file lists; stable JSON/YAML ordering; fixed seeds for any sampling.
|
||||||
- Offline posture: no network at build/test time; vendored toolchains; registry pulls are forbidden—use cached/bundled images.
|
- Offline posture: no network at build/test time; vendored toolchains; registry pulls are forbidden—use cached/bundled images.
|
||||||
|
- Java builds: use vendored Temurin 21 via `tools/java/ensure_jdk.sh` when `JAVA_HOME`/`javac` are absent; keep `.jdk/` out of VCS and use `build_all.py --skip-lang` when a toolchain is missing.
|
||||||
- Licensing: all benchmark content Apache-2.0; include LICENSE in repo root; third-party cases must have compatible licenses and attributions.
|
- Licensing: all benchmark content Apache-2.0; include LICENSE in repo root; third-party cases must have compatible licenses and attributions.
|
||||||
- Evidence: each case must include oracle tests/coverage proving reachability label; store truth and submissions under `benchmark/truth/` and `benchmark/submissions/` with JSON Schema.
|
- Evidence: each case must include oracle tests/coverage proving reachability label; store truth and submissions under `benchmark/truth/` and `benchmark/submissions/` with JSON Schema.
|
||||||
- Security: no secrets; scrub URLs/tokens; deterministic CI artifacts only.
|
- Security: no secrets; scrub URLs/tokens; deterministic CI artifacts only.
|
||||||
|
|||||||
@@ -8,38 +8,42 @@ Deterministic, reproducible benchmark for reachability analysis tools.
|
|||||||
- Enable fair scoring via the `rb-score` CLI and published schemas.
|
- Enable fair scoring via the `rb-score` CLI and published schemas.
|
||||||
|
|
||||||
## Layout
|
## Layout
|
||||||
- `cases/<lang>/<project>/` — benchmark cases with deterministic Dockerfiles, pinned deps, oracle tests.
|
- `cases/<lang>/<project>/` ƒ?" benchmark cases with deterministic Dockerfiles, pinned deps, oracle tests.
|
||||||
- `schemas/` — JSON/YAML schemas for cases, entrypoints, truth, submissions.
|
- `schemas/` ƒ?" JSON/YAML schemas for cases, entrypoints, truth, submissions.
|
||||||
- `benchmark/truth/` — ground-truth labels (hidden/internal split optional).
|
- `benchmark/truth/` ƒ?" ground-truth labels (hidden/internal split optional).
|
||||||
- `benchmark/submissions/` — sample submissions and format reference.
|
- `benchmark/submissions/` ƒ?" sample submissions and format reference.
|
||||||
- `tools/scorer/` — `rb-score` CLI and tests.
|
- `tools/scorer/` ƒ?" `rb-score` CLI and tests.
|
||||||
- `tools/build/` — `build_all.py` (run all cases) and `validate_builds.py` (run twice and compare hashes).
|
- `tools/build/` ƒ?" `build_all.py` (run all cases) and `validate_builds.py` (run twice and compare hashes).
|
||||||
- `baselines/` — reference runners (Semgrep, CodeQL, Stella) with normalized outputs.
|
- `baselines/` ƒ?" reference runners (Semgrep, CodeQL, Stella) with normalized outputs.
|
||||||
- `ci/` — deterministic CI workflows and scripts.
|
- `ci/` ƒ?" deterministic CI workflows and scripts.
|
||||||
- `website/` — static site (leaderboard/docs/downloads).
|
- `website/` ƒ?" static site (leaderboard/docs/downloads).
|
||||||
|
|
||||||
Sample cases added (JS track):
|
Sample cases added (JS track):
|
||||||
- `cases/js/unsafe-eval` (reachable sink) → `benchmark/truth/js-unsafe-eval.json`.
|
- `cases/js/unsafe-eval` (reachable sink) ƒ+' `benchmark/truth/js-unsafe-eval.json`.
|
||||||
- `cases/js/guarded-eval` (unreachable by default) → `benchmark/truth/js-guarded-eval.json`.
|
- `cases/js/guarded-eval` (unreachable by default) ƒ+' `benchmark/truth/js-guarded-eval.json`.
|
||||||
- `cases/js/express-eval` (admin eval reachable) → `benchmark/truth/js-express-eval.json`.
|
- `cases/js/express-eval` (admin eval reachable) ƒ+' `benchmark/truth/js-express-eval.json`.
|
||||||
- `cases/js/express-guarded` (admin eval gated by env) → `benchmark/truth/js-express-guarded.json`.
|
- `cases/js/express-guarded` (admin eval gated by env) ƒ+' `benchmark/truth/js-express-guarded.json`.
|
||||||
- `cases/js/fastify-template` (template rendering reachable) → `benchmark/truth/js-fastify-template.json`.
|
- `cases/js/fastify-template` (template rendering reachable) ƒ+' `benchmark/truth/js-fastify-template.json`.
|
||||||
|
|
||||||
Sample cases added (Python track):
|
Sample cases added (Python track):
|
||||||
- `cases/py/unsafe-exec` (reachable eval) → `benchmark/truth/py-unsafe-exec.json`.
|
- `cases/py/unsafe-exec` (reachable eval) ƒ+' `benchmark/truth/py-unsafe-exec.json`.
|
||||||
- `cases/py/guarded-exec` (unreachable when FEATURE_ENABLE != 1) → `benchmark/truth/py-guarded-exec.json`.
|
- `cases/py/guarded-exec` (unreachable when FEATURE_ENABLE != 1) ƒ+' `benchmark/truth/py-guarded-exec.json`.
|
||||||
- `cases/py/flask-template` (template rendering reachable) → `benchmark/truth/py-flask-template.json`.
|
- `cases/py/flask-template` (template rendering reachable) ƒ+' `benchmark/truth/py-flask-template.json`.
|
||||||
- `cases/py/fastapi-guarded` (unreachable unless ALLOW_EXEC=true) → `benchmark/truth/py-fastapi-guarded.json`.
|
- `cases/py/fastapi-guarded` (unreachable unless ALLOW_EXEC=true) ƒ+' `benchmark/truth/py-fastapi-guarded.json`.
|
||||||
- `cases/py/django-ssti` (template rendering reachable, autoescape off) → `benchmark/truth/py-django-ssti.json`.
|
- `cases/py/django-ssti` (template rendering reachable, autoescape off) ƒ+' `benchmark/truth/py-django-ssti.json`.
|
||||||
|
|
||||||
Sample cases added (Java track):
|
Sample cases added (Java track):
|
||||||
- `cases/java/spring-deserialize` (reachable Java deserialization) → `benchmark/truth/java-spring-deserialize.json`.
|
- `cases/java/spring-deserialize` (reachable Java deserialization) ƒ+' `benchmark/truth/java-spring-deserialize.json`.
|
||||||
- `cases/java/spring-guarded` (deserialization unreachable unless ALLOW_DESER=true) → `benchmark/truth/java-spring-guarded.json`.
|
- `cases/java/spring-guarded` (deserialization unreachable unless ALLOW_DESER=true) ƒ+' `benchmark/truth/java-spring-guarded.json`.
|
||||||
|
- `cases/java/micronaut-deserialize` (reachable Micronaut-style deserialization) ƒ+' `benchmark/truth/java-micronaut-deserialize.json`.
|
||||||
|
- `cases/java/micronaut-guarded` (unreachable unless ALLOW_MN_DESER=true) ƒ+' `benchmark/truth/java-micronaut-guarded.json`.
|
||||||
|
- `cases/java/spring-reflection` (reflection sink reachable via Class.forName) ƒ+' `benchmark/truth/java-spring-reflection.json`.
|
||||||
|
|
||||||
## Determinism & Offline Rules
|
## Determinism & Offline Rules
|
||||||
- No network during build/test; pin images/deps; set `SOURCE_DATE_EPOCH`.
|
- No network during build/test; pin images/deps; set `SOURCE_DATE_EPOCH`.
|
||||||
- Sort file lists; stable JSON/YAML emitters; fixed RNG seeds.
|
- Sort file lists; stable JSON/YAML emitters; fixed RNG seeds.
|
||||||
- All scripts must succeed on a clean machine with cached toolchain tarballs only.
|
- All scripts must succeed on a clean machine with cached toolchain tarballs only.
|
||||||
|
- Java builds auto-use vendored Temurin 21 via `tools/java/ensure_jdk.sh` when `JAVA_HOME`/`javac` are absent.
|
||||||
|
|
||||||
## Licensing
|
## Licensing
|
||||||
- Apache-2.0 for all benchmark assets. Third-party snippets must be license-compatible and attributed.
|
- Apache-2.0 for all benchmark assets. Third-party snippets must be license-compatible and attributed.
|
||||||
@@ -50,8 +54,10 @@ Sample cases added (Java track):
|
|||||||
python tools/validate.py all schemas/examples
|
python tools/validate.py all schemas/examples
|
||||||
|
|
||||||
# score a submission (coming in task 513-008)
|
# score a submission (coming in task 513-008)
|
||||||
cd tools/scorer
|
./tools/scorer/rb-score --cases cases --truth benchmark/truth --submission benchmark/submissions/sample.json
|
||||||
./rb-score --cases ../cases --truth ../benchmark/truth --submission ../benchmark/submissions/sample.json
|
|
||||||
|
# deterministic case builds (skip a language when a toolchain is unavailable)
|
||||||
|
python tools/build/build_all.py --cases cases --skip-lang js
|
||||||
```
|
```
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|||||||
@@ -1,11 +1,16 @@
|
|||||||
# Reachability Benchmark Changelog
|
# Reachability Benchmark Changelog
|
||||||
|
|
||||||
## 1.0.1 · 2025-12-03
|
## 1.0.2 Aú 2025-12-05
|
||||||
|
- Unblocked Java track with vendored Temurin 21 (`tools/java/ensure_jdk.sh`) and deterministic build artifacts (coverage + traces).
|
||||||
|
- Added three more Java cases (`micronaut-deserialize`, `micronaut-guarded`, `spring-reflection`) to reach 5/5 required cases.
|
||||||
|
- `tools/build/build_all.py` now supports `--skip-lang` and runs under WSL-aware bash; CI builds Java cases by default.
|
||||||
|
|
||||||
|
## 1.0.1 Aú 2025-12-03
|
||||||
- Added manifest schema + sample manifest with hashes, SBOM/attestation entries, and sandbox/redaction metadata.
|
- Added manifest schema + sample manifest with hashes, SBOM/attestation entries, and sandbox/redaction metadata.
|
||||||
- Added coverage/trace schemas and extended validator to cover them.
|
- Added coverage/trace schemas and extended validator to cover them.
|
||||||
- Introduced `tools/verify_manifest.py` and deterministic offline kit packaging script.
|
- Introduced `tools/verify_manifest.py` and deterministic offline kit packaging script.
|
||||||
- Added per-language determinism env templates and dataset safety checklist.
|
- Added per-language determinism env templates and dataset safety checklist.
|
||||||
- Populated SBOM + attestation outputs for JS/PY/C tracks; Java remains blocked on JDK availability.
|
- Populated SBOM + attestation outputs for JS/PY/C tracks.
|
||||||
|
|
||||||
## 1.0.0 · 2025-12-01
|
## 1.0.0 Aú 2025-12-01
|
||||||
- Initial public dataset, scorer, baselines, and website.
|
- Initial public dataset, scorer, baselines, and website.
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ Version: 1.0.1 · Date: 2025-12-03
|
|||||||
- [x] Published schemas/validators: truth/submission/coverage/trace + manifest schemas; validated via `tools/validate.py` and `tools/verify_manifest.py`.
|
- [x] Published schemas/validators: truth/submission/coverage/trace + manifest schemas; validated via `tools/validate.py` and `tools/verify_manifest.py`.
|
||||||
- [x] Evidence bundles: coverage + traces + attestation + sbom recorded per case (sample manifest).
|
- [x] Evidence bundles: coverage + traces + attestation + sbom recorded per case (sample manifest).
|
||||||
- [x] Binary case recipe: `cases/**/build/build.sh` pinned `SOURCE_DATE_EPOCH` and env templates under `benchmark/templates/determinism/`.
|
- [x] Binary case recipe: `cases/**/build/build.sh` pinned `SOURCE_DATE_EPOCH` and env templates under `benchmark/templates/determinism/`.
|
||||||
- [x] Determinism CI: `ci/run-ci.sh` + `tools/verify_manifest.py` run twice to compare hashes; Java track still blocked on JDK availability.
|
- [x] Determinism CI: `ci/run-ci.sh` + `tools/verify_manifest.py` run twice to compare hashes; Java track uses vendored Temurin 21 via `tools/java/ensure_jdk.sh`.
|
||||||
- [x] Signed baselines: baseline submissions may include DSSE path in manifest (not required for sample kit); rulepack hashes recorded separately.
|
- [x] Signed baselines: baseline submissions may include DSSE path in manifest (not required for sample kit); rulepack hashes recorded separately.
|
||||||
- [x] Submission policy: CLA/DSSE optional in sample; production kits require DSSE envelope recorded in `signatures`.
|
- [x] Submission policy: CLA/DSSE optional in sample; production kits require DSSE envelope recorded in `signatures`.
|
||||||
- [x] Semantic versioning & changelog: see `benchmark/CHANGELOG.md`; manifest `version` mirrors dataset release.
|
- [x] Semantic versioning & changelog: see `benchmark/CHANGELOG.md`; manifest `version` mirrors dataset release.
|
||||||
|
|||||||
@@ -1,92 +1,203 @@
|
|||||||
{
|
{
|
||||||
"schemaVersion": "1.0.0",
|
"artifacts": {
|
||||||
"kitId": "reachability-benchmark:public-v1",
|
"baselineSubmissions": [],
|
||||||
"version": "1.0.1",
|
"scorer": {
|
||||||
|
"path": "tools/scorer/rb_score.py",
|
||||||
|
"sha256": "32d4f69f5d1d4b87902d6c4f020efde703487d526bf7d42b4438cb2499813f7f"
|
||||||
|
},
|
||||||
|
"submissionSchema": {
|
||||||
|
"path": "schemas/submission.schema.json",
|
||||||
|
"sha256": "de5bebb2dbcd085d7896f47a16b9d3837a65fb7f816dcf7e587967d5848c50a7"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cases": [
|
||||||
|
{
|
||||||
|
"hashes": {
|
||||||
|
"attestation": {
|
||||||
|
"path": "cases/js/unsafe-eval/outputs/attestation.json",
|
||||||
|
"sha256": "be3b0971d805f68730a1c4c0f7a4c3c40dfc7a73099a5524c68759fcc1729d7c"
|
||||||
|
},
|
||||||
|
"binary": {
|
||||||
|
"path": "cases/js/unsafe-eval/outputs/binary.tar.gz",
|
||||||
|
"sha256": "72da19f28c2c36b6666afcc304514b387de20a5de881d5341067481e8418e23e"
|
||||||
|
},
|
||||||
|
"case": {
|
||||||
|
"path": "cases/js/unsafe-eval/case.yaml",
|
||||||
|
"sha256": "a858ff509fda65d69df476e870d9646c6a84744010c812f3d23a88576f20cb6b"
|
||||||
|
},
|
||||||
|
"coverage": {
|
||||||
|
"path": "cases/js/unsafe-eval/outputs/coverage.json",
|
||||||
|
"sha256": "c2cf5af508d33f6ecdc7c0f10200a02a4c0ddeb8e1fc08b55d9bd4a2d6cb926b"
|
||||||
|
},
|
||||||
|
"entrypoints": {
|
||||||
|
"path": "cases/js/unsafe-eval/entrypoints.yaml",
|
||||||
|
"sha256": "77829e728d34c9dc5f56c04784c97f619830ad43bd8410acb3d7134f372a49b3"
|
||||||
|
},
|
||||||
|
"sbom": {
|
||||||
|
"path": "cases/js/unsafe-eval/outputs/sbom.cdx.json",
|
||||||
|
"sha256": "c00ee1e12b1b6a6237e42174b2fe1393bcf575f6605205a2b84366e867b36d5f"
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"path": "cases/js/unsafe-eval",
|
||||||
|
"sha256": "69b0d1cbae1e2c9ddc0f4dba8c6db507e1d3a1c5ea0a0a545c6f3e785529c91c"
|
||||||
|
},
|
||||||
|
"traces": {
|
||||||
|
"path": "cases/js/unsafe-eval/outputs/traces/traces.json",
|
||||||
|
"sha256": "6e63c78e091cc9d06acdc5966dd9e54593ca6b0b97f502928de278b3f80adbd8"
|
||||||
|
},
|
||||||
|
"truth": {
|
||||||
|
"path": "benchmark/truth/js-unsafe-eval.json",
|
||||||
|
"sha256": "ab42f28ed229eb657ffcb36c3a99287436e1822a4c7d395a94de784457a08f62"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"id": "js-unsafe-eval:001",
|
||||||
|
"language": "js",
|
||||||
|
"redaction": {
|
||||||
|
"pii": false,
|
||||||
|
"policy": "benchmark-default/v1"
|
||||||
|
},
|
||||||
|
"sandbox": {
|
||||||
|
"network": "loopback",
|
||||||
|
"privileges": "rootless"
|
||||||
|
},
|
||||||
|
"size": "small",
|
||||||
|
"truth": {
|
||||||
|
"confidence": "high",
|
||||||
|
"label": "reachable",
|
||||||
|
"rationale": "Unit test hits eval sink via POST /api/exec"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hashes": {
|
||||||
|
"attestation": {
|
||||||
|
"path": "cases/py/fastapi-guarded/outputs/attestation.json",
|
||||||
|
"sha256": "257aa5408a5c6ffe0e193a75a2a54597f8c6f61babfe8aaf26bd47340c3086c3"
|
||||||
|
},
|
||||||
|
"binary": {
|
||||||
|
"path": "cases/py/fastapi-guarded/outputs/binary.tar.gz",
|
||||||
|
"sha256": "ca964fef352dc535b63d35b8f8846cc051e10e54cfd8aceef7566f3c94178b76"
|
||||||
|
},
|
||||||
|
"case": {
|
||||||
|
"path": "cases/py/fastapi-guarded/case.yaml",
|
||||||
|
"sha256": "0add8a5f487ebd21ee20ab88b7c6436fe8471f0a54ab8da0e08c8416aa181346"
|
||||||
|
},
|
||||||
|
"coverage": {
|
||||||
|
"path": "cases/py/fastapi-guarded/outputs/coverage.json",
|
||||||
|
"sha256": "07b1f6dccaa02bd4e1c3e2771064fa3c6e06d02843a724151721ea694762c750"
|
||||||
|
},
|
||||||
|
"entrypoints": {
|
||||||
|
"path": "cases/py/fastapi-guarded/entrypoints.yaml",
|
||||||
|
"sha256": "47c9dd15bf7c5bb8641893a92791d3f7675ed6adba17b251f609335400d29d41"
|
||||||
|
},
|
||||||
|
"sbom": {
|
||||||
|
"path": "cases/py/fastapi-guarded/outputs/sbom.cdx.json",
|
||||||
|
"sha256": "13999d8f3d4c9bdb70ea54ad1de613be3f893d79bdd1a53f7c9401e6add88cf0"
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"path": "cases/py/fastapi-guarded",
|
||||||
|
"sha256": "0869cab10767ac7e7b33c9bbd634f811d98ce5cdeb244769f1a81949438460fb"
|
||||||
|
},
|
||||||
|
"traces": {
|
||||||
|
"path": "cases/py/fastapi-guarded/outputs/traces/traces.json",
|
||||||
|
"sha256": "4633748b8b428b45e3702f2f8f5b3f4270728078e26bce1e08900ed1d5bb3046"
|
||||||
|
},
|
||||||
|
"truth": {
|
||||||
|
"path": "benchmark/truth/py-fastapi-guarded.json",
|
||||||
|
"sha256": "f8c62abeb00006621feeb010d0e47d248918dffd6d6e20e0f47d74e1b3642760"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"id": "py-fastapi-guarded:104",
|
||||||
|
"language": "py",
|
||||||
|
"redaction": {
|
||||||
|
"pii": false,
|
||||||
|
"policy": "benchmark-default/v1"
|
||||||
|
},
|
||||||
|
"sandbox": {
|
||||||
|
"network": "loopback",
|
||||||
|
"privileges": "rootless"
|
||||||
|
},
|
||||||
|
"size": "small",
|
||||||
|
"truth": {
|
||||||
|
"confidence": "high",
|
||||||
|
"label": "unreachable",
|
||||||
|
"rationale": "Feature flag ALLOW_EXEC must be true before sink executes"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hashes": {
|
||||||
|
"attestation": {
|
||||||
|
"path": "cases/c/unsafe-system/outputs/attestation.json",
|
||||||
|
"sha256": "c3755088182359a45492170fa8a57d826b605176333d109f4f113bc7ccf85f97"
|
||||||
|
},
|
||||||
|
"binary": {
|
||||||
|
"path": "cases/c/unsafe-system/outputs/binary.tar.gz",
|
||||||
|
"sha256": "62200167bd660bad6d131b21f941acdfebe00e949e353a53c97b6691ac8f0e49"
|
||||||
|
},
|
||||||
|
"case": {
|
||||||
|
"path": "cases/c/unsafe-system/case.yaml",
|
||||||
|
"sha256": "7799a3a629c22ad47197309f44e32aabbc4e6711ef78d606ba57a7a4974787ce"
|
||||||
|
},
|
||||||
|
"coverage": {
|
||||||
|
"path": "cases/c/unsafe-system/outputs/coverage.json",
|
||||||
|
"sha256": "03ba8cf09e7e0ed82e9fa8abb48f92355e894fd56e0c0160a504193a6f6ec48a"
|
||||||
|
},
|
||||||
|
"entrypoints": {
|
||||||
|
"path": "cases/c/unsafe-system/entrypoints.yaml",
|
||||||
|
"sha256": "06afee8350460c9d15b26ea9d4ea293e8eb3f4b86b3179e19401fa99947e4490"
|
||||||
|
},
|
||||||
|
"sbom": {
|
||||||
|
"path": "cases/c/unsafe-system/outputs/sbom.cdx.json",
|
||||||
|
"sha256": "4c72a213fc4c646f44b4d0be3c23711b120b2a386374ebaa4897e5058980e0f5"
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"path": "cases/c/unsafe-system",
|
||||||
|
"sha256": "bc39ab3a3e5cb3944a205912ecad8c1ac4b7d15c64b453c9d34a9a5df7fbbbf4"
|
||||||
|
},
|
||||||
|
"traces": {
|
||||||
|
"path": "cases/c/unsafe-system/outputs/traces/traces.json",
|
||||||
|
"sha256": "f6469e46a57b8a6e8e17c9b8e78168edd6657ea8a5e1e96fe6ab4a0fc88a734e"
|
||||||
|
},
|
||||||
|
"truth": {
|
||||||
|
"path": "benchmark/truth/c-unsafe-system.json",
|
||||||
|
"sha256": "9a8200c2cf549b3ac8b19b170e9d34df063351879f19f401d8492e280ad08c13"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"id": "c-unsafe-system:001",
|
||||||
|
"language": "c",
|
||||||
|
"redaction": {
|
||||||
|
"pii": false,
|
||||||
|
"policy": "benchmark-default/v1"
|
||||||
|
},
|
||||||
|
"sandbox": {
|
||||||
|
"network": "loopback",
|
||||||
|
"privileges": "rootless"
|
||||||
|
},
|
||||||
|
"size": "small",
|
||||||
|
"truth": {
|
||||||
|
"confidence": "high",
|
||||||
|
"label": "reachable",
|
||||||
|
"rationale": "Command injection sink reachable via argv -> system()"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
"createdAt": "2025-12-03T00:00:00Z",
|
"createdAt": "2025-12-03T00:00:00Z",
|
||||||
"sourceDateEpoch": 1730000000,
|
"kitId": "reachability-benchmark:public-v1",
|
||||||
"resourceLimits": {
|
"resourceLimits": {
|
||||||
"cpu": "4",
|
"cpu": "4",
|
||||||
"memory": "8Gi"
|
"memory": "8Gi"
|
||||||
},
|
},
|
||||||
"cases": [
|
"schemaVersion": "1.0.0",
|
||||||
{
|
"signatures": [],
|
||||||
"id": "js-unsafe-eval:001",
|
"sourceDateEpoch": 1730000000,
|
||||||
"language": "js",
|
|
||||||
"size": "small",
|
|
||||||
"hashes": {
|
|
||||||
"source": { "path": "cases/js/unsafe-eval", "sha256": "69b0d1cbae1e2c9ddc0f4dba8c6db507e1d3a1c5ea0a0a545c6f3e785529c91c" },
|
|
||||||
"case": { "path": "cases/js/unsafe-eval/case.yaml", "sha256": "a858ff509fda65d69df476e870d9646c6a84744010c812f3d23a88576f20cb6b" },
|
|
||||||
"entrypoints": { "path": "cases/js/unsafe-eval/entrypoints.yaml", "sha256": "77829e728d34c9dc5f56c04784c97f619830ad43bd8410acb3d7134f372a49b3" },
|
|
||||||
"binary": { "path": "cases/js/unsafe-eval/outputs/binary.tar.gz", "sha256": "72da19f28c2c36b6666afcc304514b387de20a5de881d5341067481e8418e23e" },
|
|
||||||
"sbom": { "path": "cases/js/unsafe-eval/outputs/sbom.cdx.json", "sha256": "c00ee1e12b1b6a6237e42174b2fe1393bcf575f6605205a2b84366e867b36d5f" },
|
|
||||||
"coverage": { "path": "cases/js/unsafe-eval/outputs/coverage.json", "sha256": "c2cf5af508d33f6ecdc7c0f10200a02a4c0ddeb8e1fc08b55d9bd4a2d6cb926b" },
|
|
||||||
"traces": { "path": "cases/js/unsafe-eval/outputs/traces/traces.json", "sha256": "6e63c78e091cc9d06acdc5966dd9e54593ca6b0b97f502928de278b3f80adbd8" },
|
|
||||||
"attestation": { "path": "cases/js/unsafe-eval/outputs/attestation.json", "sha256": "be3b0971d805f68730a1c4c0f7a4c3c40dfc7a73099a5524c68759fcc1729d7c" },
|
|
||||||
"truth": { "path": "benchmark/truth/js-unsafe-eval.json", "sha256": "ab42f28ed229eb657ffcb36c3a99287436e1822a4c7d395a94de784457a08f62" }
|
|
||||||
},
|
|
||||||
"truth": {
|
|
||||||
"label": "reachable",
|
|
||||||
"confidence": "high",
|
|
||||||
"rationale": "Unit test hits eval sink via POST /api/exec"
|
|
||||||
},
|
|
||||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
|
||||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "py-fastapi-guarded:104",
|
|
||||||
"language": "py",
|
|
||||||
"size": "small",
|
|
||||||
"hashes": {
|
|
||||||
"source": { "path": "cases/py/fastapi-guarded", "sha256": "0869cab10767ac7e7b33c9bbd634f811d98ce5cdeb244769f1a81949438460fb" },
|
|
||||||
"case": { "path": "cases/py/fastapi-guarded/case.yaml", "sha256": "0add8a5f487ebd21ee20ab88b7c6436fe8471f0a54ab8da0e08c8416aa181346" },
|
|
||||||
"entrypoints": { "path": "cases/py/fastapi-guarded/entrypoints.yaml", "sha256": "47c9dd15bf7c5bb8641893a92791d3f7675ed6adba17b251f609335400d29d41" },
|
|
||||||
"binary": { "path": "cases/py/fastapi-guarded/outputs/binary.tar.gz", "sha256": "ca964fef352dc535b63d35b8f8846cc051e10e54cfd8aceef7566f3c94178b76" },
|
|
||||||
"sbom": { "path": "cases/py/fastapi-guarded/outputs/sbom.cdx.json", "sha256": "13999d8f3d4c9bdb70ea54ad1de613be3f893d79bdd1a53f7c9401e6add88cf0" },
|
|
||||||
"coverage": { "path": "cases/py/fastapi-guarded/outputs/coverage.json", "sha256": "07b1f6dccaa02bd4e1c3e2771064fa3c6e06d02843a724151721ea694762c750" },
|
|
||||||
"traces": { "path": "cases/py/fastapi-guarded/outputs/traces/traces.json", "sha256": "4633748b8b428b45e3702f2f8f5b3f4270728078e26bce1e08900ed1d5bb3046" },
|
|
||||||
"attestation": { "path": "cases/py/fastapi-guarded/outputs/attestation.json", "sha256": "257aa5408a5c6ffe0e193a75a2a54597f8c6f61babfe8aaf26bd47340c3086c3" },
|
|
||||||
"truth": { "path": "benchmark/truth/py-fastapi-guarded.json", "sha256": "f8c62abeb00006621feeb010d0e47d248918dffd6d6e20e0f47d74e1b3642760" }
|
|
||||||
},
|
|
||||||
"truth": {
|
|
||||||
"label": "unreachable",
|
|
||||||
"confidence": "high",
|
|
||||||
"rationale": "Feature flag ALLOW_EXEC must be true before sink executes"
|
|
||||||
},
|
|
||||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
|
||||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "c-unsafe-system:001",
|
|
||||||
"language": "c",
|
|
||||||
"size": "small",
|
|
||||||
"hashes": {
|
|
||||||
"source": { "path": "cases/c/unsafe-system", "sha256": "bc39ab3a3e5cb3944a205912ecad8c1ac4b7d15c64b453c9d34a9a5df7fbbbf4" },
|
|
||||||
"case": { "path": "cases/c/unsafe-system/case.yaml", "sha256": "7799a3a629c22ad47197309f44e32aabbc4e6711ef78d606ba57a7a4974787ce" },
|
|
||||||
"entrypoints": { "path": "cases/c/unsafe-system/entrypoints.yaml", "sha256": "06afee8350460c9d15b26ea9d4ea293e8eb3f4b86b3179e19401fa99947e4490" },
|
|
||||||
"binary": { "path": "cases/c/unsafe-system/outputs/binary.tar.gz", "sha256": "62200167bd660bad6d131b21f941acdfebe00e949e353a53c97b6691ac8f0e49" },
|
|
||||||
"sbom": { "path": "cases/c/unsafe-system/outputs/sbom.cdx.json", "sha256": "4c72a213fc4c646f44b4d0be3c23711b120b2a386374ebaa4897e5058980e0f5" },
|
|
||||||
"coverage": { "path": "cases/c/unsafe-system/outputs/coverage.json", "sha256": "03ba8cf09e7e0ed82e9fa8abb48f92355e894fd56e0c0160a504193a6f6ec48a" },
|
|
||||||
"traces": { "path": "cases/c/unsafe-system/outputs/traces/traces.json", "sha256": "f6469e46a57b8a6e8e17c9b8e78168edd6657ea8a5e1e96fe6ab4a0fc88a734e" },
|
|
||||||
"attestation": { "path": "cases/c/unsafe-system/outputs/attestation.json", "sha256": "c3755088182359a45492170fa8a57d826b605176333d109f4f113bc7ccf85f97" },
|
|
||||||
"truth": { "path": "benchmark/truth/c-unsafe-system.json", "sha256": "9a8200c2cf549b3ac8b19b170e9d34df063351879f19f401d8492e280ad08c13" }
|
|
||||||
},
|
|
||||||
"truth": {
|
|
||||||
"label": "reachable",
|
|
||||||
"confidence": "high",
|
|
||||||
"rationale": "Command injection sink reachable via argv -> system()"
|
|
||||||
},
|
|
||||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
|
||||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"artifacts": {
|
|
||||||
"submissionSchema": { "path": "schemas/submission.schema.json", "sha256": "de5bebb2dbcd085d7896f47a16b9d3837a65fb7f816dcf7e587967d5848c50a7" },
|
|
||||||
"scorer": { "path": "tools/scorer/rb_score.py", "sha256": "32d4f69f5d1d4b87902d6c4f020efde703487d526bf7d42b4438cb2499813f7f" },
|
|
||||||
"baselineSubmissions": []
|
|
||||||
},
|
|
||||||
"tools": {
|
"tools": {
|
||||||
"builder": { "path": "tools/build/build_all.py", "sha256": "64a73f3df9b6f2cdaf5cbb33852b8e9bf443f67cf9dff1573fb635a0252bda9a" },
|
"builder": {
|
||||||
"validator": { "path": "tools/validate.py", "sha256": "776009ef0f3691e60cc87df3f0468181ee7a827be1bd0f73c77fdb68d3ed31c0" }
|
"path": "tools/build/build_all.py",
|
||||||
|
"sha256": "64a73f3df9b6f2cdaf5cbb33852b8e9bf443f67cf9dff1573fb635a0252bda9a"
|
||||||
},
|
},
|
||||||
"signatures": []
|
"validator": {
|
||||||
|
"path": "tools/validate.py",
|
||||||
|
"sha256": "776009ef0f3691e60cc87df3f0468181ee7a827be1bd0f73c77fdb68d3ed31c0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"version": "1.0.2"
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"version": "1.0.0",
|
||||||
|
"cases": [
|
||||||
|
{
|
||||||
|
"case_id": "java-micronaut-deserialize:203",
|
||||||
|
"case_version": "1.0.0",
|
||||||
|
"notes": "Micronaut-style controller deserializes base64 payload",
|
||||||
|
"sinks": [
|
||||||
|
{
|
||||||
|
"sink_id": "MicronautDeserialize::handleUpload",
|
||||||
|
"label": "reachable",
|
||||||
|
"confidence": "high",
|
||||||
|
"dynamic_evidence": {
|
||||||
|
"covered_by_tests": [
|
||||||
|
"src/ControllerTest.java"
|
||||||
|
],
|
||||||
|
"coverage_files": [
|
||||||
|
"outputs/coverage.json"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"static_evidence": {
|
||||||
|
"call_path": [
|
||||||
|
"POST /mn/upload",
|
||||||
|
"Controller.handleUpload",
|
||||||
|
"ObjectInputStream.readObject"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"config_conditions": [],
|
||||||
|
"notes": "No guard; ObjectInputStream invoked on user-controlled bytes"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"version": "1.0.0",
|
||||||
|
"cases": [
|
||||||
|
{
|
||||||
|
"case_id": "java-micronaut-guarded:204",
|
||||||
|
"case_version": "1.0.0",
|
||||||
|
"notes": "Deserialization guarded by ALLOW_MN_DESER flag (unreachable by default)",
|
||||||
|
"sinks": [
|
||||||
|
{
|
||||||
|
"sink_id": "MicronautDeserializeGuarded::handleUpload",
|
||||||
|
"label": "unreachable",
|
||||||
|
"confidence": "high",
|
||||||
|
"dynamic_evidence": {
|
||||||
|
"covered_by_tests": [
|
||||||
|
"src/ControllerTest.java"
|
||||||
|
],
|
||||||
|
"coverage_files": [
|
||||||
|
"outputs/coverage.json"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"static_evidence": {
|
||||||
|
"call_path": [
|
||||||
|
"POST /mn/upload",
|
||||||
|
"Controller.handleUpload"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"config_conditions": [
|
||||||
|
"ALLOW_MN_DESER=true"
|
||||||
|
],
|
||||||
|
"notes": "Feature flag defaults to false; sink not executed without ALLOW_MN_DESER"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -14,7 +14,9 @@
|
|||||||
"covered_by_tests": [
|
"covered_by_tests": [
|
||||||
"src/AppTest.java"
|
"src/AppTest.java"
|
||||||
],
|
],
|
||||||
"coverage_files": []
|
"coverage_files": [
|
||||||
|
"outputs/coverage.json"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"static_evidence": {
|
"static_evidence": {
|
||||||
"call_path": [
|
"call_path": [
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
"confidence": "high",
|
"confidence": "high",
|
||||||
"dynamic_evidence": {
|
"dynamic_evidence": {
|
||||||
"covered_by_tests": ["src/AppTest.java"],
|
"covered_by_tests": ["src/AppTest.java"],
|
||||||
"coverage_files": []
|
"coverage_files": ["outputs/coverage.json"]
|
||||||
},
|
},
|
||||||
"static_evidence": {
|
"static_evidence": {
|
||||||
"call_path": [
|
"call_path": [
|
||||||
|
|||||||
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"version": "1.0.0",
|
||||||
|
"cases": [
|
||||||
|
{
|
||||||
|
"case_id": "java-spring-reflection:205",
|
||||||
|
"case_version": "1.0.0",
|
||||||
|
"notes": "Reflection endpoint loads arbitrary classes supplied by caller",
|
||||||
|
"sinks": [
|
||||||
|
{
|
||||||
|
"sink_id": "SpringReflection::run",
|
||||||
|
"label": "reachable",
|
||||||
|
"confidence": "high",
|
||||||
|
"dynamic_evidence": {
|
||||||
|
"covered_by_tests": [
|
||||||
|
"src/ReflectControllerTest.java"
|
||||||
|
],
|
||||||
|
"coverage_files": [
|
||||||
|
"outputs/coverage.json"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"static_evidence": {
|
||||||
|
"call_path": [
|
||||||
|
"POST /api/reflect",
|
||||||
|
"ReflectController.run",
|
||||||
|
"Class.forName"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"config_conditions": [],
|
||||||
|
"notes": "User-controlled class name flows into Class.forName and reflection instantiation"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
id: "java-micronaut-deserialize:203"
|
||||||
|
language: java
|
||||||
|
project: micronaut-deserialize
|
||||||
|
version: "1.0.0"
|
||||||
|
description: "Micronaut-style controller performs unsafe deserialization on request payload"
|
||||||
|
entrypoints:
|
||||||
|
- "POST /mn/upload"
|
||||||
|
sinks:
|
||||||
|
- id: "MicronautDeserialize::handleUpload"
|
||||||
|
path: "bench.reachability.micronaut.Controller.handleUpload"
|
||||||
|
kind: "custom"
|
||||||
|
location:
|
||||||
|
file: src/Controller.java
|
||||||
|
line: 10
|
||||||
|
notes: "ObjectInputStream on user-controlled payload"
|
||||||
|
environment:
|
||||||
|
os_image: "eclipse-temurin:21-jdk"
|
||||||
|
runtime:
|
||||||
|
java: "21"
|
||||||
|
source_date_epoch: 1730000000
|
||||||
|
resource_limits:
|
||||||
|
cpu: "2"
|
||||||
|
memory: "4Gi"
|
||||||
|
build:
|
||||||
|
command: "./build/build.sh"
|
||||||
|
source_date_epoch: 1730000000
|
||||||
|
outputs:
|
||||||
|
artifact_path: outputs/binary.tar.gz
|
||||||
|
sbom_path: outputs/sbom.cdx.json
|
||||||
|
coverage_path: outputs/coverage.json
|
||||||
|
traces_dir: outputs/traces
|
||||||
|
attestation_path: outputs/attestation.json
|
||||||
|
test:
|
||||||
|
command: "./build/build.sh"
|
||||||
|
expected_coverage: []
|
||||||
|
expected_traces: []
|
||||||
|
env:
|
||||||
|
JAVA_TOOL_OPTIONS: "-ea"
|
||||||
|
ground_truth:
|
||||||
|
summary: "Deserialization reachable"
|
||||||
|
evidence_files:
|
||||||
|
- "../benchmark/truth/java-micronaut-deserialize.json"
|
||||||
|
sandbox:
|
||||||
|
network: loopback
|
||||||
|
privileges: rootless
|
||||||
|
redaction:
|
||||||
|
pii: false
|
||||||
|
policy: "benchmark-default/v1"
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
case_id: "java-micronaut-deserialize:203"
|
||||||
|
entries:
|
||||||
|
http:
|
||||||
|
- id: "POST /mn/upload"
|
||||||
|
route: "/mn/upload"
|
||||||
|
method: "POST"
|
||||||
|
handler: "Controller.handleUpload"
|
||||||
|
description: "Binary payload base64-deserialized"
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>org.stellaops.bench</groupId>
|
||||||
|
<artifactId>micronaut-deserialize</artifactId>
|
||||||
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
<properties>
|
||||||
|
<maven.compiler.source>17</maven.compiler.source>
|
||||||
|
<maven.compiler.target>17</maven.compiler.target>
|
||||||
|
</properties>
|
||||||
|
</project>
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
package bench.reachability.micronaut;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Base64;
|
||||||
|
import java.io.*;
|
||||||
|
|
||||||
|
public class Controller {
|
||||||
|
// Unsafe deserialization sink (reachable)
|
||||||
|
public static Response handleUpload(Map<String, String> body) {
|
||||||
|
String payload = body.get("payload");
|
||||||
|
if (payload == null) {
|
||||||
|
return new Response(400, "bad request");
|
||||||
|
}
|
||||||
|
try (ObjectInputStream ois = new ObjectInputStream(
|
||||||
|
new ByteArrayInputStream(Base64.getDecoder().decode(payload)))) {
|
||||||
|
Object obj = ois.readObject();
|
||||||
|
return new Response(200, obj.toString());
|
||||||
|
} catch (Exception ex) {
|
||||||
|
return new Response(500, ex.getClass().getSimpleName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public record Response(int status, String body) {}
|
||||||
|
}
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
package bench.reachability.micronaut;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.Base64;
|
||||||
|
|
||||||
|
// Simple assertion-based oracle (JUnit-free for offline determinism)
|
||||||
|
public class ControllerTest {
|
||||||
|
private static String serialize(Object obj) throws IOException {
|
||||||
|
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||||
|
try (ObjectOutputStream oos = new ObjectOutputStream(bos)) {
|
||||||
|
oos.writeObject(obj);
|
||||||
|
}
|
||||||
|
return Base64.getEncoder().encodeToString(bos.toByteArray());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
Map<String, String> body = Map.of("payload", serialize("micronaut"));
|
||||||
|
var res = Controller.handleUpload(body);
|
||||||
|
assert res.status() == 200 : "status";
|
||||||
|
assert res.body().equals("micronaut") : "body";
|
||||||
|
|
||||||
|
File outDir = new File("outputs");
|
||||||
|
outDir.mkdirs();
|
||||||
|
try (FileWriter fw = new FileWriter(new File(outDir, "SINK_REACHED"))) {
|
||||||
|
fw.write("true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
id: "java-micronaut-guarded:204"
|
||||||
|
language: java
|
||||||
|
project: micronaut-guarded
|
||||||
|
version: "1.0.0"
|
||||||
|
description: "Micronaut-style controller guards deserialization behind ALLOW_MN_DESER flag (unreachable by default)"
|
||||||
|
entrypoints:
|
||||||
|
- "POST /mn/upload"
|
||||||
|
sinks:
|
||||||
|
- id: "MicronautDeserializeGuarded::handleUpload"
|
||||||
|
path: "bench.reachability.micronautguard.Controller.handleUpload"
|
||||||
|
kind: "custom"
|
||||||
|
location:
|
||||||
|
file: src/Controller.java
|
||||||
|
line: 11
|
||||||
|
notes: "ObjectInputStream gated by ALLOW_MN_DESER"
|
||||||
|
environment:
|
||||||
|
os_image: "eclipse-temurin:21-jdk"
|
||||||
|
runtime:
|
||||||
|
java: "21"
|
||||||
|
source_date_epoch: 1730000000
|
||||||
|
resource_limits:
|
||||||
|
cpu: "2"
|
||||||
|
memory: "4Gi"
|
||||||
|
build:
|
||||||
|
command: "./build/build.sh"
|
||||||
|
source_date_epoch: 1730000000
|
||||||
|
outputs:
|
||||||
|
artifact_path: outputs/binary.tar.gz
|
||||||
|
sbom_path: outputs/sbom.cdx.json
|
||||||
|
coverage_path: outputs/coverage.json
|
||||||
|
traces_dir: outputs/traces
|
||||||
|
attestation_path: outputs/attestation.json
|
||||||
|
test:
|
||||||
|
command: "./build/build.sh"
|
||||||
|
expected_coverage: []
|
||||||
|
expected_traces: []
|
||||||
|
env:
|
||||||
|
JAVA_TOOL_OPTIONS: "-ea"
|
||||||
|
ground_truth:
|
||||||
|
summary: "Guard blocks deserialization unless ALLOW_MN_DESER=true"
|
||||||
|
evidence_files:
|
||||||
|
- "../benchmark/truth/java-micronaut-guarded.json"
|
||||||
|
sandbox:
|
||||||
|
network: loopback
|
||||||
|
privileges: rootless
|
||||||
|
redaction:
|
||||||
|
pii: false
|
||||||
|
policy: "benchmark-default/v1"
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
case_id: "java-micronaut-guarded:204"
|
||||||
|
entries:
|
||||||
|
http:
|
||||||
|
- id: "POST /mn/upload"
|
||||||
|
route: "/mn/upload"
|
||||||
|
method: "POST"
|
||||||
|
handler: "Controller.handleUpload"
|
||||||
|
description: "Deserialization guarded by ALLOW_MN_DESER flag"
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>org.stellaops.bench</groupId>
|
||||||
|
<artifactId>micronaut-guarded</artifactId>
|
||||||
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
<properties>
|
||||||
|
<maven.compiler.source>17</maven.compiler.source>
|
||||||
|
<maven.compiler.target>17</maven.compiler.target>
|
||||||
|
</properties>
|
||||||
|
</project>
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
package bench.reachability.micronautguard;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Base64;
|
||||||
|
import java.io.*;
|
||||||
|
|
||||||
|
public class Controller {
|
||||||
|
// Deserialization behind feature flag; unreachable unless ALLOW_MN_DESER=true
|
||||||
|
public static Response handleUpload(Map<String, String> body, Map<String, String> env) {
|
||||||
|
if (!"true".equals(env.getOrDefault("ALLOW_MN_DESER", "false"))) {
|
||||||
|
return new Response(403, "forbidden");
|
||||||
|
}
|
||||||
|
String payload = body.get("payload");
|
||||||
|
if (payload == null) {
|
||||||
|
return new Response(400, "bad request");
|
||||||
|
}
|
||||||
|
try (ObjectInputStream ois = new ObjectInputStream(
|
||||||
|
new ByteArrayInputStream(Base64.getDecoder().decode(payload)))) {
|
||||||
|
Object obj = ois.readObject();
|
||||||
|
return new Response(200, obj.toString());
|
||||||
|
} catch (Exception ex) {
|
||||||
|
return new Response(500, ex.getClass().getSimpleName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public record Response(int status, String body) {}
|
||||||
|
}
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
package bench.reachability.micronautguard;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.Base64;
|
||||||
|
|
||||||
|
public class ControllerTest {
|
||||||
|
private static String serialize(Object obj) throws IOException {
|
||||||
|
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||||
|
try (ObjectOutputStream oos = new ObjectOutputStream(bos)) {
|
||||||
|
oos.writeObject(obj);
|
||||||
|
}
|
||||||
|
return Base64.getEncoder().encodeToString(bos.toByteArray());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
Map<String, String> body = Map.of("payload", serialize("blocked"));
|
||||||
|
Map<String, String> env = Map.of("ALLOW_MN_DESER", "false");
|
||||||
|
var res = Controller.handleUpload(body, env);
|
||||||
|
assert res.status() == 403 : "status";
|
||||||
|
assert res.body().equals("forbidden") : "body";
|
||||||
|
|
||||||
|
File outDir = new File("outputs");
|
||||||
|
outDir.mkdirs();
|
||||||
|
try (FileWriter fw = new FileWriter(new File(outDir, "SINK_BLOCKED"))) {
|
||||||
|
fw.write("true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
id: "java-spring-reflection:205"
|
||||||
|
language: java
|
||||||
|
project: spring-reflection
|
||||||
|
version: "1.0.0"
|
||||||
|
description: "Spring-style controller exposes reflection endpoint that loads arbitrary classes"
|
||||||
|
entrypoints:
|
||||||
|
- "POST /api/reflect"
|
||||||
|
sinks:
|
||||||
|
- id: "SpringReflection::run"
|
||||||
|
path: "bench.reachability.springreflection.ReflectController.run"
|
||||||
|
kind: "custom"
|
||||||
|
location:
|
||||||
|
file: src/ReflectController.java
|
||||||
|
line: 7
|
||||||
|
notes: "User-controlled Class.forName + newInstance"
|
||||||
|
environment:
|
||||||
|
os_image: "eclipse-temurin:21-jdk"
|
||||||
|
runtime:
|
||||||
|
java: "21"
|
||||||
|
source_date_epoch: 1730000000
|
||||||
|
resource_limits:
|
||||||
|
cpu: "2"
|
||||||
|
memory: "4Gi"
|
||||||
|
build:
|
||||||
|
command: "./build/build.sh"
|
||||||
|
source_date_epoch: 1730000000
|
||||||
|
outputs:
|
||||||
|
artifact_path: outputs/binary.tar.gz
|
||||||
|
sbom_path: outputs/sbom.cdx.json
|
||||||
|
coverage_path: outputs/coverage.json
|
||||||
|
traces_dir: outputs/traces
|
||||||
|
attestation_path: outputs/attestation.json
|
||||||
|
test:
|
||||||
|
command: "./build/build.sh"
|
||||||
|
expected_coverage: []
|
||||||
|
expected_traces: []
|
||||||
|
env:
|
||||||
|
JAVA_TOOL_OPTIONS: "-ea"
|
||||||
|
ground_truth:
|
||||||
|
summary: "Reflection sink reachable with user-controlled class name"
|
||||||
|
evidence_files:
|
||||||
|
- "../benchmark/truth/java-spring-reflection.json"
|
||||||
|
sandbox:
|
||||||
|
network: loopback
|
||||||
|
privileges: rootless
|
||||||
|
redaction:
|
||||||
|
pii: false
|
||||||
|
policy: "benchmark-default/v1"
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
case_id: "java-spring-reflection:205"
|
||||||
|
entries:
|
||||||
|
http:
|
||||||
|
- id: "POST /api/reflect"
|
||||||
|
route: "/api/reflect"
|
||||||
|
method: "POST"
|
||||||
|
handler: "ReflectController.run"
|
||||||
|
description: "Reflection endpoint loads arbitrary classes"
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>org.stellaops.bench</groupId>
|
||||||
|
<artifactId>spring-reflection</artifactId>
|
||||||
|
<version>1.0.0</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
<properties>
|
||||||
|
<maven.compiler.source>17</maven.compiler.source>
|
||||||
|
<maven.compiler.target>17</maven.compiler.target>
|
||||||
|
</properties>
|
||||||
|
</project>
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
package bench.reachability.springreflection;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class ReflectController {
|
||||||
|
// Reflection sink: user controls Class.forName target
|
||||||
|
public static Response run(Map<String, String> body) {
|
||||||
|
String className = body.get("class");
|
||||||
|
if (className == null || className.isBlank()) {
|
||||||
|
return new Response(400, "bad request");
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
Class<?> type = Class.forName(className);
|
||||||
|
Object instance = type.getDeclaredConstructor().newInstance();
|
||||||
|
return new Response(200, instance.toString());
|
||||||
|
} catch (Exception ex) {
|
||||||
|
return new Response(500, ex.getClass().getSimpleName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public record Response(int status, String body) {}
|
||||||
|
|
||||||
|
public static class Marker {
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "marker";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
package bench.reachability.springreflection;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileWriter;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class ReflectControllerTest {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
Map<String, String> body = Map.of("class", ReflectController.Marker.class.getName());
|
||||||
|
var res = ReflectController.run(body);
|
||||||
|
assert res.status() == 200 : "status";
|
||||||
|
assert res.body().equals("marker") : "body";
|
||||||
|
|
||||||
|
File outDir = new File("outputs");
|
||||||
|
outDir.mkdirs();
|
||||||
|
try (FileWriter fw = new FileWriter(new File(outDir, "SINK_REACHED"))) {
|
||||||
|
fw.write("true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,11 +9,14 @@ export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
|||||||
export GIT_TERMINAL_PROMPT=0
|
export GIT_TERMINAL_PROMPT=0
|
||||||
export TZ=UTC
|
export TZ=UTC
|
||||||
|
|
||||||
|
source "${ROOT}/tools/java/ensure_jdk.sh"
|
||||||
|
ensure_bench_jdk
|
||||||
|
|
||||||
# 1) Validate schemas (truth + submission samples)
|
# 1) Validate schemas (truth + submission samples)
|
||||||
python "${ROOT}/tools/validate.py" --schemas "${ROOT}/schemas"
|
python "${ROOT}/tools/validate.py" --schemas "${ROOT}/schemas"
|
||||||
|
|
||||||
# 2) Build all cases deterministically (skips Java since JDK may be missing)
|
# 2) Build all cases deterministically (including Java via vendored JDK)
|
||||||
python "${ROOT}/tools/build/build_all.py" --cases "${ROOT}/cases" --skip-lang java
|
python "${ROOT}/tools/build/build_all.py" --cases "${ROOT}/cases"
|
||||||
|
|
||||||
# 3) Run Semgrep baseline (offline-safe)
|
# 3) Run Semgrep baseline (offline-safe)
|
||||||
bash "${ROOT}/baselines/semgrep/run_all.sh" "${ROOT}/cases" "${ROOT}/out/semgrep-baseline"
|
bash "${ROOT}/baselines/semgrep/run_all.sh" "${ROOT}/cases" "${ROOT}/out/semgrep-baseline"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ This guide explains how to produce a compliant submission for the Stella Ops rea
|
|||||||
python tools/build/build_all.py --cases cases
|
python tools/build/build_all.py --cases cases
|
||||||
```
|
```
|
||||||
- Sets `SOURCE_DATE_EPOCH`.
|
- Sets `SOURCE_DATE_EPOCH`.
|
||||||
- Skips Java by default if JDK is unavailable (pass `--skip-lang` as needed).
|
- Uses vendored Temurin 21 via `tools/java/ensure_jdk.sh` when `JAVA_HOME`/`javac` are missing; pass `--skip-lang` if another toolchain is unavailable on your runner.
|
||||||
|
|
||||||
2) **Run your analyzer**
|
2) **Run your analyzer**
|
||||||
- For each case, produce sink predictions in memory-safe JSON.
|
- For each case, produce sink predictions in memory-safe JSON.
|
||||||
|
|||||||
62
bench/reachability-benchmark/tools/java/ensure_jdk.sh
Normal file
62
bench/reachability-benchmark/tools/java/ensure_jdk.sh
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Offline-friendly helper to make a JDK available for benchmark builds.
|
||||||
|
# Order of preference:
|
||||||
|
# 1) Respect an existing JAVA_HOME when it contains javac.
|
||||||
|
# 2) Use javac from PATH when present.
|
||||||
|
# 3) Extract a vendored archive (jdk-21.0.1.tar.gz) into .jdk/ and use it.
|
||||||
|
|
||||||
|
ensure_bench_jdk() {
|
||||||
|
# Re-use an explicitly provided JAVA_HOME when it already has javac.
|
||||||
|
if [[ -n "${JAVA_HOME:-}" && -x "${JAVA_HOME}/bin/javac" ]]; then
|
||||||
|
export PATH="${JAVA_HOME}/bin:${PATH}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use any javac already on PATH.
|
||||||
|
if command -v javac >/dev/null 2>&1; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
local script_dir bench_root cache_dir archive_dir archive_path candidate
|
||||||
|
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
bench_root="$(cd "${script_dir}/../.." && pwd)"
|
||||||
|
repo_root="$(cd "${bench_root}/../.." && pwd)"
|
||||||
|
cache_dir="${bench_root}/.jdk"
|
||||||
|
archive_dir="${cache_dir}/jdk-21.0.1+12"
|
||||||
|
|
||||||
|
# Prefer an archive co-located with this script; fall back to the repo copy.
|
||||||
|
for candidate in \
|
||||||
|
"${script_dir}/jdk-21.0.1.tar.gz" \
|
||||||
|
"${repo_root}/src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1.tar.gz"
|
||||||
|
do
|
||||||
|
if [[ -f "${candidate}" ]]; then
|
||||||
|
archive_path="${candidate}"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z "${archive_path:-}" ]]; then
|
||||||
|
echo "[ensure_jdk] No JDK found. Set JAVA_HOME or place jdk-21.0.1.tar.gz under tools/java/." >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "${cache_dir}"
|
||||||
|
if [[ ! -d "${archive_dir}" ]]; then
|
||||||
|
tar -xzf "${archive_path}" -C "${cache_dir}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -x "${archive_dir}/bin/javac" ]]; then
|
||||||
|
echo "[ensure_jdk] Extracted archive but javac not found under ${archive_dir}" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
export JAVA_HOME="${archive_dir}"
|
||||||
|
export PATH="${JAVA_HOME}/bin:${PATH}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Allow running as a script for quick verification.
|
||||||
|
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
|
||||||
|
if ensure_bench_jdk; then
|
||||||
|
java -version
|
||||||
|
fi
|
||||||
|
fi
|
||||||
17
bench/reachability-benchmark/tools/node/node
Normal file
17
bench/reachability-benchmark/tools/node/node
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Lightweight Node shim to support environments where only node.exe (Windows) is present.
|
||||||
|
|
||||||
|
if command -v node >/dev/null 2>&1; then
|
||||||
|
exec node "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if command -v node.exe >/dev/null 2>&1; then
|
||||||
|
exec node.exe "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -x "/mnt/c/Program Files/nodejs/node.exe" ]; then
|
||||||
|
exec "/mnt/c/Program Files/nodejs/node.exe" "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "node not found; install Node.js or adjust PATH" >&2
|
||||||
|
exit 127
|
||||||
@@ -10,6 +10,7 @@ This directory contains deterministic deployment bundles for the core Stella Ops
|
|||||||
- `compose/docker-compose.telemetry.yaml` – optional OpenTelemetry collector overlay (mutual TLS, OTLP pipelines).
|
- `compose/docker-compose.telemetry.yaml` – optional OpenTelemetry collector overlay (mutual TLS, OTLP pipelines).
|
||||||
- `compose/docker-compose.telemetry-storage.yaml` – optional Prometheus/Tempo/Loki stack for observability backends.
|
- `compose/docker-compose.telemetry-storage.yaml` – optional Prometheus/Tempo/Loki stack for observability backends.
|
||||||
- `helm/stellaops/` – multi-profile Helm chart with values files for dev/stage/airgap.
|
- `helm/stellaops/` – multi-profile Helm chart with values files for dev/stage/airgap.
|
||||||
|
- `helm/stellaops/INSTALL.md` – install/runbook for prod and airgap profiles with digest pins.
|
||||||
- `telemetry/` – shared OpenTelemetry collector configuration and certificate artefacts (generated via tooling).
|
- `telemetry/` – shared OpenTelemetry collector configuration and certificate artefacts (generated via tooling).
|
||||||
- `tools/validate-profiles.sh` – helper that runs `docker compose config` and `helm lint/template` for every profile.
|
- `tools/validate-profiles.sh` – helper that runs `docker compose config` and `helm lint/template` for every profile.
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,12 @@ These Compose bundles ship the minimum services required to exercise the scanner
|
|||||||
| `docker-compose.mirror.yaml` | Managed mirror topology for `*.stella-ops.org` distribution (Concelier + Excititor + CDN gateway). |
|
| `docker-compose.mirror.yaml` | Managed mirror topology for `*.stella-ops.org` distribution (Concelier + Excititor + CDN gateway). |
|
||||||
| `docker-compose.telemetry.yaml` | Optional OpenTelemetry collector overlay (mutual TLS, OTLP ingest endpoints). |
|
| `docker-compose.telemetry.yaml` | Optional OpenTelemetry collector overlay (mutual TLS, OTLP ingest endpoints). |
|
||||||
| `docker-compose.telemetry-storage.yaml` | Prometheus/Tempo/Loki storage overlay with multi-tenant defaults. |
|
| `docker-compose.telemetry-storage.yaml` | Prometheus/Tempo/Loki storage overlay with multi-tenant defaults. |
|
||||||
|
| `docker-compose.gpu.yaml` | Optional GPU overlay enabling NVIDIA devices for Advisory AI web/worker. Apply with `-f docker-compose.<env>.yaml -f docker-compose.gpu.yaml`. |
|
||||||
| `env/*.env.example` | Seed `.env` files that document required secrets and ports per profile. |
|
| `env/*.env.example` | Seed `.env` files that document required secrets and ports per profile. |
|
||||||
|
| `scripts/backup.sh` | Pauses workers and creates tar.gz of Mongo/MinIO/Redis volumes (deterministic snapshot). |
|
||||||
|
| `scripts/reset.sh` | Stops the stack and removes Mongo/MinIO/Redis volumes after explicit confirmation. |
|
||||||
|
| `scripts/quickstart.sh` | Helper to validate config and start dev stack; set `USE_MOCK=1` to include `docker-compose.mock.yaml` overlay. |
|
||||||
|
| `docker-compose.mock.yaml` | Dev-only overlay with placeholder digests for missing services (orchestrator, policy-registry, packs, task-runner, VEX/Vuln stack). Use only with mock release manifest `deploy/releases/2025.09-mock-dev.yaml`. |
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
@@ -25,6 +30,8 @@ docker compose --env-file dev.env -f docker-compose.dev.yaml up -d
|
|||||||
|
|
||||||
The stage and airgap variants behave the same way—swap the file names accordingly. All profiles expose 443/8443 for the UI and REST APIs, and they share a `stellaops` Docker network scoped to the compose project.
|
The stage and airgap variants behave the same way—swap the file names accordingly. All profiles expose 443/8443 for the UI and REST APIs, and they share a `stellaops` Docker network scoped to the compose project.
|
||||||
|
|
||||||
|
> **Surface.Secrets:** set `SCANNER_SURFACE_SECRETS_PROVIDER`/`SCANNER_SURFACE_SECRETS_ROOT` in your `.env` and point `SURFACE_SECRETS_HOST_PATH` to the decrypted bundle path (default `./offline/surface-secrets`). The stack mounts that path read-only into Scanner Web/Worker so `secret://` references resolve without embedding plaintext.
|
||||||
|
|
||||||
> **Graph Explorer reminder:** If you enable Cartographer or Graph API containers alongside these profiles, update `etc/authority.yaml` so the `cartographer-service` client is marked with `properties.serviceIdentity: "cartographer"` and carries a tenant hint. The Authority host now refuses `graph:write` tokens without that marker, so apply the configuration change before rolling out the updated images.
|
> **Graph Explorer reminder:** If you enable Cartographer or Graph API containers alongside these profiles, update `etc/authority.yaml` so the `cartographer-service` client is marked with `properties.serviceIdentity: "cartographer"` and carries a tenant hint. The Authority host now refuses `graph:write` tokens without that marker, so apply the configuration change before rolling out the updated images.
|
||||||
|
|
||||||
### Telemetry collector overlay
|
### Telemetry collector overlay
|
||||||
@@ -101,4 +108,29 @@ The Helm chart mirrors these settings under `services.advisory-ai-web` / `adviso
|
|||||||
2. Update image digests in the relevant Compose file(s).
|
2. Update image digests in the relevant Compose file(s).
|
||||||
3. Re-run `docker compose config` to confirm the bundle is deterministic.
|
3. Re-run `docker compose config` to confirm the bundle is deterministic.
|
||||||
|
|
||||||
|
### Mock overlay for missing digests (dev only)
|
||||||
|
|
||||||
|
Until official digests land, you can exercise Compose packaging with mock placeholders:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# assumes docker-compose.dev.yaml as the base profile
|
||||||
|
USE_MOCK=1 ./scripts/quickstart.sh env/dev.env.example
|
||||||
|
```
|
||||||
|
|
||||||
|
The overlay pins the missing services (orchestrator, policy-registry, packs-registry, task-runner, VEX/Vuln stack) to mock digests from `deploy/releases/2025.09-mock-dev.yaml` and starts their real entrypoints so integration flows can be exercised end-to-end. Replace the mock pins with production digests once releases publish; keep the mock overlay dev-only.
|
||||||
|
|
||||||
Keep digests synchronized between Compose, Helm, and the release manifest to preserve reproducibility guarantees. `deploy/tools/validate-profiles.sh` performs a quick audit.
|
Keep digests synchronized between Compose, Helm, and the release manifest to preserve reproducibility guarantees. `deploy/tools/validate-profiles.sh` performs a quick audit.
|
||||||
|
|
||||||
|
### GPU toggle for Advisory AI
|
||||||
|
|
||||||
|
GPU is disabled by default. To run inference on NVIDIA GPUs:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose \
|
||||||
|
--env-file prod.env \
|
||||||
|
-f docker-compose.prod.yaml \
|
||||||
|
-f docker-compose.gpu.yaml \
|
||||||
|
up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
The GPU overlay requests one GPU for `advisory-ai-worker` and `advisory-ai-web` and sets `ADVISORY_AI_INFERENCE_GPU=true`. Ensure the host has the NVIDIA container runtime and that the base compose file still sets the correct digests.
|
||||||
|
|||||||
@@ -15,6 +15,9 @@ volumes:
|
|||||||
nats-data:
|
nats-data:
|
||||||
scanner-surface-cache:
|
scanner-surface-cache:
|
||||||
postgres-data:
|
postgres-data:
|
||||||
|
advisory-ai-queue:
|
||||||
|
advisory-ai-plans:
|
||||||
|
advisory-ai-outputs:
|
||||||
|
|
||||||
services:
|
services:
|
||||||
mongo:
|
mongo:
|
||||||
@@ -210,10 +213,13 @@ services:
|
|||||||
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
volumes:
|
volumes:
|
||||||
- scanner-surface-cache:/var/lib/stellaops/surface
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
ports:
|
ports:
|
||||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||||
networks:
|
networks:
|
||||||
@@ -243,10 +249,13 @@ services:
|
|||||||
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
volumes:
|
volumes:
|
||||||
- scanner-surface-cache:/var/lib/stellaops/surface
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|||||||
191
deploy/compose/docker-compose.cas.yaml
Normal file
191
deploy/compose/docker-compose.cas.yaml
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
# Content Addressable Storage (CAS) Infrastructure
|
||||||
|
# Uses RustFS for S3-compatible immutable object storage
|
||||||
|
# Aligned with best-in-class vulnerability scanner retention policies
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# docker compose -f docker-compose.cas.yaml up -d
|
||||||
|
# docker compose -f docker-compose.cas.yaml -f docker-compose.dev.yaml up -d
|
||||||
|
|
||||||
|
x-release-labels: &release-labels
|
||||||
|
com.stellaops.release.version: "2025.10.0-edge"
|
||||||
|
com.stellaops.release.channel: "edge"
|
||||||
|
com.stellaops.profile: "cas"
|
||||||
|
|
||||||
|
x-cas-config: &cas-config
|
||||||
|
# Retention policies (aligned with Trivy/Grype/Anchore Enterprise)
|
||||||
|
# - vulnerability-db: 7 days (matches Trivy default)
|
||||||
|
# - sbom-artifacts: 365 days (audit compliance)
|
||||||
|
# - scan-results: 90 days (SOC2/ISO27001 typical)
|
||||||
|
# - evidence-bundles: indefinite (immutable, content-addressed)
|
||||||
|
# - attestations: indefinite (in-toto/DSSE signed)
|
||||||
|
CAS__RETENTION__VULNERABILITY_DB_DAYS: "7"
|
||||||
|
CAS__RETENTION__SBOM_ARTIFACTS_DAYS: "365"
|
||||||
|
CAS__RETENTION__SCAN_RESULTS_DAYS: "90"
|
||||||
|
CAS__RETENTION__EVIDENCE_BUNDLES_DAYS: "0" # 0 = indefinite
|
||||||
|
CAS__RETENTION__ATTESTATIONS_DAYS: "0" # 0 = indefinite
|
||||||
|
CAS__RETENTION__TEMP_ARTIFACTS_DAYS: "1"
|
||||||
|
|
||||||
|
networks:
|
||||||
|
cas:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
rustfs-cas-data:
|
||||||
|
driver: local
|
||||||
|
driver_opts:
|
||||||
|
type: none
|
||||||
|
o: bind
|
||||||
|
device: ${CAS_DATA_PATH:-/var/lib/stellaops/cas}
|
||||||
|
rustfs-evidence-data:
|
||||||
|
driver: local
|
||||||
|
driver_opts:
|
||||||
|
type: none
|
||||||
|
o: bind
|
||||||
|
device: ${CAS_EVIDENCE_PATH:-/var/lib/stellaops/evidence}
|
||||||
|
rustfs-attestation-data:
|
||||||
|
driver: local
|
||||||
|
driver_opts:
|
||||||
|
type: none
|
||||||
|
o: bind
|
||||||
|
device: ${CAS_ATTESTATION_PATH:-/var/lib/stellaops/attestations}
|
||||||
|
|
||||||
|
services:
|
||||||
|
# Primary CAS storage - runtime facts, signals, replay artifacts
|
||||||
|
rustfs-cas:
|
||||||
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
RUSTFS__LOG__LEVEL: "${RUSTFS_LOG_LEVEL:-info}"
|
||||||
|
RUSTFS__STORAGE__PATH: /data
|
||||||
|
RUSTFS__STORAGE__DEDUP: "true"
|
||||||
|
RUSTFS__STORAGE__COMPRESSION: "${RUSTFS_COMPRESSION:-zstd}"
|
||||||
|
RUSTFS__STORAGE__COMPRESSION_LEVEL: "${RUSTFS_COMPRESSION_LEVEL:-3}"
|
||||||
|
# Bucket lifecycle (retention enforcement)
|
||||||
|
RUSTFS__LIFECYCLE__ENABLED: "true"
|
||||||
|
RUSTFS__LIFECYCLE__SCAN_INTERVAL_HOURS: "24"
|
||||||
|
RUSTFS__LIFECYCLE__DEFAULT_RETENTION_DAYS: "90"
|
||||||
|
# Access control
|
||||||
|
RUSTFS__AUTH__ENABLED: "${RUSTFS_AUTH_ENABLED:-true}"
|
||||||
|
RUSTFS__AUTH__API_KEY: "${RUSTFS_CAS_API_KEY:-cas-api-key-change-me}"
|
||||||
|
RUSTFS__AUTH__READONLY_KEY: "${RUSTFS_CAS_READONLY_KEY:-cas-readonly-key-change-me}"
|
||||||
|
# Service account configuration
|
||||||
|
RUSTFS__ACCOUNTS__SCANNER__KEY: "${RUSTFS_SCANNER_KEY:-scanner-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__SCANNER__BUCKETS: "scanner-artifacts,surface-cache,runtime-facts"
|
||||||
|
RUSTFS__ACCOUNTS__SCANNER__PERMISSIONS: "read,write"
|
||||||
|
RUSTFS__ACCOUNTS__SIGNALS__KEY: "${RUSTFS_SIGNALS_KEY:-signals-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__SIGNALS__BUCKETS: "runtime-facts,signals-data,provenance-feed"
|
||||||
|
RUSTFS__ACCOUNTS__SIGNALS__PERMISSIONS: "read,write"
|
||||||
|
RUSTFS__ACCOUNTS__REPLAY__KEY: "${RUSTFS_REPLAY_KEY:-replay-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__REPLAY__BUCKETS: "replay-bundles,inputs-lock"
|
||||||
|
RUSTFS__ACCOUNTS__REPLAY__PERMISSIONS: "read,write"
|
||||||
|
RUSTFS__ACCOUNTS__READONLY__KEY: "${RUSTFS_READONLY_KEY:-readonly-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__READONLY__BUCKETS: "*"
|
||||||
|
RUSTFS__ACCOUNTS__READONLY__PERMISSIONS: "read"
|
||||||
|
<<: *cas-config
|
||||||
|
volumes:
|
||||||
|
- rustfs-cas-data:/data
|
||||||
|
ports:
|
||||||
|
- "${RUSTFS_CAS_PORT:-8180}:8080"
|
||||||
|
networks:
|
||||||
|
- cas
|
||||||
|
labels: *release-labels
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 10s
|
||||||
|
|
||||||
|
# Evidence storage - Merkle roots, hash chains, evidence bundles (immutable)
|
||||||
|
rustfs-evidence:
|
||||||
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data", "--immutable"]
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
RUSTFS__LOG__LEVEL: "${RUSTFS_LOG_LEVEL:-info}"
|
||||||
|
RUSTFS__STORAGE__PATH: /data
|
||||||
|
RUSTFS__STORAGE__DEDUP: "true"
|
||||||
|
RUSTFS__STORAGE__COMPRESSION: "${RUSTFS_COMPRESSION:-zstd}"
|
||||||
|
RUSTFS__STORAGE__IMMUTABLE: "true" # Write-once, never delete
|
||||||
|
# Access control
|
||||||
|
RUSTFS__AUTH__ENABLED: "true"
|
||||||
|
RUSTFS__AUTH__API_KEY: "${RUSTFS_EVIDENCE_API_KEY:-evidence-api-key-change-me}"
|
||||||
|
RUSTFS__AUTH__READONLY_KEY: "${RUSTFS_EVIDENCE_READONLY_KEY:-evidence-readonly-key-change-me}"
|
||||||
|
# Service accounts
|
||||||
|
RUSTFS__ACCOUNTS__LEDGER__KEY: "${RUSTFS_LEDGER_KEY:-ledger-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__LEDGER__BUCKETS: "evidence-bundles,merkle-roots,hash-chains"
|
||||||
|
RUSTFS__ACCOUNTS__LEDGER__PERMISSIONS: "read,write"
|
||||||
|
RUSTFS__ACCOUNTS__EXPORTER__KEY: "${RUSTFS_EXPORTER_KEY:-exporter-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__EXPORTER__BUCKETS: "evidence-bundles"
|
||||||
|
RUSTFS__ACCOUNTS__EXPORTER__PERMISSIONS: "read"
|
||||||
|
volumes:
|
||||||
|
- rustfs-evidence-data:/data
|
||||||
|
ports:
|
||||||
|
- "${RUSTFS_EVIDENCE_PORT:-8181}:8080"
|
||||||
|
networks:
|
||||||
|
- cas
|
||||||
|
labels: *release-labels
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 10s
|
||||||
|
|
||||||
|
# Attestation storage - DSSE envelopes, in-toto attestations (immutable)
|
||||||
|
rustfs-attestation:
|
||||||
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data", "--immutable"]
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
RUSTFS__LOG__LEVEL: "${RUSTFS_LOG_LEVEL:-info}"
|
||||||
|
RUSTFS__STORAGE__PATH: /data
|
||||||
|
RUSTFS__STORAGE__DEDUP: "true"
|
||||||
|
RUSTFS__STORAGE__COMPRESSION: "${RUSTFS_COMPRESSION:-zstd}"
|
||||||
|
RUSTFS__STORAGE__IMMUTABLE: "true" # Write-once, never delete
|
||||||
|
# Access control
|
||||||
|
RUSTFS__AUTH__ENABLED: "true"
|
||||||
|
RUSTFS__AUTH__API_KEY: "${RUSTFS_ATTESTATION_API_KEY:-attestation-api-key-change-me}"
|
||||||
|
RUSTFS__AUTH__READONLY_KEY: "${RUSTFS_ATTESTATION_READONLY_KEY:-attestation-readonly-key-change-me}"
|
||||||
|
# Service accounts
|
||||||
|
RUSTFS__ACCOUNTS__ATTESTOR__KEY: "${RUSTFS_ATTESTOR_KEY:-attestor-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__ATTESTOR__BUCKETS: "attestations,dsse-envelopes,rekor-receipts"
|
||||||
|
RUSTFS__ACCOUNTS__ATTESTOR__PERMISSIONS: "read,write"
|
||||||
|
RUSTFS__ACCOUNTS__VERIFIER__KEY: "${RUSTFS_VERIFIER_KEY:-verifier-svc-key}"
|
||||||
|
RUSTFS__ACCOUNTS__VERIFIER__BUCKETS: "attestations,dsse-envelopes,rekor-receipts"
|
||||||
|
RUSTFS__ACCOUNTS__VERIFIER__PERMISSIONS: "read"
|
||||||
|
volumes:
|
||||||
|
- rustfs-attestation-data:/data
|
||||||
|
ports:
|
||||||
|
- "${RUSTFS_ATTESTATION_PORT:-8182}:8080"
|
||||||
|
networks:
|
||||||
|
- cas
|
||||||
|
labels: *release-labels
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 10s
|
||||||
|
|
||||||
|
# Lifecycle manager - enforces retention policies
|
||||||
|
cas-lifecycle:
|
||||||
|
image: registry.stella-ops.org/stellaops/cas-lifecycle:2025.10.0-edge
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
rustfs-cas:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
LIFECYCLE__CAS__ENDPOINT: "http://rustfs-cas:8080"
|
||||||
|
LIFECYCLE__CAS__API_KEY: "${RUSTFS_CAS_API_KEY:-cas-api-key-change-me}"
|
||||||
|
LIFECYCLE__SCHEDULE__CRON: "${LIFECYCLE_CRON:-0 3 * * *}" # 3 AM daily
|
||||||
|
LIFECYCLE__POLICIES__VULNERABILITY_DB: "7d"
|
||||||
|
LIFECYCLE__POLICIES__SBOM_ARTIFACTS: "365d"
|
||||||
|
LIFECYCLE__POLICIES__SCAN_RESULTS: "90d"
|
||||||
|
LIFECYCLE__POLICIES__TEMP_ARTIFACTS: "1d"
|
||||||
|
LIFECYCLE__TELEMETRY__ENABLED: "${LIFECYCLE_TELEMETRY:-true}"
|
||||||
|
LIFECYCLE__TELEMETRY__OTLP_ENDPOINT: "${OTLP_ENDPOINT:-}"
|
||||||
|
networks:
|
||||||
|
- cas
|
||||||
|
labels: *release-labels
|
||||||
@@ -329,3 +329,21 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
|
cryptopro-csp:
|
||||||
|
build:
|
||||||
|
context: ../..
|
||||||
|
dockerfile: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
args:
|
||||||
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
||||||
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
|
volumes:
|
||||||
|
- ../../opt/cryptopro/downloads:/opt/cryptopro/downloads:ro
|
||||||
|
ports:
|
||||||
|
- "${CRYPTOPRO_PORT:-18080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|||||||
26
deploy/compose/docker-compose.gpu.yaml
Normal file
26
deploy/compose/docker-compose.gpu.yaml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
version: "3.9"
|
||||||
|
|
||||||
|
services:
|
||||||
|
advisory-ai-worker:
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- capabilities: [gpu]
|
||||||
|
driver: nvidia
|
||||||
|
count: 1
|
||||||
|
environment:
|
||||||
|
ADVISORY_AI_INFERENCE_GPU: "true"
|
||||||
|
runtime: nvidia
|
||||||
|
|
||||||
|
advisory-ai-web:
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- capabilities: [gpu]
|
||||||
|
driver: nvidia
|
||||||
|
count: 1
|
||||||
|
environment:
|
||||||
|
ADVISORY_AI_INFERENCE_GPU: "true"
|
||||||
|
runtime: nvidia
|
||||||
90
deploy/compose/docker-compose.mock.yaml
Normal file
90
deploy/compose/docker-compose.mock.yaml
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
x-release-labels: &release-labels
|
||||||
|
com.stellaops.release.version: "2025.09.2-mock"
|
||||||
|
com.stellaops.release.channel: "dev-mock"
|
||||||
|
com.stellaops.profile: "mock-overlay"
|
||||||
|
|
||||||
|
services:
|
||||||
|
orchestrator:
|
||||||
|
image: registry.stella-ops.org/stellaops/orchestrator@sha256:97f12856ce870bafd3328bda86833bcccbf56d255941d804966b5557f6610119
|
||||||
|
command: ["dotnet", "StellaOps.Orchestrator.WebService.dll"]
|
||||||
|
depends_on:
|
||||||
|
- mongo
|
||||||
|
- nats
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
policy-registry:
|
||||||
|
image: registry.stella-ops.org/stellaops/policy-registry@sha256:c6cad8055e9827ebcbebb6ad4d6866dce4b83a0a49b0a8a6500b736a5cb26fa7
|
||||||
|
command: ["dotnet", "StellaOps.Policy.Engine.dll"]
|
||||||
|
depends_on:
|
||||||
|
- mongo
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
vex-lens:
|
||||||
|
image: registry.stella-ops.org/stellaops/vex-lens@sha256:b44e63ecfeebc345a70c073c1ce5ace709c58be0ffaad0e2862758aeee3092fb
|
||||||
|
command: ["dotnet", "StellaOps.VexLens.dll"]
|
||||||
|
depends_on:
|
||||||
|
- mongo
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
issuer-directory:
|
||||||
|
image: registry.stella-ops.org/stellaops/issuer-directory@sha256:67e8ef02c97d3156741e857756994888f30c373ace8e84886762edba9dc51914
|
||||||
|
command: ["dotnet", "StellaOps.IssuerDirectory.Web.dll"]
|
||||||
|
depends_on:
|
||||||
|
- mongo
|
||||||
|
- authority
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
findings-ledger:
|
||||||
|
image: registry.stella-ops.org/stellaops/findings-ledger@sha256:71d4c361ba8b2f8b69d652597bc3f2efc8a64f93fab854ce25272a88506df49c
|
||||||
|
command: ["dotnet", "StellaOps.Findings.Ledger.WebService.dll"]
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- authority
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
vuln-explorer-api:
|
||||||
|
image: registry.stella-ops.org/stellaops/vuln-explorer-api@sha256:7fc7e43a05cbeb0106ce7d4d634612e83de6fdc119aaab754a71c1d60b82841d
|
||||||
|
command: ["dotnet", "StellaOps.VulnExplorer.Api.dll"]
|
||||||
|
depends_on:
|
||||||
|
- findings-ledger
|
||||||
|
- authority
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
packs-registry:
|
||||||
|
image: registry.stella-ops.org/stellaops/packs-registry@sha256:1f5e9416c4dc608594ad6fad87c24d72134427f899c192b494e22b268499c791
|
||||||
|
command: ["dotnet", "StellaOps.PacksRegistry.dll"]
|
||||||
|
depends_on:
|
||||||
|
- mongo
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
task-runner:
|
||||||
|
image: registry.stella-ops.org/stellaops/task-runner@sha256:eb5ad992b49a41554f41516be1a6afcfa6522faf2111c08ff2b3664ad2fc954b
|
||||||
|
command: ["dotnet", "StellaOps.TaskRunner.WebService.dll"]
|
||||||
|
depends_on:
|
||||||
|
- packs-registry
|
||||||
|
- postgres
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
|
|
||||||
|
cryptopro-csp:
|
||||||
|
build:
|
||||||
|
context: ../..
|
||||||
|
dockerfile: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
args:
|
||||||
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
|
environment:
|
||||||
|
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
||||||
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
|
volumes:
|
||||||
|
- ../../opt/cryptopro/downloads:/opt/cryptopro/downloads:ro
|
||||||
|
ports:
|
||||||
|
- "${CRYPTOPRO_PORT:-18080}:8080"
|
||||||
|
labels: *release-labels
|
||||||
|
networks: [stellaops]
|
||||||
3
deploy/compose/env/airgap.env.example
vendored
3
deploy/compose/env/airgap.env.example
vendored
@@ -33,7 +33,10 @@ SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
|||||||
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
||||||
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER=file
|
SCANNER_SURFACE_SECRETS_PROVIDER=file
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE=
|
||||||
SCANNER_SURFACE_SECRETS_ROOT=/etc/stellaops/secrets
|
SCANNER_SURFACE_SECRETS_ROOT=/etc/stellaops/secrets
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER=
|
||||||
|
SURFACE_SECRETS_HOST_PATH=./offline/surface-secrets
|
||||||
SCHEDULER_QUEUE_KIND=Nats
|
SCHEDULER_QUEUE_KIND=Nats
|
||||||
SCHEDULER_QUEUE_NATS_URL=nats://nats:4222
|
SCHEDULER_QUEUE_NATS_URL=nats://nats:4222
|
||||||
SCHEDULER_STORAGE_DATABASE=stellaops_scheduler
|
SCHEDULER_STORAGE_DATABASE=stellaops_scheduler
|
||||||
|
|||||||
118
deploy/compose/env/cas.env.example
vendored
Normal file
118
deploy/compose/env/cas.env.example
vendored
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
# CAS (Content Addressable Storage) Environment Configuration
|
||||||
|
# Copy to .env and customize for your deployment
|
||||||
|
#
|
||||||
|
# Aligned with best-in-class vulnerability scanner retention policies:
|
||||||
|
# - Trivy: 7 days vulnerability DB
|
||||||
|
# - Grype: 5 days DB, configurable
|
||||||
|
# - Anchore Enterprise: 90-365 days typical
|
||||||
|
# - Snyk Enterprise: 365 days
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# DATA PATHS (ensure directories exist with proper permissions)
|
||||||
|
# =============================================================================
|
||||||
|
CAS_DATA_PATH=/var/lib/stellaops/cas
|
||||||
|
CAS_EVIDENCE_PATH=/var/lib/stellaops/evidence
|
||||||
|
CAS_ATTESTATION_PATH=/var/lib/stellaops/attestations
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# RUSTFS CONFIGURATION
|
||||||
|
# =============================================================================
|
||||||
|
RUSTFS_LOG_LEVEL=info
|
||||||
|
RUSTFS_COMPRESSION=zstd
|
||||||
|
RUSTFS_COMPRESSION_LEVEL=3
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# PORTS
|
||||||
|
# =============================================================================
|
||||||
|
RUSTFS_CAS_PORT=8180
|
||||||
|
RUSTFS_EVIDENCE_PORT=8181
|
||||||
|
RUSTFS_ATTESTATION_PORT=8182
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# ACCESS CONTROL - API KEYS
|
||||||
|
# IMPORTANT: Change these in production!
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# CAS Storage (mutable, lifecycle-managed)
|
||||||
|
RUSTFS_CAS_API_KEY=cas-api-key-CHANGE-IN-PRODUCTION
|
||||||
|
RUSTFS_CAS_READONLY_KEY=cas-readonly-key-CHANGE-IN-PRODUCTION
|
||||||
|
|
||||||
|
# Evidence Storage (immutable)
|
||||||
|
RUSTFS_EVIDENCE_API_KEY=evidence-api-key-CHANGE-IN-PRODUCTION
|
||||||
|
RUSTFS_EVIDENCE_READONLY_KEY=evidence-readonly-key-CHANGE-IN-PRODUCTION
|
||||||
|
|
||||||
|
# Attestation Storage (immutable)
|
||||||
|
RUSTFS_ATTESTATION_API_KEY=attestation-api-key-CHANGE-IN-PRODUCTION
|
||||||
|
RUSTFS_ATTESTATION_READONLY_KEY=attestation-readonly-key-CHANGE-IN-PRODUCTION
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# SERVICE ACCOUNT KEYS
|
||||||
|
# Each service has its own key for fine-grained access control
|
||||||
|
# IMPORTANT: Generate unique keys per environment!
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Scanner service - access to scanner artifacts, surface cache, runtime facts
|
||||||
|
RUSTFS_SCANNER_KEY=scanner-svc-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: scanner-artifacts (rw), surface-cache (rw), runtime-facts (rw)
|
||||||
|
|
||||||
|
# Signals service - access to runtime facts, signals data, provenance feed
|
||||||
|
RUSTFS_SIGNALS_KEY=signals-svc-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: runtime-facts (rw), signals-data (rw), provenance-feed (rw)
|
||||||
|
|
||||||
|
# Replay service - access to replay bundles, inputs lock files
|
||||||
|
RUSTFS_REPLAY_KEY=replay-svc-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: replay-bundles (rw), inputs-lock (rw)
|
||||||
|
|
||||||
|
# Ledger service - access to evidence bundles, merkle roots, hash chains
|
||||||
|
RUSTFS_LEDGER_KEY=ledger-svc-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: evidence-bundles (rw), merkle-roots (rw), hash-chains (rw)
|
||||||
|
|
||||||
|
# Exporter service - read-only access to evidence bundles
|
||||||
|
RUSTFS_EXPORTER_KEY=exporter-svc-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: evidence-bundles (r)
|
||||||
|
|
||||||
|
# Attestor service - access to attestations, DSSE envelopes, Rekor receipts
|
||||||
|
RUSTFS_ATTESTOR_KEY=attestor-svc-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: attestations (rw), dsse-envelopes (rw), rekor-receipts (rw)
|
||||||
|
|
||||||
|
# Verifier service - read-only access to attestations
|
||||||
|
RUSTFS_VERIFIER_KEY=verifier-svc-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: attestations (r), dsse-envelopes (r), rekor-receipts (r)
|
||||||
|
|
||||||
|
# Global read-only key (for debugging/auditing)
|
||||||
|
RUSTFS_READONLY_KEY=readonly-global-key-GENERATE-UNIQUE
|
||||||
|
# Bucket access: * (r)
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# LIFECYCLE MANAGEMENT
|
||||||
|
# =============================================================================
|
||||||
|
# Cron schedule for retention policy enforcement (default: 3 AM daily)
|
||||||
|
LIFECYCLE_CRON=0 3 * * *
|
||||||
|
LIFECYCLE_TELEMETRY=true
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# RETENTION POLICIES (days, 0 = indefinite)
|
||||||
|
# Aligned with enterprise vulnerability scanner best practices
|
||||||
|
# =============================================================================
|
||||||
|
# Vulnerability DB: 7 days (matches Trivy default, Grype uses 5)
|
||||||
|
CAS_RETENTION_VULNERABILITY_DB_DAYS=7
|
||||||
|
|
||||||
|
# SBOM artifacts: 365 days (audit compliance - SOC2, ISO27001, FedRAMP)
|
||||||
|
CAS_RETENTION_SBOM_ARTIFACTS_DAYS=365
|
||||||
|
|
||||||
|
# Scan results: 90 days (common compliance window)
|
||||||
|
CAS_RETENTION_SCAN_RESULTS_DAYS=90
|
||||||
|
|
||||||
|
# Evidence bundles: indefinite (content-addressed, immutable, audit trail)
|
||||||
|
CAS_RETENTION_EVIDENCE_BUNDLES_DAYS=0
|
||||||
|
|
||||||
|
# Attestations: indefinite (signed, immutable, verifiable)
|
||||||
|
CAS_RETENTION_ATTESTATIONS_DAYS=0
|
||||||
|
|
||||||
|
# Temporary artifacts: 1 day (work-in-progress, intermediate files)
|
||||||
|
CAS_RETENTION_TEMP_ARTIFACTS_DAYS=1
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# TELEMETRY (optional)
|
||||||
|
# =============================================================================
|
||||||
|
OTLP_ENDPOINT=
|
||||||
12
deploy/compose/env/mock.env.example
vendored
Normal file
12
deploy/compose/env/mock.env.example
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Dev-only overlay env for docker-compose.mock.yaml
|
||||||
|
# Use together with dev.env.example:
|
||||||
|
# docker compose --env-file env/dev.env.example --env-file env/mock.env.example -f docker-compose.dev.yaml -f docker-compose.mock.yaml config
|
||||||
|
|
||||||
|
# Optional: override ports if you expose mock services
|
||||||
|
ORCHESTRATOR_PORT=8450
|
||||||
|
POLICY_REGISTRY_PORT=8451
|
||||||
|
VEX_LENS_PORT=8452
|
||||||
|
FINDINGS_LEDGER_PORT=8453
|
||||||
|
VULN_EXPLORER_API_PORT=8454
|
||||||
|
PACKS_REGISTRY_PORT=8455
|
||||||
|
TASK_RUNNER_PORT=8456
|
||||||
28
deploy/compose/scripts/backup.sh
Normal file
28
deploy/compose/scripts/backup.sh
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "StellaOps Compose Backup"
|
||||||
|
echo "This will create a tar.gz of Mongo, MinIO (object-store), and Redis data volumes."
|
||||||
|
read -rp "Proceed? [y/N] " ans
|
||||||
|
[[ ${ans:-N} =~ ^[Yy]$ ]] || { echo "Aborted."; exit 1; }
|
||||||
|
|
||||||
|
TS=$(date -u +%Y%m%dT%H%M%SZ)
|
||||||
|
OUT_DIR=${BACKUP_DIR:-backups}
|
||||||
|
mkdir -p "$OUT_DIR"
|
||||||
|
|
||||||
|
docker compose ps >/dev/null
|
||||||
|
|
||||||
|
echo "Pausing worker containers for consistency..."
|
||||||
|
docker compose pause scanner-worker scheduler-worker taskrunner-worker || true
|
||||||
|
|
||||||
|
echo "Backing up volumes..."
|
||||||
|
docker run --rm \
|
||||||
|
-v stellaops-mongo:/data/db:ro \
|
||||||
|
-v stellaops-minio:/data/minio:ro \
|
||||||
|
-v stellaops-redis:/data/redis:ro \
|
||||||
|
-v "$PWD/$OUT_DIR":/out \
|
||||||
|
alpine sh -c "cd / && tar czf /out/stellaops-backup-$TS.tar.gz data"
|
||||||
|
|
||||||
|
docker compose unpause scanner-worker scheduler-worker taskrunner-worker || true
|
||||||
|
|
||||||
|
echo "Backup written to $OUT_DIR/stellaops-backup-$TS.tar.gz"
|
||||||
25
deploy/compose/scripts/quickstart.sh
Normal file
25
deploy/compose/scripts/quickstart.sh
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
COMPOSE_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||||
|
|
||||||
|
ENV_FILE="${1:-$COMPOSE_DIR/env/dev.env.example}"
|
||||||
|
USE_MOCK="${USE_MOCK:-0}"
|
||||||
|
|
||||||
|
FILES=(-f "$COMPOSE_DIR/docker-compose.dev.yaml")
|
||||||
|
ENV_FILES=(--env-file "$ENV_FILE")
|
||||||
|
|
||||||
|
if [[ "$USE_MOCK" == "1" ]]; then
|
||||||
|
FILES+=(-f "$COMPOSE_DIR/docker-compose.mock.yaml")
|
||||||
|
ENV_FILES+=(--env-file "$COMPOSE_DIR/env/mock.env.example")
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Validating compose config..."
|
||||||
|
docker compose "${ENV_FILES[@]}" "${FILES[@]}" config > /tmp/compose-validated.yaml
|
||||||
|
echo "Config written to /tmp/compose-validated.yaml"
|
||||||
|
|
||||||
|
echo "Starting stack..."
|
||||||
|
docker compose "${ENV_FILES[@]}" "${FILES[@]}" up -d
|
||||||
|
|
||||||
|
echo "Stack started. To stop: docker compose ${ENV_FILES[*]} ${FILES[*]} down"
|
||||||
15
deploy/compose/scripts/reset.sh
Normal file
15
deploy/compose/scripts/reset.sh
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "WARNING: This will stop the stack and wipe Mongo, MinIO, and Redis volumes."
|
||||||
|
read -rp "Type 'RESET' to continue: " ans
|
||||||
|
[[ ${ans:-} == "RESET" ]] || { echo "Aborted."; exit 1; }
|
||||||
|
|
||||||
|
docker compose down
|
||||||
|
|
||||||
|
for vol in stellaops-mongo stellaops-minio stellaops-redis; do
|
||||||
|
echo "Removing volume $vol"
|
||||||
|
docker volume rm "$vol" || true
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Reset complete. Re-run compose with your env file to recreate volumes."
|
||||||
18
deploy/downloads/manifest.json
Normal file
18
deploy/downloads/manifest.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"version": "2025.09.2-mock",
|
||||||
|
"generatedAt": "2025-12-06T00:00:00Z",
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"name": "console-web",
|
||||||
|
"type": "container",
|
||||||
|
"image": "registry.stella-ops.org/stellaops/web-ui@sha256:3878c335df50ca958907849b09d43ce397900d32fc7a417c0bf76742e1217ba1",
|
||||||
|
"channel": "dev-mock"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "console-bundle",
|
||||||
|
"type": "archive",
|
||||||
|
"url": "https://downloads.stella-ops.mock/console/2025.09.2-mock/console.tar.gz",
|
||||||
|
"sha256": "12dd89e012b1262ac61188ac5b7721ddab80c4e2b6341251d03925eb49a48521"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
64
deploy/helm/stellaops/INSTALL.md
Normal file
64
deploy/helm/stellaops/INSTALL.md
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# StellaOps Helm Install Guide
|
||||||
|
|
||||||
|
This guide ships with the `stellaops` chart and provides deterministic install steps for **prod** and **airgap** profiles. All images are pinned by digest from `deploy/releases/<channel>.yaml`.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
- Helm ≥ 3.14 and kubectl configured for the target cluster.
|
||||||
|
- Pull secrets for `registry.stella-ops.org` (or your mirrored registry in air-gapped mode).
|
||||||
|
- TLS/ingress secrets created if you enable ingress in the values files.
|
||||||
|
|
||||||
|
## Channels and values
|
||||||
|
- Prod/stable: `deploy/releases/2025.09-stable.yaml` + `values-prod.yaml`
|
||||||
|
- Airgap: `deploy/releases/2025.09-airgap.yaml` + `values-airgap.yaml`
|
||||||
|
- Mirror (optional): `values-mirror.yaml` overlays registry endpoints when using a private mirror.
|
||||||
|
|
||||||
|
## Quick install (prod)
|
||||||
|
```bash
|
||||||
|
export RELEASE_CHANNEL=2025.09-stable
|
||||||
|
export NAMESPACE=stellaops
|
||||||
|
|
||||||
|
helm upgrade --install stellaops ./deploy/helm/stellaops \
|
||||||
|
--namespace "$NAMESPACE" --create-namespace \
|
||||||
|
-f deploy/helm/stellaops/values-prod.yaml \
|
||||||
|
--set global.release.channel=stable \
|
||||||
|
--set global.release.version="2025.09.2" \
|
||||||
|
--set global.release.manifestSha256="dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Quick install (airgap)
|
||||||
|
Assumes images are already loaded into your private registry and `values-airgap.yaml` points to that registry.
|
||||||
|
```bash
|
||||||
|
export NAMESPACE=stellaops
|
||||||
|
|
||||||
|
helm upgrade --install stellaops ./deploy/helm/stellaops \
|
||||||
|
--namespace "$NAMESPACE" --create-namespace \
|
||||||
|
-f deploy/helm/stellaops/values-airgap.yaml \
|
||||||
|
--set global.release.channel=airgap \
|
||||||
|
--set global.release.version="2025.09.0-airgap" \
|
||||||
|
--set global.release.manifestSha256="d422ae3ea01d5f27ea8b5fdc5b19667cb4e3e2c153a35cb761cb53a6ce4f6ba4"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Mirror overlay
|
||||||
|
If using a mirrored registry, layer the mirror values:
|
||||||
|
```bash
|
||||||
|
helm upgrade --install stellaops ./deploy/helm/stellaops \
|
||||||
|
--namespace "$NAMESPACE" --create-namespace \
|
||||||
|
-f deploy/helm/stellaops/values-prod.yaml \
|
||||||
|
-f deploy/helm/stellaops/values-mirror.yaml \
|
||||||
|
--set global.release.version="2025.09.2" \
|
||||||
|
--set global.release.manifestSha256="dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Validate chart and digests
|
||||||
|
```bash
|
||||||
|
deploy/tools/check-channel-alignment.py --manifest deploy/releases/$RELEASE_CHANNEL.yaml \
|
||||||
|
--values deploy/helm/stellaops/values-prod.yaml
|
||||||
|
|
||||||
|
helm lint ./deploy/helm/stellaops
|
||||||
|
helm template stellaops ./deploy/helm/stellaops -f deploy/helm/stellaops/values-prod.yaml >/tmp/stellaops.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
- Surface.Env and Surface.Secrets defaults are defined in `values*.yaml`; adjust endpoints, cache roots, and providers before promotion.
|
||||||
|
- Keep `global.release.*` in sync with the chosen release manifest; never deploy with empty version/channel/manifestSha256.
|
||||||
|
- For offline clusters, run image preload and secret creation before `helm upgrade` to avoid pull failures.
|
||||||
16
deploy/helm/stellaops/README-mock.md
Normal file
16
deploy/helm/stellaops/README-mock.md
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# Mock Overlay (Dev Only)
|
||||||
|
|
||||||
|
Purpose: let deployment tasks progress with placeholder digests until real releases land.
|
||||||
|
|
||||||
|
Use:
|
||||||
|
```bash
|
||||||
|
helm template mock ./deploy/helm/stellaops -f deploy/helm/stellaops/values-mock.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
Contents:
|
||||||
|
- Mock deployments for orchestrator, policy-registry, packs-registry, task-runner, VEX Lens, issuer-directory, findings-ledger, vuln-explorer-api.
|
||||||
|
- Image pins pulled from `deploy/releases/2025.09-mock-dev.yaml`.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Annotated with `stellaops.dev/mock: "true"` to discourage production use.
|
||||||
|
- Swap to real values once official digests publish; keep mock overlay gated behind `mock.enabled`.
|
||||||
@@ -9,7 +9,7 @@ metadata:
|
|||||||
data:
|
data:
|
||||||
{{- range $fileName, $content := $cfg.data }}
|
{{- range $fileName, $content := $cfg.data }}
|
||||||
{{ $fileName }}: |
|
{{ $fileName }}: |
|
||||||
{{ $content | nindent 4 }}
|
{{ tpl $content $root | nindent 4 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
---
|
---
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
|||||||
@@ -23,14 +23,26 @@ spec:
|
|||||||
metadata:
|
metadata:
|
||||||
labels:
|
labels:
|
||||||
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 8 }}
|
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 8 }}
|
||||||
|
{{- if $svc.podAnnotations }}
|
||||||
|
annotations:
|
||||||
|
{{ toYaml $svc.podAnnotations | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
annotations:
|
annotations:
|
||||||
stellaops.release/version: {{ $root.Values.global.release.version | quote }}
|
stellaops.release/version: {{ $root.Values.global.release.version | quote }}
|
||||||
stellaops.release/channel: {{ $root.Values.global.release.channel | quote }}
|
stellaops.release/channel: {{ $root.Values.global.release.channel | quote }}
|
||||||
spec:
|
spec:
|
||||||
|
{{- if $svc.podSecurityContext }}
|
||||||
|
securityContext:
|
||||||
|
{{ toYaml $svc.podSecurityContext | nindent 6 }}
|
||||||
|
{{- end }}
|
||||||
containers:
|
containers:
|
||||||
- name: {{ $name }}
|
- name: {{ $name }}
|
||||||
image: {{ $svc.image | quote }}
|
image: {{ $svc.image | quote }}
|
||||||
imagePullPolicy: {{ default $root.Values.global.image.pullPolicy $svc.imagePullPolicy }}
|
imagePullPolicy: {{ default $root.Values.global.image.pullPolicy $svc.imagePullPolicy }}
|
||||||
|
{{- if $svc.securityContext }}
|
||||||
|
securityContext:
|
||||||
|
{{ toYaml $svc.securityContext | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
{{- if $svc.command }}
|
{{- if $svc.command }}
|
||||||
command:
|
command:
|
||||||
{{- range $cmd := $svc.command }}
|
{{- range $cmd := $svc.command }}
|
||||||
@@ -52,6 +64,9 @@ spec:
|
|||||||
{{- end }}
|
{{- end }}
|
||||||
{{- $needsPolicyActivation := and $hasPolicyActivationConfig (hasKey $policyActivationTargets $name) }}
|
{{- $needsPolicyActivation := and $hasPolicyActivationConfig (hasKey $policyActivationTargets $name) }}
|
||||||
{{- $envFrom := default (list) $svc.envFrom }}
|
{{- $envFrom := default (list) $svc.envFrom }}
|
||||||
|
{{- if and (hasKey $root.Values.configMaps "surface-env") (or (hasPrefix "scanner-" $name) (hasPrefix "zastava-" $name)) }}
|
||||||
|
{{- $envFrom = append $envFrom (dict "configMapRef" (dict "name" (include "stellaops.fullname" (dict "root" $root "name" "surface-env")))) }}
|
||||||
|
{{- end }}
|
||||||
{{- if and $needsPolicyActivation (ne $policyActivationConfigName "") }}
|
{{- if and $needsPolicyActivation (ne $policyActivationConfigName "") }}
|
||||||
{{- $hasActivationReference := false }}
|
{{- $hasActivationReference := false }}
|
||||||
{{- range $envFromEntry := $envFrom }}
|
{{- range $envFromEntry := $envFrom }}
|
||||||
@@ -85,6 +100,14 @@ spec:
|
|||||||
resources:
|
resources:
|
||||||
{{ toYaml $svc.resources | nindent 12 }}
|
{{ toYaml $svc.resources | nindent 12 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{- if $svc.securityContext }}
|
||||||
|
securityContext:
|
||||||
|
{{ toYaml $svc.securityContext | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if $svc.securityContext }}
|
||||||
|
securityContext:
|
||||||
|
{{ toYaml $svc.securityContext | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
{{- if $svc.livenessProbe }}
|
{{- if $svc.livenessProbe }}
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
{{ toYaml $svc.livenessProbe | nindent 12 }}
|
{{ toYaml $svc.livenessProbe | nindent 12 }}
|
||||||
@@ -93,6 +116,15 @@ spec:
|
|||||||
readinessProbe:
|
readinessProbe:
|
||||||
{{ toYaml $svc.readinessProbe | nindent 12 }}
|
{{ toYaml $svc.readinessProbe | nindent 12 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{- if $svc.prometheus }}
|
||||||
|
{{- $pr := $svc.prometheus }}
|
||||||
|
{{- if $pr.enabled }}
|
||||||
|
{{- if not $svc.podAnnotations }}
|
||||||
|
{{- $svc = merge $svc (dict "podAnnotations" (dict)) }}
|
||||||
|
{{- end }}
|
||||||
|
{{- $svc.podAnnotations = merge $svc.podAnnotations (dict "prometheus.io/scrape" "true" "prometheus.io/path" (default "/metrics" $pr.path) "prometheus.io/port" (toString (default 8080 $pr.port)) "prometheus.io/scheme" (default "http" $pr.scheme))) }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
{{- if or $svc.volumeMounts $configMounts }}
|
{{- if or $svc.volumeMounts $configMounts }}
|
||||||
volumeMounts:
|
volumeMounts:
|
||||||
{{- if $svc.volumeMounts }}
|
{{- if $svc.volumeMounts }}
|
||||||
@@ -152,6 +184,25 @@ spec:
|
|||||||
tolerations:
|
tolerations:
|
||||||
{{ toYaml $svc.tolerations | nindent 8 }}
|
{{ toYaml $svc.tolerations | nindent 8 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{- if $svc.pdb }}
|
||||||
|
---
|
||||||
|
apiVersion: policy/v1
|
||||||
|
kind: PodDisruptionBudget
|
||||||
|
metadata:
|
||||||
|
name: {{ include "stellaops.fullname" (dict "root" $root "name" $name) }}
|
||||||
|
labels:
|
||||||
|
{{- include "stellaops.labels" (dict "root" $root "name" $name "svc" $svc) | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
{{- if $svc.pdb.minAvailable }}
|
||||||
|
minAvailable: {{ $svc.pdb.minAvailable }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if $svc.pdb.maxUnavailable }}
|
||||||
|
maxUnavailable: {{ $svc.pdb.maxUnavailable }}
|
||||||
|
{{- end }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "stellaops.selectorLabels" (dict "root" $root "name" $name "svc" $svc) | nindent 6 }}
|
||||||
|
{{- end }}
|
||||||
---
|
---
|
||||||
{{- if $svc.service }}
|
{{- if $svc.service }}
|
||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
|
|||||||
28
deploy/helm/stellaops/templates/externalsecrets.yaml
Normal file
28
deploy/helm/stellaops/templates/externalsecrets.yaml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{{- if and .Values.externalSecrets.enabled .Values.externalSecrets.secrets }}
|
||||||
|
{{- range $secret := .Values.externalSecrets.secrets }}
|
||||||
|
apiVersion: external-secrets.io/v1beta1
|
||||||
|
kind: ExternalSecret
|
||||||
|
metadata:
|
||||||
|
name: {{ include "stellaops.fullname" $ }}-{{ $secret.name }}
|
||||||
|
labels:
|
||||||
|
{{- include "stellaops.labels" $ | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
refreshInterval: {{ default "1h" $secret.refreshInterval }}
|
||||||
|
secretStoreRef:
|
||||||
|
name: {{ $secret.storeRef.name }}
|
||||||
|
kind: {{ default "ClusterSecretStore" $secret.storeRef.kind }}
|
||||||
|
target:
|
||||||
|
name: {{ $secret.target.name | default (printf "%s-%s" (include "stellaops.fullname" $) $secret.name) }}
|
||||||
|
creationPolicy: {{ default "Owner" $secret.target.creationPolicy }}
|
||||||
|
data:
|
||||||
|
{{- range $secret.data }}
|
||||||
|
- secretKey: {{ .key }}
|
||||||
|
remoteRef:
|
||||||
|
key: {{ .remoteKey }}
|
||||||
|
{{- if .property }}
|
||||||
|
property: {{ .property }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
---
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
39
deploy/helm/stellaops/templates/hpa.yaml
Normal file
39
deploy/helm/stellaops/templates/hpa.yaml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{{- if and .Values.hpa.enabled .Values.services }}
|
||||||
|
{{- range $name, $svc := .Values.services }}
|
||||||
|
{{- if and $svc.hpa $svc.hpa.enabled }}
|
||||||
|
apiVersion: autoscaling/v2
|
||||||
|
kind: HorizontalPodAutoscaler
|
||||||
|
metadata:
|
||||||
|
name: {{ include "stellaops.fullname" (dict "root" $ "name" $name) }}
|
||||||
|
labels:
|
||||||
|
{{- include "stellaops.labels" (dict "root" $ "name" $name "svc" $svc) | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
scaleTargetRef:
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
name: {{ include "stellaops.fullname" (dict "root" $ "name" $name) }}
|
||||||
|
minReplicas: {{ default $.Values.hpa.minReplicas $svc.hpa.minReplicas }}
|
||||||
|
maxReplicas: {{ default $.Values.hpa.maxReplicas $svc.hpa.maxReplicas }}
|
||||||
|
metrics:
|
||||||
|
{{- $cpu := coalesce $svc.hpa.cpu.targetPercentage $.Values.hpa.cpu.targetPercentage -}}
|
||||||
|
{{- if $cpu }}
|
||||||
|
- type: Resource
|
||||||
|
resource:
|
||||||
|
name: cpu
|
||||||
|
target:
|
||||||
|
type: Utilization
|
||||||
|
averageUtilization: {{ $cpu }}
|
||||||
|
{{- end }}
|
||||||
|
{{- $mem := coalesce $svc.hpa.memory.targetPercentage $.Values.hpa.memory.targetPercentage -}}
|
||||||
|
{{- if $mem }}
|
||||||
|
- type: Resource
|
||||||
|
resource:
|
||||||
|
name: memory
|
||||||
|
target:
|
||||||
|
type: Utilization
|
||||||
|
averageUtilization: {{ $mem }}
|
||||||
|
{{- end }}
|
||||||
|
---
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
32
deploy/helm/stellaops/templates/ingress.yaml
Normal file
32
deploy/helm/stellaops/templates/ingress.yaml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{{- if and .Values.ingress.enabled .Values.ingress.hosts }}
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: {{ include "stellaops.fullname" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "stellaops.labels" . | nindent 4 }}
|
||||||
|
annotations:
|
||||||
|
{{- range $k, $v := .Values.ingress.annotations }}
|
||||||
|
{{ $k }}: {{ $v | quote }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
ingressClassName: {{ .Values.ingress.className | default "nginx" | quote }}
|
||||||
|
tls:
|
||||||
|
{{- range .Values.ingress.tls }}
|
||||||
|
- hosts: {{ toYaml .hosts | nindent 6 }}
|
||||||
|
secretName: {{ .secretName }}
|
||||||
|
{{- end }}
|
||||||
|
rules:
|
||||||
|
{{- range .Values.ingress.hosts }}
|
||||||
|
- host: {{ .host }}
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: {{ .path | default "/" }}
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: {{ include "stellaops.fullname" $ }}-gateway
|
||||||
|
port:
|
||||||
|
number: {{ .servicePort | default 80 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
50
deploy/helm/stellaops/templates/migrations.yaml
Normal file
50
deploy/helm/stellaops/templates/migrations.yaml
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
{{- if and .Values.migrations.enabled .Values.migrations.jobs }}
|
||||||
|
{{- range $job := .Values.migrations.jobs }}
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: Job
|
||||||
|
metadata:
|
||||||
|
name: {{ include "stellaops.fullname" $ }}-migration-{{ $job.name | trunc 30 | trimSuffix "-" }}
|
||||||
|
labels:
|
||||||
|
{{- include "stellaops.labels" $ | nindent 4 }}
|
||||||
|
stellaops.io/component: migration
|
||||||
|
stellaops.io/migration-name: {{ $job.name | quote }}
|
||||||
|
spec:
|
||||||
|
backoffLimit: {{ default 3 $job.backoffLimit }}
|
||||||
|
ttlSecondsAfterFinished: {{ default 3600 $job.ttlSecondsAfterFinished }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "stellaops.selectorLabels" $ | nindent 8 }}
|
||||||
|
stellaops.io/component: migration
|
||||||
|
stellaops.io/migration-name: {{ $job.name | quote }}
|
||||||
|
spec:
|
||||||
|
restartPolicy: {{ default "Never" $job.restartPolicy }}
|
||||||
|
serviceAccountName: {{ default "default" $job.serviceAccountName }}
|
||||||
|
containers:
|
||||||
|
- name: {{ $job.name | trunc 50 | trimSuffix "-" }}
|
||||||
|
image: {{ $job.image | quote }}
|
||||||
|
imagePullPolicy: {{ default "IfNotPresent" $job.imagePullPolicy }}
|
||||||
|
command: {{- if $job.command }} {{ toJson $job.command }} {{- else }} null {{- end }}
|
||||||
|
args: {{- if $job.args }} {{ toJson $job.args }} {{- else }} null {{- end }}
|
||||||
|
env:
|
||||||
|
{{- if $job.env }}
|
||||||
|
{{- range $k, $v := $job.env }}
|
||||||
|
- name: {{ $k }}
|
||||||
|
value: {{ $v | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
envFrom:
|
||||||
|
{{- if $job.envFrom }}
|
||||||
|
{{- toYaml $job.envFrom | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
|
resources:
|
||||||
|
{{- if $job.resources }}
|
||||||
|
{{- toYaml $job.resources | nindent 12 }}
|
||||||
|
{{- else }}{}
|
||||||
|
{{- end }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- if $.Values.global.image.pullSecrets }}
|
||||||
|
{{- toYaml $.Values.global.image.pullSecrets | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
45
deploy/helm/stellaops/templates/networkpolicy.yaml
Normal file
45
deploy/helm/stellaops/templates/networkpolicy.yaml
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
{{- if .Values.networkPolicy.enabled }}
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: NetworkPolicy
|
||||||
|
metadata:
|
||||||
|
name: {{ include "stellaops.fullname" . }}-default
|
||||||
|
labels:
|
||||||
|
{{- include "stellaops.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
podSelector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "stellaops.selectorLabelsRoot" . | nindent 6 }}
|
||||||
|
policyTypes:
|
||||||
|
- Ingress
|
||||||
|
- Egress
|
||||||
|
ingress:
|
||||||
|
- from:
|
||||||
|
{{- if .Values.networkPolicy.ingressNamespaces }}
|
||||||
|
- namespaceSelector:
|
||||||
|
matchLabels:
|
||||||
|
{{- toYaml .Values.networkPolicy.ingressNamespaces | nindent 14 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.networkPolicy.ingressPods }}
|
||||||
|
- podSelector:
|
||||||
|
matchLabels:
|
||||||
|
{{- toYaml .Values.networkPolicy.ingressPods | nindent 14 }}
|
||||||
|
{{- end }}
|
||||||
|
ports:
|
||||||
|
- protocol: TCP
|
||||||
|
port: {{ default 80 .Values.networkPolicy.ingressPort }}
|
||||||
|
egress:
|
||||||
|
- to:
|
||||||
|
{{- if .Values.networkPolicy.egressNamespaces }}
|
||||||
|
- namespaceSelector:
|
||||||
|
matchLabels:
|
||||||
|
{{- toYaml .Values.networkPolicy.egressNamespaces | nindent 14 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.networkPolicy.egressPods }}
|
||||||
|
- podSelector:
|
||||||
|
matchLabels:
|
||||||
|
{{- toYaml .Values.networkPolicy.egressPods | nindent 14 }}
|
||||||
|
{{- end }}
|
||||||
|
ports:
|
||||||
|
- protocol: TCP
|
||||||
|
port: {{ default 443 .Values.networkPolicy.egressPort }}
|
||||||
|
{{- end }}
|
||||||
22
deploy/helm/stellaops/templates/orchestrator-mock.yaml
Normal file
22
deploy/helm/stellaops/templates/orchestrator-mock.yaml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{{- if .Values.mock.enabled }}
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: orchestrator-mock
|
||||||
|
annotations:
|
||||||
|
stellaops.dev/mock: "true"
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: orchestrator-mock
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: orchestrator-mock
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: orchestrator
|
||||||
|
image: "{{ .Values.mock.orchestrator.image }}"
|
||||||
|
args: ["dotnet", "StellaOps.Orchestrator.WebService.dll"]
|
||||||
|
{{- end }}
|
||||||
44
deploy/helm/stellaops/templates/packs-mock.yaml
Normal file
44
deploy/helm/stellaops/templates/packs-mock.yaml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
{{- if .Values.mock.enabled }}
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: packs-registry-mock
|
||||||
|
annotations:
|
||||||
|
stellaops.dev/mock: "true"
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: packs-registry-mock
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: packs-registry-mock
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: packs-registry
|
||||||
|
image: "{{ .Values.mock.packsRegistry.image }}"
|
||||||
|
args: ["dotnet", "StellaOps.PacksRegistry.dll"]
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: task-runner-mock
|
||||||
|
annotations:
|
||||||
|
stellaops.dev/mock: "true"
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: task-runner-mock
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: task-runner-mock
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: task-runner
|
||||||
|
image: "{{ .Values.mock.taskRunner.image }}"
|
||||||
|
args: ["dotnet", "StellaOps.TaskRunner.WebService.dll"]
|
||||||
|
{{- end }}
|
||||||
22
deploy/helm/stellaops/templates/policy-mock.yaml
Normal file
22
deploy/helm/stellaops/templates/policy-mock.yaml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{{- if .Values.mock.enabled }}
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: policy-registry-mock
|
||||||
|
annotations:
|
||||||
|
stellaops.dev/mock: "true"
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: policy-registry-mock
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: policy-registry-mock
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: policy-registry
|
||||||
|
image: "{{ .Values.mock.policyRegistry.image }}"
|
||||||
|
args: ["dotnet", "StellaOps.Policy.Engine.dll"]
|
||||||
|
{{- end }}
|
||||||
22
deploy/helm/stellaops/templates/vex-mock.yaml
Normal file
22
deploy/helm/stellaops/templates/vex-mock.yaml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{{- if .Values.mock.enabled }}
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: vex-lens-mock
|
||||||
|
annotations:
|
||||||
|
stellaops.dev/mock: "true"
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: vex-lens-mock
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: vex-lens-mock
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: vex-lens
|
||||||
|
image: "{{ .Values.mock.vexLens.image }}"
|
||||||
|
args: ["dotnet", "StellaOps.VexLens.dll"]
|
||||||
|
{{- end }}
|
||||||
44
deploy/helm/stellaops/templates/vuln-mock.yaml
Normal file
44
deploy/helm/stellaops/templates/vuln-mock.yaml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
{{- if .Values.mock.enabled }}
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: findings-ledger-mock
|
||||||
|
annotations:
|
||||||
|
stellaops.dev/mock: "true"
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: findings-ledger-mock
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: findings-ledger-mock
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: findings-ledger
|
||||||
|
image: "{{ .Values.mock.findingsLedger.image }}"
|
||||||
|
args: ["dotnet", "StellaOps.Findings.Ledger.WebService.dll"]
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: vuln-explorer-api-mock
|
||||||
|
annotations:
|
||||||
|
stellaops.dev/mock: "true"
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: vuln-explorer-api-mock
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: vuln-explorer-api-mock
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: vuln-explorer-api
|
||||||
|
image: "{{ .Values.mock.vulnExplorerApi.image }}"
|
||||||
|
args: ["dotnet", "StellaOps.VulnExplorer.Api.dll"]
|
||||||
|
{{- end }}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user