Compare commits
82 Commits
feature/do
...
0987cd6ac8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0987cd6ac8 | ||
|
|
b83aa1aa0b | ||
|
|
ce1f282ce0 | ||
|
|
b8b493913a | ||
|
|
49922dff5a | ||
|
|
92bc4d3a07 | ||
|
|
0ad4777259 | ||
|
|
2bd189387e | ||
|
|
3a92c77a04 | ||
|
|
b7059d523e | ||
|
|
96e5646977 | ||
|
|
a3c7fe5e88 | ||
|
|
199aaf74d8 | ||
|
|
f30805ad7f | ||
|
|
689c656f20 | ||
|
|
108d1c64b3 | ||
|
|
bc0762e97d | ||
|
|
3d01bf9edc | ||
|
|
68bc53a07b | ||
|
|
4b124fb056 | ||
|
|
7c24ed96ee | ||
|
|
11597679ed | ||
|
|
e3f28a21ab | ||
|
|
a403979177 | ||
|
|
b8641b1959 | ||
|
|
98e6b76584 | ||
|
|
862bb6ed80 | ||
|
|
bd2529502e | ||
|
|
965cbf9574 | ||
|
|
af30fc322f | ||
|
|
e53a282fbe | ||
|
|
d907729778 | ||
|
|
8a72779c16 | ||
|
|
e0f6efecce | ||
|
|
98934170ca | ||
|
|
69651212ec | ||
|
|
53889d85e7 | ||
|
|
0de92144d2 | ||
|
|
9bd6a73926 | ||
|
|
4042fc2184 | ||
|
|
dd0067ea0b | ||
|
|
f6c22854a4 | ||
|
|
05597616d6 | ||
|
|
a6f1406509 | ||
|
|
0a8f8c14af | ||
|
|
7efee7dd41 | ||
|
|
952ba77924 | ||
|
|
23e463e346 | ||
|
|
849a70f9d1 | ||
|
|
868f8e0bb6 | ||
|
|
84c42ca2d8 | ||
|
|
efd6850c38 | ||
|
|
2b892ad1b2 | ||
|
|
e16d2b5224 | ||
|
|
5e514532df | ||
|
|
2141196496 | ||
|
|
bca02ec295 | ||
|
|
8cabdce3b6 | ||
|
|
6145d89468 | ||
|
|
ee317d3f61 | ||
|
|
4cc8bdb460 | ||
|
|
95ff83e0f0 | ||
|
|
3954615e81 | ||
|
|
8948b1a3e2 | ||
|
|
5cfcf0723a | ||
|
|
ba733b9f69 | ||
|
|
79d562ea5d | ||
|
|
a7cd10020a | ||
|
|
b978ae399f | ||
|
|
570746b7d9 | ||
|
|
8318b26370 | ||
|
|
1f76650b7e | ||
|
|
37304cf819 | ||
|
|
6beb9d7c4e | ||
|
|
be8c623e04 | ||
|
|
dd4bb50076 | ||
|
|
bf6ab6ba6f | ||
|
|
02849cc955 | ||
|
|
2eaf0f699b | ||
|
|
6c1177a6ce | ||
|
|
582a88e8f8 | ||
|
|
f0662dd45f |
@@ -1,8 +1,33 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(dotnet --list-sdks:*)",
|
||||
"Bash(winget install:*)",
|
||||
"Bash(dotnet restore:*)",
|
||||
"Bash(dotnet nuget:*)",
|
||||
"Bash(csc -parse:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(dotnet build:*)",
|
||||
"Bash(cat:*)",
|
||||
"Bash(copy:*)",
|
||||
"Bash(dotnet test:*)",
|
||||
"Bash(dir:*)",
|
||||
"Bash(Select-Object -ExpandProperty FullName)",
|
||||
"Bash(echo:*)",
|
||||
"Bash(Out-File -FilePath \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Libraries\\StellaOps.Scanner.Surface\\StellaOps.Scanner.Surface.csproj\" -Encoding utf8)",
|
||||
"Bash(wc:*)",
|
||||
"Bash(sort:*)"
|
||||
"Bash(find:*)",
|
||||
"WebFetch(domain:docs.gradle.org)",
|
||||
"WebSearch",
|
||||
"Bash(dotnet msbuild:*)",
|
||||
"Bash(test:*)",
|
||||
"Bash(taskkill:*)",
|
||||
"Bash(timeout /t)",
|
||||
"Bash(dotnet clean:*)",
|
||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")",
|
||||
"Bash(rm:*)",
|
||||
"Bash(if not exist \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\" mkdir \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\")"
|
||||
],
|
||||
"deny": [],
|
||||
"ask": []
|
||||
|
||||
23
.dockerignore
Normal file
23
.dockerignore
Normal file
@@ -0,0 +1,23 @@
|
||||
.git
|
||||
.gitignore
|
||||
.gitea
|
||||
.venv
|
||||
bin
|
||||
obj
|
||||
**/bin
|
||||
**/obj
|
||||
local-nugets
|
||||
.nuget
|
||||
**/node_modules
|
||||
**/dist
|
||||
**/coverage
|
||||
**/*.user
|
||||
**/*.suo
|
||||
**/*.cache
|
||||
**/.vscode
|
||||
**/.idea
|
||||
**/.DS_Store
|
||||
**/TestResults
|
||||
**/out
|
||||
**/packages
|
||||
/tmp
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
aoc-guard:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event_name != 'schedule'
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
||||
steps:
|
||||
|
||||
128
.gitea/workflows/artifact-signing.yml
Normal file
128
.gitea/workflows/artifact-signing.yml
Normal file
@@ -0,0 +1,128 @@
|
||||
name: Artifact Signing
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
artifact_path:
|
||||
description: 'Path to artifact to sign'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
COSIGN_VERSION: 'v2.2.0'
|
||||
|
||||
jobs:
|
||||
sign-containers:
|
||||
name: Sign Container Images
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Log in to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Sign images (keyless)
|
||||
if: ${{ !env.COSIGN_PRIVATE_KEY_B64 }}
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
run: |
|
||||
IMAGES=(
|
||||
"ghcr.io/${{ github.repository }}/concelier"
|
||||
"ghcr.io/${{ github.repository }}/scanner"
|
||||
"ghcr.io/${{ github.repository }}/authority"
|
||||
)
|
||||
for img in "${IMAGES[@]}"; do
|
||||
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||
cosign sign --yes "${img}:${{ github.ref_name }}"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Sign images (with key)
|
||||
if: ${{ env.COSIGN_PRIVATE_KEY_B64 }}
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
run: |
|
||||
echo "$COSIGN_PRIVATE_KEY" | base64 -d > /tmp/cosign.key
|
||||
IMAGES=(
|
||||
"ghcr.io/${{ github.repository }}/concelier"
|
||||
"ghcr.io/${{ github.repository }}/scanner"
|
||||
"ghcr.io/${{ github.repository }}/authority"
|
||||
)
|
||||
for img in "${IMAGES[@]}"; do
|
||||
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||
cosign sign --key /tmp/cosign.key "${img}:${{ github.ref_name }}"
|
||||
fi
|
||||
done
|
||||
rm -f /tmp/cosign.key
|
||||
|
||||
sign-sbom:
|
||||
name: Sign SBOM Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Generate and sign SBOM
|
||||
run: |
|
||||
# Generate SBOM using syft
|
||||
if command -v syft &> /dev/null; then
|
||||
syft . -o cyclonedx-json > sbom.cdx.json
|
||||
cosign sign-blob --yes sbom.cdx.json --output-signature sbom.cdx.json.sig
|
||||
else
|
||||
echo "syft not installed, skipping SBOM generation"
|
||||
fi
|
||||
|
||||
- name: Upload signed artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: signed-sbom
|
||||
path: |
|
||||
sbom.cdx.json
|
||||
sbom.cdx.json.sig
|
||||
if-no-files-found: ignore
|
||||
|
||||
verify-signatures:
|
||||
name: Verify Existing Signatures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Verify DSSE envelopes
|
||||
run: |
|
||||
find . -name "*.dsse" -o -name "*.dsse.json" | while read f; do
|
||||
echo "Checking $f..."
|
||||
# Basic JSON validation
|
||||
if ! jq empty "$f" 2>/dev/null; then
|
||||
echo "Warning: Invalid JSON in $f"
|
||||
fi
|
||||
done
|
||||
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
BUILD_CONFIGURATION: Release
|
||||
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
||||
RUNNER_TOOL_CACHE: /toolcache
|
||||
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install syft (SBOM)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Chaos smoke
|
||||
if: ${{ github.event.inputs.chaos == 'true' }}
|
||||
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
- name: Setup .NET 10 preview
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
||||
dotnet-version: '10.0.100'
|
||||
|
||||
- name: Restore Concelier solution
|
||||
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
||||
|
||||
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Concelier STORE-AOC-19-005 Dataset
|
||||
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
build-dataset:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/out/linksets
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get update && sudo apt-get install -y zstd
|
||||
|
||||
- name: Build dataset tarball
|
||||
run: |
|
||||
chmod +x scripts/concelier/build-store-aoc-19-005-dataset.sh scripts/concelier/test-store-aoc-19-005-dataset.sh
|
||||
scripts/concelier/build-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||
|
||||
- name: Validate dataset
|
||||
run: scripts/concelier/test-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||
|
||||
- name: Upload dataset artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: concelier-store-aoc-19-005-dataset
|
||||
path: |
|
||||
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst
|
||||
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst.sha256
|
||||
@@ -1,86 +1,58 @@
|
||||
name: Console CI
|
||||
name: console-ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/UI/**'
|
||||
- '.gitea/workflows/console-ci.yml'
|
||||
- 'docs/modules/devops/console-ci-contract.md'
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/UI/**'
|
||||
- 'src/Web/**'
|
||||
- '.gitea/workflows/console-ci.yml'
|
||||
- 'docs/modules/devops/console-ci-contract.md'
|
||||
- 'ops/devops/console/**'
|
||||
|
||||
jobs:
|
||||
console-ci:
|
||||
runs-on: ubuntu-22.04
|
||||
lint-test-build:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: src/Web
|
||||
env:
|
||||
PNPM_HOME: ~/.pnpm
|
||||
PLAYWRIGHT_BROWSERS_PATH: ./.playwright
|
||||
SOURCE_DATE_EPOCH: ${{ github.run_id }}
|
||||
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||
CI: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Node.js 20
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: src/Web/package-lock.json
|
||||
|
||||
- name: Enable pnpm
|
||||
- name: Install deps (offline-friendly)
|
||||
run: npm ci --prefer-offline --no-audit --progress=false
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint -- --no-progress
|
||||
|
||||
- name: Console export specs (targeted)
|
||||
run: bash ./scripts/ci-console-exports.sh
|
||||
|
||||
- name: Build
|
||||
run: npm run build -- --configuration=production --progress=false
|
||||
|
||||
- name: Collect artifacts
|
||||
if: always()
|
||||
run: |
|
||||
corepack enable
|
||||
corepack prepare pnpm@9 --activate
|
||||
|
||||
- name: Cache pnpm store & node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
node_modules
|
||||
./.pnpm-store
|
||||
./.playwright
|
||||
key: console-${{ runner.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Install dependencies (offline-first)
|
||||
env:
|
||||
PNPM_FETCH_RETRIES: 0
|
||||
PNPM_OFFLINE: 1
|
||||
run: |
|
||||
pnpm install --frozen-lockfile || PNPM_OFFLINE=0 pnpm install --frozen-lockfile --prefer-offline
|
||||
|
||||
- name: Lint / Types
|
||||
run: pnpm lint && pnpm format:check && pnpm typecheck
|
||||
|
||||
- name: Unit tests
|
||||
run: pnpm test -- --runInBand --reporter=junit --outputFile=.artifacts/junit.xml
|
||||
|
||||
- name: Storybook a11y
|
||||
run: |
|
||||
pnpm storybook:build
|
||||
pnpm storybook:a11y --ci --output .artifacts/storybook-a11y.json
|
||||
|
||||
- name: Playwright smoke
|
||||
run: pnpm playwright test --config=playwright.config.ci.ts --reporter=list,junit=.artifacts/playwright.xml
|
||||
|
||||
- name: Lighthouse (CI budgets)
|
||||
run: |
|
||||
pnpm serve --port 4173 &
|
||||
pnpm lhci autorun --config=lighthouserc.ci.js --upload.target=filesystem --upload.outputDir=.artifacts/lhci
|
||||
|
||||
- name: SBOM
|
||||
run: pnpm exec syft packages dir:dist --output=spdx-json=.artifacts/console.spdx.json
|
||||
|
||||
mkdir -p ../artifacts
|
||||
cp -r dist ../artifacts/dist || true
|
||||
cp -r coverage ../artifacts/coverage || true
|
||||
find . -maxdepth 3 -type f -name "*.xml" -o -name "*.trx" -o -name "*.json" -path "*test*" -print0 | xargs -0 -I{} cp --parents {} ../artifacts 2>/dev/null || true
|
||||
|
||||
- name: Upload artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: console-ci-artifacts
|
||||
path: .artifacts
|
||||
name: console-ci-${{ github.run_id }}
|
||||
path: artifacts
|
||||
retention-days: 14
|
||||
|
||||
32
.gitea/workflows/console-runner-image.yml
Normal file
32
.gitea/workflows/console-runner-image.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
name: console-runner-image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'ops/devops/console/**'
|
||||
- '.gitea/workflows/console-runner-image.yml'
|
||||
|
||||
jobs:
|
||||
build-runner-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build runner image tarball (baked caches)
|
||||
env:
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
chmod +x ops/devops/console/build-runner-image.sh ops/devops/console/build-runner-image-ci.sh
|
||||
ops/devops/console/build-runner-image-ci.sh
|
||||
|
||||
- name: Upload runner image artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: console-runner-image-${{ github.run_id }}
|
||||
path: ops/devops/artifacts/console-runner/
|
||||
retention-days: 14
|
||||
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
@@ -0,0 +1,41 @@
|
||||
name: crypto-sim-smoke
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- "ops/crypto/sim-crypto-service/**"
|
||||
- "ops/crypto/sim-crypto-smoke/**"
|
||||
- "scripts/crypto/run-sim-smoke.ps1"
|
||||
- "docs/security/crypto-simulation-services.md"
|
||||
- ".gitea/workflows/crypto-sim-smoke.yml"
|
||||
|
||||
jobs:
|
||||
sim-smoke:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.x"
|
||||
|
||||
- name: Build sim service and smoke harness
|
||||
run: |
|
||||
dotnet build ops/crypto/sim-crypto-service/SimCryptoService.csproj -c Release
|
||||
dotnet build ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj -c Release
|
||||
|
||||
- name: Run smoke (sim profile: sm)
|
||||
env:
|
||||
ASPNETCORE_URLS: http://localhost:5000
|
||||
STELLAOPS_CRYPTO_SIM_URL: http://localhost:5000
|
||||
SIM_PROFILE: sm
|
||||
run: |
|
||||
set -euo pipefail
|
||||
dotnet run --project ops/crypto/sim-crypto-service/SimCryptoService.csproj --no-build -c Release &
|
||||
service_pid=$!
|
||||
sleep 6
|
||||
dotnet run --project ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj --no-build -c Release
|
||||
kill $service_pid
|
||||
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
name: cryptopro-linux-csp
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'ops/cryptopro/linux-csp-service/**'
|
||||
- 'opt/cryptopro/downloads/**'
|
||||
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'ops/cryptopro/linux-csp-service/**'
|
||||
- 'opt/cryptopro/downloads/**'
|
||||
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||
|
||||
env:
|
||||
IMAGE_NAME: cryptopro-linux-csp
|
||||
DOCKERFILE: ops/cryptopro/linux-csp-service/Dockerfile
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build image (accept EULA explicitly)
|
||||
run: |
|
||||
docker build -t $IMAGE_NAME \
|
||||
--build-arg CRYPTOPRO_ACCEPT_EULA=1 \
|
||||
-f $DOCKERFILE .
|
||||
|
||||
- name: Run container
|
||||
run: |
|
||||
docker run -d --rm --name $IMAGE_NAME -p 18080:8080 $IMAGE_NAME
|
||||
for i in {1..20}; do
|
||||
if curl -sf http://127.0.0.1:18080/health >/dev/null; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 3
|
||||
done
|
||||
echo "Service failed to start" && exit 1
|
||||
|
||||
- name: Test endpoints
|
||||
run: |
|
||||
curl -sf http://127.0.0.1:18080/health
|
||||
curl -sf http://127.0.0.1:18080/license || true
|
||||
curl -sf -X POST http://127.0.0.1:18080/hash \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"data_b64":"SGVsbG8="}'
|
||||
|
||||
- name: Stop container
|
||||
if: always()
|
||||
run: docker rm -f $IMAGE_NAME || true
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
- name: Setup .NET 10 (preview)
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
|
||||
- name: Build CryptoPro plugin
|
||||
run: |
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
||||
dotnet-version: '10.0.100'
|
||||
|
||||
- name: Link check
|
||||
run: |
|
||||
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
export-ci:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
MINIO_ACCESS_KEY: exportci
|
||||
MINIO_SECRET_KEY: exportci123
|
||||
BUCKET: export-ci
|
||||
|
||||
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
@@ -0,0 +1,325 @@
|
||||
# .gitea/workflows/findings-ledger-ci.yml
|
||||
# Findings Ledger CI with RLS migration validation (DEVOPS-LEDGER-TEN-48-001-REL)
|
||||
|
||||
name: Findings Ledger CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Findings/**'
|
||||
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||
- 'deploy/releases/2025.09-stable.yaml'
|
||||
- 'deploy/releases/2025.09-airgap.yaml'
|
||||
- 'deploy/downloads/manifest.json'
|
||||
- 'ops/devops/release/check_release_manifest.py'
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/Findings/**'
|
||||
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
POSTGRES_IMAGE: postgres:16-alpine
|
||||
BUILD_CONFIGURATION: Release
|
||||
|
||||
jobs:
|
||||
build-test:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore dependencies
|
||||
run: |
|
||||
dotnet restore src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj
|
||||
dotnet restore src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj \
|
||||
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||
/p:ContinuousIntegrationBuild=true
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
mkdir -p $TEST_RESULTS_DIR
|
||||
dotnet test src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj \
|
||||
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||
--logger "trx;LogFileName=ledger-tests.trx" \
|
||||
--results-directory $TEST_RESULTS_DIR
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: ledger-test-results
|
||||
path: ${{ env.TEST_RESULTS_DIR }}
|
||||
|
||||
migration-validation:
|
||||
runs-on: ubuntu-22.04
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: ledgertest
|
||||
POSTGRES_PASSWORD: ledgertest
|
||||
POSTGRES_DB: ledger_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGUSER: ledgertest
|
||||
PGPASSWORD: ledgertest
|
||||
PGDATABASE: ledger_test
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install PostgreSQL client
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y postgresql-client
|
||||
|
||||
- name: Wait for PostgreSQL
|
||||
run: |
|
||||
until pg_isready -h $PGHOST -p $PGPORT -U $PGUSER; do
|
||||
echo "Waiting for PostgreSQL..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Apply prerequisite migrations (001-006)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIGRATION_DIR="src/Findings/StellaOps.Findings.Ledger/migrations"
|
||||
for migration in 001_initial.sql 002_add_evidence_bundle_ref.sql 002_projection_offsets.sql \
|
||||
003_policy_rationale.sql 004_ledger_attestations.sql 004_risk_fields.sql \
|
||||
005_risk_fields.sql 006_orchestrator_airgap.sql; do
|
||||
if [ -f "$MIGRATION_DIR/$migration" ]; then
|
||||
echo "Applying migration: $migration"
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f "$MIGRATION_DIR/$migration"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Apply RLS migration (007_enable_rls.sql)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Applying RLS migration..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||
|
||||
- name: Validate RLS configuration
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Validating RLS is enabled on all protected tables..."
|
||||
|
||||
# Check RLS enabled
|
||||
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||
WHERE n.nspname = 'public'
|
||||
AND c.relrowsecurity = true
|
||||
AND c.relname IN (
|
||||
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||
'orchestrator_exports', 'airgap_imports'
|
||||
);
|
||||
")
|
||||
|
||||
if [ "$TABLES_WITH_RLS" -ne 8 ]; then
|
||||
echo "::error::Expected 8 tables with RLS enabled, found $TABLES_WITH_RLS"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ All 8 tables have RLS enabled"
|
||||
|
||||
# Check policies exist
|
||||
POLICIES=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(DISTINCT tablename)
|
||||
FROM pg_policies
|
||||
WHERE schemaname = 'public'
|
||||
AND policyname LIKE '%_tenant_isolation';
|
||||
")
|
||||
|
||||
if [ "$POLICIES" -ne 8 ]; then
|
||||
echo "::error::Expected 8 tenant isolation policies, found $POLICIES"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ All 8 tenant isolation policies created"
|
||||
|
||||
# Check tenant function exists
|
||||
FUNC_EXISTS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_proc p
|
||||
JOIN pg_namespace n ON p.pronamespace = n.oid
|
||||
WHERE p.proname = 'require_current_tenant'
|
||||
AND n.nspname = 'findings_ledger_app';
|
||||
")
|
||||
|
||||
if [ "$FUNC_EXISTS" -ne 1 ]; then
|
||||
echo "::error::Tenant function 'require_current_tenant' not found"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Tenant function 'findings_ledger_app.require_current_tenant()' exists"
|
||||
|
||||
echo ""
|
||||
echo "=== RLS Migration Validation PASSED ==="
|
||||
|
||||
- name: Test rollback migration
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Testing rollback migration..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql
|
||||
|
||||
# Verify RLS is disabled
|
||||
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||
WHERE n.nspname = 'public'
|
||||
AND c.relrowsecurity = true
|
||||
AND c.relname IN (
|
||||
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||
'orchestrator_exports', 'airgap_imports'
|
||||
);
|
||||
")
|
||||
|
||||
if [ "$TABLES_WITH_RLS" -ne 0 ]; then
|
||||
echo "::error::Rollback failed - $TABLES_WITH_RLS tables still have RLS enabled"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Rollback successful - RLS disabled on all tables"
|
||||
- name: Validate release manifests (production)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python ops/devops/release/check_release_manifest.py
|
||||
|
||||
- name: Re-apply RLS migration (idempotency check)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Re-applying RLS migration to verify idempotency..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||
echo "✓ Migration is idempotent"
|
||||
|
||||
generate-manifest:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [build-test, migration-validation]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Generate migration manifest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIGRATION_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql"
|
||||
ROLLBACK_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql"
|
||||
MANIFEST_DIR="out/findings-ledger/migrations"
|
||||
mkdir -p "$MANIFEST_DIR"
|
||||
|
||||
# Compute SHA256 hashes
|
||||
MIGRATION_SHA=$(sha256sum "$MIGRATION_FILE" | awk '{print $1}')
|
||||
ROLLBACK_SHA=$(sha256sum "$ROLLBACK_FILE" | awk '{print $1}')
|
||||
CREATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
cat > "$MANIFEST_DIR/007_enable_rls.manifest.json" <<EOF
|
||||
{
|
||||
"\$schema": "https://stella-ops.org/schemas/migration-manifest.v1.json",
|
||||
"schemaVersion": "1.0.0",
|
||||
"migrationId": "007_enable_rls",
|
||||
"module": "findings-ledger",
|
||||
"version": "2025.12.0",
|
||||
"createdAt": "$CREATED_AT",
|
||||
"description": "Enable Row-Level Security for Findings Ledger tenant isolation",
|
||||
"taskId": "LEDGER-TEN-48-001-DEV",
|
||||
"contractRef": "CONTRACT-FINDINGS-LEDGER-RLS-011",
|
||||
"database": {
|
||||
"engine": "postgresql",
|
||||
"minVersion": "16.0"
|
||||
},
|
||||
"files": {
|
||||
"apply": {
|
||||
"path": "007_enable_rls.sql",
|
||||
"sha256": "$MIGRATION_SHA"
|
||||
},
|
||||
"rollback": {
|
||||
"path": "007_enable_rls_rollback.sql",
|
||||
"sha256": "$ROLLBACK_SHA"
|
||||
}
|
||||
},
|
||||
"affects": {
|
||||
"tables": [
|
||||
"ledger_events",
|
||||
"ledger_merkle_roots",
|
||||
"findings_projection",
|
||||
"finding_history",
|
||||
"triage_actions",
|
||||
"ledger_attestations",
|
||||
"orchestrator_exports",
|
||||
"airgap_imports"
|
||||
],
|
||||
"schemas": ["public", "findings_ledger_app"],
|
||||
"roles": ["findings_ledger_admin"]
|
||||
},
|
||||
"prerequisites": [
|
||||
"006_orchestrator_airgap"
|
||||
],
|
||||
"validation": {
|
||||
"type": "rls-check",
|
||||
"expectedTables": 8,
|
||||
"expectedPolicies": 8,
|
||||
"tenantFunction": "findings_ledger_app.require_current_tenant"
|
||||
},
|
||||
"offlineKit": {
|
||||
"includedInBundle": true,
|
||||
"requiresManualApply": true,
|
||||
"applyOrder": 7
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Generated migration manifest at $MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||
cat "$MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||
|
||||
- name: Copy migration files for offline-kit
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OFFLINE_DIR="out/findings-ledger/offline-kit/migrations"
|
||||
mkdir -p "$OFFLINE_DIR"
|
||||
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql "$OFFLINE_DIR/"
|
||||
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql "$OFFLINE_DIR/"
|
||||
cp out/findings-ledger/migrations/007_enable_rls.manifest.json "$OFFLINE_DIR/"
|
||||
echo "Offline-kit migration files prepared"
|
||||
ls -la "$OFFLINE_DIR"
|
||||
|
||||
- name: Upload migration artefacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: findings-ledger-migrations
|
||||
path: out/findings-ledger/
|
||||
if-no-files-found: error
|
||||
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
@@ -0,0 +1,68 @@
|
||||
name: ICS/KISA Feed Refresh
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * MON'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
live_fetch:
|
||||
description: 'Attempt live RSS fetch (fallback to samples on failure)'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
offline_snapshot:
|
||||
description: 'Force offline samples only (no network)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
refresh:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
ICSCISA_FEED_URL: ${{ secrets.ICSCISA_FEED_URL }}
|
||||
KISA_FEED_URL: ${{ secrets.KISA_FEED_URL }}
|
||||
FEED_GATEWAY_HOST: concelier-webservice
|
||||
FEED_GATEWAY_SCHEME: http
|
||||
LIVE_FETCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.live_fetch || 'true' }}
|
||||
OFFLINE_SNAPSHOT: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.offline_snapshot || 'false' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set run metadata
|
||||
id: meta
|
||||
run: |
|
||||
RUN_DATE=$(date -u +%Y%m%d)
|
||||
RUN_ID="icscisa-kisa-$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
echo "run_date=$RUN_DATE" >> $GITHUB_OUTPUT
|
||||
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||
echo "RUN_DATE=$RUN_DATE" >> $GITHUB_ENV
|
||||
echo "RUN_ID=$RUN_ID" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Run ICS/KISA refresh
|
||||
run: |
|
||||
python scripts/feeds/run_icscisa_kisa_refresh.py \
|
||||
--out-dir out/feeds/icscisa-kisa \
|
||||
--run-date "${{ steps.meta.outputs.run_date }}" \
|
||||
--run-id "${{ steps.meta.outputs.run_id }}"
|
||||
|
||||
- name: Show fetch log
|
||||
run: cat out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}/fetch.log
|
||||
|
||||
- name: Upload refresh artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: icscisa-kisa-${{ steps.meta.outputs.run_date }}
|
||||
path: out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}
|
||||
if-no-files-found: error
|
||||
retention-days: 21
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
lnm-backfill:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
vex-backfill:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
125
.gitea/workflows/manifest-integrity.yml
Normal file
125
.gitea/workflows/manifest-integrity.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
name: Manifest Integrity
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'docs/**/*.schema.json'
|
||||
- 'docs/contracts/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/packs/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**/*.schema.json'
|
||||
- 'docs/contracts/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/packs/**'
|
||||
|
||||
jobs:
|
||||
validate-schemas:
|
||||
name: Validate Schema Integrity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install -g ajv-cli ajv-formats
|
||||
|
||||
- name: Validate JSON schemas
|
||||
run: |
|
||||
EXIT_CODE=0
|
||||
for schema in docs/schemas/*.schema.json; do
|
||||
echo "Validating $schema..."
|
||||
if ! ajv compile -s "$schema" --spec=draft2020 2>/dev/null; then
|
||||
echo "Error: $schema is invalid"
|
||||
EXIT_CODE=1
|
||||
fi
|
||||
done
|
||||
exit $EXIT_CODE
|
||||
|
||||
validate-contracts:
|
||||
name: Validate Contract Documents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check contract structure
|
||||
run: |
|
||||
for contract in docs/contracts/*.md; do
|
||||
echo "Checking $contract..."
|
||||
# Verify required sections exist
|
||||
if ! grep -q "^## " "$contract"; then
|
||||
echo "Warning: $contract missing section headers"
|
||||
fi
|
||||
# Check for decision ID
|
||||
if grep -q "Decision ID" "$contract" && ! grep -q "DECISION-\|CONTRACT-" "$contract"; then
|
||||
echo "Warning: $contract missing decision ID format"
|
||||
fi
|
||||
done
|
||||
|
||||
validate-pack-fixtures:
|
||||
name: Validate Pack Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install jsonschema
|
||||
|
||||
- name: Run fixture validation
|
||||
run: |
|
||||
if [ -f scripts/packs/run-fixtures-check.sh ]; then
|
||||
chmod +x scripts/packs/run-fixtures-check.sh
|
||||
./scripts/packs/run-fixtures-check.sh
|
||||
fi
|
||||
|
||||
checksum-audit:
|
||||
name: Audit SHA256SUMS Files
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate checksums
|
||||
run: |
|
||||
find . -name "SHA256SUMS" -type f | while read f; do
|
||||
dir=$(dirname "$f")
|
||||
echo "Validating checksums in $dir..."
|
||||
cd "$dir"
|
||||
# Check if all referenced files exist
|
||||
while read hash file; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file referenced in SHA256SUMS but not found"
|
||||
fi
|
||||
done < SHA256SUMS
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
merkle-consistency:
|
||||
name: Verify Merkle Roots
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check DSSE Merkle roots
|
||||
run: |
|
||||
find . -name "*.dsse.json" -type f | while read f; do
|
||||
echo "Checking Merkle root in $f..."
|
||||
# Extract and validate Merkle root if present
|
||||
if jq -e '.payload' "$f" > /dev/null 2>&1; then
|
||||
PAYLOAD=$(jq -r '.payload' "$f" | base64 -d 2>/dev/null || echo "")
|
||||
if echo "$PAYLOAD" | jq -e '._stellaops.merkleRoot' > /dev/null 2>&1; then
|
||||
MERKLE=$(echo "$PAYLOAD" | jq -r '._stellaops.merkleRoot')
|
||||
echo " Merkle root: $MERKLE"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
@@ -18,10 +18,18 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||
run: |
|
||||
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
@@ -38,6 +46,16 @@ jobs:
|
||||
run: |
|
||||
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
||||
|
||||
- name: Prepare Export Center handoff (metadata + optional schedule)
|
||||
run: |
|
||||
scripts/mirror/export-center-wire.sh
|
||||
env:
|
||||
EXPORT_CENTER_BASE_URL: ${{ secrets.EXPORT_CENTER_BASE_URL }}
|
||||
EXPORT_CENTER_TOKEN: ${{ secrets.EXPORT_CENTER_TOKEN }}
|
||||
EXPORT_CENTER_TENANT: ${{ secrets.EXPORT_CENTER_TENANT }}
|
||||
EXPORT_CENTER_PROJECT: ${{ secrets.EXPORT_CENTER_PROJECT }}
|
||||
EXPORT_CENTER_AUTO_SCHEDULE: ${{ secrets.EXPORT_CENTER_AUTO_SCHEDULE }}
|
||||
|
||||
- name: Upload signed artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -49,5 +67,8 @@ jobs:
|
||||
out/mirror/thin/tuf/
|
||||
out/mirror/thin/oci/
|
||||
out/mirror/thin/milestone.json
|
||||
out/mirror/thin/export-center/export-center-handoff.json
|
||||
out/mirror/thin/export-center/export-center-targets.json
|
||||
out/mirror/thin/export-center/schedule-response.json
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
44
.gitea/workflows/mock-dev-release.yml
Normal file
44
.gitea/workflows/mock-dev-release.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
name: mock-dev-release
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- deploy/releases/2025.09-mock-dev.yaml
|
||||
- deploy/downloads/manifest.json
|
||||
- ops/devops/mock-release/**
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
package-mock-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Package mock dev artefacts
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p out/mock-release
|
||||
cp deploy/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||
cp deploy/downloads/manifest.json out/mock-release/
|
||||
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
|
||||
|
||||
- name: Compose config (dev + mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ops/devops/mock-release/config_check.sh
|
||||
|
||||
- name: Helm template (mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
helm template mock ./deploy/helm/stellaops -f deploy/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||
ls -lh /tmp/helm-mock.yaml
|
||||
|
||||
- name: Upload mock release bundle
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: mock-dev-release
|
||||
path: |
|
||||
out/mock-release/mock-dev-release.tgz
|
||||
/tmp/compose-mock-config.yaml
|
||||
/tmp/helm-mock.yaml
|
||||
102
.gitea/workflows/notify-smoke-test.yml
Normal file
102
.gitea/workflows/notify-smoke-test.yml
Normal file
@@ -0,0 +1,102 @@
|
||||
name: Notify Smoke Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Notify/**'
|
||||
- 'src/Notifier/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Notify/**'
|
||||
- 'src/Notifier/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
name: Notify Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/Notify/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Notify/ --no-restore
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test src/Notify/ --no-build --verbosity normal
|
||||
|
||||
notifier-tests:
|
||||
name: Notifier Service Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/Notifier/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Notifier/ --no-restore
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test src/Notifier/ --no-build --verbosity normal
|
||||
|
||||
smoke-test:
|
||||
name: Notification Smoke Test
|
||||
runs-on: ubuntu-latest
|
||||
needs: [unit-tests, notifier-tests]
|
||||
services:
|
||||
mongodb:
|
||||
image: mongo:7.0
|
||||
ports:
|
||||
- 27017:27017
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Build Notifier
|
||||
run: dotnet build src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/
|
||||
|
||||
- name: Start service
|
||||
run: |
|
||||
dotnet run --project src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/ &
|
||||
sleep 10
|
||||
|
||||
- name: Health check
|
||||
run: |
|
||||
for i in {1..30}; do
|
||||
if curl -s http://localhost:5000/health > /dev/null; then
|
||||
echo "Service is healthy"
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
echo "Service failed to start"
|
||||
exit 1
|
||||
|
||||
- name: Test notification endpoint
|
||||
run: |
|
||||
# Test dry-run notification
|
||||
curl -X POST http://localhost:5000/api/v1/notifications/test \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"channel": "log", "message": "Smoke test", "dryRun": true}' \
|
||||
|| echo "Warning: Notification test endpoint not available"
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install Cosign
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
# .gitea/workflows/promote.yml
|
||||
# Manual promotion workflow to copy staged artefacts to production
|
||||
|
||||
name: Promote Feedser (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
include_docs:
|
||||
description: 'Also promote the generated documentation bundle'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
tag:
|
||||
description: 'Optional build identifier to record in the summary'
|
||||
required: false
|
||||
default: 'latest'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
runs-on: ubuntu-22.04
|
||||
environment: production
|
||||
steps:
|
||||
# .gitea/workflows/promote.yml
|
||||
# Manual promotion workflow to copy staged artefacts to production
|
||||
|
||||
name: Promote Feedser (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
include_docs:
|
||||
description: 'Also promote the generated documentation bundle'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
tag:
|
||||
description: 'Optional build identifier to record in the summary'
|
||||
required: false
|
||||
default: 'latest'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
runs-on: ubuntu-22.04
|
||||
environment: production
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
@@ -32,178 +32,178 @@ jobs:
|
||||
id: staging
|
||||
run: |
|
||||
missing=()
|
||||
|
||||
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
|
||||
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
|
||||
|
||||
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
|
||||
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
|
||||
|
||||
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
|
||||
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
|
||||
if [ -z "$path" ]; then missing+=("STAGING_DEPLOYMENT_PATH")
|
||||
fi
|
||||
|
||||
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
|
||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
|
||||
|
||||
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
|
||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
|
||||
|
||||
if [ ${#missing[@]} -gt 0 ]; then
|
||||
echo "❌ Missing staging configuration: ${missing[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
key_file="$RUNNER_TEMP/staging_key"
|
||||
printf '%s\n' "$key" > "$key_file"
|
||||
chmod 600 "$key_file"
|
||||
|
||||
echo "host=$host" >> $GITHUB_OUTPUT
|
||||
echo "user=$user" >> $GITHUB_OUTPUT
|
||||
echo "path=$path" >> $GITHUB_OUTPUT
|
||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Resolve production credentials
|
||||
id: production
|
||||
run: |
|
||||
missing=()
|
||||
|
||||
host="${{ secrets.PRODUCTION_DEPLOYMENT_HOST }}"
|
||||
if [ -z "$host" ]; then host="${{ vars.PRODUCTION_DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then missing+=("PRODUCTION_DEPLOYMENT_HOST"); fi
|
||||
|
||||
user="${{ secrets.PRODUCTION_DEPLOYMENT_USERNAME }}"
|
||||
if [ -z "$user" ]; then user="${{ vars.PRODUCTION_DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then missing+=("PRODUCTION_DEPLOYMENT_USERNAME"); fi
|
||||
|
||||
path="${{ secrets.PRODUCTION_DEPLOYMENT_PATH }}"
|
||||
if [ -z "$path" ]; then path="${{ vars.PRODUCTION_DEPLOYMENT_PATH }}"; fi
|
||||
if [ -z "$path" ]; then missing+=("PRODUCTION_DEPLOYMENT_PATH")
|
||||
fi
|
||||
|
||||
docs_path="${{ secrets.PRODUCTION_DOCS_PATH }}"
|
||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.PRODUCTION_DOCS_PATH }}"; fi
|
||||
|
||||
key="${{ secrets.PRODUCTION_DEPLOYMENT_KEY }}"
|
||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.PRODUCTION_DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then missing+=("PRODUCTION_DEPLOYMENT_KEY"); fi
|
||||
|
||||
if [ ${#missing[@]} -gt 0 ]; then
|
||||
echo "❌ Missing production configuration: ${missing[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
key_file="$RUNNER_TEMP/production_key"
|
||||
printf '%s\n' "$key" > "$key_file"
|
||||
chmod 600 "$key_file"
|
||||
|
||||
echo "host=$host" >> $GITHUB_OUTPUT
|
||||
echo "user=$user" >> $GITHUB_OUTPUT
|
||||
echo "path=$path" >> $GITHUB_OUTPUT
|
||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install rsync
|
||||
run: |
|
||||
if command -v rsync >/dev/null 2>&1; then
|
||||
exit 0
|
||||
fi
|
||||
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
|
||||
mkdir -p "$CACHE_DIR"
|
||||
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
|
||||
DEB_DIR="$CACHE_DIR/$KEY"
|
||||
mkdir -p "$DEB_DIR"
|
||||
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
|
||||
else
|
||||
apt-get update
|
||||
apt-get download rsync libpopt0
|
||||
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
|
||||
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
|
||||
fi
|
||||
|
||||
- name: Fetch staging artefacts
|
||||
id: fetch
|
||||
run: |
|
||||
staging_root="${{ runner.temp }}/staging"
|
||||
mkdir -p "$staging_root/service" "$staging_root/docs"
|
||||
|
||||
echo "📥 Copying service bundle from staging"
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs.path }}/" \
|
||||
"$staging_root/service/"
|
||||
|
||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ] && [ -n "${{ steps.staging.outputs['docs-path'] }}" ]; then
|
||||
echo "📥 Copying documentation bundle from staging"
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs['docs-path'] }}/" \
|
||||
"$staging_root/docs/"
|
||||
else
|
||||
echo "ℹ️ Documentation promotion skipped"
|
||||
fi
|
||||
|
||||
echo "service-dir=$staging_root/service" >> $GITHUB_OUTPUT
|
||||
echo "docs-dir=$staging_root/docs" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Backup production service content
|
||||
run: |
|
||||
ssh -o StrictHostKeyChecking=no -i "${{ steps.production.outputs['key-file'] }}" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}" \
|
||||
"set -e; TARGET='${{ steps.production.outputs.path }}'; \
|
||||
if [ -d \"$TARGET\" ]; then \
|
||||
parent=\$(dirname \"$TARGET\"); \
|
||||
base=\$(basename \"$TARGET\"); \
|
||||
backup=\"\$parent/\${base}.backup.\$(date +%Y%m%d_%H%M%S)\"; \
|
||||
mkdir -p \"\$backup\"; \
|
||||
rsync -a --delete \"$TARGET/\" \"\$backup/\"; \
|
||||
ls -dt \"\$parent/\${base}.backup.*\" 2>/dev/null | tail -n +6 | xargs rm -rf || true; \
|
||||
echo 'Backup created at ' \"\$backup\"; \
|
||||
else \
|
||||
echo 'Production service path missing; skipping backup'; \
|
||||
fi"
|
||||
|
||||
- name: Publish service to production
|
||||
run: |
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.fetch.outputs['service-dir'] }}/" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs.path }}/"
|
||||
|
||||
- name: Promote documentation bundle
|
||||
if: github.event.inputs.include_docs == 'true' && steps.production.outputs['docs-path'] != ''
|
||||
run: |
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.fetch.outputs['docs-dir'] }}/" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs['docs-path'] }}/"
|
||||
|
||||
- name: Promotion summary
|
||||
run: |
|
||||
echo "✅ Promotion completed"
|
||||
echo " Tag: ${{ github.event.inputs.tag }}"
|
||||
echo " Service: ${{ steps.staging.outputs.host }} → ${{ steps.production.outputs.host }}"
|
||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ]; then
|
||||
echo " Docs: included"
|
||||
else
|
||||
echo " Docs: skipped"
|
||||
fi
|
||||
|
||||
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
|
||||
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
|
||||
|
||||
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
|
||||
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
|
||||
|
||||
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
|
||||
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
|
||||
if [ -z "$path" ]; then missing+=("STAGING_DEPLOYMENT_PATH")
|
||||
fi
|
||||
|
||||
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
|
||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
|
||||
|
||||
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
|
||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
|
||||
|
||||
if [ ${#missing[@]} -gt 0 ]; then
|
||||
echo "❌ Missing staging configuration: ${missing[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
key_file="$RUNNER_TEMP/staging_key"
|
||||
printf '%s\n' "$key" > "$key_file"
|
||||
chmod 600 "$key_file"
|
||||
|
||||
echo "host=$host" >> $GITHUB_OUTPUT
|
||||
echo "user=$user" >> $GITHUB_OUTPUT
|
||||
echo "path=$path" >> $GITHUB_OUTPUT
|
||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Resolve production credentials
|
||||
id: production
|
||||
run: |
|
||||
missing=()
|
||||
|
||||
host="${{ secrets.PRODUCTION_DEPLOYMENT_HOST }}"
|
||||
if [ -z "$host" ]; then host="${{ vars.PRODUCTION_DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then missing+=("PRODUCTION_DEPLOYMENT_HOST"); fi
|
||||
|
||||
user="${{ secrets.PRODUCTION_DEPLOYMENT_USERNAME }}"
|
||||
if [ -z "$user" ]; then user="${{ vars.PRODUCTION_DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then missing+=("PRODUCTION_DEPLOYMENT_USERNAME"); fi
|
||||
|
||||
path="${{ secrets.PRODUCTION_DEPLOYMENT_PATH }}"
|
||||
if [ -z "$path" ]; then path="${{ vars.PRODUCTION_DEPLOYMENT_PATH }}"; fi
|
||||
if [ -z "$path" ]; then missing+=("PRODUCTION_DEPLOYMENT_PATH")
|
||||
fi
|
||||
|
||||
docs_path="${{ secrets.PRODUCTION_DOCS_PATH }}"
|
||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.PRODUCTION_DOCS_PATH }}"; fi
|
||||
|
||||
key="${{ secrets.PRODUCTION_DEPLOYMENT_KEY }}"
|
||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.PRODUCTION_DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then missing+=("PRODUCTION_DEPLOYMENT_KEY"); fi
|
||||
|
||||
if [ ${#missing[@]} -gt 0 ]; then
|
||||
echo "❌ Missing production configuration: ${missing[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
key_file="$RUNNER_TEMP/production_key"
|
||||
printf '%s\n' "$key" > "$key_file"
|
||||
chmod 600 "$key_file"
|
||||
|
||||
echo "host=$host" >> $GITHUB_OUTPUT
|
||||
echo "user=$user" >> $GITHUB_OUTPUT
|
||||
echo "path=$path" >> $GITHUB_OUTPUT
|
||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install rsync
|
||||
run: |
|
||||
if command -v rsync >/dev/null 2>&1; then
|
||||
exit 0
|
||||
fi
|
||||
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
|
||||
mkdir -p "$CACHE_DIR"
|
||||
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
|
||||
DEB_DIR="$CACHE_DIR/$KEY"
|
||||
mkdir -p "$DEB_DIR"
|
||||
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
|
||||
else
|
||||
apt-get update
|
||||
apt-get download rsync libpopt0
|
||||
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
|
||||
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
|
||||
fi
|
||||
|
||||
- name: Fetch staging artefacts
|
||||
id: fetch
|
||||
run: |
|
||||
staging_root="${{ runner.temp }}/staging"
|
||||
mkdir -p "$staging_root/service" "$staging_root/docs"
|
||||
|
||||
echo "📥 Copying service bundle from staging"
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs.path }}/" \
|
||||
"$staging_root/service/"
|
||||
|
||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ] && [ -n "${{ steps.staging.outputs['docs-path'] }}" ]; then
|
||||
echo "📥 Copying documentation bundle from staging"
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs['docs-path'] }}/" \
|
||||
"$staging_root/docs/"
|
||||
else
|
||||
echo "ℹ️ Documentation promotion skipped"
|
||||
fi
|
||||
|
||||
echo "service-dir=$staging_root/service" >> $GITHUB_OUTPUT
|
||||
echo "docs-dir=$staging_root/docs" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Backup production service content
|
||||
run: |
|
||||
ssh -o StrictHostKeyChecking=no -i "${{ steps.production.outputs['key-file'] }}" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}" \
|
||||
"set -e; TARGET='${{ steps.production.outputs.path }}'; \
|
||||
if [ -d \"$TARGET\" ]; then \
|
||||
parent=\$(dirname \"$TARGET\"); \
|
||||
base=\$(basename \"$TARGET\"); \
|
||||
backup=\"\$parent/\${base}.backup.\$(date +%Y%m%d_%H%M%S)\"; \
|
||||
mkdir -p \"\$backup\"; \
|
||||
rsync -a --delete \"$TARGET/\" \"\$backup/\"; \
|
||||
ls -dt \"\$parent/\${base}.backup.*\" 2>/dev/null | tail -n +6 | xargs rm -rf || true; \
|
||||
echo 'Backup created at ' \"\$backup\"; \
|
||||
else \
|
||||
echo 'Production service path missing; skipping backup'; \
|
||||
fi"
|
||||
|
||||
- name: Publish service to production
|
||||
run: |
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.fetch.outputs['service-dir'] }}/" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs.path }}/"
|
||||
|
||||
- name: Promote documentation bundle
|
||||
if: github.event.inputs.include_docs == 'true' && steps.production.outputs['docs-path'] != ''
|
||||
run: |
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.fetch.outputs['docs-dir'] }}/" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs['docs-path'] }}/"
|
||||
|
||||
- name: Promotion summary
|
||||
run: |
|
||||
echo "✅ Promotion completed"
|
||||
echo " Tag: ${{ github.event.inputs.tag }}"
|
||||
echo " Service: ${{ steps.staging.outputs.host }} → ${{ steps.production.outputs.host }}"
|
||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ]; then
|
||||
echo " Docs: included"
|
||||
else
|
||||
echo " Docs: skipped"
|
||||
fi
|
||||
|
||||
19
.gitea/workflows/release-manifest-verify.yml
Normal file
19
.gitea/workflows/release-manifest-verify.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
name: release-manifest-verify
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- deploy/releases/2025.09-stable.yaml
|
||||
- deploy/releases/2025.09-airgap.yaml
|
||||
- deploy/downloads/manifest.json
|
||||
- ops/devops/release/check_release_manifest.py
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
verify:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Validate release & downloads manifests
|
||||
run: |
|
||||
python ops/devops/release/check_release_manifest.py
|
||||
120
.gitea/workflows/release-validation.yml
Normal file
120
.gitea/workflows/release-validation.yml
Normal file
@@ -0,0 +1,120 @@
|
||||
name: Release Validation
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'deploy/**'
|
||||
- 'scripts/release/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_PREFIX: stellaops
|
||||
|
||||
jobs:
|
||||
validate-manifests:
|
||||
name: Validate Release Manifests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate Helm charts
|
||||
run: |
|
||||
helm lint deploy/helm/stellaops
|
||||
helm template stellaops deploy/helm/stellaops --dry-run
|
||||
|
||||
- name: Validate Kubernetes manifests
|
||||
run: |
|
||||
for f in deploy/k8s/*.yaml; do
|
||||
kubectl apply --dry-run=client -f "$f" || exit 1
|
||||
done
|
||||
|
||||
- name: Check required images exist
|
||||
run: |
|
||||
REQUIRED_IMAGES=(
|
||||
"concelier"
|
||||
"scanner"
|
||||
"authority"
|
||||
"signer"
|
||||
"attestor"
|
||||
"excititor"
|
||||
"policy"
|
||||
"scheduler"
|
||||
"notify"
|
||||
)
|
||||
for img in "${REQUIRED_IMAGES[@]}"; do
|
||||
echo "Checking $img..."
|
||||
# Validate Dockerfile exists
|
||||
if [ ! -f "src/${img^}/Dockerfile" ] && [ ! -f "deploy/docker/${img}/Dockerfile" ]; then
|
||||
echo "Warning: Dockerfile not found for $img"
|
||||
fi
|
||||
done
|
||||
|
||||
validate-checksums:
|
||||
name: Validate Artifact Checksums
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Verify SHA256SUMS files
|
||||
run: |
|
||||
find . -name "SHA256SUMS" -type f | while read f; do
|
||||
dir=$(dirname "$f")
|
||||
echo "Validating $f..."
|
||||
cd "$dir"
|
||||
if ! sha256sum -c SHA256SUMS --quiet 2>/dev/null; then
|
||||
echo "Warning: Checksum mismatch in $dir"
|
||||
fi
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
validate-schemas:
|
||||
name: Validate Schema Integrity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install ajv-cli
|
||||
run: npm install -g ajv-cli ajv-formats
|
||||
|
||||
- name: Validate JSON schemas
|
||||
run: |
|
||||
for schema in docs/schemas/*.schema.json; do
|
||||
echo "Validating $schema..."
|
||||
ajv compile -s "$schema" --spec=draft2020 || echo "Warning: $schema validation issue"
|
||||
done
|
||||
|
||||
release-notes:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
needs: [validate-manifests, validate-checksums, validate-schemas]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate changelog
|
||||
run: |
|
||||
PREV_TAG=$(git describe --abbrev=0 --tags HEAD^ 2>/dev/null || echo "")
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "## Changes since $PREV_TAG" > RELEASE_NOTES.md
|
||||
git log --pretty=format:"- %s (%h)" "$PREV_TAG"..HEAD >> RELEASE_NOTES.md
|
||||
else
|
||||
echo "## Initial Release" > RELEASE_NOTES.md
|
||||
fi
|
||||
|
||||
- name: Upload release notes
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-notes
|
||||
path: RELEASE_NOTES.md
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
build-release:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
REGISTRY: registry.stella-ops.org
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
|
||||
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
@@ -0,0 +1,198 @@
|
||||
name: Risk Bundle CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'ops/devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'ops/devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
include_osv:
|
||||
description: 'Include OSV providers (larger bundle)'
|
||||
type: boolean
|
||||
default: false
|
||||
publish_checksums:
|
||||
description: 'Publish checksums to artifact store'
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
risk-bundle-build:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
BUNDLE_OUTPUT: ${{ github.workspace }}/.artifacts/risk-bundle
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||
|
||||
- name: Test RiskBundle unit tests
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
dotnet test src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj \
|
||||
-c Release \
|
||||
--filter "FullyQualifiedName~RiskBundle" \
|
||||
--logger "trx;LogFileName=risk-bundle-tests.trx" \
|
||||
--results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Build risk bundle (fixtures)
|
||||
run: |
|
||||
mkdir -p $BUNDLE_OUTPUT
|
||||
ops/devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||
|
||||
- name: Verify bundle integrity
|
||||
run: ops/devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd $BUNDLE_OUTPUT
|
||||
sha256sum risk-bundle.tar.gz > risk-bundle.tar.gz.sha256
|
||||
sha256sum manifest.json > manifest.json.sha256
|
||||
cat risk-bundle.tar.gz.sha256 manifest.json.sha256 > checksums.txt
|
||||
echo "Bundle checksums:"
|
||||
cat checksums.txt
|
||||
|
||||
- name: Upload risk bundle artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: |
|
||||
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz
|
||||
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz.sig
|
||||
${{ env.BUNDLE_OUTPUT }}/manifest.json
|
||||
${{ env.BUNDLE_OUTPUT }}/checksums.txt
|
||||
${{ env.ARTIFACT_DIR }}/*.trx
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: risk-bundle-test-results
|
||||
path: ${{ env.ARTIFACT_DIR }}/*.trx
|
||||
|
||||
risk-bundle-offline-kit:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: risk-bundle-build
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
OFFLINE_KIT_DIR: ${{ github.workspace }}/.artifacts/offline-kit
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download risk bundle artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
- name: Package for offline kit
|
||||
run: |
|
||||
mkdir -p $OFFLINE_KIT_DIR/risk-bundles
|
||||
cp $ARTIFACT_DIR/risk-bundle.tar.gz $OFFLINE_KIT_DIR/risk-bundles/
|
||||
cp $ARTIFACT_DIR/risk-bundle.tar.gz.sig $OFFLINE_KIT_DIR/risk-bundles/ 2>/dev/null || true
|
||||
cp $ARTIFACT_DIR/manifest.json $OFFLINE_KIT_DIR/risk-bundles/
|
||||
cp $ARTIFACT_DIR/checksums.txt $OFFLINE_KIT_DIR/risk-bundles/
|
||||
|
||||
# Create offline kit manifest entry
|
||||
cat > $OFFLINE_KIT_DIR/risk-bundles/kit-manifest.json <<EOF
|
||||
{
|
||||
"component": "risk-bundle",
|
||||
"version": "$(date -u +%Y%m%d-%H%M%S)",
|
||||
"files": [
|
||||
{"path": "risk-bundle.tar.gz", "checksum_file": "risk-bundle.tar.gz.sha256"},
|
||||
{"path": "manifest.json", "checksum_file": "manifest.json.sha256"}
|
||||
],
|
||||
"verification": {
|
||||
"checksums": "checksums.txt",
|
||||
"signature": "risk-bundle.tar.gz.sig"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Verify offline kit structure
|
||||
run: |
|
||||
echo "Offline kit structure:"
|
||||
find $OFFLINE_KIT_DIR -type f
|
||||
echo ""
|
||||
echo "Checksum verification:"
|
||||
cd $OFFLINE_KIT_DIR/risk-bundles
|
||||
sha256sum -c checksums.txt
|
||||
|
||||
- name: Upload offline kit
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-offline-kit
|
||||
path: ${{ env.OFFLINE_KIT_DIR }}
|
||||
|
||||
publish-checksums:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: risk-bundle-build
|
||||
if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event.inputs.publish_checksums == 'true')
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download risk bundle artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
- name: Publish checksums
|
||||
run: |
|
||||
echo "Publishing checksums for risk bundle..."
|
||||
CHECKSUM_DIR=out/checksums/risk-bundle/$(date -u +%Y-%m-%d)
|
||||
mkdir -p $CHECKSUM_DIR
|
||||
cp $ARTIFACT_DIR/checksums.txt $CHECKSUM_DIR/
|
||||
cp $ARTIFACT_DIR/manifest.json $CHECKSUM_DIR/
|
||||
|
||||
# Create latest symlink manifest
|
||||
cat > out/checksums/risk-bundle/latest.json <<EOF
|
||||
{
|
||||
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"path": "$(date -u +%Y-%m-%d)/checksums.txt",
|
||||
"manifest": "$(date -u +%Y-%m-%d)/manifest.json"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Checksums published to $CHECKSUM_DIR"
|
||||
cat $CHECKSUM_DIR/checksums.txt
|
||||
|
||||
- name: Upload published checksums
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-published-checksums
|
||||
path: out/checksums/risk-bundle/
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install syft (SBOM)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
133
.gitea/workflows/scanner-analyzers.yml
Normal file
133
.gitea/workflows/scanner-analyzers.yml
Normal file
@@ -0,0 +1,133 @@
|
||||
name: Scanner Analyzers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
|
||||
jobs:
|
||||
discover-analyzers:
|
||||
name: Discover Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
analyzers: ${{ steps.find.outputs.analyzers }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Find analyzer projects
|
||||
id: find
|
||||
run: |
|
||||
ANALYZERS=$(find src/Scanner/__Libraries -name "StellaOps.Scanner.Analyzers.*.csproj" -exec dirname {} \; | xargs -I {} basename {} | sort -u | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo "analyzers=$ANALYZERS" >> $GITHUB_OUTPUT
|
||||
|
||||
build-analyzers:
|
||||
name: Build Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
needs: discover-analyzers
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
analyzer: ${{ fromJson(needs.discover-analyzers.outputs.analyzers) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Scanner/__Libraries/${{ matrix.analyzer }}/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Scanner/__Libraries/${{ matrix.analyzer }}/ --no-restore
|
||||
|
||||
test-lang-analyzers:
|
||||
name: Test Language Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-analyzers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Run Bun analyzer tests
|
||||
run: |
|
||||
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests" ]; then
|
||||
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests/ --verbosity normal
|
||||
fi
|
||||
|
||||
- name: Run Node analyzer tests
|
||||
run: |
|
||||
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests" ]; then
|
||||
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/ --verbosity normal
|
||||
fi
|
||||
|
||||
fixture-validation:
|
||||
name: Validate Test Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate fixture structure
|
||||
run: |
|
||||
find src/Scanner/__Tests -name "expected.json" | while read f; do
|
||||
echo "Validating $f..."
|
||||
if ! jq empty "$f" 2>/dev/null; then
|
||||
echo "Error: Invalid JSON in $f"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Check fixture completeness
|
||||
run: |
|
||||
find src/Scanner/__Tests -type d -name "Fixtures" | while read fixtures_dir; do
|
||||
echo "Checking $fixtures_dir..."
|
||||
find "$fixtures_dir" -mindepth 1 -maxdepth 1 -type d | while read test_case; do
|
||||
if [ ! -f "$test_case/expected.json" ]; then
|
||||
echo "Warning: $test_case missing expected.json"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
determinism-check:
|
||||
name: Verify Deterministic Output
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-lang-analyzers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Run determinism tests
|
||||
run: |
|
||||
# Run scanner on same input twice, compare outputs
|
||||
if [ -d "tests/fixtures/determinism" ]; then
|
||||
dotnet test --filter "Category=Determinism" --verbosity normal
|
||||
fi
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Run determinism harness
|
||||
run: |
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.2.25502.107
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
|
||||
@@ -28,6 +28,8 @@ jobs:
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-01' }}
|
||||
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -42,6 +44,16 @@ jobs:
|
||||
with:
|
||||
cosign-release: 'v2.2.4'
|
||||
|
||||
- name: Check signing key configured
|
||||
run: |
|
||||
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||
fi
|
||||
|
||||
- name: Verify artifacts exist
|
||||
run: |
|
||||
cd docs/modules/signals
|
||||
@@ -90,9 +102,9 @@ jobs:
|
||||
retention-days: 90
|
||||
|
||||
- name: Push to Evidence Locker
|
||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||
env:
|
||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||
run: |
|
||||
tar -cf /tmp/signals-dsse.tar -C "$OUT_DIR" .
|
||||
@@ -102,7 +114,7 @@ jobs:
|
||||
echo "Pushed to Evidence Locker"
|
||||
|
||||
- name: Evidence Locker skip notice
|
||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||
run: |
|
||||
echo "::notice::Evidence Locker push skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||
echo "Artifacts available as workflow artifact for manual ingestion"
|
||||
|
||||
@@ -2,6 +2,14 @@ name: signals-evidence-locker
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
out_dir:
|
||||
description: "Output directory containing signed artifacts"
|
||||
required: false
|
||||
default: "evidence-locker/signals/2025-12-05"
|
||||
allow_dev_key:
|
||||
description: "Allow dev key fallback (1=yes, 0=no)"
|
||||
required: false
|
||||
default: "0"
|
||||
retention_target:
|
||||
description: "Retention days target"
|
||||
required: false
|
||||
@@ -12,7 +20,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MODULE_ROOT: docs/modules/signals
|
||||
OUT_DIR: evidence-locker/signals/2025-12-05
|
||||
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -20,6 +33,31 @@ jobs:
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: 'v2.2.4'
|
||||
|
||||
- name: Check signing key configured
|
||||
run: |
|
||||
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||
fi
|
||||
|
||||
- name: Verify artifacts exist
|
||||
run: |
|
||||
cd "$MODULE_ROOT"
|
||||
sha256sum -c SHA256SUMS
|
||||
|
||||
- name: Sign signals artifacts
|
||||
run: |
|
||||
chmod +x tools/cosign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||
|
||||
- name: Build deterministic signals evidence tar
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -52,16 +90,17 @@ jobs:
|
||||
/tmp/signals-evidence.tar.sha256
|
||||
|
||||
- name: Push to Evidence Locker
|
||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||
env:
|
||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||
run: |
|
||||
curl -f -X PUT "$URL/signals/2025-12-05/signals-evidence.tar" \
|
||||
upload_path="${OUT_DIR#evidence-locker/}"
|
||||
curl -f -X PUT "$URL/${upload_path}/signals-evidence.tar" \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
--data-binary @/tmp/signals-evidence.tar
|
||||
|
||||
- name: Skip push (missing secret or URL)
|
||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||
run: |
|
||||
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
||||
|
||||
127
.gitea/workflows/signals-reachability.yml
Normal file
127
.gitea/workflows/signals-reachability.yml
Normal file
@@ -0,0 +1,127 @@
|
||||
name: Signals Reachability Scoring & Events
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
allow_dev_key:
|
||||
description: "Allow dev signing key fallback (1=yes, 0=no)"
|
||||
required: false
|
||||
default: "0"
|
||||
evidence_out_dir:
|
||||
description: "Evidence output dir for signing/upload"
|
||||
required: false
|
||||
default: "evidence-locker/signals/2025-12-05"
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/Signals/**'
|
||||
- 'scripts/signals/reachability-smoke.sh'
|
||||
- '.gitea/workflows/signals-reachability.yml'
|
||||
- 'tools/cosign/sign-signals.sh'
|
||||
|
||||
jobs:
|
||||
reachability-smoke:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Signals/StellaOps.Signals.sln --configfile nuget.config
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Signals/StellaOps.Signals.sln -c Release --no-restore
|
||||
|
||||
- name: Reachability scoring + cache/events smoke
|
||||
run: |
|
||||
chmod +x scripts/signals/reachability-smoke.sh
|
||||
scripts/signals/reachability-smoke.sh
|
||||
|
||||
sign-and-upload:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: reachability-smoke
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||
OUT_DIR: ${{ github.event.inputs.evidence_out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: 'v2.2.4'
|
||||
|
||||
- name: Check signing key configured
|
||||
run: |
|
||||
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||
fi
|
||||
|
||||
- name: Verify artifacts exist
|
||||
run: |
|
||||
cd docs/modules/signals
|
||||
sha256sum -c SHA256SUMS
|
||||
|
||||
- name: Sign signals artifacts
|
||||
run: |
|
||||
chmod +x tools/cosign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||
|
||||
- name: Upload signed artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: signals-evidence-${{ github.run_number }}
|
||||
path: |
|
||||
${{ env.OUT_DIR }}/*.sigstore.json
|
||||
${{ env.OUT_DIR }}/*.dsse
|
||||
${{ env.OUT_DIR }}/SHA256SUMS
|
||||
if-no-files-found: error
|
||||
retention-days: 30
|
||||
|
||||
- name: Push to Evidence Locker
|
||||
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||
env:
|
||||
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||
run: |
|
||||
tar -cf /tmp/signals-evidence.tar -C "$OUT_DIR" .
|
||||
sha256sum /tmp/signals-evidence.tar
|
||||
curl -f -X PUT "$URL/signals/$(date -u +%Y-%m-%d)/signals-evidence.tar" \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
--data-binary @/tmp/signals-evidence.tar
|
||||
echo "Uploaded to Evidence Locker"
|
||||
|
||||
- name: Evidence Locker skip notice
|
||||
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||
run: |
|
||||
echo "::notice::Evidence Locker upload skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||
33
.gitea/workflows/sm-remote-ci.yml
Normal file
33
.gitea/workflows/sm-remote-ci.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
name: sm-remote-ci
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "src/SmRemote/**"
|
||||
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||
- "ops/sm-remote/**"
|
||||
- ".gitea/workflows/sm-remote-ci.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "src/SmRemote/**"
|
||||
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||
- "ops/sm-remote/**"
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.x
|
||||
- name: Restore
|
||||
run: dotnet restore src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj
|
||||
- name: Test
|
||||
run: dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj --no-build --verbosity normal
|
||||
- name: Publish service
|
||||
run: dotnet publish src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj -c Release -o out/sm-remote
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -64,3 +64,6 @@ coverage/
|
||||
local-nugets/
|
||||
local-nuget/
|
||||
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
||||
.nuget-cache/
|
||||
.nuget-packages2/
|
||||
.nuget-temp/
|
||||
@@ -1,52 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>Microsoft.Extensions.Logging.Abstractions</id>
|
||||
<version>10.0.0-rc.2.25502.107</version>
|
||||
<authors>Microsoft</authors>
|
||||
<license type="expression">MIT</license>
|
||||
<licenseUrl>https://licenses.nuget.org/MIT</licenseUrl>
|
||||
<icon>Icon.png</icon>
|
||||
<readme>PACKAGE.md</readme>
|
||||
<projectUrl>https://dot.net/</projectUrl>
|
||||
<description>Logging abstractions for Microsoft.Extensions.Logging.
|
||||
|
||||
Commonly Used Types:
|
||||
Microsoft.Extensions.Logging.ILogger
|
||||
Microsoft.Extensions.Logging.ILoggerFactory
|
||||
Microsoft.Extensions.Logging.ILogger<TCategoryName>
|
||||
Microsoft.Extensions.Logging.LogLevel
|
||||
Microsoft.Extensions.Logging.Logger<T>
|
||||
Microsoft.Extensions.Logging.LoggerMessage
|
||||
Microsoft.Extensions.Logging.Abstractions.NullLogger</description>
|
||||
<releaseNotes>https://go.microsoft.com/fwlink/?LinkID=799421</releaseNotes>
|
||||
<copyright>© Microsoft Corporation. All rights reserved.</copyright>
|
||||
<serviceable>true</serviceable>
|
||||
<repository type="git" url="https://github.com/dotnet/dotnet" commit="89c8f6a112d37d2ea8b77821e56d170a1bccdc5a" />
|
||||
<dependencies>
|
||||
<group targetFramework=".NETFramework4.6.2">
|
||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Buffers" version="4.6.1" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Memory" version="4.6.3" exclude="Build,Analyzers" />
|
||||
</group>
|
||||
<group targetFramework="net8.0">
|
||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
</group>
|
||||
<group targetFramework="net9.0">
|
||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
</group>
|
||||
<group targetFramework="net10.0">
|
||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
</group>
|
||||
<group targetFramework=".NETStandard2.0">
|
||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Buffers" version="4.6.1" exclude="Build,Analyzers" />
|
||||
<dependency id="System.Memory" version="4.6.3" exclude="Build,Analyzers" />
|
||||
</group>
|
||||
</dependencies>
|
||||
</metadata>
|
||||
</package>
|
||||
Binary file not shown.
86
AGENTS.md
86
AGENTS.md
@@ -58,7 +58,7 @@ When you are told you are working in a particular module or directory, assume yo
|
||||
|
||||
* **Runtime**: .NET 10 (`net10.0`) with latest C# preview features. Microsoft.* dependencies should target the closest compatible versions.
|
||||
* **Frontend**: Angular v17 for the UI.
|
||||
* **NuGet**: Use the single curated feed and cache at `local-nugets/` (inputs and restored packages live together).
|
||||
* **NuGet**: Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org). Packages restore to the global NuGet cache.
|
||||
* **Data**: MongoDB as canonical store and for job/export state. Use a MongoDB driver version ≥ 3.0.
|
||||
* **Observability**: Structured logs, counters, and (optional) OpenTelemetry traces.
|
||||
* **Ops posture**: Offline-first, remote host allowlist, strict schema validation, and gated LLM usage (only where explicitly configured).
|
||||
@@ -269,12 +269,12 @@ In this role you act as:
|
||||
* **Angular v17 engineer** (UI).
|
||||
* **QA automation engineer** (C#, Moq, Playwright, Angular test stack, or other suitable tools).
|
||||
|
||||
Implementation principles:
|
||||
|
||||
* Always follow .NET 10 and Angular v17 best practices.
|
||||
* Apply SOLID design principles (SRP, OCP, LSP, ISP, DIP) in service and library code.
|
||||
* Maximise reuse and composability.
|
||||
* Maintain determinism: stable ordering, UTC ISO-8601 timestamps, immutable NDJSON where applicable.
|
||||
Implementation principles:
|
||||
|
||||
* Always follow .NET 10 and Angular v17 best practices.
|
||||
* Apply SOLID design principles (SRP, OCP, LSP, ISP, DIP) in service and library code.
|
||||
* Maximise reuse and composability.
|
||||
* Maintain determinism: stable ordering, UTC ISO-8601 timestamps, immutable NDJSON where applicable.
|
||||
|
||||
Execution rules (very important):
|
||||
|
||||
@@ -330,7 +330,7 @@ If no design decision is required, you proceed autonomously, implementing the ch
|
||||
|
||||
---
|
||||
|
||||
### 5) Working Agreement (Global)
|
||||
### 5) Working Agreement (Global)
|
||||
|
||||
1. **Task status discipline**
|
||||
|
||||
@@ -353,41 +353,41 @@ If no design decision is required, you proceed autonomously, implementing the ch
|
||||
5. **Completion**
|
||||
|
||||
* When you complete all tasks in scope for your current instruction set, explicitly state that you are done with those tasks.
|
||||
6. **AGENTS.md discipline**
|
||||
* Project / technical managers ensure each module’s `AGENTS.md` exists, is up to date, and reflects current design and advisory decisions.
|
||||
* Implementers must read and follow the relevant `AGENTS.md` before coding in a module.
|
||||
* If a mismatch or gap is found, implementers log it via `BLOCKED` status and the sprint’s **Decisions & Risks**, and then continue with other work instead of asking for live clarification.
|
||||
|
||||
---
|
||||
|
||||
### 7) Advisory Handling (do this every time a new advisory lands)
|
||||
|
||||
**Trigger:** Any new or updated file under `docs/product-advisories/` (including archived) automatically starts this workflow. No chat approval required.
|
||||
|
||||
1) **Doc sync (must happen for every advisory):**
|
||||
- Create/update **two layers**:
|
||||
- **High-level**: `docs/` (vision/key-features/market) to capture the moat/positioning and the headline promise.
|
||||
- **Detailed**: closest deep area (`docs/reachability/*`, `docs/market/*`, `docs/benchmarks/*`, `docs/modules/<module>/*`, etc.).
|
||||
- **Code & samples:**
|
||||
- Inline only short fragments (≤ ~20 lines) directly in the updated doc for readability.
|
||||
- Place runnable or longer samples/harnesses in `docs/benchmarks/**` or `tests/**` with deterministic, offline-friendly defaults (no network, fixed seeds), and link to them from the doc.
|
||||
- If the advisory already contains code, carry it over verbatim into the benchmark/test file (with minor formatting only); don’t paraphrase away executable value.
|
||||
- **Cross-links:** whenever moats/positioning change, add links from `docs/07_HIGH_LEVEL_ARCHITECTURE.md`, `docs/key-features.md`, and the relevant module dossier(s).
|
||||
|
||||
2) **Sprint sync (must happen for every advisory):**
|
||||
- Add Delivery Tracker rows in the relevant `SPRINT_*.md` with owners, deps, and doc paths; add an Execution Log entry for the change.
|
||||
- If code/bench/dataset work is implied, create tasks and point to the new benchmark/test paths; add risks/interlocks for schema/feed freeze or transparency caps as needed.
|
||||
|
||||
3) **De-duplication:**
|
||||
- Check `docs/product-advisories/archived/` for overlaps. If similar, mark “supersedes/extends <advisory>` in the new doc and avoid duplicate tasks.
|
||||
|
||||
4) **Defaults to apply (unless advisory overrides):**
|
||||
- Hybrid reachability posture: graph DSSE mandatory; edge-bundle DSSE optional/targeted; deterministic outputs only.
|
||||
- Offline-friendly benches/tests; frozen feeds; deterministic ordering/hashes.
|
||||
|
||||
5) **Do not defer:** Execute steps 1–4 immediately; reporting is after the fact, not a gating step.
|
||||
|
||||
**Lessons baked in:** Past delays came from missing code carry-over and missing sprint tasks. Always move advisory code into benchmarks/tests and open the corresponding sprint rows the same session you read the advisory.
|
||||
6. **AGENTS.md discipline**
|
||||
* Project / technical managers ensure each module’s `AGENTS.md` exists, is up to date, and reflects current design and advisory decisions.
|
||||
* Implementers must read and follow the relevant `AGENTS.md` before coding in a module.
|
||||
* If a mismatch or gap is found, implementers log it via `BLOCKED` status and the sprint’s **Decisions & Risks**, and then continue with other work instead of asking for live clarification.
|
||||
|
||||
---
|
||||
|
||||
### 7) Advisory Handling (do this every time a new advisory lands)
|
||||
|
||||
**Trigger:** Any new or updated file under `docs/product-advisories/` (including archived) automatically starts this workflow. No chat approval required.
|
||||
|
||||
1) **Doc sync (must happen for every advisory):**
|
||||
- Create/update **two layers**:
|
||||
- **High-level**: `docs/` (vision/key-features/market) to capture the moat/positioning and the headline promise.
|
||||
- **Detailed**: closest deep area (`docs/reachability/*`, `docs/market/*`, `docs/benchmarks/*`, `docs/modules/<module>/*`, etc.).
|
||||
- **Code & samples:**
|
||||
- Inline only short fragments (≤ ~20 lines) directly in the updated doc for readability.
|
||||
- Place runnable or longer samples/harnesses in `docs/benchmarks/**` or `tests/**` with deterministic, offline-friendly defaults (no network, fixed seeds), and link to them from the doc.
|
||||
- If the advisory already contains code, carry it over verbatim into the benchmark/test file (with minor formatting only); don’t paraphrase away executable value.
|
||||
- **Cross-links:** whenever moats/positioning change, add links from `docs/07_HIGH_LEVEL_ARCHITECTURE.md`, `docs/key-features.md`, and the relevant module dossier(s).
|
||||
|
||||
2) **Sprint sync (must happen for every advisory):**
|
||||
- Add Delivery Tracker rows in the relevant `SPRINT_*.md` with owners, deps, and doc paths; add an Execution Log entry for the change.
|
||||
- If code/bench/dataset work is implied, create tasks and point to the new benchmark/test paths; add risks/interlocks for schema/feed freeze or transparency caps as needed.
|
||||
|
||||
3) **De-duplication:**
|
||||
- Check `docs/product-advisories/archived/` for overlaps. If similar, mark “supersedes/extends <advisory>` in the new doc and avoid duplicate tasks.
|
||||
|
||||
4) **Defaults to apply (unless advisory overrides):**
|
||||
- Hybrid reachability posture: graph DSSE mandatory; edge-bundle DSSE optional/targeted; deterministic outputs only.
|
||||
- Offline-friendly benches/tests; frozen feeds; deterministic ordering/hashes.
|
||||
|
||||
5) **Do not defer:** Execute steps 1–4 immediately; reporting is after the fact, not a gating step.
|
||||
|
||||
**Lessons baked in:** Past delays came from missing code carry-over and missing sprint tasks. Always move advisory code into benchmarks/tests and open the corresponding sprint rows the same session you read the advisory.
|
||||
---
|
||||
|
||||
### 6) Role Switching
|
||||
|
||||
16
CLAUDE.md
16
CLAUDE.md
@@ -41,7 +41,7 @@ dotnet test --filter "FullyQualifiedName~TestMethodName"
|
||||
dotnet test src/StellaOps.sln --verbosity normal
|
||||
```
|
||||
|
||||
**Note:** Tests use Mongo2Go which requires OpenSSL 1.1 on Linux. Run `scripts/enable-openssl11-shim.sh` before testing if needed.
|
||||
**Note:** Integration tests use Testcontainers for PostgreSQL. Ensure Docker is running before executing tests.
|
||||
|
||||
## Linting and Validation
|
||||
|
||||
@@ -61,10 +61,10 @@ helm lint deploy/helm/stellaops
|
||||
### Technology Stack
|
||||
- **Runtime:** .NET 10 (`net10.0`) with latest C# preview features
|
||||
- **Frontend:** Angular v17 (in `src/UI/StellaOps.UI`)
|
||||
- **Database:** MongoDB (driver version ≥ 3.0)
|
||||
- **Testing:** xUnit with Mongo2Go, Moq, Microsoft.AspNetCore.Mvc.Testing
|
||||
- **Database:** PostgreSQL (≥16) with per-module schema isolation; see `docs/db/` for specification
|
||||
- **Testing:** xUnit with Testcontainers (PostgreSQL), Moq, Microsoft.AspNetCore.Mvc.Testing
|
||||
- **Observability:** Structured logging, OpenTelemetry traces
|
||||
- **NuGet:** Use the single curated feed and cache at `local-nugets/`
|
||||
- **NuGet:** Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org)
|
||||
|
||||
### Module Structure
|
||||
|
||||
@@ -89,7 +89,7 @@ The codebase follows a monorepo pattern with modules under `src/`:
|
||||
- **Libraries:** `src/<Module>/__Libraries/StellaOps.<Module>.*`
|
||||
- **Tests:** `src/<Module>/__Tests/StellaOps.<Module>.*.Tests/`
|
||||
- **Plugins:** Follow naming `StellaOps.<Module>.Connector.*` or `StellaOps.<Module>.Plugin.*`
|
||||
- **Shared test infrastructure:** `StellaOps.Concelier.Testing` provides MongoDB fixtures
|
||||
- **Shared test infrastructure:** `StellaOps.Concelier.Testing` and `StellaOps.Infrastructure.Postgres.Testing` provide PostgreSQL fixtures
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
@@ -127,7 +127,7 @@ The codebase follows a monorepo pattern with modules under `src/`:
|
||||
|
||||
- Module tests: `StellaOps.<Module>.<Component>.Tests`
|
||||
- Shared fixtures/harnesses: `StellaOps.<Module>.Testing`
|
||||
- Tests use xUnit, Mongo2Go for MongoDB integration tests
|
||||
- Tests use xUnit, Testcontainers for PostgreSQL integration tests
|
||||
|
||||
### Documentation Updates
|
||||
|
||||
@@ -200,6 +200,8 @@ Before coding, confirm required docs are read:
|
||||
|
||||
- **Architecture overview:** `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- **Module dossiers:** `docs/modules/<module>/architecture.md`
|
||||
- **Database specification:** `docs/db/SPECIFICATION.md`
|
||||
- **PostgreSQL operations:** `docs/operations/postgresql-guide.md`
|
||||
- **API/CLI reference:** `docs/09_API_CLI_REFERENCE.md`
|
||||
- **Offline operation:** `docs/24_OFFLINE_KIT.md`
|
||||
- **Quickstart:** `docs/10_CONCELIER_CLI_QUICKSTART.md`
|
||||
@@ -216,5 +218,5 @@ Workflows are in `.gitea/workflows/`. Key workflows:
|
||||
## Environment Variables
|
||||
|
||||
- `STELLAOPS_BACKEND_URL` - Backend API URL for CLI
|
||||
- `STELLAOPS_TEST_MONGO_URI` - MongoDB connection string for integration tests
|
||||
- `STELLAOPS_TEST_POSTGRES_CONNECTION` - PostgreSQL connection string for integration tests
|
||||
- `StellaOpsEnableCryptoPro` - Enable GOST crypto support (set to `true` in build)
|
||||
|
||||
@@ -1,23 +1,91 @@
|
||||
<Project>
|
||||
|
||||
<PropertyGroup>
|
||||
|
||||
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
||||
<StellaOpsLocalNuGetSource Condition="'$(StellaOpsLocalNuGetSource)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nugets/'))</StellaOpsLocalNuGetSource>
|
||||
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
||||
<StellaOpsNuGetOrgSource Condition="'$(StellaOpsNuGetOrgSource)' == ''">https://api.nuget.org/v3/index.json</StellaOpsNuGetOrgSource>
|
||||
<_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource)</_StellaOpsDefaultRestoreSources>
|
||||
<_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources)</_StellaOpsOriginalRestoreSources>
|
||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(_StellaOpsDefaultRestoreSources)</RestoreSources>
|
||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' != ''">$(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources)</RestoreSources>
|
||||
<RestoreConfigFile Condition="'$(RestoreConfigFile)' == ''">$([System.IO.Path]::Combine('$(StellaOpsRepoRoot)','NuGet.config'))</RestoreConfigFile>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
||||
<NoWarn>$(NoWarn);NU1608;NU1605;NU1202</NoWarn>
|
||||
<WarningsNotAsErrors>$(WarningsNotAsErrors);NU1608;NU1605;NU1202</WarningsNotAsErrors>
|
||||
<RestoreNoWarn>$(RestoreNoWarn);NU1608;NU1605;NU1202</RestoreNoWarn>
|
||||
<RestoreWarningsAsErrors></RestoreWarningsAsErrors>
|
||||
<RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors>
|
||||
<RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder>
|
||||
<RestoreFallbackFolders>clear</RestoreFallbackFolders>
|
||||
<RestoreFallbackFoldersExcludes>clear</RestoreFallbackFoldersExcludes>
|
||||
<RestoreAdditionalProjectFallbackFolders>clear</RestoreAdditionalProjectFallbackFolders>
|
||||
<RestoreAdditionalProjectFallbackFoldersExcludes>clear</RestoreAdditionalProjectFallbackFoldersExcludes>
|
||||
<RestoreAdditionalFallbackFolders>clear</RestoreAdditionalFallbackFolders>
|
||||
<RestoreAdditionalFallbackFoldersExcludes>clear</RestoreAdditionalFallbackFoldersExcludes>
|
||||
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<PackageTargetFallback>$(PackageTargetFallback);net8.0;net7.0;net6.0;netstandard2.1;netstandard2.0</PackageTargetFallback>
|
||||
<AssetTargetFallback>$(AssetTargetFallback);net8.0;net7.0;net6.0;netstandard2.1;netstandard2.0</AssetTargetFallback>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
||||
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Update="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<!-- .NET 10 compatible package version overrides -->
|
||||
<ItemGroup>
|
||||
<!-- Cryptography packages - updated for net10.0 compatibility -->
|
||||
<PackageReference Update="BouncyCastle.Cryptography" Version="2.6.2" />
|
||||
<PackageReference Update="Pkcs11Interop" Version="5.1.2" />
|
||||
|
||||
<!-- Resilience - Polly 8.x for .NET 6+ -->
|
||||
<PackageReference Update="Polly" Version="8.5.2" />
|
||||
<PackageReference Update="Polly.Core" Version="8.5.2" />
|
||||
|
||||
<!-- YAML - updated for net10.0 -->
|
||||
<PackageReference Update="YamlDotNet" Version="16.3.0" />
|
||||
|
||||
<!-- JSON Schema packages -->
|
||||
<PackageReference Update="JsonSchema.Net" Version="7.3.2" />
|
||||
<PackageReference Update="Json.More.Net" Version="2.1.0" />
|
||||
<PackageReference Update="JsonPointer.Net" Version="5.1.0" />
|
||||
|
||||
<!-- HTML parsing -->
|
||||
<PackageReference Update="AngleSharp" Version="1.2.0" />
|
||||
|
||||
<!-- Scheduling -->
|
||||
<PackageReference Update="Cronos" Version="0.9.0" />
|
||||
|
||||
<!-- Testing - xUnit 2.9.3 for .NET 10 -->
|
||||
<PackageReference Update="xunit" Version="2.9.3" />
|
||||
<PackageReference Update="xunit.assert" Version="2.9.3" />
|
||||
<PackageReference Update="xunit.extensibility.core" Version="2.9.3" />
|
||||
<PackageReference Update="xunit.extensibility.execution" Version="2.9.3" />
|
||||
<PackageReference Update="xunit.runner.visualstudio" Version="3.0.1" />
|
||||
<PackageReference Update="xunit.abstractions" Version="2.0.3" />
|
||||
|
||||
<!-- JSON -->
|
||||
<PackageReference Update="Newtonsoft.Json" Version="13.0.4" />
|
||||
|
||||
<!-- Annotations -->
|
||||
<PackageReference Update="JetBrains.Annotations" Version="2024.3.0" />
|
||||
|
||||
<!-- Async interfaces -->
|
||||
<PackageReference Update="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||
|
||||
<!-- HTTP Resilience integration (replaces Http.Polly) -->
|
||||
<PackageReference Update="Microsoft.Extensions.Http.Resilience" Version="10.0.0" />
|
||||
|
||||
<!-- Testing packages - aligned to 10.0.0 -->
|
||||
<PackageReference Update="Microsoft.Extensions.TimeProvider.Testing" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
/nowarn:CA2022
|
||||
/p:DisableWorkloadResolver=true
|
||||
/p:RestoreAdditionalProjectFallbackFolders=
|
||||
/p:RestoreFallbackFolders=
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
# Third-Party Notices
|
||||
|
||||
This project bundles or links against the following third-party components in the scanner Ruby analyzer implementation:
|
||||
This project bundles or links against the following third-party components:
|
||||
|
||||
- **tree-sitter** (MIT License, © 2018 Max Brunsfeld)
|
||||
- **tree-sitter-ruby** (MIT License, © 2016 Rob Rix)
|
||||
- **tree-sitter** (MIT License, (c) 2018 Max Brunsfeld)
|
||||
- **tree-sitter-ruby** (MIT License, (c) 2016 Rob Rix)
|
||||
- **GostCryptography (fork)** (MIT License, (c) 2014-2024 AlexMAS) — vendored under `third_party/forks/AlexMAS.GostCryptography` for GOST support in `StellaOps.Cryptography.Plugin.CryptoPro` and related sovereign crypto plug-ins.
|
||||
- **CryptoPro CSP integration** (Commercial, customer-provided) — StellaOps ships only integration code; CryptoPro CSP binaries and licenses are not redistributed and must be supplied by the operator per vendor EULA.
|
||||
|
||||
License texts are available under third-party-licenses/.
|
||||
|
||||
17
NuGet.config
17
NuGet.config
@@ -2,18 +2,9 @@
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="local" value="local-nugets" />
|
||||
<add key="ablera-mirror" value="https://mirrors.ablera.dev/nuget/nuget-mirror/v3/index.json" />
|
||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
</packageSources>
|
||||
<config>
|
||||
<add key="globalPackagesFolder" value="local-nugets/packages" />
|
||||
</config>
|
||||
<packageSourceMapping>
|
||||
<packageSource key="local">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
<packageSource key="ablera-mirror">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
</packageSourceMapping>
|
||||
<fallbackPackageFolders>
|
||||
<clear />
|
||||
</fallbackPackageFolders>
|
||||
</configuration>
|
||||
|
||||
1
bench/reachability-benchmark/.gitignore
vendored
Normal file
1
bench/reachability-benchmark/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.jdk/
|
||||
@@ -20,6 +20,7 @@
|
||||
## Working Agreements
|
||||
- Determinism: pin toolchains; set `SOURCE_DATE_EPOCH`; sort file lists; stable JSON/YAML ordering; fixed seeds for any sampling.
|
||||
- Offline posture: no network at build/test time; vendored toolchains; registry pulls are forbidden—use cached/bundled images.
|
||||
- Java builds: use vendored Temurin 21 via `tools/java/ensure_jdk.sh` when `JAVA_HOME`/`javac` are absent; keep `.jdk/` out of VCS and use `build_all.py --skip-lang` when a toolchain is missing.
|
||||
- Licensing: all benchmark content Apache-2.0; include LICENSE in repo root; third-party cases must have compatible licenses and attributions.
|
||||
- Evidence: each case must include oracle tests/coverage proving reachability label; store truth and submissions under `benchmark/truth/` and `benchmark/submissions/` with JSON Schema.
|
||||
- Security: no secrets; scrub URLs/tokens; deterministic CI artifacts only.
|
||||
|
||||
@@ -8,38 +8,42 @@ Deterministic, reproducible benchmark for reachability analysis tools.
|
||||
- Enable fair scoring via the `rb-score` CLI and published schemas.
|
||||
|
||||
## Layout
|
||||
- `cases/<lang>/<project>/` — benchmark cases with deterministic Dockerfiles, pinned deps, oracle tests.
|
||||
- `schemas/` — JSON/YAML schemas for cases, entrypoints, truth, submissions.
|
||||
- `benchmark/truth/` — ground-truth labels (hidden/internal split optional).
|
||||
- `benchmark/submissions/` — sample submissions and format reference.
|
||||
- `tools/scorer/` — `rb-score` CLI and tests.
|
||||
- `tools/build/` — `build_all.py` (run all cases) and `validate_builds.py` (run twice and compare hashes).
|
||||
- `baselines/` — reference runners (Semgrep, CodeQL, Stella) with normalized outputs.
|
||||
- `ci/` — deterministic CI workflows and scripts.
|
||||
- `website/` — static site (leaderboard/docs/downloads).
|
||||
- `cases/<lang>/<project>/` ƒ?" benchmark cases with deterministic Dockerfiles, pinned deps, oracle tests.
|
||||
- `schemas/` ƒ?" JSON/YAML schemas for cases, entrypoints, truth, submissions.
|
||||
- `benchmark/truth/` ƒ?" ground-truth labels (hidden/internal split optional).
|
||||
- `benchmark/submissions/` ƒ?" sample submissions and format reference.
|
||||
- `tools/scorer/` ƒ?" `rb-score` CLI and tests.
|
||||
- `tools/build/` ƒ?" `build_all.py` (run all cases) and `validate_builds.py` (run twice and compare hashes).
|
||||
- `baselines/` ƒ?" reference runners (Semgrep, CodeQL, Stella) with normalized outputs.
|
||||
- `ci/` ƒ?" deterministic CI workflows and scripts.
|
||||
- `website/` ƒ?" static site (leaderboard/docs/downloads).
|
||||
|
||||
Sample cases added (JS track):
|
||||
- `cases/js/unsafe-eval` (reachable sink) → `benchmark/truth/js-unsafe-eval.json`.
|
||||
- `cases/js/guarded-eval` (unreachable by default) → `benchmark/truth/js-guarded-eval.json`.
|
||||
- `cases/js/express-eval` (admin eval reachable) → `benchmark/truth/js-express-eval.json`.
|
||||
- `cases/js/express-guarded` (admin eval gated by env) → `benchmark/truth/js-express-guarded.json`.
|
||||
- `cases/js/fastify-template` (template rendering reachable) → `benchmark/truth/js-fastify-template.json`.
|
||||
- `cases/js/unsafe-eval` (reachable sink) ƒ+' `benchmark/truth/js-unsafe-eval.json`.
|
||||
- `cases/js/guarded-eval` (unreachable by default) ƒ+' `benchmark/truth/js-guarded-eval.json`.
|
||||
- `cases/js/express-eval` (admin eval reachable) ƒ+' `benchmark/truth/js-express-eval.json`.
|
||||
- `cases/js/express-guarded` (admin eval gated by env) ƒ+' `benchmark/truth/js-express-guarded.json`.
|
||||
- `cases/js/fastify-template` (template rendering reachable) ƒ+' `benchmark/truth/js-fastify-template.json`.
|
||||
|
||||
Sample cases added (Python track):
|
||||
- `cases/py/unsafe-exec` (reachable eval) → `benchmark/truth/py-unsafe-exec.json`.
|
||||
- `cases/py/guarded-exec` (unreachable when FEATURE_ENABLE != 1) → `benchmark/truth/py-guarded-exec.json`.
|
||||
- `cases/py/flask-template` (template rendering reachable) → `benchmark/truth/py-flask-template.json`.
|
||||
- `cases/py/fastapi-guarded` (unreachable unless ALLOW_EXEC=true) → `benchmark/truth/py-fastapi-guarded.json`.
|
||||
- `cases/py/django-ssti` (template rendering reachable, autoescape off) → `benchmark/truth/py-django-ssti.json`.
|
||||
- `cases/py/unsafe-exec` (reachable eval) ƒ+' `benchmark/truth/py-unsafe-exec.json`.
|
||||
- `cases/py/guarded-exec` (unreachable when FEATURE_ENABLE != 1) ƒ+' `benchmark/truth/py-guarded-exec.json`.
|
||||
- `cases/py/flask-template` (template rendering reachable) ƒ+' `benchmark/truth/py-flask-template.json`.
|
||||
- `cases/py/fastapi-guarded` (unreachable unless ALLOW_EXEC=true) ƒ+' `benchmark/truth/py-fastapi-guarded.json`.
|
||||
- `cases/py/django-ssti` (template rendering reachable, autoescape off) ƒ+' `benchmark/truth/py-django-ssti.json`.
|
||||
|
||||
Sample cases added (Java track):
|
||||
- `cases/java/spring-deserialize` (reachable Java deserialization) → `benchmark/truth/java-spring-deserialize.json`.
|
||||
- `cases/java/spring-guarded` (deserialization unreachable unless ALLOW_DESER=true) → `benchmark/truth/java-spring-guarded.json`.
|
||||
- `cases/java/spring-deserialize` (reachable Java deserialization) ƒ+' `benchmark/truth/java-spring-deserialize.json`.
|
||||
- `cases/java/spring-guarded` (deserialization unreachable unless ALLOW_DESER=true) ƒ+' `benchmark/truth/java-spring-guarded.json`.
|
||||
- `cases/java/micronaut-deserialize` (reachable Micronaut-style deserialization) ƒ+' `benchmark/truth/java-micronaut-deserialize.json`.
|
||||
- `cases/java/micronaut-guarded` (unreachable unless ALLOW_MN_DESER=true) ƒ+' `benchmark/truth/java-micronaut-guarded.json`.
|
||||
- `cases/java/spring-reflection` (reflection sink reachable via Class.forName) ƒ+' `benchmark/truth/java-spring-reflection.json`.
|
||||
|
||||
## Determinism & Offline Rules
|
||||
- No network during build/test; pin images/deps; set `SOURCE_DATE_EPOCH`.
|
||||
- Sort file lists; stable JSON/YAML emitters; fixed RNG seeds.
|
||||
- All scripts must succeed on a clean machine with cached toolchain tarballs only.
|
||||
- Java builds auto-use vendored Temurin 21 via `tools/java/ensure_jdk.sh` when `JAVA_HOME`/`javac` are absent.
|
||||
|
||||
## Licensing
|
||||
- Apache-2.0 for all benchmark assets. Third-party snippets must be license-compatible and attributed.
|
||||
@@ -50,8 +54,10 @@ Sample cases added (Java track):
|
||||
python tools/validate.py all schemas/examples
|
||||
|
||||
# score a submission (coming in task 513-008)
|
||||
cd tools/scorer
|
||||
./rb-score --cases ../cases --truth ../benchmark/truth --submission ../benchmark/submissions/sample.json
|
||||
./tools/scorer/rb-score --cases cases --truth benchmark/truth --submission benchmark/submissions/sample.json
|
||||
|
||||
# deterministic case builds (skip a language when a toolchain is unavailable)
|
||||
python tools/build/build_all.py --cases cases --skip-lang js
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
# Reachability Benchmark Changelog
|
||||
|
||||
## 1.0.1 · 2025-12-03
|
||||
## 1.0.2 Aú 2025-12-05
|
||||
- Unblocked Java track with vendored Temurin 21 (`tools/java/ensure_jdk.sh`) and deterministic build artifacts (coverage + traces).
|
||||
- Added three more Java cases (`micronaut-deserialize`, `micronaut-guarded`, `spring-reflection`) to reach 5/5 required cases.
|
||||
- `tools/build/build_all.py` now supports `--skip-lang` and runs under WSL-aware bash; CI builds Java cases by default.
|
||||
|
||||
## 1.0.1 Aú 2025-12-03
|
||||
- Added manifest schema + sample manifest with hashes, SBOM/attestation entries, and sandbox/redaction metadata.
|
||||
- Added coverage/trace schemas and extended validator to cover them.
|
||||
- Introduced `tools/verify_manifest.py` and deterministic offline kit packaging script.
|
||||
- Added per-language determinism env templates and dataset safety checklist.
|
||||
- Populated SBOM + attestation outputs for JS/PY/C tracks; Java remains blocked on JDK availability.
|
||||
- Populated SBOM + attestation outputs for JS/PY/C tracks.
|
||||
|
||||
## 1.0.0 · 2025-12-01
|
||||
## 1.0.0 Aú 2025-12-01
|
||||
- Initial public dataset, scorer, baselines, and website.
|
||||
|
||||
@@ -8,7 +8,7 @@ Version: 1.0.1 · Date: 2025-12-03
|
||||
- [x] Published schemas/validators: truth/submission/coverage/trace + manifest schemas; validated via `tools/validate.py` and `tools/verify_manifest.py`.
|
||||
- [x] Evidence bundles: coverage + traces + attestation + sbom recorded per case (sample manifest).
|
||||
- [x] Binary case recipe: `cases/**/build/build.sh` pinned `SOURCE_DATE_EPOCH` and env templates under `benchmark/templates/determinism/`.
|
||||
- [x] Determinism CI: `ci/run-ci.sh` + `tools/verify_manifest.py` run twice to compare hashes; Java track still blocked on JDK availability.
|
||||
- [x] Determinism CI: `ci/run-ci.sh` + `tools/verify_manifest.py` run twice to compare hashes; Java track uses vendored Temurin 21 via `tools/java/ensure_jdk.sh`.
|
||||
- [x] Signed baselines: baseline submissions may include DSSE path in manifest (not required for sample kit); rulepack hashes recorded separately.
|
||||
- [x] Submission policy: CLA/DSSE optional in sample; production kits require DSSE envelope recorded in `signatures`.
|
||||
- [x] Semantic versioning & changelog: see `benchmark/CHANGELOG.md`; manifest `version` mirrors dataset release.
|
||||
|
||||
@@ -1,92 +1,203 @@
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"kitId": "reachability-benchmark:public-v1",
|
||||
"version": "1.0.1",
|
||||
"artifacts": {
|
||||
"baselineSubmissions": [],
|
||||
"scorer": {
|
||||
"path": "tools/scorer/rb_score.py",
|
||||
"sha256": "32d4f69f5d1d4b87902d6c4f020efde703487d526bf7d42b4438cb2499813f7f"
|
||||
},
|
||||
"submissionSchema": {
|
||||
"path": "schemas/submission.schema.json",
|
||||
"sha256": "de5bebb2dbcd085d7896f47a16b9d3837a65fb7f816dcf7e587967d5848c50a7"
|
||||
}
|
||||
},
|
||||
"cases": [
|
||||
{
|
||||
"hashes": {
|
||||
"attestation": {
|
||||
"path": "cases/js/unsafe-eval/outputs/attestation.json",
|
||||
"sha256": "be3b0971d805f68730a1c4c0f7a4c3c40dfc7a73099a5524c68759fcc1729d7c"
|
||||
},
|
||||
"binary": {
|
||||
"path": "cases/js/unsafe-eval/outputs/binary.tar.gz",
|
||||
"sha256": "72da19f28c2c36b6666afcc304514b387de20a5de881d5341067481e8418e23e"
|
||||
},
|
||||
"case": {
|
||||
"path": "cases/js/unsafe-eval/case.yaml",
|
||||
"sha256": "a858ff509fda65d69df476e870d9646c6a84744010c812f3d23a88576f20cb6b"
|
||||
},
|
||||
"coverage": {
|
||||
"path": "cases/js/unsafe-eval/outputs/coverage.json",
|
||||
"sha256": "c2cf5af508d33f6ecdc7c0f10200a02a4c0ddeb8e1fc08b55d9bd4a2d6cb926b"
|
||||
},
|
||||
"entrypoints": {
|
||||
"path": "cases/js/unsafe-eval/entrypoints.yaml",
|
||||
"sha256": "77829e728d34c9dc5f56c04784c97f619830ad43bd8410acb3d7134f372a49b3"
|
||||
},
|
||||
"sbom": {
|
||||
"path": "cases/js/unsafe-eval/outputs/sbom.cdx.json",
|
||||
"sha256": "c00ee1e12b1b6a6237e42174b2fe1393bcf575f6605205a2b84366e867b36d5f"
|
||||
},
|
||||
"source": {
|
||||
"path": "cases/js/unsafe-eval",
|
||||
"sha256": "69b0d1cbae1e2c9ddc0f4dba8c6db507e1d3a1c5ea0a0a545c6f3e785529c91c"
|
||||
},
|
||||
"traces": {
|
||||
"path": "cases/js/unsafe-eval/outputs/traces/traces.json",
|
||||
"sha256": "6e63c78e091cc9d06acdc5966dd9e54593ca6b0b97f502928de278b3f80adbd8"
|
||||
},
|
||||
"truth": {
|
||||
"path": "benchmark/truth/js-unsafe-eval.json",
|
||||
"sha256": "ab42f28ed229eb657ffcb36c3a99287436e1822a4c7d395a94de784457a08f62"
|
||||
}
|
||||
},
|
||||
"id": "js-unsafe-eval:001",
|
||||
"language": "js",
|
||||
"redaction": {
|
||||
"pii": false,
|
||||
"policy": "benchmark-default/v1"
|
||||
},
|
||||
"sandbox": {
|
||||
"network": "loopback",
|
||||
"privileges": "rootless"
|
||||
},
|
||||
"size": "small",
|
||||
"truth": {
|
||||
"confidence": "high",
|
||||
"label": "reachable",
|
||||
"rationale": "Unit test hits eval sink via POST /api/exec"
|
||||
}
|
||||
},
|
||||
{
|
||||
"hashes": {
|
||||
"attestation": {
|
||||
"path": "cases/py/fastapi-guarded/outputs/attestation.json",
|
||||
"sha256": "257aa5408a5c6ffe0e193a75a2a54597f8c6f61babfe8aaf26bd47340c3086c3"
|
||||
},
|
||||
"binary": {
|
||||
"path": "cases/py/fastapi-guarded/outputs/binary.tar.gz",
|
||||
"sha256": "ca964fef352dc535b63d35b8f8846cc051e10e54cfd8aceef7566f3c94178b76"
|
||||
},
|
||||
"case": {
|
||||
"path": "cases/py/fastapi-guarded/case.yaml",
|
||||
"sha256": "0add8a5f487ebd21ee20ab88b7c6436fe8471f0a54ab8da0e08c8416aa181346"
|
||||
},
|
||||
"coverage": {
|
||||
"path": "cases/py/fastapi-guarded/outputs/coverage.json",
|
||||
"sha256": "07b1f6dccaa02bd4e1c3e2771064fa3c6e06d02843a724151721ea694762c750"
|
||||
},
|
||||
"entrypoints": {
|
||||
"path": "cases/py/fastapi-guarded/entrypoints.yaml",
|
||||
"sha256": "47c9dd15bf7c5bb8641893a92791d3f7675ed6adba17b251f609335400d29d41"
|
||||
},
|
||||
"sbom": {
|
||||
"path": "cases/py/fastapi-guarded/outputs/sbom.cdx.json",
|
||||
"sha256": "13999d8f3d4c9bdb70ea54ad1de613be3f893d79bdd1a53f7c9401e6add88cf0"
|
||||
},
|
||||
"source": {
|
||||
"path": "cases/py/fastapi-guarded",
|
||||
"sha256": "0869cab10767ac7e7b33c9bbd634f811d98ce5cdeb244769f1a81949438460fb"
|
||||
},
|
||||
"traces": {
|
||||
"path": "cases/py/fastapi-guarded/outputs/traces/traces.json",
|
||||
"sha256": "4633748b8b428b45e3702f2f8f5b3f4270728078e26bce1e08900ed1d5bb3046"
|
||||
},
|
||||
"truth": {
|
||||
"path": "benchmark/truth/py-fastapi-guarded.json",
|
||||
"sha256": "f8c62abeb00006621feeb010d0e47d248918dffd6d6e20e0f47d74e1b3642760"
|
||||
}
|
||||
},
|
||||
"id": "py-fastapi-guarded:104",
|
||||
"language": "py",
|
||||
"redaction": {
|
||||
"pii": false,
|
||||
"policy": "benchmark-default/v1"
|
||||
},
|
||||
"sandbox": {
|
||||
"network": "loopback",
|
||||
"privileges": "rootless"
|
||||
},
|
||||
"size": "small",
|
||||
"truth": {
|
||||
"confidence": "high",
|
||||
"label": "unreachable",
|
||||
"rationale": "Feature flag ALLOW_EXEC must be true before sink executes"
|
||||
}
|
||||
},
|
||||
{
|
||||
"hashes": {
|
||||
"attestation": {
|
||||
"path": "cases/c/unsafe-system/outputs/attestation.json",
|
||||
"sha256": "c3755088182359a45492170fa8a57d826b605176333d109f4f113bc7ccf85f97"
|
||||
},
|
||||
"binary": {
|
||||
"path": "cases/c/unsafe-system/outputs/binary.tar.gz",
|
||||
"sha256": "62200167bd660bad6d131b21f941acdfebe00e949e353a53c97b6691ac8f0e49"
|
||||
},
|
||||
"case": {
|
||||
"path": "cases/c/unsafe-system/case.yaml",
|
||||
"sha256": "7799a3a629c22ad47197309f44e32aabbc4e6711ef78d606ba57a7a4974787ce"
|
||||
},
|
||||
"coverage": {
|
||||
"path": "cases/c/unsafe-system/outputs/coverage.json",
|
||||
"sha256": "03ba8cf09e7e0ed82e9fa8abb48f92355e894fd56e0c0160a504193a6f6ec48a"
|
||||
},
|
||||
"entrypoints": {
|
||||
"path": "cases/c/unsafe-system/entrypoints.yaml",
|
||||
"sha256": "06afee8350460c9d15b26ea9d4ea293e8eb3f4b86b3179e19401fa99947e4490"
|
||||
},
|
||||
"sbom": {
|
||||
"path": "cases/c/unsafe-system/outputs/sbom.cdx.json",
|
||||
"sha256": "4c72a213fc4c646f44b4d0be3c23711b120b2a386374ebaa4897e5058980e0f5"
|
||||
},
|
||||
"source": {
|
||||
"path": "cases/c/unsafe-system",
|
||||
"sha256": "bc39ab3a3e5cb3944a205912ecad8c1ac4b7d15c64b453c9d34a9a5df7fbbbf4"
|
||||
},
|
||||
"traces": {
|
||||
"path": "cases/c/unsafe-system/outputs/traces/traces.json",
|
||||
"sha256": "f6469e46a57b8a6e8e17c9b8e78168edd6657ea8a5e1e96fe6ab4a0fc88a734e"
|
||||
},
|
||||
"truth": {
|
||||
"path": "benchmark/truth/c-unsafe-system.json",
|
||||
"sha256": "9a8200c2cf549b3ac8b19b170e9d34df063351879f19f401d8492e280ad08c13"
|
||||
}
|
||||
},
|
||||
"id": "c-unsafe-system:001",
|
||||
"language": "c",
|
||||
"redaction": {
|
||||
"pii": false,
|
||||
"policy": "benchmark-default/v1"
|
||||
},
|
||||
"sandbox": {
|
||||
"network": "loopback",
|
||||
"privileges": "rootless"
|
||||
},
|
||||
"size": "small",
|
||||
"truth": {
|
||||
"confidence": "high",
|
||||
"label": "reachable",
|
||||
"rationale": "Command injection sink reachable via argv -> system()"
|
||||
}
|
||||
}
|
||||
],
|
||||
"createdAt": "2025-12-03T00:00:00Z",
|
||||
"sourceDateEpoch": 1730000000,
|
||||
"kitId": "reachability-benchmark:public-v1",
|
||||
"resourceLimits": {
|
||||
"cpu": "4",
|
||||
"memory": "8Gi"
|
||||
},
|
||||
"cases": [
|
||||
{
|
||||
"id": "js-unsafe-eval:001",
|
||||
"language": "js",
|
||||
"size": "small",
|
||||
"hashes": {
|
||||
"source": { "path": "cases/js/unsafe-eval", "sha256": "69b0d1cbae1e2c9ddc0f4dba8c6db507e1d3a1c5ea0a0a545c6f3e785529c91c" },
|
||||
"case": { "path": "cases/js/unsafe-eval/case.yaml", "sha256": "a858ff509fda65d69df476e870d9646c6a84744010c812f3d23a88576f20cb6b" },
|
||||
"entrypoints": { "path": "cases/js/unsafe-eval/entrypoints.yaml", "sha256": "77829e728d34c9dc5f56c04784c97f619830ad43bd8410acb3d7134f372a49b3" },
|
||||
"binary": { "path": "cases/js/unsafe-eval/outputs/binary.tar.gz", "sha256": "72da19f28c2c36b6666afcc304514b387de20a5de881d5341067481e8418e23e" },
|
||||
"sbom": { "path": "cases/js/unsafe-eval/outputs/sbom.cdx.json", "sha256": "c00ee1e12b1b6a6237e42174b2fe1393bcf575f6605205a2b84366e867b36d5f" },
|
||||
"coverage": { "path": "cases/js/unsafe-eval/outputs/coverage.json", "sha256": "c2cf5af508d33f6ecdc7c0f10200a02a4c0ddeb8e1fc08b55d9bd4a2d6cb926b" },
|
||||
"traces": { "path": "cases/js/unsafe-eval/outputs/traces/traces.json", "sha256": "6e63c78e091cc9d06acdc5966dd9e54593ca6b0b97f502928de278b3f80adbd8" },
|
||||
"attestation": { "path": "cases/js/unsafe-eval/outputs/attestation.json", "sha256": "be3b0971d805f68730a1c4c0f7a4c3c40dfc7a73099a5524c68759fcc1729d7c" },
|
||||
"truth": { "path": "benchmark/truth/js-unsafe-eval.json", "sha256": "ab42f28ed229eb657ffcb36c3a99287436e1822a4c7d395a94de784457a08f62" }
|
||||
},
|
||||
"truth": {
|
||||
"label": "reachable",
|
||||
"confidence": "high",
|
||||
"rationale": "Unit test hits eval sink via POST /api/exec"
|
||||
},
|
||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
||||
},
|
||||
{
|
||||
"id": "py-fastapi-guarded:104",
|
||||
"language": "py",
|
||||
"size": "small",
|
||||
"hashes": {
|
||||
"source": { "path": "cases/py/fastapi-guarded", "sha256": "0869cab10767ac7e7b33c9bbd634f811d98ce5cdeb244769f1a81949438460fb" },
|
||||
"case": { "path": "cases/py/fastapi-guarded/case.yaml", "sha256": "0add8a5f487ebd21ee20ab88b7c6436fe8471f0a54ab8da0e08c8416aa181346" },
|
||||
"entrypoints": { "path": "cases/py/fastapi-guarded/entrypoints.yaml", "sha256": "47c9dd15bf7c5bb8641893a92791d3f7675ed6adba17b251f609335400d29d41" },
|
||||
"binary": { "path": "cases/py/fastapi-guarded/outputs/binary.tar.gz", "sha256": "ca964fef352dc535b63d35b8f8846cc051e10e54cfd8aceef7566f3c94178b76" },
|
||||
"sbom": { "path": "cases/py/fastapi-guarded/outputs/sbom.cdx.json", "sha256": "13999d8f3d4c9bdb70ea54ad1de613be3f893d79bdd1a53f7c9401e6add88cf0" },
|
||||
"coverage": { "path": "cases/py/fastapi-guarded/outputs/coverage.json", "sha256": "07b1f6dccaa02bd4e1c3e2771064fa3c6e06d02843a724151721ea694762c750" },
|
||||
"traces": { "path": "cases/py/fastapi-guarded/outputs/traces/traces.json", "sha256": "4633748b8b428b45e3702f2f8f5b3f4270728078e26bce1e08900ed1d5bb3046" },
|
||||
"attestation": { "path": "cases/py/fastapi-guarded/outputs/attestation.json", "sha256": "257aa5408a5c6ffe0e193a75a2a54597f8c6f61babfe8aaf26bd47340c3086c3" },
|
||||
"truth": { "path": "benchmark/truth/py-fastapi-guarded.json", "sha256": "f8c62abeb00006621feeb010d0e47d248918dffd6d6e20e0f47d74e1b3642760" }
|
||||
},
|
||||
"truth": {
|
||||
"label": "unreachable",
|
||||
"confidence": "high",
|
||||
"rationale": "Feature flag ALLOW_EXEC must be true before sink executes"
|
||||
},
|
||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
||||
},
|
||||
{
|
||||
"id": "c-unsafe-system:001",
|
||||
"language": "c",
|
||||
"size": "small",
|
||||
"hashes": {
|
||||
"source": { "path": "cases/c/unsafe-system", "sha256": "bc39ab3a3e5cb3944a205912ecad8c1ac4b7d15c64b453c9d34a9a5df7fbbbf4" },
|
||||
"case": { "path": "cases/c/unsafe-system/case.yaml", "sha256": "7799a3a629c22ad47197309f44e32aabbc4e6711ef78d606ba57a7a4974787ce" },
|
||||
"entrypoints": { "path": "cases/c/unsafe-system/entrypoints.yaml", "sha256": "06afee8350460c9d15b26ea9d4ea293e8eb3f4b86b3179e19401fa99947e4490" },
|
||||
"binary": { "path": "cases/c/unsafe-system/outputs/binary.tar.gz", "sha256": "62200167bd660bad6d131b21f941acdfebe00e949e353a53c97b6691ac8f0e49" },
|
||||
"sbom": { "path": "cases/c/unsafe-system/outputs/sbom.cdx.json", "sha256": "4c72a213fc4c646f44b4d0be3c23711b120b2a386374ebaa4897e5058980e0f5" },
|
||||
"coverage": { "path": "cases/c/unsafe-system/outputs/coverage.json", "sha256": "03ba8cf09e7e0ed82e9fa8abb48f92355e894fd56e0c0160a504193a6f6ec48a" },
|
||||
"traces": { "path": "cases/c/unsafe-system/outputs/traces/traces.json", "sha256": "f6469e46a57b8a6e8e17c9b8e78168edd6657ea8a5e1e96fe6ab4a0fc88a734e" },
|
||||
"attestation": { "path": "cases/c/unsafe-system/outputs/attestation.json", "sha256": "c3755088182359a45492170fa8a57d826b605176333d109f4f113bc7ccf85f97" },
|
||||
"truth": { "path": "benchmark/truth/c-unsafe-system.json", "sha256": "9a8200c2cf549b3ac8b19b170e9d34df063351879f19f401d8492e280ad08c13" }
|
||||
},
|
||||
"truth": {
|
||||
"label": "reachable",
|
||||
"confidence": "high",
|
||||
"rationale": "Command injection sink reachable via argv -> system()"
|
||||
},
|
||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
||||
}
|
||||
],
|
||||
"artifacts": {
|
||||
"submissionSchema": { "path": "schemas/submission.schema.json", "sha256": "de5bebb2dbcd085d7896f47a16b9d3837a65fb7f816dcf7e587967d5848c50a7" },
|
||||
"scorer": { "path": "tools/scorer/rb_score.py", "sha256": "32d4f69f5d1d4b87902d6c4f020efde703487d526bf7d42b4438cb2499813f7f" },
|
||||
"baselineSubmissions": []
|
||||
},
|
||||
"schemaVersion": "1.0.0",
|
||||
"signatures": [],
|
||||
"sourceDateEpoch": 1730000000,
|
||||
"tools": {
|
||||
"builder": { "path": "tools/build/build_all.py", "sha256": "64a73f3df9b6f2cdaf5cbb33852b8e9bf443f67cf9dff1573fb635a0252bda9a" },
|
||||
"validator": { "path": "tools/validate.py", "sha256": "776009ef0f3691e60cc87df3f0468181ee7a827be1bd0f73c77fdb68d3ed31c0" }
|
||||
"builder": {
|
||||
"path": "tools/build/build_all.py",
|
||||
"sha256": "64a73f3df9b6f2cdaf5cbb33852b8e9bf443f67cf9dff1573fb635a0252bda9a"
|
||||
},
|
||||
"validator": {
|
||||
"path": "tools/validate.py",
|
||||
"sha256": "776009ef0f3691e60cc87df3f0468181ee7a827be1bd0f73c77fdb68d3ed31c0"
|
||||
}
|
||||
},
|
||||
"signatures": []
|
||||
}
|
||||
"version": "1.0.2"
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"version": "1.0.0",
|
||||
"cases": [
|
||||
{
|
||||
"case_id": "java-micronaut-deserialize:203",
|
||||
"case_version": "1.0.0",
|
||||
"notes": "Micronaut-style controller deserializes base64 payload",
|
||||
"sinks": [
|
||||
{
|
||||
"sink_id": "MicronautDeserialize::handleUpload",
|
||||
"label": "reachable",
|
||||
"confidence": "high",
|
||||
"dynamic_evidence": {
|
||||
"covered_by_tests": [
|
||||
"src/ControllerTest.java"
|
||||
],
|
||||
"coverage_files": [
|
||||
"outputs/coverage.json"
|
||||
]
|
||||
},
|
||||
"static_evidence": {
|
||||
"call_path": [
|
||||
"POST /mn/upload",
|
||||
"Controller.handleUpload",
|
||||
"ObjectInputStream.readObject"
|
||||
]
|
||||
},
|
||||
"config_conditions": [],
|
||||
"notes": "No guard; ObjectInputStream invoked on user-controlled bytes"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"version": "1.0.0",
|
||||
"cases": [
|
||||
{
|
||||
"case_id": "java-micronaut-guarded:204",
|
||||
"case_version": "1.0.0",
|
||||
"notes": "Deserialization guarded by ALLOW_MN_DESER flag (unreachable by default)",
|
||||
"sinks": [
|
||||
{
|
||||
"sink_id": "MicronautDeserializeGuarded::handleUpload",
|
||||
"label": "unreachable",
|
||||
"confidence": "high",
|
||||
"dynamic_evidence": {
|
||||
"covered_by_tests": [
|
||||
"src/ControllerTest.java"
|
||||
],
|
||||
"coverage_files": [
|
||||
"outputs/coverage.json"
|
||||
]
|
||||
},
|
||||
"static_evidence": {
|
||||
"call_path": [
|
||||
"POST /mn/upload",
|
||||
"Controller.handleUpload"
|
||||
]
|
||||
},
|
||||
"config_conditions": [
|
||||
"ALLOW_MN_DESER=true"
|
||||
],
|
||||
"notes": "Feature flag defaults to false; sink not executed without ALLOW_MN_DESER"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -14,7 +14,9 @@
|
||||
"covered_by_tests": [
|
||||
"src/AppTest.java"
|
||||
],
|
||||
"coverage_files": []
|
||||
"coverage_files": [
|
||||
"outputs/coverage.json"
|
||||
]
|
||||
},
|
||||
"static_evidence": {
|
||||
"call_path": [
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"confidence": "high",
|
||||
"dynamic_evidence": {
|
||||
"covered_by_tests": ["src/AppTest.java"],
|
||||
"coverage_files": []
|
||||
"coverage_files": ["outputs/coverage.json"]
|
||||
},
|
||||
"static_evidence": {
|
||||
"call_path": [
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"version": "1.0.0",
|
||||
"cases": [
|
||||
{
|
||||
"case_id": "java-spring-reflection:205",
|
||||
"case_version": "1.0.0",
|
||||
"notes": "Reflection endpoint loads arbitrary classes supplied by caller",
|
||||
"sinks": [
|
||||
{
|
||||
"sink_id": "SpringReflection::run",
|
||||
"label": "reachable",
|
||||
"confidence": "high",
|
||||
"dynamic_evidence": {
|
||||
"covered_by_tests": [
|
||||
"src/ReflectControllerTest.java"
|
||||
],
|
||||
"coverage_files": [
|
||||
"outputs/coverage.json"
|
||||
]
|
||||
},
|
||||
"static_evidence": {
|
||||
"call_path": [
|
||||
"POST /api/reflect",
|
||||
"ReflectController.run",
|
||||
"Class.forName"
|
||||
]
|
||||
},
|
||||
"config_conditions": [],
|
||||
"notes": "User-controlled class name flows into Class.forName and reflection instantiation"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
id: "java-micronaut-deserialize:203"
|
||||
language: java
|
||||
project: micronaut-deserialize
|
||||
version: "1.0.0"
|
||||
description: "Micronaut-style controller performs unsafe deserialization on request payload"
|
||||
entrypoints:
|
||||
- "POST /mn/upload"
|
||||
sinks:
|
||||
- id: "MicronautDeserialize::handleUpload"
|
||||
path: "bench.reachability.micronaut.Controller.handleUpload"
|
||||
kind: "custom"
|
||||
location:
|
||||
file: src/Controller.java
|
||||
line: 10
|
||||
notes: "ObjectInputStream on user-controlled payload"
|
||||
environment:
|
||||
os_image: "eclipse-temurin:21-jdk"
|
||||
runtime:
|
||||
java: "21"
|
||||
source_date_epoch: 1730000000
|
||||
resource_limits:
|
||||
cpu: "2"
|
||||
memory: "4Gi"
|
||||
build:
|
||||
command: "./build/build.sh"
|
||||
source_date_epoch: 1730000000
|
||||
outputs:
|
||||
artifact_path: outputs/binary.tar.gz
|
||||
sbom_path: outputs/sbom.cdx.json
|
||||
coverage_path: outputs/coverage.json
|
||||
traces_dir: outputs/traces
|
||||
attestation_path: outputs/attestation.json
|
||||
test:
|
||||
command: "./build/build.sh"
|
||||
expected_coverage: []
|
||||
expected_traces: []
|
||||
env:
|
||||
JAVA_TOOL_OPTIONS: "-ea"
|
||||
ground_truth:
|
||||
summary: "Deserialization reachable"
|
||||
evidence_files:
|
||||
- "../benchmark/truth/java-micronaut-deserialize.json"
|
||||
sandbox:
|
||||
network: loopback
|
||||
privileges: rootless
|
||||
redaction:
|
||||
pii: false
|
||||
policy: "benchmark-default/v1"
|
||||
@@ -0,0 +1,8 @@
|
||||
case_id: "java-micronaut-deserialize:203"
|
||||
entries:
|
||||
http:
|
||||
- id: "POST /mn/upload"
|
||||
route: "/mn/upload"
|
||||
method: "POST"
|
||||
handler: "Controller.handleUpload"
|
||||
description: "Binary payload base64-deserialized"
|
||||
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v0.1",
|
||||
"predicate": {
|
||||
"buildType": "stub",
|
||||
"builder": {
|
||||
"id": "stub"
|
||||
},
|
||||
"metadata": {
|
||||
"buildFinishedOn": "1970-01-01T00:00:00Z",
|
||||
"buildStartedOn": "1970-01-01T00:00:00Z"
|
||||
}
|
||||
},
|
||||
"predicateType": "https://slsa.dev/provenance/v0.2",
|
||||
"subject": [
|
||||
{
|
||||
"digest": {
|
||||
"sha256": "stub"
|
||||
},
|
||||
"name": "java-micronaut-deserialize:203"
|
||||
}
|
||||
]
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
true
|
||||
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"files": {
|
||||
"src/Controller.java": {
|
||||
"lines_covered": [
|
||||
11,
|
||||
14,
|
||||
15,
|
||||
17
|
||||
],
|
||||
"lines_total": 40
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"components": [],
|
||||
"metadata": {
|
||||
"component": {
|
||||
"name": "micronaut-deserialize",
|
||||
"type": "application",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"timestamp": "1970-01-01T00:00:00Z"
|
||||
},
|
||||
"specVersion": "1.5",
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entry": "POST /mn/upload",
|
||||
"notes": "Base64 payload flows into ObjectInputStream without guard",
|
||||
"path": [
|
||||
"Controller.handleUpload",
|
||||
"ObjectInputStream.readObject"
|
||||
],
|
||||
"sink": "MicronautDeserialize::handleUpload"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.stellaops.bench</groupId>
|
||||
<artifactId>micronaut-deserialize</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<packaging>jar</packaging>
|
||||
<properties>
|
||||
<maven.compiler.source>17</maven.compiler.source>
|
||||
<maven.compiler.target>17</maven.compiler.target>
|
||||
</properties>
|
||||
</project>
|
||||
@@ -0,0 +1,24 @@
|
||||
package bench.reachability.micronaut;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Base64;
|
||||
import java.io.*;
|
||||
|
||||
public class Controller {
|
||||
// Unsafe deserialization sink (reachable)
|
||||
public static Response handleUpload(Map<String, String> body) {
|
||||
String payload = body.get("payload");
|
||||
if (payload == null) {
|
||||
return new Response(400, "bad request");
|
||||
}
|
||||
try (ObjectInputStream ois = new ObjectInputStream(
|
||||
new ByteArrayInputStream(Base64.getDecoder().decode(payload)))) {
|
||||
Object obj = ois.readObject();
|
||||
return new Response(200, obj.toString());
|
||||
} catch (Exception ex) {
|
||||
return new Response(500, ex.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
|
||||
public record Response(int status, String body) {}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package bench.reachability.micronaut;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.Base64;
|
||||
|
||||
// Simple assertion-based oracle (JUnit-free for offline determinism)
|
||||
public class ControllerTest {
|
||||
private static String serialize(Object obj) throws IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
try (ObjectOutputStream oos = new ObjectOutputStream(bos)) {
|
||||
oos.writeObject(obj);
|
||||
}
|
||||
return Base64.getEncoder().encodeToString(bos.toByteArray());
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
Map<String, String> body = Map.of("payload", serialize("micronaut"));
|
||||
var res = Controller.handleUpload(body);
|
||||
assert res.status() == 200 : "status";
|
||||
assert res.body().equals("micronaut") : "body";
|
||||
|
||||
File outDir = new File("outputs");
|
||||
outDir.mkdirs();
|
||||
try (FileWriter fw = new FileWriter(new File(outDir, "SINK_REACHED"))) {
|
||||
fw.write("true");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
id: "java-micronaut-guarded:204"
|
||||
language: java
|
||||
project: micronaut-guarded
|
||||
version: "1.0.0"
|
||||
description: "Micronaut-style controller guards deserialization behind ALLOW_MN_DESER flag (unreachable by default)"
|
||||
entrypoints:
|
||||
- "POST /mn/upload"
|
||||
sinks:
|
||||
- id: "MicronautDeserializeGuarded::handleUpload"
|
||||
path: "bench.reachability.micronautguard.Controller.handleUpload"
|
||||
kind: "custom"
|
||||
location:
|
||||
file: src/Controller.java
|
||||
line: 11
|
||||
notes: "ObjectInputStream gated by ALLOW_MN_DESER"
|
||||
environment:
|
||||
os_image: "eclipse-temurin:21-jdk"
|
||||
runtime:
|
||||
java: "21"
|
||||
source_date_epoch: 1730000000
|
||||
resource_limits:
|
||||
cpu: "2"
|
||||
memory: "4Gi"
|
||||
build:
|
||||
command: "./build/build.sh"
|
||||
source_date_epoch: 1730000000
|
||||
outputs:
|
||||
artifact_path: outputs/binary.tar.gz
|
||||
sbom_path: outputs/sbom.cdx.json
|
||||
coverage_path: outputs/coverage.json
|
||||
traces_dir: outputs/traces
|
||||
attestation_path: outputs/attestation.json
|
||||
test:
|
||||
command: "./build/build.sh"
|
||||
expected_coverage: []
|
||||
expected_traces: []
|
||||
env:
|
||||
JAVA_TOOL_OPTIONS: "-ea"
|
||||
ground_truth:
|
||||
summary: "Guard blocks deserialization unless ALLOW_MN_DESER=true"
|
||||
evidence_files:
|
||||
- "../benchmark/truth/java-micronaut-guarded.json"
|
||||
sandbox:
|
||||
network: loopback
|
||||
privileges: rootless
|
||||
redaction:
|
||||
pii: false
|
||||
policy: "benchmark-default/v1"
|
||||
@@ -0,0 +1,8 @@
|
||||
case_id: "java-micronaut-guarded:204"
|
||||
entries:
|
||||
http:
|
||||
- id: "POST /mn/upload"
|
||||
route: "/mn/upload"
|
||||
method: "POST"
|
||||
handler: "Controller.handleUpload"
|
||||
description: "Deserialization guarded by ALLOW_MN_DESER flag"
|
||||
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v0.1",
|
||||
"predicate": {
|
||||
"buildType": "stub",
|
||||
"builder": {
|
||||
"id": "stub"
|
||||
},
|
||||
"metadata": {
|
||||
"buildFinishedOn": "1970-01-01T00:00:00Z",
|
||||
"buildStartedOn": "1970-01-01T00:00:00Z"
|
||||
}
|
||||
},
|
||||
"predicateType": "https://slsa.dev/provenance/v0.2",
|
||||
"subject": [
|
||||
{
|
||||
"digest": {
|
||||
"sha256": "stub"
|
||||
},
|
||||
"name": "java-micronaut-guarded:204"
|
||||
}
|
||||
]
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
true
|
||||
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"files": {
|
||||
"src/Controller.java": {
|
||||
"lines_covered": [
|
||||
12,
|
||||
13,
|
||||
15,
|
||||
17
|
||||
],
|
||||
"lines_total": 42
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"components": [],
|
||||
"metadata": {
|
||||
"component": {
|
||||
"name": "micronaut-guarded",
|
||||
"type": "application",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"timestamp": "1970-01-01T00:00:00Z"
|
||||
},
|
||||
"specVersion": "1.5",
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"entry": "POST /mn/upload",
|
||||
"notes": "Guard enforces ALLOW_MN_DESER feature flag; sink not reached by default",
|
||||
"path": [
|
||||
"Controller.handleUpload"
|
||||
],
|
||||
"sink": "MicronautDeserializeGuarded::handleUpload"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.stellaops.bench</groupId>
|
||||
<artifactId>micronaut-guarded</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<packaging>jar</packaging>
|
||||
<properties>
|
||||
<maven.compiler.source>17</maven.compiler.source>
|
||||
<maven.compiler.target>17</maven.compiler.target>
|
||||
</properties>
|
||||
</project>
|
||||
@@ -0,0 +1,27 @@
|
||||
package bench.reachability.micronautguard;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Base64;
|
||||
import java.io.*;
|
||||
|
||||
public class Controller {
|
||||
// Deserialization behind feature flag; unreachable unless ALLOW_MN_DESER=true
|
||||
public static Response handleUpload(Map<String, String> body, Map<String, String> env) {
|
||||
if (!"true".equals(env.getOrDefault("ALLOW_MN_DESER", "false"))) {
|
||||
return new Response(403, "forbidden");
|
||||
}
|
||||
String payload = body.get("payload");
|
||||
if (payload == null) {
|
||||
return new Response(400, "bad request");
|
||||
}
|
||||
try (ObjectInputStream ois = new ObjectInputStream(
|
||||
new ByteArrayInputStream(Base64.getDecoder().decode(payload)))) {
|
||||
Object obj = ois.readObject();
|
||||
return new Response(200, obj.toString());
|
||||
} catch (Exception ex) {
|
||||
return new Response(500, ex.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
|
||||
public record Response(int status, String body) {}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package bench.reachability.micronautguard;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.Base64;
|
||||
|
||||
public class ControllerTest {
|
||||
private static String serialize(Object obj) throws IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
try (ObjectOutputStream oos = new ObjectOutputStream(bos)) {
|
||||
oos.writeObject(obj);
|
||||
}
|
||||
return Base64.getEncoder().encodeToString(bos.toByteArray());
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
Map<String, String> body = Map.of("payload", serialize("blocked"));
|
||||
Map<String, String> env = Map.of("ALLOW_MN_DESER", "false");
|
||||
var res = Controller.handleUpload(body, env);
|
||||
assert res.status() == 403 : "status";
|
||||
assert res.body().equals("forbidden") : "body";
|
||||
|
||||
File outDir = new File("outputs");
|
||||
outDir.mkdirs();
|
||||
try (FileWriter fw = new FileWriter(new File(outDir, "SINK_BLOCKED"))) {
|
||||
fw.write("true");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v0.1",
|
||||
"predicate": {
|
||||
"buildType": "stub",
|
||||
"builder": {
|
||||
"id": "stub"
|
||||
},
|
||||
"metadata": {
|
||||
"buildFinishedOn": "1970-01-01T00:00:00Z",
|
||||
"buildStartedOn": "1970-01-01T00:00:00Z"
|
||||
}
|
||||
},
|
||||
"predicateType": "https://slsa.dev/provenance/v0.2",
|
||||
"subject": [
|
||||
{
|
||||
"digest": {
|
||||
"sha256": "stub"
|
||||
},
|
||||
"name": "java-spring-deserialize:201"
|
||||
}
|
||||
]
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
true
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"files": {
|
||||
"src/App.java": {
|
||||
"lines_covered": [
|
||||
9,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
19
|
||||
],
|
||||
"lines_total": 26
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"components": [],
|
||||
"metadata": {
|
||||
"component": {
|
||||
"name": "spring-deserialize",
|
||||
"type": "application",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"timestamp": "1970-01-01T00:00:00Z"
|
||||
},
|
||||
"specVersion": "1.5",
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entry": "POST /api/upload",
|
||||
"notes": "No guard; base64 payload deserialized",
|
||||
"path": [
|
||||
"App.handleRequest",
|
||||
"ObjectInputStream.readObject"
|
||||
],
|
||||
"sink": "JavaDeserialize::handleRequest"
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v0.1",
|
||||
"predicate": {
|
||||
"buildType": "stub",
|
||||
"builder": {
|
||||
"id": "stub"
|
||||
},
|
||||
"metadata": {
|
||||
"buildFinishedOn": "1970-01-01T00:00:00Z",
|
||||
"buildStartedOn": "1970-01-01T00:00:00Z"
|
||||
}
|
||||
},
|
||||
"predicateType": "https://slsa.dev/provenance/v0.2",
|
||||
"subject": [
|
||||
{
|
||||
"digest": {
|
||||
"sha256": "stub"
|
||||
},
|
||||
"name": "java-spring-guarded:202"
|
||||
}
|
||||
]
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user