tests pipeline run attempt
This commit is contained in:
@@ -1,71 +0,0 @@
|
||||
name: Advisory AI Feed Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
allow_dev_key:
|
||||
description: 'Allow dev key for testing (1=yes)'
|
||||
required: false
|
||||
default: '0'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/AdvisoryAI/feeds/**'
|
||||
- 'docs/samples/advisory-feeds/**'
|
||||
|
||||
jobs:
|
||||
package-feeds:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: 'v2.6.0'
|
||||
|
||||
- name: Fallback to dev key when secret is absent
|
||||
run: |
|
||||
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||
echo "[warn] COSIGN_PRIVATE_KEY_B64 not set; using dev key for non-production"
|
||||
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||
fi
|
||||
# Manual override
|
||||
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Package advisory feeds
|
||||
run: |
|
||||
chmod +x ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||
ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||
|
||||
- name: Generate SBOM
|
||||
run: |
|
||||
# Install syft
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||
|
||||
# Generate SBOM for feed bundle
|
||||
syft dir:out/advisory-ai/feeds/stage \
|
||||
-o spdx-json=out/advisory-ai/feeds/advisory-feeds.sbom.json \
|
||||
--name advisory-feeds
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: advisory-feeds-${{ github.run_number }}
|
||||
path: |
|
||||
out/advisory-ai/feeds/advisory-feeds.tar.gz
|
||||
out/advisory-ai/feeds/advisory-feeds.manifest.json
|
||||
out/advisory-ai/feeds/advisory-feeds.manifest.dsse.json
|
||||
out/advisory-ai/feeds/advisory-feeds.sbom.json
|
||||
out/advisory-ai/feeds/provenance.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 30
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
name: Airgap Sealed CI Smoke
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'devops/airgap/**'
|
||||
- '.gitea/workflows/airgap-sealed-ci.yml'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'devops/airgap/**'
|
||||
- '.gitea/workflows/airgap-sealed-ci.yml'
|
||||
|
||||
jobs:
|
||||
sealed-smoke:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
- name: Install dnslib
|
||||
run: pip install dnslib
|
||||
- name: Run sealed-mode smoke
|
||||
run: sudo devops/airgap/sealed-ci-smoke.sh
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
name: AOC Backfill Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dataset_hash:
|
||||
description: 'Dataset hash from dev rehearsal (leave empty for dev mode)'
|
||||
required: false
|
||||
default: ''
|
||||
allow_dev_key:
|
||||
description: 'Allow dev key for testing (1=yes)'
|
||||
required: false
|
||||
default: '0'
|
||||
|
||||
jobs:
|
||||
package-backfill:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Setup cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: 'v2.6.0'
|
||||
|
||||
- name: Restore AOC CLI
|
||||
run: dotnet restore src/Aoc/StellaOps.Aoc.Cli/StellaOps.Aoc.Cli.csproj
|
||||
|
||||
- name: Configure signing
|
||||
run: |
|
||||
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||
echo "[info] No production key; using dev key"
|
||||
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||
fi
|
||||
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Package AOC backfill release
|
||||
run: |
|
||||
chmod +x devops/aoc/package-backfill-release.sh
|
||||
DATASET_HASH="${{ github.event.inputs.dataset_hash }}" \
|
||||
devops/aoc/package-backfill-release.sh
|
||||
env:
|
||||
DATASET_HASH: ${{ github.event.inputs.dataset_hash }}
|
||||
|
||||
- name: Generate SBOM with syft
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||
syft dir:out/aoc/cli \
|
||||
-o spdx-json=out/aoc/aoc-backfill-runner.sbom.json \
|
||||
--name aoc-backfill-runner || true
|
||||
|
||||
- name: Verify checksums
|
||||
run: |
|
||||
cd out/aoc
|
||||
sha256sum -c SHA256SUMS
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: aoc-backfill-release-${{ github.run_number }}
|
||||
path: |
|
||||
out/aoc/aoc-backfill-runner.tar.gz
|
||||
out/aoc/aoc-backfill-runner.manifest.json
|
||||
out/aoc/aoc-backfill-runner.sbom.json
|
||||
out/aoc/aoc-backfill-runner.provenance.json
|
||||
out/aoc/aoc-backfill-runner.dsse.json
|
||||
out/aoc/SHA256SUMS
|
||||
if-no-files-found: warn
|
||||
retention-days: 30
|
||||
|
||||
@@ -1,171 +0,0 @@
|
||||
name: AOC Guard CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/Aoc/**'
|
||||
- 'src/Concelier/**'
|
||||
- 'src/Authority/**'
|
||||
- 'src/Excititor/**'
|
||||
- 'devops/aoc/**'
|
||||
- '.gitea/workflows/aoc-guard.yml'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/Aoc/**'
|
||||
- 'src/Concelier/**'
|
||||
- 'src/Authority/**'
|
||||
- 'src/Excititor/**'
|
||||
- 'devops/aoc/**'
|
||||
- '.gitea/workflows/aoc-guard.yml'
|
||||
|
||||
jobs:
|
||||
aoc-guard:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore analyzers
|
||||
run: dotnet restore src/Aoc/__Analyzers/StellaOps.Aoc.Analyzers/StellaOps.Aoc.Analyzers.csproj
|
||||
|
||||
- name: Build analyzers
|
||||
run: dotnet build src/Aoc/__Analyzers/StellaOps.Aoc.Analyzers/StellaOps.Aoc.Analyzers.csproj -c Release
|
||||
|
||||
- name: Run analyzers against ingestion projects
|
||||
run: |
|
||||
dotnet build src/Concelier/StellaOps.Concelier.Ingestion/StellaOps.Concelier.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||
|
||||
- name: Run analyzer tests with coverage
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release \
|
||||
--settings src/Aoc/aoc.runsettings \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--logger "trx;LogFileName=aoc-analyzers-tests.trx" \
|
||||
--results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Run AOC library tests with coverage
|
||||
run: |
|
||||
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj -c Release \
|
||||
--settings src/Aoc/aoc.runsettings \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--logger "trx;LogFileName=aoc-lib-tests.trx" \
|
||||
--results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Run AOC CLI tests with coverage
|
||||
run: |
|
||||
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Cli.Tests/StellaOps.Aoc.Cli.Tests.csproj -c Release \
|
||||
--settings src/Aoc/aoc.runsettings \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--logger "trx;LogFileName=aoc-cli-tests.trx" \
|
||||
--results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Generate coverage report
|
||||
run: |
|
||||
dotnet tool install --global dotnet-reportgenerator-globaltool || true
|
||||
reportgenerator \
|
||||
-reports:"$ARTIFACT_DIR/**/coverage.cobertura.xml" \
|
||||
-targetdir:"$ARTIFACT_DIR/coverage-report" \
|
||||
-reporttypes:"Html;Cobertura;TextSummary" || true
|
||||
if [ -f "$ARTIFACT_DIR/coverage-report/Summary.txt" ]; then
|
||||
cat "$ARTIFACT_DIR/coverage-report/Summary.txt"
|
||||
fi
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: aoc-guard-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
aoc-verify:
|
||||
needs: aoc-guard
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
if: github.event_name != 'schedule'
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Run AOC verify
|
||||
env:
|
||||
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
||||
STAGING_POSTGRES_URI: ${{ secrets.STAGING_POSTGRES_URI || vars.STAGING_POSTGRES_URI }}
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
|
||||
# Prefer PostgreSQL, fall back to MongoDB (legacy)
|
||||
if [ -n "${STAGING_POSTGRES_URI:-}" ]; then
|
||||
echo "Using PostgreSQL for AOC verification"
|
||||
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||
--since "$AOC_VERIFY_SINCE" \
|
||||
--postgres "$STAGING_POSTGRES_URI" \
|
||||
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||
--verbose || VERIFY_EXIT=$?
|
||||
elif [ -n "${STAGING_MONGO_URI:-}" ]; then
|
||||
echo "Using MongoDB for AOC verification (deprecated)"
|
||||
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||
--since "$AOC_VERIFY_SINCE" \
|
||||
--mongo "$STAGING_MONGO_URI" \
|
||||
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||
--verbose || VERIFY_EXIT=$?
|
||||
else
|
||||
echo "::warning::Neither STAGING_POSTGRES_URI nor STAGING_MONGO_URI set; running dry-run verification"
|
||||
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||
--since "$AOC_VERIFY_SINCE" \
|
||||
--postgres "placeholder" \
|
||||
--dry-run \
|
||||
--verbose
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
||||
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
||||
fi
|
||||
|
||||
- name: Upload verify artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: aoc-verify-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
name: api-governance
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "src/Api/**"
|
||||
- ".spectral.yaml"
|
||||
- "package.json"
|
||||
pull_request:
|
||||
paths:
|
||||
- "src/Api/**"
|
||||
- ".spectral.yaml"
|
||||
- "package.json"
|
||||
|
||||
jobs:
|
||||
spectral-lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
- name: Install npm deps
|
||||
run: npm install --ignore-scripts --no-progress
|
||||
- name: Compose aggregate OpenAPI
|
||||
run: npm run api:compose
|
||||
- name: Validate examples coverage
|
||||
run: npm run api:examples
|
||||
- name: Compatibility diff (previous commit)
|
||||
run: |
|
||||
set -e
|
||||
if git show HEAD~1:src/Api/StellaOps.Api.OpenApi/stella.yaml > /tmp/stella-prev.yaml 2>/dev/null; then
|
||||
node scripts/api-compat-diff.mjs /tmp/stella-prev.yaml src/Api/StellaOps.Api.OpenApi/stella.yaml --output text --fail-on-breaking
|
||||
else
|
||||
echo "[api:compat] previous stella.yaml not found; skipping"
|
||||
fi
|
||||
- name: Compatibility diff (baseline)
|
||||
run: |
|
||||
set -e
|
||||
if [ -f src/Api/StellaOps.Api.OpenApi/baselines/stella-baseline.yaml ]; then
|
||||
node scripts/api-compat-diff.mjs src/Api/StellaOps.Api.OpenApi/baselines/stella-baseline.yaml src/Api/StellaOps.Api.OpenApi/stella.yaml --output text
|
||||
else
|
||||
echo "[api:compat] baseline file missing; skipping"
|
||||
fi
|
||||
- name: Generate changelog
|
||||
run: npm run api:changelog
|
||||
- name: Spectral lint (fail on warning+)
|
||||
run: npm run api:lint
|
||||
@@ -1,128 +0,0 @@
|
||||
name: Artifact Signing
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
artifact_path:
|
||||
description: 'Path to artifact to sign'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
COSIGN_VERSION: 'v2.2.0'
|
||||
|
||||
jobs:
|
||||
sign-containers:
|
||||
name: Sign Container Images
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Log in to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Sign images (keyless)
|
||||
if: ${{ !env.COSIGN_PRIVATE_KEY_B64 }}
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
run: |
|
||||
IMAGES=(
|
||||
"ghcr.io/${{ github.repository }}/concelier"
|
||||
"ghcr.io/${{ github.repository }}/scanner"
|
||||
"ghcr.io/${{ github.repository }}/authority"
|
||||
)
|
||||
for img in "${IMAGES[@]}"; do
|
||||
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||
cosign sign --yes "${img}:${{ github.ref_name }}"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Sign images (with key)
|
||||
if: ${{ env.COSIGN_PRIVATE_KEY_B64 }}
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
run: |
|
||||
echo "$COSIGN_PRIVATE_KEY" | base64 -d > /tmp/cosign.key
|
||||
IMAGES=(
|
||||
"ghcr.io/${{ github.repository }}/concelier"
|
||||
"ghcr.io/${{ github.repository }}/scanner"
|
||||
"ghcr.io/${{ github.repository }}/authority"
|
||||
)
|
||||
for img in "${IMAGES[@]}"; do
|
||||
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||
cosign sign --key /tmp/cosign.key "${img}:${{ github.ref_name }}"
|
||||
fi
|
||||
done
|
||||
rm -f /tmp/cosign.key
|
||||
|
||||
sign-sbom:
|
||||
name: Sign SBOM Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Generate and sign SBOM
|
||||
run: |
|
||||
# Generate SBOM using syft
|
||||
if command -v syft &> /dev/null; then
|
||||
syft . -o cyclonedx-json > sbom.cdx.json
|
||||
cosign sign-blob --yes sbom.cdx.json --output-signature sbom.cdx.json.sig
|
||||
else
|
||||
echo "syft not installed, skipping SBOM generation"
|
||||
fi
|
||||
|
||||
- name: Upload signed artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: signed-sbom
|
||||
path: |
|
||||
sbom.cdx.json
|
||||
sbom.cdx.json.sig
|
||||
if-no-files-found: ignore
|
||||
|
||||
verify-signatures:
|
||||
name: Verify Existing Signatures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
with:
|
||||
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||
|
||||
- name: Verify DSSE envelopes
|
||||
run: |
|
||||
find . -name "*.dsse" -o -name "*.dsse.json" | while read f; do
|
||||
echo "Checking $f..."
|
||||
# Basic JSON validation
|
||||
if ! jq empty "$f" 2>/dev/null; then
|
||||
echo "Warning: Invalid JSON in $f"
|
||||
fi
|
||||
done
|
||||
@@ -1,29 +0,0 @@
|
||||
name: attestation-bundle
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
attest_dir:
|
||||
description: "Directory containing attestation artefacts"
|
||||
required: true
|
||||
default: "out/attest"
|
||||
|
||||
jobs:
|
||||
bundle:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Build bundle
|
||||
run: |
|
||||
chmod +x scripts/attest/build-attestation-bundle.sh
|
||||
scripts/attest/build-attestation-bundle.sh "${{ github.event.inputs.attest_dir }}"
|
||||
|
||||
- name: Upload bundle
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: attestation-bundle
|
||||
path: out/attest-bundles/**
|
||||
@@ -1,272 +0,0 @@
|
||||
# Attestation Linkage Workflow
|
||||
# Sprint: Testing Enhancement Advisory - Phase 1.3
|
||||
# Generates test run attestations linking outputs to inputs (SBOMs, VEX)
|
||||
|
||||
name: attestation-linkage
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/__Tests/**'
|
||||
- 'src/__Libraries/StellaOps.Testing.Manifests/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/__Tests/**'
|
||||
- 'src/__Libraries/StellaOps.Testing.Manifests/**'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
sign_attestations:
|
||||
description: 'Sign attestations with production key'
|
||||
type: boolean
|
||||
default: false
|
||||
verify_existing:
|
||||
description: 'Verify existing attestations in evidence locker'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: attestation-linkage-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DETERMINISM_OUTPUT_DIR: ${{ github.workspace }}/attestation-output
|
||||
|
||||
jobs:
|
||||
# ==========================================================================
|
||||
# Build Attestation Infrastructure
|
||||
# ==========================================================================
|
||||
build-attestation:
|
||||
name: Build Attestation Infrastructure
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj
|
||||
|
||||
- name: Build attestation library
|
||||
run: |
|
||||
dotnet build src/__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj \
|
||||
--configuration Release \
|
||||
--no-restore
|
||||
|
||||
- name: Verify attestation types compile
|
||||
run: |
|
||||
# Verify the attestation generator compiles correctly
|
||||
dotnet build src/__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj \
|
||||
--configuration Release \
|
||||
-warnaserror
|
||||
|
||||
# ==========================================================================
|
||||
# Generate Test Run Attestations
|
||||
# ==========================================================================
|
||||
generate-attestations:
|
||||
name: Generate Test Run Attestations
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: build-attestation
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Create output directory
|
||||
run: mkdir -p $DETERMINISM_OUTPUT_DIR/attestations
|
||||
|
||||
- name: Restore and build test projects
|
||||
run: |
|
||||
dotnet restore src/StellaOps.sln
|
||||
dotnet build src/StellaOps.sln --configuration Release --no-restore
|
||||
|
||||
- name: Run determinism tests with attestation
|
||||
run: |
|
||||
# Run determinism tests and capture results for attestation
|
||||
dotnet test src/__Tests/__Libraries/StellaOps.HybridLogicalClock.Tests \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--filter "Category=Unit" \
|
||||
--logger "trx;LogFileName=hlc-unit.trx" \
|
||||
--results-directory $DETERMINISM_OUTPUT_DIR/results \
|
||||
|| true
|
||||
|
||||
- name: Collect test evidence
|
||||
run: |
|
||||
# Collect test run evidence for attestation generation
|
||||
cat > $DETERMINISM_OUTPUT_DIR/test-evidence.json << 'EOF'
|
||||
{
|
||||
"testFramework": "xunit",
|
||||
"executedAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"gitCommitSha": "${{ github.sha }}",
|
||||
"gitBranch": "${{ github.ref_name }}",
|
||||
"ciBuildId": "${{ github.run_id }}",
|
||||
"ciWorkflow": "${{ github.workflow }}"
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Generate attestation manifest
|
||||
run: |
|
||||
# Generate a manifest of test outputs for attestation
|
||||
echo "Generating attestation manifest..."
|
||||
|
||||
# Compute digests of test result files
|
||||
if [ -d "$DETERMINISM_OUTPUT_DIR/results" ]; then
|
||||
find $DETERMINISM_OUTPUT_DIR/results -name "*.trx" -exec sha256sum {} \; \
|
||||
> $DETERMINISM_OUTPUT_DIR/attestations/output-digests.txt
|
||||
fi
|
||||
|
||||
# Create attestation metadata
|
||||
cat > $DETERMINISM_OUTPUT_DIR/attestations/attestation-metadata.json << EOF
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"generatedAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"runId": "${{ github.run_id }}-${{ github.run_attempt }}",
|
||||
"predicateType": "https://stellaops.io/attestation/test-run/v1",
|
||||
"signed": ${{ github.event.inputs.sign_attestations == 'true' && 'true' || 'false' }}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Upload attestation artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: attestation-artifacts
|
||||
path: |
|
||||
${{ env.DETERMINISM_OUTPUT_DIR }}/attestations/**
|
||||
${{ env.DETERMINISM_OUTPUT_DIR }}/results/**
|
||||
${{ env.DETERMINISM_OUTPUT_DIR }}/test-evidence.json
|
||||
|
||||
# ==========================================================================
|
||||
# Verify Attestation Linkage
|
||||
# ==========================================================================
|
||||
verify-attestation-linkage:
|
||||
name: Verify Attestation Linkage
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
needs: generate-attestations
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download attestation artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: attestation-artifacts
|
||||
path: ${{ env.DETERMINISM_OUTPUT_DIR }}
|
||||
|
||||
- name: Verify attestation structure
|
||||
run: |
|
||||
echo "Verifying attestation structure..."
|
||||
|
||||
# Check that metadata file exists and is valid JSON
|
||||
if [ -f "$DETERMINISM_OUTPUT_DIR/attestations/attestation-metadata.json" ]; then
|
||||
cat $DETERMINISM_OUTPUT_DIR/attestations/attestation-metadata.json | jq .
|
||||
echo "Attestation metadata is valid JSON"
|
||||
else
|
||||
echo "::warning::No attestation metadata found"
|
||||
fi
|
||||
|
||||
# Check output digests
|
||||
if [ -f "$DETERMINISM_OUTPUT_DIR/attestations/output-digests.txt" ]; then
|
||||
echo "Output digests recorded:"
|
||||
cat $DETERMINISM_OUTPUT_DIR/attestations/output-digests.txt
|
||||
fi
|
||||
|
||||
- name: Verify SBOM linkage
|
||||
run: |
|
||||
echo "Verifying SBOM linkage..."
|
||||
# In a full implementation, this would:
|
||||
# 1. Load the test run manifest
|
||||
# 2. Verify all SBOM digests are referenced in the attestation
|
||||
# 3. Verify the attestation subject digests match actual outputs
|
||||
echo "SBOM linkage verification: PASS (placeholder)"
|
||||
|
||||
- name: Verify VEX linkage
|
||||
run: |
|
||||
echo "Verifying VEX linkage..."
|
||||
# In a full implementation, this would:
|
||||
# 1. Load VEX documents referenced in the test run
|
||||
# 2. Verify they were considered in the test execution
|
||||
# 3. Verify the attestation predicate includes VEX digests
|
||||
echo "VEX linkage verification: PASS (placeholder)"
|
||||
|
||||
# ==========================================================================
|
||||
# Attestation Unit Tests
|
||||
# ==========================================================================
|
||||
attestation-unit-tests:
|
||||
name: Attestation Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build src/__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj \
|
||||
--configuration Release \
|
||||
--no-restore
|
||||
|
||||
- name: Run attestation tests
|
||||
run: |
|
||||
# Run tests for the attestation infrastructure
|
||||
# Note: Tests would be in a .Tests project
|
||||
echo "Attestation unit tests: Would run from StellaOps.Testing.Manifests.Tests"
|
||||
|
||||
# For now, verify the types are correctly structured
|
||||
dotnet build src/__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj \
|
||||
--configuration Release \
|
||||
-warnaserror
|
||||
|
||||
# ==========================================================================
|
||||
# Gate Status
|
||||
# ==========================================================================
|
||||
attestation-gate:
|
||||
name: Attestation Linkage Gate
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-attestation, generate-attestations, verify-attestation-linkage, attestation-unit-tests]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Check gate status
|
||||
run: |
|
||||
if [ "${{ needs.build-attestation.result }}" == "failure" ]; then
|
||||
echo "::error::Attestation build failed"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${{ needs.generate-attestations.result }}" == "failure" ]; then
|
||||
echo "::error::Attestation generation failed"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${{ needs.verify-attestation-linkage.result }}" == "failure" ]; then
|
||||
echo "::error::Attestation linkage verification failed"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${{ needs.attestation-unit-tests.result }}" == "failure" ]; then
|
||||
echo "::error::Attestation unit tests failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "All attestation linkage checks passed!"
|
||||
@@ -1,167 +0,0 @@
|
||||
# .gitea/workflows/authority-key-rotation.yml
|
||||
# Manual workflow to push a new Authority signing key using OPS3 tooling
|
||||
|
||||
name: Authority Key Rotation
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Target environment name (used to select secrets/vars)'
|
||||
required: true
|
||||
default: 'staging'
|
||||
type: choice
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
authority_url:
|
||||
description: 'Override Authority URL (leave blank to use env-specific secret)'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
key_id:
|
||||
description: 'New signing key identifier (kid)'
|
||||
required: true
|
||||
type: string
|
||||
key_path:
|
||||
description: 'Path (as Authority sees it) to the PEM key'
|
||||
required: true
|
||||
type: string
|
||||
source:
|
||||
description: 'Signing key source loader (default: file)'
|
||||
required: false
|
||||
default: 'file'
|
||||
type: string
|
||||
algorithm:
|
||||
description: 'Signing algorithm (default: ES256)'
|
||||
required: false
|
||||
default: 'ES256'
|
||||
type: string
|
||||
provider:
|
||||
description: 'Preferred crypto provider hint'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
metadata:
|
||||
description: 'Optional key=value metadata entries (comma-separated)'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
rotate:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
environment: ${{ inputs.environment }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Resolve Authority configuration
|
||||
id: config
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
env_name=${{ inputs.environment }}
|
||||
echo "Environment: $env_name"
|
||||
|
||||
bootstrap_key=""
|
||||
authority_url="${{ inputs.authority_url }}"
|
||||
|
||||
# Helper to prefer secrets over variables and fall back to shared defaults
|
||||
resolve_var() {
|
||||
local name="$1"
|
||||
local default="$2"
|
||||
local value="${{ secrets[name] }}"
|
||||
if [ -z "$value" ]; then value="${{ vars[name] }}"; fi
|
||||
if [ -z "$value" ]; then value="$default"; fi
|
||||
printf '%s' "$value"
|
||||
}
|
||||
|
||||
key_name="${env_name^^}_AUTHORITY_BOOTSTRAP_KEY"
|
||||
bootstrap_key="$(resolve_var "$key_name" "")"
|
||||
if [ -z "$bootstrap_key" ]; then
|
||||
bootstrap_key="$(resolve_var "AUTHORITY_BOOTSTRAP_KEY" "")"
|
||||
fi
|
||||
|
||||
if [ -z "$bootstrap_key" ]; then
|
||||
echo "::error::Missing bootstrap key secret (expected $key_name or AUTHORITY_BOOTSTRAP_KEY)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$authority_url" ]; then
|
||||
url_name="${env_name^^}_AUTHORITY_URL"
|
||||
authority_url="$(resolve_var "$url_name" "")"
|
||||
if [ -z "$authority_url" ]; then
|
||||
authority_url="$(resolve_var "AUTHORITY_URL" "")"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$authority_url" ]; then
|
||||
echo "::error::Authority URL not provided and no secret/var found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
key_file="${RUNNER_TEMP}/authority-bootstrap-key"
|
||||
printf '%s\n' "$bootstrap_key" > "$key_file"
|
||||
chmod 600 "$key_file"
|
||||
|
||||
echo "bootstrap-key-file=$key_file" >> "$GITHUB_OUTPUT"
|
||||
echo "authority-url=$authority_url" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Execute key rotation
|
||||
id: rotate
|
||||
shell: bash
|
||||
env:
|
||||
AUTHORITY_BOOTSTRAP_KEY_FILE: ${{ steps.config.outputs['bootstrap-key-file'] }}
|
||||
AUTHORITY_URL: ${{ steps.config.outputs['authority-url'] }}
|
||||
KEY_ID: ${{ inputs.key_id }}
|
||||
KEY_PATH: ${{ inputs.key_path }}
|
||||
KEY_SOURCE: ${{ inputs.source }}
|
||||
KEY_ALGORITHM: ${{ inputs.algorithm }}
|
||||
KEY_PROVIDER: ${{ inputs.provider }}
|
||||
KEY_METADATA: ${{ inputs.metadata }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
bootstrap_key=$(cat "$AUTHORITY_BOOTSTRAP_KEY_FILE")
|
||||
|
||||
metadata_args=()
|
||||
if [ -n "$KEY_METADATA" ]; then
|
||||
IFS=',' read -ra META <<< "$KEY_METADATA"
|
||||
for entry in "${META[@]}"; do
|
||||
trimmed="$(echo "$entry" | xargs)"
|
||||
[ -z "$trimmed" ] && continue
|
||||
metadata_args+=(-m "$trimmed")
|
||||
done
|
||||
fi
|
||||
|
||||
provider_args=()
|
||||
if [ -n "$KEY_PROVIDER" ]; then
|
||||
provider_args+=(--provider "$KEY_PROVIDER")
|
||||
fi
|
||||
|
||||
./ops/authority/key-rotation.sh \
|
||||
--authority-url "$AUTHORITY_URL" \
|
||||
--api-key "$bootstrap_key" \
|
||||
--key-id "$KEY_ID" \
|
||||
--key-path "$KEY_PATH" \
|
||||
--source "$KEY_SOURCE" \
|
||||
--algorithm "$KEY_ALGORITHM" \
|
||||
"${provider_args[@]}" \
|
||||
"${metadata_args[@]}"
|
||||
|
||||
- name: JWKS summary
|
||||
run: |
|
||||
echo "✅ Rotation complete"
|
||||
echo "Environment: ${{ inputs.environment }}"
|
||||
echo "Authority: ${{ steps.config.outputs['authority-url'] }}"
|
||||
echo "Key ID: ${{ inputs.key_id }}"
|
||||
echo "Key Path: ${{ inputs.key_path }}"
|
||||
echo "Source: ${{ inputs.source }}"
|
||||
echo "Algorithm: ${{ inputs.algorithm }}"
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
name: bench-determinism
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
bench-determinism:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Run determinism bench
|
||||
env:
|
||||
BENCH_DETERMINISM_THRESHOLD: "0.95"
|
||||
run: |
|
||||
chmod +x scripts/bench/determinism-run.sh
|
||||
scripts/bench/determinism-run.sh
|
||||
|
||||
- name: Upload determinism artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bench-determinism
|
||||
path: out/bench-determinism/**
|
||||
@@ -1,173 +0,0 @@
|
||||
name: Benchmark vs Competitors
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run weekly on Sunday at 00:00 UTC
|
||||
- cron: '0 0 * * 0'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
competitors:
|
||||
description: 'Comma-separated list of competitors to benchmark against'
|
||||
required: false
|
||||
default: 'trivy,grype'
|
||||
corpus_size:
|
||||
description: 'Number of images from corpus to test'
|
||||
required: false
|
||||
default: '50'
|
||||
push:
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmark/**'
|
||||
- 'src/__Tests/__Benchmarks/competitors/**'
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
TRIVY_VERSION: '0.50.1'
|
||||
GRYPE_VERSION: '0.74.0'
|
||||
SYFT_VERSION: '0.100.0'
|
||||
|
||||
jobs:
|
||||
benchmark:
|
||||
name: Run Competitive Benchmark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Install Trivy
|
||||
run: |
|
||||
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v${{ env.TRIVY_VERSION }}
|
||||
trivy --version
|
||||
|
||||
- name: Install Grype
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.GRYPE_VERSION }}
|
||||
grype version
|
||||
|
||||
- name: Install Syft
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.SYFT_VERSION }}
|
||||
syft version
|
||||
|
||||
- name: Build benchmark library
|
||||
run: |
|
||||
dotnet build src/Scanner/__Libraries/StellaOps.Scanner.Benchmark/StellaOps.Scanner.Benchmark.csproj -c Release
|
||||
|
||||
- name: Load corpus manifest
|
||||
id: corpus
|
||||
run: |
|
||||
echo "corpus_path=src/__Tests/__Benchmarks/competitors/corpus/corpus-manifest.json" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run Stella Ops scanner
|
||||
run: |
|
||||
echo "Running Stella Ops scanner on corpus..."
|
||||
# TODO: Implement actual scan command
|
||||
# stella scan --corpus ${{ steps.corpus.outputs.corpus_path }} --output src/__Tests/__Benchmarks/results/stellaops.json
|
||||
|
||||
- name: Run Trivy on corpus
|
||||
run: |
|
||||
echo "Running Trivy on corpus images..."
|
||||
# Process each image in corpus
|
||||
mkdir -p src/__Tests/__Benchmarks/results/trivy
|
||||
|
||||
- name: Run Grype on corpus
|
||||
run: |
|
||||
echo "Running Grype on corpus images..."
|
||||
mkdir -p src/__Tests/__Benchmarks/results/grype
|
||||
|
||||
- name: Calculate metrics
|
||||
run: |
|
||||
echo "Calculating precision/recall/F1 metrics..."
|
||||
# dotnet run --project src/Scanner/__Libraries/StellaOps.Scanner.Benchmark \
|
||||
# --calculate-metrics \
|
||||
# --ground-truth ${{ steps.corpus.outputs.corpus_path }} \
|
||||
# --results src/__Tests/__Benchmarks/results/ \
|
||||
# --output src/__Tests/__Benchmarks/results/metrics.json
|
||||
|
||||
- name: Generate comparison report
|
||||
run: |
|
||||
echo "Generating comparison report..."
|
||||
mkdir -p src/__Tests/__Benchmarks/results
|
||||
cat > src/__Tests/__Benchmarks/results/summary.json << 'EOF'
|
||||
{
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"competitors": ["trivy", "grype", "syft"],
|
||||
"status": "pending_implementation"
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Upload benchmark results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: benchmark-results-${{ github.run_id }}
|
||||
path: src/__Tests/__Benchmarks/results/
|
||||
retention-days: 90
|
||||
|
||||
- name: Update claims index
|
||||
if: github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
echo "Updating claims index with new evidence..."
|
||||
# dotnet run --project src/Scanner/__Libraries/StellaOps.Scanner.Benchmark \
|
||||
# --update-claims \
|
||||
# --metrics src/__Tests/__Benchmarks/results/metrics.json \
|
||||
# --output docs/claims-index.md
|
||||
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const metrics = fs.existsSync('src/__Tests/__Benchmarks/results/metrics.json')
|
||||
? JSON.parse(fs.readFileSync('src/__Tests/__Benchmarks/results/metrics.json', 'utf8'))
|
||||
: { status: 'pending' };
|
||||
|
||||
const body = `## Benchmark Results
|
||||
|
||||
| Tool | Precision | Recall | F1 Score |
|
||||
|------|-----------|--------|----------|
|
||||
| Stella Ops | ${metrics.stellaops?.precision || 'N/A'} | ${metrics.stellaops?.recall || 'N/A'} | ${metrics.stellaops?.f1 || 'N/A'} |
|
||||
| Trivy | ${metrics.trivy?.precision || 'N/A'} | ${metrics.trivy?.recall || 'N/A'} | ${metrics.trivy?.f1 || 'N/A'} |
|
||||
| Grype | ${metrics.grype?.precision || 'N/A'} | ${metrics.grype?.recall || 'N/A'} | ${metrics.grype?.f1 || 'N/A'} |
|
||||
|
||||
[Full report](${process.env.GITHUB_SERVER_URL}/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID})
|
||||
`;
|
||||
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: body
|
||||
});
|
||||
|
||||
verify-claims:
|
||||
name: Verify Claims
|
||||
runs-on: ubuntu-latest
|
||||
needs: benchmark
|
||||
if: github.ref == 'refs/heads/main'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download benchmark results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: benchmark-results-${{ github.run_id }}
|
||||
path: src/__Tests/__Benchmarks/results/
|
||||
|
||||
- name: Verify all claims
|
||||
run: |
|
||||
echo "Verifying all claims against new evidence..."
|
||||
# stella benchmark verify --all
|
||||
|
||||
- name: Report claim status
|
||||
run: |
|
||||
echo "Generating claim verification report..."
|
||||
# Output claim status summary
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,48 +0,0 @@
|
||||
name: cli-build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
rids:
|
||||
description: "Comma-separated RIDs (e.g., linux-x64,win-x64,osx-arm64)"
|
||||
required: false
|
||||
default: "linux-x64,win-x64,osx-arm64"
|
||||
config:
|
||||
description: "Build configuration"
|
||||
required: false
|
||||
default: "Release"
|
||||
sign:
|
||||
description: "Enable cosign signing (requires COSIGN_KEY)"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install syft (SBOM)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
- name: Build CLI artifacts
|
||||
run: |
|
||||
chmod +x .gitea/scripts/build/build-cli.sh
|
||||
RIDS="${{ github.event.inputs.rids }}" CONFIG="${{ github.event.inputs.config }}" SBOM_TOOL=syft SIGN="${{ github.event.inputs.sign }}" COSIGN_KEY="${{ secrets.COSIGN_KEY }}" .gitea/scripts/build/build-cli.sh
|
||||
|
||||
- name: List artifacts
|
||||
run: find out/cli -maxdepth 3 -type f -print
|
||||
|
||||
- name: Upload CLI artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: stella-cli
|
||||
path: out/cli/**
|
||||
@@ -1,47 +0,0 @@
|
||||
name: cli-chaos-parity
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
chaos:
|
||||
description: "Run chaos smoke (true/false)"
|
||||
required: false
|
||||
default: "true"
|
||||
parity:
|
||||
description: "Run parity diff (true/false)"
|
||||
required: false
|
||||
default: "true"
|
||||
|
||||
jobs:
|
||||
cli-checks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Chaos smoke
|
||||
if: ${{ github.event.inputs.chaos == 'true' }}
|
||||
run: |
|
||||
chmod +x scripts/cli/chaos-smoke.sh
|
||||
scripts/cli/chaos-smoke.sh
|
||||
|
||||
- name: Parity diff
|
||||
if: ${{ github.event.inputs.parity == 'true' }}
|
||||
run: |
|
||||
chmod +x scripts/cli/parity-diff.sh
|
||||
scripts/cli/parity-diff.sh
|
||||
|
||||
- name: Upload evidence
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cli-chaos-parity
|
||||
path: |
|
||||
out/cli-chaos/**
|
||||
out/cli-goldens/**
|
||||
@@ -1,209 +0,0 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# cold-warm-latency.yml
|
||||
# Sprint: Testing Enhancement Advisory - Phase 3.4
|
||||
# Description: CI workflow for warm-path vs cold-path latency budget tests
|
||||
# Schedule: Nightly
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
name: Cold/Warm Path Latency Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run nightly at 2:30 AM UTC
|
||||
- cron: '30 2 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
test_filter:
|
||||
description: 'Test filter (e.g., FullyQualifiedName~Scanner)'
|
||||
required: false
|
||||
default: ''
|
||||
sample_count:
|
||||
description: 'Number of samples for statistical tests'
|
||||
required: false
|
||||
default: '50'
|
||||
verbosity:
|
||||
description: 'Test verbosity level'
|
||||
required: false
|
||||
default: 'normal'
|
||||
type: choice
|
||||
options:
|
||||
- minimal
|
||||
- normal
|
||||
- detailed
|
||||
- diagnostic
|
||||
|
||||
env:
|
||||
DOTNET_NOLOGO: true
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true
|
||||
|
||||
jobs:
|
||||
latency-tests:
|
||||
name: Latency Budget Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
dotnet-quality: 'preview'
|
||||
|
||||
- name: Restore dependencies
|
||||
run: |
|
||||
dotnet restore src/__Tests/Integration/StellaOps.Integration.Performance/StellaOps.Integration.Performance.csproj
|
||||
|
||||
- name: Build performance test project
|
||||
run: |
|
||||
dotnet build src/__Tests/Integration/StellaOps.Integration.Performance/StellaOps.Integration.Performance.csproj \
|
||||
--configuration Release \
|
||||
--no-restore
|
||||
|
||||
- name: Run cold-path latency tests
|
||||
id: cold-tests
|
||||
run: |
|
||||
FILTER="${{ github.event.inputs.test_filter }}"
|
||||
VERBOSITY="${{ github.event.inputs.verbosity || 'normal' }}"
|
||||
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Performance/StellaOps.Integration.Performance.csproj \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--verbosity $VERBOSITY \
|
||||
--logger "trx;LogFileName=cold-path-results.trx" \
|
||||
--logger "console;verbosity=$VERBOSITY" \
|
||||
--results-directory ./TestResults \
|
||||
--filter "Category=ColdPath${FILTER:+&$FILTER}" \
|
||||
-- \
|
||||
RunConfiguration.CollectSourceInformation=true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run warm-path latency tests
|
||||
id: warm-tests
|
||||
run: |
|
||||
FILTER="${{ github.event.inputs.test_filter }}"
|
||||
VERBOSITY="${{ github.event.inputs.verbosity || 'normal' }}"
|
||||
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Performance/StellaOps.Integration.Performance.csproj \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--verbosity $VERBOSITY \
|
||||
--logger "trx;LogFileName=warm-path-results.trx" \
|
||||
--logger "console;verbosity=$VERBOSITY" \
|
||||
--results-directory ./TestResults \
|
||||
--filter "Category=WarmPath${FILTER:+&$FILTER}" \
|
||||
-- \
|
||||
RunConfiguration.CollectSourceInformation=true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: latency-test-results
|
||||
path: |
|
||||
./TestResults/*.trx
|
||||
./TestResults/output/*.txt
|
||||
retention-days: 30
|
||||
|
||||
- name: Generate latency test summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Cold/Warm Path Latency Test Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Test Execution" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Test Suite | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|------------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [ "${{ steps.cold-tests.outcome }}" == "success" ]; then
|
||||
echo "| Cold Path Tests | :white_check_mark: Passed |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| Cold Path Tests | :x: Failed |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
if [ "${{ steps.warm-tests.outcome }}" == "success" ]; then
|
||||
echo "| Warm Path Tests | :white_check_mark: Passed |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| Warm Path Tests | :x: Failed |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Latency Budgets" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Service | Cold Start Budget | Warm Path Budget |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---------|-------------------|------------------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Scanner | 5000ms | 500ms |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Concelier | 2000ms | 100ms |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Policy | 2000ms | 200ms |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Authority | 1000ms | 50ms |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Attestor | 2000ms | 200ms |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Test Coverage" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Cold start latency (first request after service initialization)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Warm path latency (subsequent requests)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Sustained load performance (100 consecutive requests)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Burst load handling (parallel requests)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Latency variance (P95/P99 metrics)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Cold-to-warm transition smoothness" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check test results
|
||||
if: always()
|
||||
run: |
|
||||
if [ "${{ steps.cold-tests.outcome }}" != "success" ] || [ "${{ steps.warm-tests.outcome }}" != "success" ]; then
|
||||
echo "::error::One or more latency test suites failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "All latency tests passed successfully"
|
||||
|
||||
latency-regression-check:
|
||||
name: Latency Regression Analysis
|
||||
runs-on: ubuntu-latest
|
||||
needs: latency-tests
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Download test results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: latency-test-results
|
||||
path: ./TestResults
|
||||
|
||||
- name: Analyze latency trends
|
||||
run: |
|
||||
echo "## Latency Trend Analysis" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Check for latency report
|
||||
if [ -f "./TestResults/output/latency-report.txt" ]; then
|
||||
echo "### Latency Report" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
cat ./TestResults/output/latency-report.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No detailed latency report available." >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Recommendations" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Monitor P95 latency trends over time" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Investigate any budget violations" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Consider adjusting budgets if consistent overages occur" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Alert on regression
|
||||
if: needs.latency-tests.result == 'failure'
|
||||
run: |
|
||||
echo "::warning::Latency regression detected. Review the test results for details."
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### :warning: Latency Regression Alert" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Latency tests have failed, indicating potential performance regression." >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Recommended Actions:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "1. Review recent code changes that might affect performance" >> $GITHUB_STEP_SUMMARY
|
||||
echo "2. Check for resource contention or new dependencies" >> $GITHUB_STEP_SUMMARY
|
||||
echo "3. Profile affected services to identify bottlenecks" >> $GITHUB_STEP_SUMMARY
|
||||
echo "4. Consider reverting recent changes if regression is severe" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -1,297 +0,0 @@
|
||||
# Sprint: Testing Enhancement Advisory - Phase 3.1
|
||||
# Competitor parity benchmarks with expanded 50+ image corpus
|
||||
# Compares StellaOps against Trivy, Grype, and Syft
|
||||
|
||||
name: competitor-parity
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run weekly on Sundays at 03:00 UTC
|
||||
- cron: '0 3 * * 0'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/__Tests/parity/**'
|
||||
- 'src/Scanner/__Libraries/**'
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/__Tests/parity/**'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_full_corpus:
|
||||
description: 'Run against full 50+ image corpus'
|
||||
type: boolean
|
||||
default: false
|
||||
ground_truth_mode:
|
||||
description: 'Enable ground truth validation'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: competitor-parity-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||
|
||||
jobs:
|
||||
# ==========================================================================
|
||||
# Install Competitor Tools
|
||||
# ==========================================================================
|
||||
setup-tools:
|
||||
name: Setup Scanner Tools
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
tools_installed: ${{ steps.check.outputs.installed }}
|
||||
|
||||
steps:
|
||||
- name: Install Syft
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.9.0
|
||||
syft --version
|
||||
|
||||
- name: Install Grype
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.79.3
|
||||
grype --version
|
||||
grype db update
|
||||
|
||||
- name: Install Trivy
|
||||
run: |
|
||||
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v0.54.1
|
||||
trivy --version
|
||||
trivy image --download-db-only
|
||||
|
||||
- name: Check tools
|
||||
id: check
|
||||
run: |
|
||||
syft --version && grype --version && trivy --version
|
||||
echo "installed=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# ==========================================================================
|
||||
# Quick Parity Check (PR Gate)
|
||||
# ==========================================================================
|
||||
quick-parity:
|
||||
name: Quick Parity Check
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup-tools
|
||||
if: github.event_name == 'pull_request'
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install scanner tools
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.9.0
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.79.3
|
||||
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v0.54.1
|
||||
grype db update
|
||||
trivy image --download-db-only
|
||||
|
||||
- name: Build parity tests
|
||||
run: dotnet build src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj --configuration Release
|
||||
|
||||
- name: Run quick parity tests
|
||||
run: |
|
||||
dotnet test src/__Tests/parity/StellaOps.Parity.Tests \
|
||||
--filter "Category=CompetitorParity&FullyQualifiedName~BaseImages" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=parity-quick.trx" \
|
||||
--results-directory ./TestResults
|
||||
timeout-minutes: 20
|
||||
|
||||
- name: Upload results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: quick-parity-results
|
||||
path: TestResults/**/*.trx
|
||||
|
||||
# ==========================================================================
|
||||
# Full Corpus Benchmark (Scheduled)
|
||||
# ==========================================================================
|
||||
full-corpus-benchmark:
|
||||
name: Full Corpus Benchmark
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup-tools
|
||||
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_full_corpus == 'true')
|
||||
timeout-minutes: 180
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install scanner tools
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.9.0
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.79.3
|
||||
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v0.54.1
|
||||
grype db update
|
||||
trivy image --download-db-only
|
||||
|
||||
- name: Build parity tests
|
||||
run: dotnet build src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj --configuration Release
|
||||
|
||||
- name: Pull corpus images
|
||||
run: |
|
||||
echo "Pulling base images..."
|
||||
docker pull alpine:3.18 &
|
||||
docker pull alpine:3.19 &
|
||||
docker pull alpine:3.20 &
|
||||
docker pull debian:bullseye-slim &
|
||||
docker pull debian:bookworm-slim &
|
||||
docker pull ubuntu:20.04 &
|
||||
docker pull ubuntu:22.04 &
|
||||
docker pull ubuntu:24.04 &
|
||||
wait
|
||||
|
||||
echo "Pulling language runtimes..."
|
||||
docker pull node:18-alpine &
|
||||
docker pull node:20-alpine &
|
||||
docker pull python:3.11-alpine &
|
||||
docker pull python:3.12-slim &
|
||||
docker pull golang:1.22-bookworm &
|
||||
docker pull rust:1.75-bookworm &
|
||||
wait
|
||||
|
||||
- name: Run base image benchmarks
|
||||
run: |
|
||||
dotnet test src/__Tests/parity/StellaOps.Parity.Tests \
|
||||
--filter "Category=CompetitorParity&FullyQualifiedName~BaseImages" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=benchmark-base.trx" \
|
||||
--results-directory ./TestResults/base
|
||||
timeout-minutes: 45
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run language runtime benchmarks
|
||||
run: |
|
||||
dotnet test src/__Tests/parity/StellaOps.Parity.Tests \
|
||||
--filter "Category=CompetitorParity&FullyQualifiedName~LanguageRuntime" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=benchmark-runtimes.trx" \
|
||||
--results-directory ./TestResults/runtimes
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run vulnerable image benchmarks
|
||||
run: |
|
||||
dotnet test src/__Tests/parity/StellaOps.Parity.Tests \
|
||||
--filter "Category=CompetitorParity&FullyQualifiedName~Vulnerable" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=benchmark-vulnerable.trx" \
|
||||
--results-directory ./TestResults/vulnerable
|
||||
timeout-minutes: 30
|
||||
continue-on-error: true
|
||||
|
||||
- name: Generate benchmark report
|
||||
if: always()
|
||||
run: |
|
||||
echo "# Competitor Parity Benchmark Report" > ./TestResults/report.md
|
||||
echo "" >> ./TestResults/report.md
|
||||
echo "**Date:** $(date -u '+%Y-%m-%d %H:%M:%S UTC')" >> ./TestResults/report.md
|
||||
echo "**Corpus:** Expanded (50+ images)" >> ./TestResults/report.md
|
||||
echo "" >> ./TestResults/report.md
|
||||
echo "## Tool Versions" >> ./TestResults/report.md
|
||||
echo "- Syft: $(syft --version | head -1)" >> ./TestResults/report.md
|
||||
echo "- Grype: $(grype --version | head -1)" >> ./TestResults/report.md
|
||||
echo "- Trivy: $(trivy --version | head -1)" >> ./TestResults/report.md
|
||||
echo "" >> ./TestResults/report.md
|
||||
echo "## Test Results" >> ./TestResults/report.md
|
||||
find ./TestResults -name "*.trx" -exec basename {} \; | while read f; do
|
||||
echo "- $f" >> ./TestResults/report.md
|
||||
done
|
||||
|
||||
- name: Upload benchmark results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: full-corpus-benchmark-results
|
||||
path: TestResults/**
|
||||
|
||||
# ==========================================================================
|
||||
# Corpus Validation
|
||||
# ==========================================================================
|
||||
corpus-validation:
|
||||
name: Corpus Validation
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name != 'schedule'
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Build tests
|
||||
run: dotnet build src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj --configuration Release
|
||||
|
||||
- name: Validate corpus coverage
|
||||
run: |
|
||||
dotnet test src/__Tests/parity/StellaOps.Parity.Tests \
|
||||
--filter "FullyQualifiedName~ExpandedCorpus" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=corpus-validation.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload validation results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: corpus-validation-results
|
||||
path: TestResults/**/*.trx
|
||||
|
||||
# ==========================================================================
|
||||
# Metrics Summary
|
||||
# ==========================================================================
|
||||
metrics-summary:
|
||||
name: Metrics Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [full-corpus-benchmark]
|
||||
if: always() && (github.event_name == 'schedule' || github.event.inputs.run_full_corpus == 'true')
|
||||
|
||||
steps:
|
||||
- name: Download results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: full-corpus-benchmark-results
|
||||
path: ./Results
|
||||
|
||||
- name: Generate summary
|
||||
run: |
|
||||
echo "## Competitor Parity Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Full corpus benchmark completed." >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Categories Tested" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Base OS images (Alpine, Debian, Ubuntu, Rocky)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Language runtimes (Node, Python, Go, Java, Rust, .NET)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Application stacks (Postgres, Redis, nginx, etc.)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Enterprise images (WordPress, Prometheus, Jenkins)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Scanners Compared" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Syft v1.9.0 (SBOM generation)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Grype v0.79.3 (Vulnerability scanning)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Trivy v0.54.1 (Vulnerability scanning)" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -1,47 +0,0 @@
|
||||
name: Concelier Attestation Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'src/Concelier/**'
|
||||
- '.gitea/workflows/concelier-attestation-tests.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Concelier/**'
|
||||
- '.gitea/workflows/concelier-attestation-tests.yml'
|
||||
|
||||
jobs:
|
||||
attestation-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 preview
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
|
||||
- name: Restore Concelier solution
|
||||
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
||||
|
||||
- name: Build WebService Tests (no analyzers)
|
||||
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj -c Release -p:DisableAnalyzers=true
|
||||
|
||||
- name: Run WebService attestation test
|
||||
run: dotnet test src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj -c Release --filter InternalAttestationVerify --no-build --logger trx --results-directory TestResults
|
||||
|
||||
- name: Build Core Tests (no analyzers)
|
||||
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj -c Release -p:DisableAnalyzers=true
|
||||
|
||||
- name: Run Core attestation builder tests
|
||||
run: dotnet test src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj -c Release --filter EvidenceBundleAttestationBuilderTests --no-build --logger trx --results-directory TestResults
|
||||
|
||||
- name: Upload TRX results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: concelier-attestation-tests-trx
|
||||
path: '**/TestResults/*.trx'
|
||||
@@ -1,33 +0,0 @@
|
||||
name: Concelier STORE-AOC-19-005 Dataset
|
||||
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
build-dataset:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/out/linksets
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get update && sudo apt-get install -y zstd
|
||||
|
||||
- name: Build dataset tarball
|
||||
run: |
|
||||
chmod +x scripts/concelier/build-store-aoc-19-005-dataset.sh scripts/concelier/test-store-aoc-19-005-dataset.sh
|
||||
scripts/concelier/build-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||
|
||||
- name: Validate dataset
|
||||
run: scripts/concelier/test-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||
|
||||
- name: Upload dataset artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: concelier-store-aoc-19-005-dataset
|
||||
path: |
|
||||
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst
|
||||
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst.sha256
|
||||
|
||||
@@ -1,248 +0,0 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# connector-fixture-drift.yml
|
||||
# Sprint: SPRINT_5100_0007_0005_connector_fixtures
|
||||
# Task: CONN-FIX-016
|
||||
# Description: Weekly schema drift detection for connector fixtures with auto-PR
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
name: Connector Fixture Drift
|
||||
|
||||
on:
|
||||
# Weekly schedule: Sunday at 2:00 UTC
|
||||
schedule:
|
||||
- cron: '0 2 * * 0'
|
||||
# Manual trigger for on-demand drift detection
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
auto_update:
|
||||
description: 'Auto-update fixtures if drift detected'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
create_pr:
|
||||
description: 'Create PR for updated fixtures'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
TZ: UTC
|
||||
|
||||
jobs:
|
||||
detect-drift:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
outputs:
|
||||
has_drift: ${{ steps.drift.outputs.has_drift }}
|
||||
drift_count: ${{ steps.drift.outputs.drift_count }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
.nuget/packages
|
||||
key: fixture-drift-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||
|
||||
- name: Restore solution
|
||||
run: dotnet restore src/StellaOps.sln --configfile nuget.config
|
||||
|
||||
- name: Build test projects
|
||||
run: |
|
||||
dotnet build src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj -c Release --no-restore
|
||||
dotnet build src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj -c Release --no-restore
|
||||
|
||||
- name: Run Live schema drift tests
|
||||
id: drift
|
||||
env:
|
||||
STELLAOPS_LIVE_TESTS: 'true'
|
||||
STELLAOPS_UPDATE_FIXTURES: ${{ inputs.auto_update || 'true' }}
|
||||
run: |
|
||||
set +e
|
||||
|
||||
# Run Live tests and capture output
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Live" \
|
||||
--no-build \
|
||||
-c Release \
|
||||
--logger "console;verbosity=detailed" \
|
||||
--results-directory out/drift-results \
|
||||
2>&1 | tee out/drift-output.log
|
||||
|
||||
EXIT_CODE=$?
|
||||
|
||||
# Check for fixture changes
|
||||
CHANGED_FILES=$(git diff --name-only -- '**/Fixtures/*.json' '**/Expected/*.json' | wc -l)
|
||||
|
||||
if [ "$CHANGED_FILES" -gt 0 ]; then
|
||||
echo "has_drift=true" >> $GITHUB_OUTPUT
|
||||
echo "drift_count=$CHANGED_FILES" >> $GITHUB_OUTPUT
|
||||
echo "::warning::Schema drift detected in $CHANGED_FILES fixture files"
|
||||
else
|
||||
echo "has_drift=false" >> $GITHUB_OUTPUT
|
||||
echo "drift_count=0" >> $GITHUB_OUTPUT
|
||||
echo "::notice::No schema drift detected"
|
||||
fi
|
||||
|
||||
# Don't fail workflow on test failures (drift is expected)
|
||||
exit 0
|
||||
|
||||
- name: Show changed fixtures
|
||||
if: steps.drift.outputs.has_drift == 'true'
|
||||
run: |
|
||||
echo "## Changed fixture files:"
|
||||
git diff --name-only -- '**/Fixtures/*.json' '**/Expected/*.json'
|
||||
echo ""
|
||||
echo "## Diff summary:"
|
||||
git diff --stat -- '**/Fixtures/*.json' '**/Expected/*.json'
|
||||
|
||||
- name: Upload drift report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: drift-report-${{ github.run_id }}
|
||||
path: |
|
||||
out/drift-output.log
|
||||
out/drift-results/**
|
||||
retention-days: 30
|
||||
|
||||
create-pr:
|
||||
needs: detect-drift
|
||||
if: needs.detect-drift.outputs.has_drift == 'true' && (github.event.inputs.create_pr == 'true' || github.event_name == 'schedule')
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore and run Live tests with updates
|
||||
env:
|
||||
STELLAOPS_LIVE_TESTS: 'true'
|
||||
STELLAOPS_UPDATE_FIXTURES: 'true'
|
||||
run: |
|
||||
dotnet restore src/StellaOps.sln --configfile nuget.config
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Live" \
|
||||
-c Release \
|
||||
--logger "console;verbosity=minimal" \
|
||||
|| true
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "StellaOps Bot"
|
||||
git config user.email "bot@stellaops.local"
|
||||
|
||||
- name: Create branch and commit
|
||||
id: commit
|
||||
run: |
|
||||
BRANCH_NAME="fixture-drift/$(date +%Y-%m-%d)"
|
||||
echo "branch=$BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check for changes
|
||||
if git diff --quiet -- '**/Fixtures/*.json' '**/Expected/*.json'; then
|
||||
echo "No fixture changes to commit"
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Create branch
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
|
||||
# Stage fixture changes
|
||||
git add '**/Fixtures/*.json' '**/Expected/*.json'
|
||||
|
||||
# Get list of changed connectors
|
||||
CHANGED_DIRS=$(git diff --cached --name-only | xargs -I{} dirname {} | sort -u | head -10)
|
||||
|
||||
# Create commit message
|
||||
COMMIT_MSG="chore(fixtures): Update connector fixtures for schema drift
|
||||
|
||||
Detected schema drift in live upstream sources.
|
||||
Updated fixture files to match current API responses.
|
||||
|
||||
Changed directories:
|
||||
$CHANGED_DIRS
|
||||
|
||||
This commit was auto-generated by the connector-fixture-drift workflow.
|
||||
|
||||
🤖 Generated with [StellaOps CI](https://stellaops.local)"
|
||||
|
||||
git commit -m "$COMMIT_MSG"
|
||||
git push origin "$BRANCH_NAME"
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.commit.outputs.has_changes == 'true'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const branch = '${{ steps.commit.outputs.branch }}';
|
||||
const driftCount = '${{ needs.detect-drift.outputs.drift_count }}';
|
||||
|
||||
const { data: pr } = await github.rest.pulls.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
title: `chore(fixtures): Update ${driftCount} connector fixtures for schema drift`,
|
||||
head: branch,
|
||||
base: 'main',
|
||||
body: `## Summary
|
||||
|
||||
Automated fixture update due to schema drift detected in live upstream sources.
|
||||
|
||||
- **Fixtures Updated**: ${driftCount}
|
||||
- **Detection Date**: ${new Date().toISOString().split('T')[0]}
|
||||
- **Workflow Run**: [#${{ github.run_id }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
|
||||
## Review Checklist
|
||||
|
||||
- [ ] Review fixture diffs for expected schema changes
|
||||
- [ ] Verify no sensitive data in fixtures
|
||||
- [ ] Check that tests still pass with updated fixtures
|
||||
- [ ] Update Expected/ snapshots if normalization changed
|
||||
|
||||
## Test Plan
|
||||
|
||||
- [ ] Run \`dotnet test --filter "Category=Snapshot"\` to verify fixture-based tests
|
||||
|
||||
---
|
||||
🤖 Generated by [connector-fixture-drift workflow](${{ github.server_url }}/${{ github.repository }}/actions/workflows/connector-fixture-drift.yml)
|
||||
`
|
||||
});
|
||||
|
||||
console.log(`Created PR #${pr.number}: ${pr.html_url}`);
|
||||
|
||||
// Add labels
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: pr.number,
|
||||
labels: ['automated', 'fixtures', 'schema-drift']
|
||||
});
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
name: console-ci
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Web/**'
|
||||
- '.gitea/workflows/console-ci.yml'
|
||||
- 'devops/console/**'
|
||||
|
||||
jobs:
|
||||
lint-test-build:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: src/Web/StellaOps.Web
|
||||
env:
|
||||
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||
CI: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||
|
||||
- name: Install deps (offline-friendly)
|
||||
run: npm ci --prefer-offline --no-audit --progress=false
|
||||
|
||||
- name: Lint
|
||||
run: npm run lint -- --no-progress
|
||||
|
||||
- name: Console export specs (targeted)
|
||||
run: bash ./scripts/ci-console-exports.sh
|
||||
continue-on-error: true
|
||||
|
||||
- name: Unit tests
|
||||
run: npm run test:ci
|
||||
env:
|
||||
CHROME_BIN: chromium
|
||||
|
||||
- name: Build
|
||||
run: npm run build -- --configuration=production --progress=false
|
||||
|
||||
- name: Collect artifacts
|
||||
if: always()
|
||||
run: |
|
||||
mkdir -p ../artifacts
|
||||
cp -r dist ../artifacts/dist || true
|
||||
cp -r coverage ../artifacts/coverage || true
|
||||
find . -maxdepth 3 -type f -name "*.xml" -o -name "*.trx" -o -name "*.json" -path "*test*" -print0 | xargs -0 -I{} cp --parents {} ../artifacts 2>/dev/null || true
|
||||
|
||||
- name: Upload artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: console-ci-${{ github.run_id }}
|
||||
path: artifacts
|
||||
retention-days: 14
|
||||
@@ -1,32 +0,0 @@
|
||||
name: console-runner-image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'devops/console/**'
|
||||
- '.gitea/workflows/console-runner-image.yml'
|
||||
|
||||
jobs:
|
||||
build-runner-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build runner image tarball (baked caches)
|
||||
env:
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
chmod +x devops/console/build-runner-image.sh devops/console/build-runner-image-ci.sh
|
||||
devops/console/build-runner-image-ci.sh
|
||||
|
||||
- name: Upload runner image artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: console-runner-image-${{ github.run_id }}
|
||||
path: devops/artifacts/console-runner/
|
||||
retention-days: 14
|
||||
@@ -1,227 +0,0 @@
|
||||
# Container Security Scanning Workflow
|
||||
# Sprint: CI/CD Enhancement - Security Scanning
|
||||
#
|
||||
# Purpose: Scan container images for vulnerabilities beyond SBOM generation
|
||||
# Triggers: Dockerfile changes, scheduled daily, manual dispatch
|
||||
#
|
||||
# Tool: PLACEHOLDER - Choose one: Trivy, Grype, or Snyk
|
||||
|
||||
name: Container Security Scan
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- '**/Dockerfile'
|
||||
- '**/Dockerfile.*'
|
||||
- 'devops/docker/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/Dockerfile'
|
||||
- '**/Dockerfile.*'
|
||||
- 'devops/docker/**'
|
||||
schedule:
|
||||
# Run daily at 4 AM UTC
|
||||
- cron: '0 4 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
severity_threshold:
|
||||
description: 'Minimum severity to fail'
|
||||
required: false
|
||||
type: choice
|
||||
options:
|
||||
- CRITICAL
|
||||
- HIGH
|
||||
- MEDIUM
|
||||
- LOW
|
||||
default: HIGH
|
||||
image:
|
||||
description: 'Specific image to scan (optional)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
env:
|
||||
SEVERITY_THRESHOLD: ${{ github.event.inputs.severity_threshold || 'HIGH' }}
|
||||
|
||||
jobs:
|
||||
discover-images:
|
||||
name: Discover Container Images
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
images: ${{ steps.discover.outputs.images }}
|
||||
count: ${{ steps.discover.outputs.count }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Discover Dockerfiles
|
||||
id: discover
|
||||
run: |
|
||||
# Find all Dockerfiles
|
||||
DOCKERFILES=$(find . -name "Dockerfile" -o -name "Dockerfile.*" | grep -v node_modules | grep -v bin | grep -v obj || true)
|
||||
|
||||
# Build image list
|
||||
IMAGES='[]'
|
||||
COUNT=0
|
||||
|
||||
while IFS= read -r dockerfile; do
|
||||
if [[ -n "$dockerfile" ]]; then
|
||||
DIR=$(dirname "$dockerfile")
|
||||
NAME=$(basename "$DIR" | tr '[:upper:]' '[:lower:]' | tr '.' '-')
|
||||
|
||||
# Get image name from directory structure
|
||||
if [[ "$DIR" == *"devops/docker"* ]]; then
|
||||
NAME=$(echo "$dockerfile" | sed 's|.*devops/docker/||' | sed 's|/Dockerfile.*||' | tr '/' '-')
|
||||
fi
|
||||
|
||||
IMAGES=$(echo "$IMAGES" | jq --arg name "$NAME" --arg path "$dockerfile" '. + [{"name": $name, "dockerfile": $path}]')
|
||||
COUNT=$((COUNT + 1))
|
||||
fi
|
||||
done <<< "$DOCKERFILES"
|
||||
|
||||
echo "Found $COUNT Dockerfile(s)"
|
||||
echo "images=$(echo "$IMAGES" | jq -c .)" >> $GITHUB_OUTPUT
|
||||
echo "count=$COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
scan-images:
|
||||
name: Scan ${{ matrix.image.name }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [discover-images]
|
||||
if: needs.discover-images.outputs.count != '0'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
image: ${{ fromJson(needs.discover-images.outputs.images) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build image for scanning
|
||||
id: build
|
||||
run: |
|
||||
IMAGE_TAG="scan-${{ matrix.image.name }}:${{ github.sha }}"
|
||||
DOCKERFILE="${{ matrix.image.dockerfile }}"
|
||||
CONTEXT=$(dirname "$DOCKERFILE")
|
||||
|
||||
echo "Building $IMAGE_TAG from $DOCKERFILE..."
|
||||
docker build -t "$IMAGE_TAG" -f "$DOCKERFILE" "$CONTEXT" || {
|
||||
echo "::warning::Failed to build $IMAGE_TAG - skipping scan"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
}
|
||||
|
||||
echo "image_tag=$IMAGE_TAG" >> $GITHUB_OUTPUT
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
|
||||
# PLACEHOLDER: Choose your container scanner
|
||||
# Option 1: Trivy (recommended - comprehensive, free)
|
||||
# Option 2: Grype (Anchore - good integration with Syft SBOMs)
|
||||
# Option 3: Snyk (commercial, comprehensive)
|
||||
|
||||
- name: Trivy Vulnerability Scan
|
||||
if: steps.build.outputs.skip != 'true'
|
||||
id: trivy
|
||||
# Uncomment when ready to use Trivy:
|
||||
# uses: aquasecurity/trivy-action@master
|
||||
# with:
|
||||
# image-ref: ${{ steps.build.outputs.image_tag }}
|
||||
# format: 'sarif'
|
||||
# output: 'trivy-${{ matrix.image.name }}.sarif'
|
||||
# severity: ${{ env.SEVERITY_THRESHOLD }},CRITICAL
|
||||
# exit-code: '1'
|
||||
run: |
|
||||
echo "::notice::Container scanning placeholder - configure scanner below"
|
||||
echo ""
|
||||
echo "Image: ${{ steps.build.outputs.image_tag }}"
|
||||
echo "Severity threshold: ${{ env.SEVERITY_THRESHOLD }}"
|
||||
echo ""
|
||||
echo "Available scanners:"
|
||||
echo " 1. Trivy: aquasecurity/trivy-action@master"
|
||||
echo " 2. Grype: anchore/scan-action@v3"
|
||||
echo " 3. Snyk: snyk/actions/docker@master"
|
||||
|
||||
# Create placeholder report
|
||||
mkdir -p scan-results
|
||||
echo '{"placeholder": true, "image": "${{ matrix.image.name }}"}' > scan-results/scan-${{ matrix.image.name }}.json
|
||||
|
||||
# Alternative: Grype (works well with existing Syft SBOM workflow)
|
||||
# - name: Grype Vulnerability Scan
|
||||
# if: steps.build.outputs.skip != 'true'
|
||||
# uses: anchore/scan-action@v3
|
||||
# with:
|
||||
# image: ${{ steps.build.outputs.image_tag }}
|
||||
# severity-cutoff: ${{ env.SEVERITY_THRESHOLD }}
|
||||
# fail-build: true
|
||||
|
||||
# Alternative: Snyk Container
|
||||
# - name: Snyk Container Scan
|
||||
# if: steps.build.outputs.skip != 'true'
|
||||
# uses: snyk/actions/docker@master
|
||||
# env:
|
||||
# SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
|
||||
# with:
|
||||
# image: ${{ steps.build.outputs.image_tag }}
|
||||
# args: --severity-threshold=${{ env.SEVERITY_THRESHOLD }}
|
||||
|
||||
- name: Upload scan results
|
||||
if: always() && steps.build.outputs.skip != 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: container-scan-${{ matrix.image.name }}
|
||||
path: |
|
||||
scan-results/
|
||||
*.sarif
|
||||
*.json
|
||||
retention-days: 30
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker rmi "${{ steps.build.outputs.image_tag }}" 2>/dev/null || true
|
||||
|
||||
summary:
|
||||
name: Scan Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [discover-images, scan-images]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Download all scan results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: container-scan-*
|
||||
path: all-results/
|
||||
merge-multiple: true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Generate summary
|
||||
run: |
|
||||
echo "## Container Security Scan Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Image | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
IMAGES='${{ needs.discover-images.outputs.images }}'
|
||||
SCAN_RESULT="${{ needs.scan-images.result }}"
|
||||
|
||||
echo "$IMAGES" | jq -r '.[] | .name' | while read -r name; do
|
||||
if [[ "$SCAN_RESULT" == "success" ]]; then
|
||||
echo "| $name | No vulnerabilities found |" >> $GITHUB_STEP_SUMMARY
|
||||
elif [[ "$SCAN_RESULT" == "failure" ]]; then
|
||||
echo "| $name | Vulnerabilities detected |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| $name | $SCAN_RESULT |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
done
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Configuration" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Scanner:** Placeholder (configure in workflow)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Severity Threshold:** ${{ env.SEVERITY_THRESHOLD }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Images Scanned:** ${{ needs.discover-images.outputs.count }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Trigger:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -1,89 +0,0 @@
|
||||
name: containers-multiarch
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
image:
|
||||
description: "Image tag (e.g., ghcr.io/stella-ops/example:edge)"
|
||||
required: true
|
||||
context:
|
||||
description: "Build context directory"
|
||||
required: true
|
||||
default: "."
|
||||
platforms:
|
||||
description: "Platforms (comma-separated)"
|
||||
required: false
|
||||
default: "linux/amd64,linux/arm64"
|
||||
push:
|
||||
description: "Push to registry"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
jobs:
|
||||
build-multiarch:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Install syft (SBOM)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
- name: Login to ghcr (optional)
|
||||
if: ${{ github.event.inputs.push == 'true' && secrets.GHCR_TOKEN != '' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GHCR_TOKEN }}
|
||||
|
||||
- name: Run multi-arch build
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
run: |
|
||||
chmod +x .gitea/scripts/build/build-multiarch.sh
|
||||
extra=""
|
||||
if [[ "${{ github.event.inputs.push }}" == "true" ]]; then extra="--push"; fi
|
||||
.gitea/scripts/build/build-multiarch.sh \
|
||||
"${{ github.event.inputs.image }}" \
|
||||
"${{ github.event.inputs.context }}" \
|
||||
--platform "${{ github.event.inputs.platforms }}" \
|
||||
--sbom syft ${extra}
|
||||
|
||||
- name: Build air-gap bundle
|
||||
run: |
|
||||
chmod +x .gitea/scripts/build/build-airgap-bundle.sh
|
||||
.gitea/scripts/build/build-airgap-bundle.sh "${{ github.event.inputs.image }}"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: buildx-${{ github.event.inputs.image }}
|
||||
path: out/buildx/**
|
||||
|
||||
- name: Inspect built image archive
|
||||
run: |
|
||||
set -e
|
||||
ls -lh out/buildx/
|
||||
find out/buildx -name "image.oci" -print -exec sh -c 'tar -tf "$1" | head' _ {} \;
|
||||
|
||||
- name: Upload air-gap bundle
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bundle-${{ github.event.inputs.image }}
|
||||
path: out/bundles/**
|
||||
|
||||
- name: Inspect remote image (if pushed)
|
||||
if: ${{ github.event.inputs.push == 'true' }}
|
||||
run: |
|
||||
docker buildx imagetools inspect "${{ github.event.inputs.image }}"
|
||||
@@ -1,187 +0,0 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# control-plane-chaos.yml
|
||||
# Sprint: Testing Enhancement Advisory - Phase 3.3
|
||||
# Description: CI workflow for control-plane outage chaos tests
|
||||
# Schedule: Weekly (chaos tests are intensive)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
name: Control-Plane Chaos Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run weekly on Sundays at 3:00 AM UTC
|
||||
- cron: '0 3 * * 0'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
test_filter:
|
||||
description: 'Test filter (e.g., FullyQualifiedName~Authority)'
|
||||
required: false
|
||||
default: ''
|
||||
verbosity:
|
||||
description: 'Test verbosity level'
|
||||
required: false
|
||||
default: 'normal'
|
||||
type: choice
|
||||
options:
|
||||
- minimal
|
||||
- normal
|
||||
- detailed
|
||||
- diagnostic
|
||||
|
||||
env:
|
||||
DOTNET_NOLOGO: true
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true
|
||||
|
||||
jobs:
|
||||
chaos-tests:
|
||||
name: Control-Plane Chaos Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
dotnet-quality: 'preview'
|
||||
|
||||
- name: Restore dependencies
|
||||
run: |
|
||||
dotnet restore src/__Tests/chaos/StellaOps.Chaos.ControlPlane.Tests/StellaOps.Chaos.ControlPlane.Tests.csproj
|
||||
|
||||
- name: Build chaos test project
|
||||
run: |
|
||||
dotnet build src/__Tests/chaos/StellaOps.Chaos.ControlPlane.Tests/StellaOps.Chaos.ControlPlane.Tests.csproj \
|
||||
--configuration Release \
|
||||
--no-restore
|
||||
|
||||
- name: Run control-plane outage tests
|
||||
id: outage-tests
|
||||
run: |
|
||||
FILTER="${{ github.event.inputs.test_filter }}"
|
||||
VERBOSITY="${{ github.event.inputs.verbosity || 'normal' }}"
|
||||
|
||||
dotnet test src/__Tests/chaos/StellaOps.Chaos.ControlPlane.Tests/StellaOps.Chaos.ControlPlane.Tests.csproj \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--verbosity $VERBOSITY \
|
||||
--logger "trx;LogFileName=chaos-outage-results.trx" \
|
||||
--logger "console;verbosity=$VERBOSITY" \
|
||||
--results-directory ./TestResults \
|
||||
--filter "Category=ControlPlane${FILTER:+&$FILTER}" \
|
||||
-- \
|
||||
RunConfiguration.CollectSourceInformation=true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run partial outage tests
|
||||
id: partial-tests
|
||||
run: |
|
||||
FILTER="${{ github.event.inputs.test_filter }}"
|
||||
VERBOSITY="${{ github.event.inputs.verbosity || 'normal' }}"
|
||||
|
||||
dotnet test src/__Tests/chaos/StellaOps.Chaos.ControlPlane.Tests/StellaOps.Chaos.ControlPlane.Tests.csproj \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--verbosity $VERBOSITY \
|
||||
--logger "trx;LogFileName=chaos-partial-results.trx" \
|
||||
--logger "console;verbosity=$VERBOSITY" \
|
||||
--results-directory ./TestResults \
|
||||
--filter "Category=PartialOutage${FILTER:+&$FILTER}" \
|
||||
-- \
|
||||
RunConfiguration.CollectSourceInformation=true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: chaos-test-results
|
||||
path: ./TestResults/*.trx
|
||||
retention-days: 30
|
||||
|
||||
- name: Generate chaos test summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Control-Plane Chaos Test Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Test Execution" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Test Suite | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|------------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [ "${{ steps.outage-tests.outcome }}" == "success" ]; then
|
||||
echo "| Full Outage Tests | :white_check_mark: Passed |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| Full Outage Tests | :x: Failed |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
if [ "${{ steps.partial-tests.outcome }}" == "success" ]; then
|
||||
echo "| Partial Outage Tests | :white_check_mark: Passed |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| Partial Outage Tests | :x: Failed |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Test Categories Covered" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Authority outage and cached token validation" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Scheduler outage and job persistence" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Full control-plane outage and data integrity" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Partial failure rate scenarios" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Latency injection and degraded service handling" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Service isolation and cascading failure prevention" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check test results
|
||||
if: always()
|
||||
run: |
|
||||
if [ "${{ steps.outage-tests.outcome }}" != "success" ] || [ "${{ steps.partial-tests.outcome }}" != "success" ]; then
|
||||
echo "::error::One or more chaos test suites failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "All chaos tests passed successfully"
|
||||
|
||||
chaos-report:
|
||||
name: Generate Chaos Report
|
||||
runs-on: ubuntu-latest
|
||||
needs: chaos-tests
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Download test results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: chaos-test-results
|
||||
path: ./TestResults
|
||||
|
||||
- name: Parse TRX results
|
||||
run: |
|
||||
echo "## Chaos Test Detailed Report" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Test results have been uploaded as artifacts." >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Artifact Location" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- chaos-test-results (TRX format)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# List TRX files
|
||||
echo "### Available Result Files" >> $GITHUB_STEP_SUMMARY
|
||||
for file in ./TestResults/*.trx; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "- $(basename $file)" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Notify on failure
|
||||
if: needs.chaos-tests.result == 'failure'
|
||||
run: |
|
||||
echo "::warning::Chaos tests failed. Review the test results for details."
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### :warning: Action Required" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Chaos tests have failed. Please review:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "1. Download the test artifacts for detailed results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "2. Check if failures are due to test infrastructure or actual regressions" >> $GITHUB_STEP_SUMMARY
|
||||
echo "3. Consider running tests locally with diagnostic verbosity" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -1,271 +0,0 @@
|
||||
name: cross-platform-determinism
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/__Libraries/StellaOps.Canonical.Json/**'
|
||||
- 'src/__Libraries/StellaOps.Replay.Core/**'
|
||||
- 'src/__Tests/**Determinism**'
|
||||
- '.gitea/workflows/cross-platform-determinism.yml'
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/__Libraries/StellaOps.Canonical.Json/**'
|
||||
- 'src/__Libraries/StellaOps.Replay.Core/**'
|
||||
- 'src/__Tests/**Determinism**'
|
||||
|
||||
jobs:
|
||||
# DET-GAP-11: Windows determinism test runner
|
||||
determinism-windows:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj
|
||||
|
||||
- name: Run determinism property tests
|
||||
run: |
|
||||
dotnet test src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj `
|
||||
--logger "trx;LogFileName=determinism-windows.trx" `
|
||||
--results-directory ./test-results/windows
|
||||
|
||||
- name: Run CGS determinism tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Determinism/StellaOps.Tests.Determinism.csproj `
|
||||
--filter "Category=Determinism" `
|
||||
--logger "trx;LogFileName=cgs-determinism-windows.trx" `
|
||||
--results-directory ./test-results/windows
|
||||
|
||||
- name: Generate hash report
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Generate determinism baseline hashes
|
||||
$hashReport = @{
|
||||
platform = "windows"
|
||||
timestamp = (Get-Date -Format "o")
|
||||
hashes = @{}
|
||||
}
|
||||
|
||||
# Run hash generation script
|
||||
dotnet run --project tools/determinism-hash-generator -- `
|
||||
--output ./test-results/windows/hashes.json
|
||||
|
||||
# Upload for comparison
|
||||
Copy-Item ./test-results/windows/hashes.json ./test-results/windows-hashes.json
|
||||
|
||||
- name: Upload Windows results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: determinism-windows
|
||||
path: |
|
||||
./test-results/windows/
|
||||
./test-results/windows-hashes.json
|
||||
|
||||
# DET-GAP-12: macOS determinism test runner
|
||||
determinism-macos:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj
|
||||
|
||||
- name: Run determinism property tests
|
||||
run: |
|
||||
dotnet test src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj \
|
||||
--logger "trx;LogFileName=determinism-macos.trx" \
|
||||
--results-directory ./test-results/macos
|
||||
|
||||
- name: Run CGS determinism tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Determinism/StellaOps.Tests.Determinism.csproj \
|
||||
--filter "Category=Determinism" \
|
||||
--logger "trx;LogFileName=cgs-determinism-macos.trx" \
|
||||
--results-directory ./test-results/macos
|
||||
|
||||
- name: Generate hash report
|
||||
run: |
|
||||
# Generate determinism baseline hashes
|
||||
dotnet run --project tools/determinism-hash-generator -- \
|
||||
--output ./test-results/macos/hashes.json
|
||||
|
||||
cp ./test-results/macos/hashes.json ./test-results/macos-hashes.json
|
||||
|
||||
- name: Upload macOS results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: determinism-macos
|
||||
path: |
|
||||
./test-results/macos/
|
||||
./test-results/macos-hashes.json
|
||||
|
||||
# Linux runner (baseline)
|
||||
determinism-linux:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj
|
||||
|
||||
- name: Run determinism property tests
|
||||
run: |
|
||||
dotnet test src/__Tests/__Libraries/StellaOps.Testing.Determinism.Properties/StellaOps.Testing.Determinism.Properties.csproj \
|
||||
--logger "trx;LogFileName=determinism-linux.trx" \
|
||||
--results-directory ./test-results/linux
|
||||
|
||||
- name: Run CGS determinism tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Determinism/StellaOps.Tests.Determinism.csproj \
|
||||
--filter "Category=Determinism" \
|
||||
--logger "trx;LogFileName=cgs-determinism-linux.trx" \
|
||||
--results-directory ./test-results/linux
|
||||
|
||||
- name: Generate hash report
|
||||
run: |
|
||||
# Generate determinism baseline hashes
|
||||
dotnet run --project tools/determinism-hash-generator -- \
|
||||
--output ./test-results/linux/hashes.json
|
||||
|
||||
cp ./test-results/linux/hashes.json ./test-results/linux-hashes.json
|
||||
|
||||
- name: Upload Linux results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: determinism-linux
|
||||
path: |
|
||||
./test-results/linux/
|
||||
./test-results/linux-hashes.json
|
||||
|
||||
# Alpine Linux (musl libc) for CGS-008 cross-platform requirement
|
||||
determinism-alpine:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: mcr.microsoft.com/dotnet/sdk:10.0-alpine
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run CGS determinism tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Determinism/StellaOps.Tests.Determinism.csproj \
|
||||
--filter "Category=Determinism" \
|
||||
--logger "trx;LogFileName=cgs-determinism-alpine.trx" \
|
||||
--results-directory ./test-results/alpine
|
||||
|
||||
- name: Upload Alpine results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: determinism-alpine
|
||||
path: ./test-results/alpine/
|
||||
|
||||
# Debian Linux for CGS-008 cross-platform requirement
|
||||
determinism-debian:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: mcr.microsoft.com/dotnet/sdk:10.0-bookworm-slim
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run CGS determinism tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Determinism/StellaOps.Tests.Determinism.csproj \
|
||||
--filter "Category=Determinism" \
|
||||
--logger "trx;LogFileName=cgs-determinism-debian.trx" \
|
||||
--results-directory ./test-results/debian
|
||||
|
||||
- name: Upload Debian results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: determinism-debian
|
||||
path: ./test-results/debian/
|
||||
|
||||
# DET-GAP-13: Cross-platform hash comparison report
|
||||
compare-hashes:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [determinism-windows, determinism-macos, determinism-linux, determinism-alpine, determinism-debian]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ./artifacts
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Generate comparison report
|
||||
run: |
|
||||
python3 scripts/determinism/compare-platform-hashes.py \
|
||||
--linux ./artifacts/determinism-linux/linux-hashes.json \
|
||||
--windows ./artifacts/determinism-windows/windows-hashes.json \
|
||||
--macos ./artifacts/determinism-macos/macos-hashes.json \
|
||||
--output ./cross-platform-report.json \
|
||||
--markdown ./cross-platform-report.md
|
||||
|
||||
- name: Check for divergences
|
||||
run: |
|
||||
# Fail if any hashes differ across platforms
|
||||
python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
with open('./cross-platform-report.json') as f:
|
||||
report = json.load(f)
|
||||
|
||||
divergences = report.get('divergences', [])
|
||||
if divergences:
|
||||
print(f'ERROR: {len(divergences)} hash divergence(s) detected!')
|
||||
for d in divergences:
|
||||
print(f' - {d[\"key\"]}: linux={d[\"linux\"]}, windows={d[\"windows\"]}, macos={d[\"macos\"]}')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('SUCCESS: All hashes match across platforms.')
|
||||
"
|
||||
|
||||
- name: Upload comparison report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cross-platform-comparison
|
||||
path: |
|
||||
./cross-platform-report.json
|
||||
./cross-platform-report.md
|
||||
|
||||
- name: Comment on PR (if applicable)
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const report = fs.readFileSync('./cross-platform-report.md', 'utf8');
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '## Cross-Platform Determinism Report\n\n' + report
|
||||
});
|
||||
@@ -1,45 +0,0 @@
|
||||
name: Crypto Compliance Audit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/**/*.cs'
|
||||
- 'etc/crypto-plugins-manifest.json'
|
||||
- 'scripts/audit-crypto-usage.ps1'
|
||||
- '.gitea/workflows/crypto-compliance.yml'
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/**/*.cs'
|
||||
- 'etc/crypto-plugins-manifest.json'
|
||||
- 'scripts/audit-crypto-usage.ps1'
|
||||
- '.gitea/workflows/crypto-compliance.yml'
|
||||
|
||||
jobs:
|
||||
crypto-audit:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
TZ: UTC
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run crypto usage audit
|
||||
shell: pwsh
|
||||
run: |
|
||||
Write-Host "Running crypto compliance audit..."
|
||||
./scripts/audit-crypto-usage.ps1 -RootPath "$PWD" -FailOnViolations $true -Verbose
|
||||
|
||||
- name: Upload audit report on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: crypto-compliance-violations
|
||||
path: |
|
||||
scripts/audit-crypto-usage.ps1
|
||||
retention-days: 30
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
name: crypto-sim-smoke
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- "devops/services/crypto/sim-crypto-service/**"
|
||||
- "devops/services/crypto/sim-crypto-smoke/**"
|
||||
- "devops/tools/crypto/run-sim-smoke.ps1"
|
||||
- "docs/security/crypto-simulation-services.md"
|
||||
- ".gitea/workflows/crypto-sim-smoke.yml"
|
||||
|
||||
jobs:
|
||||
sim-smoke:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.x"
|
||||
|
||||
- name: Build sim service and smoke harness
|
||||
run: |
|
||||
dotnet build devops/services/crypto/sim-crypto-service/SimCryptoService.csproj -c Release
|
||||
dotnet build devops/services/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj -c Release
|
||||
|
||||
- name: "Run smoke (sim profile: sm)"
|
||||
env:
|
||||
ASPNETCORE_URLS: http://localhost:5000
|
||||
STELLAOPS_CRYPTO_SIM_URL: http://localhost:5000
|
||||
SIM_PROFILE: sm
|
||||
run: |
|
||||
set -euo pipefail
|
||||
dotnet run --project devops/services/crypto/sim-crypto-service/SimCryptoService.csproj --no-build -c Release &
|
||||
service_pid=$!
|
||||
sleep 6
|
||||
dotnet run --project devops/services/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj --no-build -c Release
|
||||
kill $service_pid
|
||||
@@ -1,55 +0,0 @@
|
||||
name: cryptopro-linux-csp
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'ops/cryptopro/linux-csp-service/**'
|
||||
- 'opt/cryptopro/downloads/**'
|
||||
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'ops/cryptopro/linux-csp-service/**'
|
||||
- 'opt/cryptopro/downloads/**'
|
||||
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||
|
||||
env:
|
||||
IMAGE_NAME: cryptopro-linux-csp
|
||||
DOCKERFILE: ops/cryptopro/linux-csp-service/Dockerfile
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build image (accept EULA explicitly)
|
||||
run: |
|
||||
docker build -t $IMAGE_NAME \
|
||||
--build-arg CRYPTOPRO_ACCEPT_EULA=1 \
|
||||
-f $DOCKERFILE .
|
||||
|
||||
- name: Run container
|
||||
run: |
|
||||
docker run -d --rm --name $IMAGE_NAME -p 18080:8080 $IMAGE_NAME
|
||||
for i in {1..20}; do
|
||||
if curl -sf http://127.0.0.1:18080/health >/dev/null; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 3
|
||||
done
|
||||
echo "Service failed to start" && exit 1
|
||||
|
||||
- name: Test endpoints
|
||||
run: |
|
||||
curl -sf http://127.0.0.1:18080/health
|
||||
curl -sf http://127.0.0.1:18080/license || true
|
||||
curl -sf -X POST http://127.0.0.1:18080/hash \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"data_b64":"SGVsbG8="}'
|
||||
|
||||
- name: Stop container
|
||||
if: always()
|
||||
run: docker rm -f $IMAGE_NAME || true
|
||||
@@ -1,40 +0,0 @@
|
||||
name: cryptopro-optin
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
configuration:
|
||||
description: Build configuration
|
||||
default: Release
|
||||
run_tests:
|
||||
description: Run CryptoPro signer tests (requires CSP installed on runner)
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
cryptopro:
|
||||
runs-on: windows-latest
|
||||
env:
|
||||
STELLAOPS_CRYPTO_PRO_ENABLED: "1"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 (preview)
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
|
||||
- name: Build CryptoPro plugin
|
||||
run: |
|
||||
dotnet build src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/StellaOps.Cryptography.Plugin.CryptoPro.csproj -c ${{ github.event.inputs.configuration || 'Release' }}
|
||||
|
||||
- name: Run CryptoPro signer tests (requires CSP pre-installed)
|
||||
if: ${{ github.event.inputs.run_tests != 'false' }}
|
||||
run: |
|
||||
powershell -File scripts/crypto/run-cryptopro-tests.ps1 -Configuration ${{ github.event.inputs.configuration || 'Release' }}
|
||||
|
||||
# NOTE: This workflow assumes the windows runner already has CryptoPro CSP installed and licensed.
|
||||
# Leave it opt-in to avoid breaking default CI lanes.
|
||||
@@ -1,439 +0,0 @@
|
||||
# .gitea/workflows/dead-path-detection.yml
|
||||
# Dead-path detection workflow for uncovered branch identification
|
||||
# Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
|
||||
# Task: CCUT-017
|
||||
#
|
||||
# WORKFLOW PURPOSE:
|
||||
# =================
|
||||
# Detects uncovered code paths (dead paths) by analyzing branch coverage data.
|
||||
# Compares against baseline exemptions and fails on new dead paths to prevent
|
||||
# coverage regression and identify potential unreachable code.
|
||||
#
|
||||
# Coverage collection uses Coverlet with Cobertura output format.
|
||||
|
||||
name: Dead-Path Detection
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/**/*.cs'
|
||||
- 'src/**/*.csproj'
|
||||
- '.gitea/workflows/dead-path-detection.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/**/*.cs'
|
||||
- 'src/**/*.csproj'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
update_baseline:
|
||||
description: 'Update the dead-path baseline'
|
||||
type: boolean
|
||||
default: false
|
||||
coverage_threshold:
|
||||
description: 'Branch coverage threshold (%)'
|
||||
type: number
|
||||
default: 80
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
COVERAGE_OUTPUT: './coverage'
|
||||
DEFAULT_THRESHOLD: 80
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# COLLECT COVERAGE AND DETECT DEAD PATHS
|
||||
# ===========================================================================
|
||||
|
||||
detect:
|
||||
name: Detect Dead Paths
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
outputs:
|
||||
has-new-dead-paths: ${{ steps.check.outputs.has_new_dead_paths }}
|
||||
new-dead-path-count: ${{ steps.check.outputs.new_count }}
|
||||
total-dead-paths: ${{ steps.check.outputs.total_count }}
|
||||
branch-coverage: ${{ steps.coverage.outputs.branch_coverage }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/Directory.Packages.props', '**/*.csproj') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nuget-
|
||||
|
||||
- name: Restore Dependencies
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Run Tests with Coverage
|
||||
id: test
|
||||
run: |
|
||||
mkdir -p ${{ env.COVERAGE_OUTPUT }}
|
||||
|
||||
# Run tests with branch coverage collection
|
||||
dotnet test src/StellaOps.sln \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
--verbosity minimal \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--results-directory ${{ env.COVERAGE_OUTPUT }} \
|
||||
-- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=cobertura \
|
||||
DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.IncludeTestAssembly=false
|
||||
|
||||
# Merge coverage reports if multiple exist
|
||||
if command -v reportgenerator &> /dev/null; then
|
||||
reportgenerator \
|
||||
-reports:"${{ env.COVERAGE_OUTPUT }}/**/coverage.cobertura.xml" \
|
||||
-targetdir:"${{ env.COVERAGE_OUTPUT }}/merged" \
|
||||
-reporttypes:"Cobertura"
|
||||
fi
|
||||
|
||||
- name: Calculate Branch Coverage
|
||||
id: coverage
|
||||
run: |
|
||||
# Find coverage file
|
||||
COVERAGE_FILE=$(find ${{ env.COVERAGE_OUTPUT }} -name "coverage.cobertura.xml" | head -1)
|
||||
|
||||
if [ -z "$COVERAGE_FILE" ]; then
|
||||
echo "::warning::No coverage file found"
|
||||
echo "branch_coverage=0" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract branch coverage from Cobertura XML
|
||||
BRANCH_RATE=$(grep -oP 'branch-rate="\K[^"]+' "$COVERAGE_FILE" | head -1)
|
||||
BRANCH_COVERAGE=$(echo "scale=2; $BRANCH_RATE * 100" | bc)
|
||||
|
||||
echo "Branch coverage: ${BRANCH_COVERAGE}%"
|
||||
echo "branch_coverage=$BRANCH_COVERAGE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Detect Dead Paths
|
||||
id: detect
|
||||
run: |
|
||||
# Find coverage file
|
||||
COVERAGE_FILE=$(find ${{ env.COVERAGE_OUTPUT }} -name "coverage.cobertura.xml" | head -1)
|
||||
|
||||
if [ -z "$COVERAGE_FILE" ]; then
|
||||
echo "::warning::No coverage file found, skipping dead-path detection"
|
||||
echo '{"activeDeadPaths": 0, "entries": []}' > dead-paths-report.json
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Parse coverage and extract uncovered branches
|
||||
cat > extract-dead-paths.py << 'SCRIPT'
|
||||
import xml.etree.ElementTree as ET
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
def extract_dead_paths(coverage_file, exemptions_file=None):
|
||||
tree = ET.parse(coverage_file)
|
||||
root = tree.getroot()
|
||||
|
||||
exemptions = set()
|
||||
if exemptions_file and os.path.exists(exemptions_file):
|
||||
with open(exemptions_file) as f:
|
||||
import yaml
|
||||
data = yaml.safe_load(f) or {}
|
||||
exemptions = set(data.get('exemptions', []))
|
||||
|
||||
dead_paths = []
|
||||
|
||||
for package in root.findall('.//package'):
|
||||
for cls in package.findall('.//class'):
|
||||
filename = cls.get('filename', '')
|
||||
classname = cls.get('name', '')
|
||||
|
||||
for line in cls.findall('.//line'):
|
||||
branch = line.get('branch', 'false')
|
||||
if branch != 'true':
|
||||
continue
|
||||
|
||||
hits = int(line.get('hits', 0))
|
||||
line_num = int(line.get('number', 0))
|
||||
condition = line.get('condition-coverage', '')
|
||||
|
||||
# Parse condition coverage (e.g., "50% (1/2)")
|
||||
if condition:
|
||||
import re
|
||||
match = re.search(r'\((\d+)/(\d+)\)', condition)
|
||||
if match:
|
||||
covered = int(match.group(1))
|
||||
total = int(match.group(2))
|
||||
|
||||
if covered < total:
|
||||
path_id = f"{filename}:{line_num}"
|
||||
is_exempt = path_id in exemptions
|
||||
|
||||
dead_paths.append({
|
||||
'file': filename,
|
||||
'line': line_num,
|
||||
'class': classname,
|
||||
'coveredBranches': covered,
|
||||
'totalBranches': total,
|
||||
'coverage': f"{covered}/{total}",
|
||||
'isExempt': is_exempt,
|
||||
'pathId': path_id
|
||||
})
|
||||
|
||||
# Sort by file and line
|
||||
dead_paths.sort(key=lambda x: (x['file'], x['line']))
|
||||
|
||||
active_count = len([p for p in dead_paths if not p['isExempt']])
|
||||
|
||||
report = {
|
||||
'activeDeadPaths': active_count,
|
||||
'totalDeadPaths': len(dead_paths),
|
||||
'exemptedPaths': len(dead_paths) - active_count,
|
||||
'entries': dead_paths
|
||||
}
|
||||
|
||||
return report
|
||||
|
||||
if __name__ == '__main__':
|
||||
coverage_file = sys.argv[1] if len(sys.argv) > 1 else 'coverage.cobertura.xml'
|
||||
exemptions_file = sys.argv[2] if len(sys.argv) > 2 else None
|
||||
|
||||
report = extract_dead_paths(coverage_file, exemptions_file)
|
||||
|
||||
with open('dead-paths-report.json', 'w') as f:
|
||||
json.dump(report, f, indent=2)
|
||||
|
||||
print(f"Found {report['activeDeadPaths']} active dead paths")
|
||||
print(f"Total uncovered branches: {report['totalDeadPaths']}")
|
||||
print(f"Exempted: {report['exemptedPaths']}")
|
||||
SCRIPT
|
||||
|
||||
python3 extract-dead-paths.py "$COVERAGE_FILE" "coverage-exemptions.yaml"
|
||||
|
||||
- name: Load Baseline
|
||||
id: baseline
|
||||
run: |
|
||||
# Check for baseline file
|
||||
if [ -f "dead-paths-baseline.json" ]; then
|
||||
BASELINE_COUNT=$(jq '.activeDeadPaths // 0' dead-paths-baseline.json)
|
||||
echo "baseline_count=$BASELINE_COUNT" >> $GITHUB_OUTPUT
|
||||
echo "has_baseline=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "baseline_count=0" >> $GITHUB_OUTPUT
|
||||
echo "has_baseline=false" >> $GITHUB_OUTPUT
|
||||
echo "::notice::No baseline file found. First run will establish baseline."
|
||||
fi
|
||||
|
||||
- name: Check for New Dead Paths
|
||||
id: check
|
||||
run: |
|
||||
CURRENT_COUNT=$(jq '.activeDeadPaths' dead-paths-report.json)
|
||||
BASELINE_COUNT=${{ steps.baseline.outputs.baseline_count }}
|
||||
TOTAL_COUNT=$(jq '.totalDeadPaths' dead-paths-report.json)
|
||||
|
||||
# Calculate new dead paths (only count increases)
|
||||
if [ "$CURRENT_COUNT" -gt "$BASELINE_COUNT" ]; then
|
||||
NEW_COUNT=$((CURRENT_COUNT - BASELINE_COUNT))
|
||||
HAS_NEW="true"
|
||||
else
|
||||
NEW_COUNT=0
|
||||
HAS_NEW="false"
|
||||
fi
|
||||
|
||||
echo "has_new_dead_paths=$HAS_NEW" >> $GITHUB_OUTPUT
|
||||
echo "new_count=$NEW_COUNT" >> $GITHUB_OUTPUT
|
||||
echo "total_count=$TOTAL_COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "Current active dead paths: $CURRENT_COUNT"
|
||||
echo "Baseline: $BASELINE_COUNT"
|
||||
echo "New dead paths: $NEW_COUNT"
|
||||
|
||||
if [ "$HAS_NEW" = "true" ]; then
|
||||
echo "::error::Found $NEW_COUNT new dead paths since baseline"
|
||||
|
||||
# Show top 10 new dead paths
|
||||
echo ""
|
||||
echo "=== New Dead Paths ==="
|
||||
jq -r '.entries | map(select(.isExempt == false)) | .[:10][] | "\(.file):\(.line) - \(.coverage) branches covered"' dead-paths-report.json
|
||||
|
||||
exit 1
|
||||
else
|
||||
echo "No new dead paths detected."
|
||||
fi
|
||||
|
||||
- name: Check Coverage Threshold
|
||||
if: always()
|
||||
run: |
|
||||
THRESHOLD=${{ inputs.coverage_threshold || env.DEFAULT_THRESHOLD }}
|
||||
COVERAGE=${{ steps.coverage.outputs.branch_coverage }}
|
||||
|
||||
if [ -z "$COVERAGE" ] || [ "$COVERAGE" = "0" ]; then
|
||||
echo "::warning::Could not determine branch coverage"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Compare coverage to threshold
|
||||
BELOW_THRESHOLD=$(echo "$COVERAGE < $THRESHOLD" | bc)
|
||||
|
||||
if [ "$BELOW_THRESHOLD" -eq 1 ]; then
|
||||
echo "::warning::Branch coverage ($COVERAGE%) is below threshold ($THRESHOLD%)"
|
||||
else
|
||||
echo "Branch coverage ($COVERAGE%) meets threshold ($THRESHOLD%)"
|
||||
fi
|
||||
|
||||
- name: Update Baseline
|
||||
if: inputs.update_baseline == true && github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
cp dead-paths-report.json dead-paths-baseline.json
|
||||
echo "Baseline updated with current dead paths"
|
||||
|
||||
- name: Generate Report
|
||||
if: always()
|
||||
run: |
|
||||
# Generate markdown report
|
||||
cat > dead-paths-report.md << EOF
|
||||
## Dead-Path Detection Report
|
||||
|
||||
| Metric | Value |
|
||||
|--------|-------|
|
||||
| Branch Coverage | ${{ steps.coverage.outputs.branch_coverage }}% |
|
||||
| Active Dead Paths | $(jq '.activeDeadPaths' dead-paths-report.json) |
|
||||
| Total Uncovered Branches | $(jq '.totalDeadPaths' dead-paths-report.json) |
|
||||
| Exempted Paths | $(jq '.exemptedPaths' dead-paths-report.json) |
|
||||
| Baseline | ${{ steps.baseline.outputs.baseline_count }} |
|
||||
| New Dead Paths | ${{ steps.check.outputs.new_count }} |
|
||||
|
||||
### Top Uncovered Files
|
||||
|
||||
EOF
|
||||
|
||||
# Add top files by dead path count
|
||||
jq -r '
|
||||
.entries
|
||||
| group_by(.file)
|
||||
| map({file: .[0].file, count: length})
|
||||
| sort_by(-.count)
|
||||
| .[:10][]
|
||||
| "| \(.file) | \(.count) |"
|
||||
' dead-paths-report.json >> dead-paths-report.md 2>/dev/null || true
|
||||
|
||||
echo "" >> dead-paths-report.md
|
||||
echo "*Report generated at $(date -u +%Y-%m-%dT%H:%M:%SZ)*" >> dead-paths-report.md
|
||||
|
||||
- name: Upload Reports
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dead-path-reports
|
||||
path: |
|
||||
dead-paths-report.json
|
||||
dead-paths-report.md
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Upload Coverage
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-report
|
||||
path: ${{ env.COVERAGE_OUTPUT }}
|
||||
if-no-files-found: ignore
|
||||
|
||||
# ===========================================================================
|
||||
# POST REPORT TO PR
|
||||
# ===========================================================================
|
||||
|
||||
comment:
|
||||
name: Post Report
|
||||
needs: detect
|
||||
if: github.event_name == 'pull_request' && always()
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Download Report
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: dead-path-reports
|
||||
continue-on-error: true
|
||||
|
||||
- name: Post Comment
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
let report = '';
|
||||
try {
|
||||
report = fs.readFileSync('dead-paths-report.md', 'utf8');
|
||||
} catch (e) {
|
||||
report = 'Dead-path report not available.';
|
||||
}
|
||||
|
||||
const hasNewDeadPaths = '${{ needs.detect.outputs.has-new-dead-paths }}' === 'true';
|
||||
const newCount = '${{ needs.detect.outputs.new-dead-path-count }}';
|
||||
const branchCoverage = '${{ needs.detect.outputs.branch-coverage }}';
|
||||
|
||||
const status = hasNewDeadPaths ? ':x: Failed' : ':white_check_mark: Passed';
|
||||
|
||||
const body = `## Dead-Path Detection ${status}
|
||||
|
||||
${hasNewDeadPaths ? `Found **${newCount}** new dead path(s) that need coverage.` : 'No new dead paths detected.'}
|
||||
|
||||
**Branch Coverage:** ${branchCoverage}%
|
||||
|
||||
${report}
|
||||
|
||||
---
|
||||
<details>
|
||||
<summary>How to fix dead paths</summary>
|
||||
|
||||
Dead paths are code branches that are never executed during tests. To fix:
|
||||
|
||||
1. **Add tests** that exercise the uncovered branches
|
||||
2. **Remove dead code** if the branch is truly unreachable
|
||||
3. **Add exemption** if the code is intentionally untested (document reason)
|
||||
|
||||
Example exemption in \`coverage-exemptions.yaml\`:
|
||||
\`\`\`yaml
|
||||
exemptions:
|
||||
- "src/Module/File.cs:42" # Emergency handler - tested manually
|
||||
\`\`\`
|
||||
|
||||
</details>
|
||||
`;
|
||||
|
||||
// Find existing comment
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number
|
||||
});
|
||||
|
||||
const botComment = comments.find(c =>
|
||||
c.user.type === 'Bot' &&
|
||||
c.body.includes('Dead-Path Detection')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: body
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
# Dependency License Compliance Gate
|
||||
# Sprint: CI/CD Enhancement - Dependency Management Automation
|
||||
#
|
||||
# Purpose: Validate that all dependencies use approved licenses
|
||||
# Triggers: PRs modifying package files
|
||||
|
||||
name: License Compliance
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Directory.Packages.props'
|
||||
- '**/package.json'
|
||||
- '**/package-lock.json'
|
||||
- '**/*.csproj'
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
# Blocked licenses (incompatible with AGPL-3.0)
|
||||
BLOCKED_LICENSES: 'GPL-2.0-only,SSPL-1.0,BUSL-1.1,Proprietary,Commercial'
|
||||
# Allowed licenses
|
||||
ALLOWED_LICENSES: 'MIT,Apache-2.0,BSD-2-Clause,BSD-3-Clause,ISC,0BSD,Unlicense,CC0-1.0,LGPL-2.1,LGPL-3.0,MPL-2.0,AGPL-3.0,GPL-3.0'
|
||||
|
||||
jobs:
|
||||
check-nuget-licenses:
|
||||
name: NuGet License Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install dotnet-delice
|
||||
run: dotnet tool install --global dotnet-delice
|
||||
|
||||
- name: Restore packages
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Check NuGet licenses
|
||||
id: nuget-check
|
||||
run: |
|
||||
mkdir -p license-reports
|
||||
|
||||
echo "Checking NuGet package licenses..."
|
||||
|
||||
# Run delice on the solution
|
||||
dotnet delice src/StellaOps.sln \
|
||||
--output license-reports/nuget-licenses.json \
|
||||
--format json \
|
||||
2>&1 | tee license-reports/nuget-check.log || true
|
||||
|
||||
# Check for blocked licenses
|
||||
BLOCKED_FOUND=0
|
||||
BLOCKED_PACKAGES=""
|
||||
|
||||
IFS=',' read -ra BLOCKED_ARRAY <<< "$BLOCKED_LICENSES"
|
||||
for license in "${BLOCKED_ARRAY[@]}"; do
|
||||
if grep -qi "\"$license\"" license-reports/nuget-licenses.json 2>/dev/null; then
|
||||
BLOCKED_FOUND=1
|
||||
PACKAGES=$(grep -B5 "\"$license\"" license-reports/nuget-licenses.json | grep -o '"[^"]*"' | head -1 || echo "unknown")
|
||||
BLOCKED_PACKAGES="$BLOCKED_PACKAGES\n- $license: $PACKAGES"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $BLOCKED_FOUND -eq 1 ]]; then
|
||||
echo "::error::Blocked licenses found in NuGet packages:$BLOCKED_PACKAGES"
|
||||
echo "blocked=true" >> $GITHUB_OUTPUT
|
||||
echo "blocked_packages<<EOF" >> $GITHUB_OUTPUT
|
||||
echo -e "$BLOCKED_PACKAGES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "All NuGet packages have approved licenses"
|
||||
echo "blocked=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload NuGet license report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nuget-license-report
|
||||
path: license-reports/
|
||||
retention-days: 30
|
||||
|
||||
check-npm-licenses:
|
||||
name: npm License Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Find package.json files
|
||||
id: find-packages
|
||||
run: |
|
||||
PACKAGES=$(find . -name "package.json" -not -path "*/node_modules/*" -not -path "*/bin/*" -not -path "*/obj/*" | head -10)
|
||||
echo "Found package.json files:"
|
||||
echo "$PACKAGES"
|
||||
echo "packages<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$PACKAGES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install license-checker
|
||||
run: npm install -g license-checker
|
||||
|
||||
- name: Check npm licenses
|
||||
id: npm-check
|
||||
run: |
|
||||
mkdir -p license-reports
|
||||
BLOCKED_FOUND=0
|
||||
BLOCKED_PACKAGES=""
|
||||
|
||||
# Check each package.json directory
|
||||
while IFS= read -r pkg; do
|
||||
if [[ -z "$pkg" ]]; then continue; fi
|
||||
|
||||
DIR=$(dirname "$pkg")
|
||||
echo "Checking $DIR..."
|
||||
|
||||
cd "$DIR"
|
||||
if [[ -f "package-lock.json" ]] || [[ -f "yarn.lock" ]]; then
|
||||
npm install --ignore-scripts 2>/dev/null || true
|
||||
|
||||
# Run license checker
|
||||
license-checker --json > "${GITHUB_WORKSPACE}/license-reports/npm-$(basename $DIR).json" 2>/dev/null || true
|
||||
|
||||
# Check for blocked licenses
|
||||
IFS=',' read -ra BLOCKED_ARRAY <<< "$BLOCKED_LICENSES"
|
||||
for license in "${BLOCKED_ARRAY[@]}"; do
|
||||
if grep -qi "\"$license\"" "${GITHUB_WORKSPACE}/license-reports/npm-$(basename $DIR).json" 2>/dev/null; then
|
||||
BLOCKED_FOUND=1
|
||||
BLOCKED_PACKAGES="$BLOCKED_PACKAGES\n- $license in $DIR"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
done <<< "${{ steps.find-packages.outputs.packages }}"
|
||||
|
||||
if [[ $BLOCKED_FOUND -eq 1 ]]; then
|
||||
echo "::error::Blocked licenses found in npm packages:$BLOCKED_PACKAGES"
|
||||
echo "blocked=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "All npm packages have approved licenses"
|
||||
echo "blocked=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload npm license report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: npm-license-report
|
||||
path: license-reports/
|
||||
retention-days: 30
|
||||
|
||||
gate:
|
||||
name: License Gate
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-nuget-licenses, check-npm-licenses]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Check results
|
||||
run: |
|
||||
NUGET_BLOCKED="${{ needs.check-nuget-licenses.outputs.blocked }}"
|
||||
NPM_BLOCKED="${{ needs.check-npm-licenses.outputs.blocked }}"
|
||||
|
||||
echo "## License Compliance Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Check | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [[ "$NUGET_BLOCKED" == "true" ]]; then
|
||||
echo "| NuGet | ❌ Blocked licenses found |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| NuGet | ✅ Approved |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
if [[ "$NPM_BLOCKED" == "true" ]]; then
|
||||
echo "| npm | ❌ Blocked licenses found |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| npm | ✅ Approved |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
if [[ "$NUGET_BLOCKED" == "true" ]] || [[ "$NPM_BLOCKED" == "true" ]]; then
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Blocked Licenses" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "The following licenses are not compatible with AGPL-3.0:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`$BLOCKED_LICENSES\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Please replace the offending packages or request an exception." >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "::error::License compliance check failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "✅ All dependencies use approved licenses" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -1,249 +0,0 @@
|
||||
# Dependency Security Scan
|
||||
# Sprint: CI/CD Enhancement - Dependency Management Automation
|
||||
#
|
||||
# Purpose: Scan dependencies for known vulnerabilities
|
||||
# Schedule: Weekly and on PRs modifying package files
|
||||
|
||||
name: Dependency Security Scan
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run weekly on Sundays at 02:00 UTC
|
||||
- cron: '0 2 * * 0'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Directory.Packages.props'
|
||||
- '**/package.json'
|
||||
- '**/package-lock.json'
|
||||
- '**/*.csproj'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
fail_on_vulnerabilities:
|
||||
description: 'Fail if vulnerabilities found'
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
|
||||
jobs:
|
||||
scan-nuget:
|
||||
name: NuGet Vulnerability Scan
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
vulnerabilities_found: ${{ steps.scan.outputs.vulnerabilities_found }}
|
||||
critical_count: ${{ steps.scan.outputs.critical_count }}
|
||||
high_count: ${{ steps.scan.outputs.high_count }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore packages
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Scan for vulnerabilities
|
||||
id: scan
|
||||
run: |
|
||||
mkdir -p security-reports
|
||||
|
||||
echo "Scanning NuGet packages for vulnerabilities..."
|
||||
|
||||
# Run vulnerability check
|
||||
dotnet list src/StellaOps.sln package --vulnerable --include-transitive \
|
||||
> security-reports/nuget-vulnerabilities.txt 2>&1 || true
|
||||
|
||||
# Parse results
|
||||
CRITICAL=$(grep -c "Critical" security-reports/nuget-vulnerabilities.txt 2>/dev/null || echo "0")
|
||||
HIGH=$(grep -c "High" security-reports/nuget-vulnerabilities.txt 2>/dev/null || echo "0")
|
||||
MEDIUM=$(grep -c "Medium" security-reports/nuget-vulnerabilities.txt 2>/dev/null || echo "0")
|
||||
LOW=$(grep -c "Low" security-reports/nuget-vulnerabilities.txt 2>/dev/null || echo "0")
|
||||
|
||||
TOTAL=$((CRITICAL + HIGH + MEDIUM + LOW))
|
||||
|
||||
echo "=== Vulnerability Summary ==="
|
||||
echo "Critical: $CRITICAL"
|
||||
echo "High: $HIGH"
|
||||
echo "Medium: $MEDIUM"
|
||||
echo "Low: $LOW"
|
||||
echo "Total: $TOTAL"
|
||||
|
||||
echo "critical_count=$CRITICAL" >> $GITHUB_OUTPUT
|
||||
echo "high_count=$HIGH" >> $GITHUB_OUTPUT
|
||||
echo "medium_count=$MEDIUM" >> $GITHUB_OUTPUT
|
||||
echo "low_count=$LOW" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ $TOTAL -gt 0 ]]; then
|
||||
echo "vulnerabilities_found=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "vulnerabilities_found=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Show detailed report
|
||||
echo ""
|
||||
echo "=== Detailed Report ==="
|
||||
cat security-reports/nuget-vulnerabilities.txt
|
||||
|
||||
- name: Upload NuGet security report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nuget-security-report
|
||||
path: security-reports/
|
||||
retention-days: 90
|
||||
|
||||
scan-npm:
|
||||
name: npm Vulnerability Scan
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
vulnerabilities_found: ${{ steps.scan.outputs.vulnerabilities_found }}
|
||||
critical_count: ${{ steps.scan.outputs.critical_count }}
|
||||
high_count: ${{ steps.scan.outputs.high_count }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Find and scan package.json files
|
||||
id: scan
|
||||
run: |
|
||||
mkdir -p security-reports
|
||||
|
||||
TOTAL_CRITICAL=0
|
||||
TOTAL_HIGH=0
|
||||
TOTAL_MEDIUM=0
|
||||
TOTAL_LOW=0
|
||||
VULNERABILITIES_FOUND=false
|
||||
|
||||
# Find all package.json files
|
||||
PACKAGES=$(find . -name "package.json" -not -path "*/node_modules/*" -not -path "*/bin/*" -not -path "*/obj/*")
|
||||
|
||||
for pkg in $PACKAGES; do
|
||||
DIR=$(dirname "$pkg")
|
||||
if [[ ! -f "$DIR/package-lock.json" ]] && [[ ! -f "$DIR/yarn.lock" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Scanning $DIR..."
|
||||
cd "$DIR"
|
||||
|
||||
# Install dependencies
|
||||
npm install --ignore-scripts 2>/dev/null || true
|
||||
|
||||
# Run npm audit
|
||||
REPORT_FILE="${GITHUB_WORKSPACE}/security-reports/npm-audit-$(basename $DIR).json"
|
||||
npm audit --json > "$REPORT_FILE" 2>/dev/null || true
|
||||
|
||||
# Parse results
|
||||
if [[ -f "$REPORT_FILE" ]]; then
|
||||
CRITICAL=$(jq '.metadata.vulnerabilities.critical // 0' "$REPORT_FILE" 2>/dev/null || echo "0")
|
||||
HIGH=$(jq '.metadata.vulnerabilities.high // 0' "$REPORT_FILE" 2>/dev/null || echo "0")
|
||||
MEDIUM=$(jq '.metadata.vulnerabilities.moderate // 0' "$REPORT_FILE" 2>/dev/null || echo "0")
|
||||
LOW=$(jq '.metadata.vulnerabilities.low // 0' "$REPORT_FILE" 2>/dev/null || echo "0")
|
||||
|
||||
TOTAL_CRITICAL=$((TOTAL_CRITICAL + CRITICAL))
|
||||
TOTAL_HIGH=$((TOTAL_HIGH + HIGH))
|
||||
TOTAL_MEDIUM=$((TOTAL_MEDIUM + MEDIUM))
|
||||
TOTAL_LOW=$((TOTAL_LOW + LOW))
|
||||
|
||||
if [[ $((CRITICAL + HIGH + MEDIUM + LOW)) -gt 0 ]]; then
|
||||
VULNERABILITIES_FOUND=true
|
||||
fi
|
||||
fi
|
||||
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
done
|
||||
|
||||
echo "=== npm Vulnerability Summary ==="
|
||||
echo "Critical: $TOTAL_CRITICAL"
|
||||
echo "High: $TOTAL_HIGH"
|
||||
echo "Medium: $TOTAL_MEDIUM"
|
||||
echo "Low: $TOTAL_LOW"
|
||||
|
||||
echo "critical_count=$TOTAL_CRITICAL" >> $GITHUB_OUTPUT
|
||||
echo "high_count=$TOTAL_HIGH" >> $GITHUB_OUTPUT
|
||||
echo "vulnerabilities_found=$VULNERABILITIES_FOUND" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload npm security report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: npm-security-report
|
||||
path: security-reports/
|
||||
retention-days: 90
|
||||
|
||||
summary:
|
||||
name: Security Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [scan-nuget, scan-npm]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Generate summary
|
||||
run: |
|
||||
NUGET_VULNS="${{ needs.scan-nuget.outputs.vulnerabilities_found }}"
|
||||
NPM_VULNS="${{ needs.scan-npm.outputs.vulnerabilities_found }}"
|
||||
|
||||
NUGET_CRITICAL="${{ needs.scan-nuget.outputs.critical_count }}"
|
||||
NUGET_HIGH="${{ needs.scan-nuget.outputs.high_count }}"
|
||||
NPM_CRITICAL="${{ needs.scan-npm.outputs.critical_count }}"
|
||||
NPM_HIGH="${{ needs.scan-npm.outputs.high_count }}"
|
||||
|
||||
echo "## Dependency Security Scan Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### NuGet Packages" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Critical | ${NUGET_CRITICAL:-0} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| High | ${NUGET_HIGH:-0} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "### npm Packages" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Critical | ${NPM_CRITICAL:-0} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| High | ${NPM_HIGH:-0} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Determine overall status
|
||||
TOTAL_CRITICAL=$((${NUGET_CRITICAL:-0} + ${NPM_CRITICAL:-0}))
|
||||
TOTAL_HIGH=$((${NUGET_HIGH:-0} + ${NPM_HIGH:-0}))
|
||||
|
||||
if [[ $TOTAL_CRITICAL -gt 0 ]]; then
|
||||
echo "### ⚠️ Critical Vulnerabilities Found" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Please review and remediate critical vulnerabilities before merging." >> $GITHUB_STEP_SUMMARY
|
||||
elif [[ $TOTAL_HIGH -gt 0 ]]; then
|
||||
echo "### ⚠️ High Severity Vulnerabilities Found" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Please review high severity vulnerabilities." >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### ✅ No Critical or High Vulnerabilities" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Check gate
|
||||
if: github.event.inputs.fail_on_vulnerabilities == 'true' || github.event_name == 'pull_request'
|
||||
run: |
|
||||
NUGET_CRITICAL="${{ needs.scan-nuget.outputs.critical_count }}"
|
||||
NPM_CRITICAL="${{ needs.scan-npm.outputs.critical_count }}"
|
||||
|
||||
TOTAL_CRITICAL=$((${NUGET_CRITICAL:-0} + ${NPM_CRITICAL:-0}))
|
||||
|
||||
if [[ $TOTAL_CRITICAL -gt 0 ]]; then
|
||||
echo "::error::$TOTAL_CRITICAL critical vulnerabilities found in dependencies"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Security scan passed - no critical vulnerabilities"
|
||||
@@ -1,205 +0,0 @@
|
||||
# .gitea/workflows/deploy-keyless-verify.yml
|
||||
# Verification gate for deployments using keyless signatures
|
||||
#
|
||||
# This workflow verifies all required attestations before
|
||||
# allowing deployment to production environments.
|
||||
#
|
||||
# Dogfooding the StellaOps keyless verification feature.
|
||||
|
||||
name: Deployment Verification Gate
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
image:
|
||||
description: 'Image to deploy (with digest)'
|
||||
required: true
|
||||
type: string
|
||||
environment:
|
||||
description: 'Target environment'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
require_sbom:
|
||||
description: 'Require SBOM attestation'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
require_verdict:
|
||||
description: 'Require policy verdict attestation'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
STELLAOPS_URL: "https://api.stella-ops.internal"
|
||||
|
||||
jobs:
|
||||
pre-flight:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
outputs:
|
||||
identity-pattern: ${{ steps.config.outputs.identity-pattern }}
|
||||
|
||||
steps:
|
||||
- name: Configure Identity Constraints
|
||||
id: config
|
||||
run: |
|
||||
ENV="${{ github.event.inputs.environment }}"
|
||||
|
||||
if [[ "$ENV" == "production" ]]; then
|
||||
# Production: only allow signed releases from main or tags
|
||||
PATTERN="stella-ops.org/git.stella-ops.org:ref:refs/(heads/main|tags/v.*)"
|
||||
else
|
||||
# Staging: allow any branch
|
||||
PATTERN="stella-ops.org/git.stella-ops.org:ref:refs/heads/.*"
|
||||
fi
|
||||
|
||||
echo "identity-pattern=${PATTERN}" >> $GITHUB_OUTPUT
|
||||
echo "Using identity pattern: ${PATTERN}"
|
||||
|
||||
verify-attestations:
|
||||
needs: pre-flight
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
outputs:
|
||||
verified: ${{ steps.verify.outputs.verified }}
|
||||
attestation-count: ${{ steps.verify.outputs.count }}
|
||||
|
||||
steps:
|
||||
- name: Install StellaOps CLI
|
||||
run: |
|
||||
curl -sL https://get.stella-ops.org/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Verify All Attestations
|
||||
id: verify
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
IMAGE="${{ github.event.inputs.image }}"
|
||||
IDENTITY="${{ needs.pre-flight.outputs.identity-pattern }}"
|
||||
ISSUER="https://git.stella-ops.org"
|
||||
|
||||
VERIFY_ARGS=(
|
||||
--artifact "${IMAGE}"
|
||||
--certificate-identity "${IDENTITY}"
|
||||
--certificate-oidc-issuer "${ISSUER}"
|
||||
--require-rekor
|
||||
--output json
|
||||
)
|
||||
|
||||
if [[ "${{ github.event.inputs.require_sbom }}" == "true" ]]; then
|
||||
VERIFY_ARGS+=(--require-sbom)
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.inputs.require_verdict }}" == "true" ]]; then
|
||||
VERIFY_ARGS+=(--require-verdict)
|
||||
fi
|
||||
|
||||
echo "Verifying: ${IMAGE}"
|
||||
echo "Identity: ${IDENTITY}"
|
||||
echo "Issuer: ${ISSUER}"
|
||||
|
||||
RESULT=$(stella attest verify "${VERIFY_ARGS[@]}" 2>&1)
|
||||
echo "$RESULT" | jq .
|
||||
|
||||
VERIFIED=$(echo "$RESULT" | jq -r '.valid')
|
||||
COUNT=$(echo "$RESULT" | jq -r '.attestationCount')
|
||||
|
||||
echo "verified=${VERIFIED}" >> $GITHUB_OUTPUT
|
||||
echo "count=${COUNT}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ "$VERIFIED" != "true" ]]; then
|
||||
echo "::error::Verification failed"
|
||||
echo "$RESULT" | jq -r '.issues[]? | "::error::\(.code): \(.message)"'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Verification passed with ${COUNT} attestations"
|
||||
|
||||
verify-provenance:
|
||||
needs: pre-flight
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
outputs:
|
||||
valid: ${{ steps.verify.outputs.valid }}
|
||||
|
||||
steps:
|
||||
- name: Install StellaOps CLI
|
||||
run: |
|
||||
curl -sL https://get.stella-ops.org/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Verify Build Provenance
|
||||
id: verify
|
||||
run: |
|
||||
IMAGE="${{ github.event.inputs.image }}"
|
||||
|
||||
echo "Verifying provenance for: ${IMAGE}"
|
||||
|
||||
RESULT=$(stella provenance verify \
|
||||
--artifact "${IMAGE}" \
|
||||
--require-source-repo "stella-ops.org/git.stella-ops.org" \
|
||||
--output json)
|
||||
|
||||
echo "$RESULT" | jq .
|
||||
|
||||
VALID=$(echo "$RESULT" | jq -r '.valid')
|
||||
echo "valid=${VALID}" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ "$VALID" != "true" ]]; then
|
||||
echo "::error::Provenance verification failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
create-audit-entry:
|
||||
needs: [verify-attestations, verify-provenance]
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
|
||||
steps:
|
||||
- name: Install StellaOps CLI
|
||||
run: |
|
||||
curl -sL https://get.stella-ops.org/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Log Deployment Verification
|
||||
run: |
|
||||
stella audit log \
|
||||
--event "deployment-verification" \
|
||||
--artifact "${{ github.event.inputs.image }}" \
|
||||
--environment "${{ github.event.inputs.environment }}" \
|
||||
--verified true \
|
||||
--attestations "${{ needs.verify-attestations.outputs.attestation-count }}" \
|
||||
--provenance-valid "${{ needs.verify-provenance.outputs.valid }}" \
|
||||
--actor "${{ github.actor }}" \
|
||||
--workflow "${{ github.workflow }}" \
|
||||
--run-id "${{ github.run_id }}"
|
||||
|
||||
approve-deployment:
|
||||
needs: [verify-attestations, verify-provenance, create-audit-entry]
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
|
||||
steps:
|
||||
- name: Deployment Approved
|
||||
run: |
|
||||
cat >> $GITHUB_STEP_SUMMARY << EOF
|
||||
## Deployment Approved
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Image** | \`${{ github.event.inputs.image }}\` |
|
||||
| **Environment** | ${{ github.event.inputs.environment }} |
|
||||
| **Attestations** | ${{ needs.verify-attestations.outputs.attestation-count }} |
|
||||
| **Provenance Valid** | ${{ needs.verify-provenance.outputs.valid }} |
|
||||
| **Approved By** | @${{ github.actor }} |
|
||||
|
||||
Deployment can now proceed.
|
||||
EOF
|
||||
|
||||
@@ -1,331 +0,0 @@
|
||||
# .gitea/workflows/determinism-gate.yml
|
||||
# Determinism gate for artifact reproducibility validation
|
||||
# Implements Tasks 10-11 from SPRINT 5100.0007.0003
|
||||
# Updated: Task 13 from SPRINT 8200.0001.0003 - Add schema validation dependency
|
||||
|
||||
name: Determinism Gate
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'src/__Tests/Integration/StellaOps.Integration.Determinism/**'
|
||||
- 'src/__Tests/baselines/determinism/**'
|
||||
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||
- 'docs/schemas/**'
|
||||
- '.gitea/workflows/determinism-gate.yml'
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
types: [ closed ]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
update_baselines:
|
||||
description: 'Update baselines with current hashes'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
fail_on_missing:
|
||||
description: 'Fail if baselines are missing'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
skip_schema_validation:
|
||||
description: 'Skip schema validation step'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
BUILD_CONFIGURATION: Release
|
||||
DETERMINISM_OUTPUT_DIR: ${{ github.workspace }}/out/determinism
|
||||
BASELINE_DIR: src/__Tests/baselines/determinism
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# Schema Validation Gate (runs before determinism checks)
|
||||
# ===========================================================================
|
||||
schema-validation:
|
||||
name: Schema Validation
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
if: github.event.inputs.skip_schema_validation != 'true'
|
||||
timeout-minutes: 10
|
||||
|
||||
env:
|
||||
SBOM_UTILITY_VERSION: "0.16.0"
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install sbom-utility
|
||||
run: |
|
||||
curl -sSfL "https://github.com/CycloneDX/sbom-utility/releases/download/v${SBOM_UTILITY_VERSION}/sbom-utility-v${SBOM_UTILITY_VERSION}-linux-amd64.tar.gz" | tar xz
|
||||
sudo mv sbom-utility /usr/local/bin/
|
||||
sbom-utility --version
|
||||
|
||||
- name: Validate CycloneDX fixtures
|
||||
run: |
|
||||
set -e
|
||||
SCHEMA="docs/schemas/cyclonedx-bom-1.6.schema.json"
|
||||
FIXTURE_DIRS=(
|
||||
"src/__Tests/__Benchmarks/golden-corpus"
|
||||
"src/__Tests/fixtures"
|
||||
"src/__Tests/__Datasets/seed-data"
|
||||
)
|
||||
|
||||
FOUND=0
|
||||
PASSED=0
|
||||
FAILED=0
|
||||
|
||||
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||
if [ -d "$dir" ]; then
|
||||
# Skip invalid fixtures directory (used for negative testing)
|
||||
while IFS= read -r -d '' file; do
|
||||
if [[ "$file" == *"/invalid/"* ]]; then
|
||||
continue
|
||||
fi
|
||||
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||
FOUND=$((FOUND + 1))
|
||||
echo "::group::Validating: $file"
|
||||
if sbom-utility validate --input-file "$file" --schema "$SCHEMA" 2>&1; then
|
||||
echo "✅ PASS: $file"
|
||||
PASSED=$((PASSED + 1))
|
||||
else
|
||||
echo "❌ FAIL: $file"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||
fi
|
||||
done
|
||||
|
||||
echo "================================================"
|
||||
echo "CycloneDX Validation Summary"
|
||||
echo "================================================"
|
||||
echo "Found: $FOUND fixtures"
|
||||
echo "Passed: $PASSED"
|
||||
echo "Failed: $FAILED"
|
||||
echo "================================================"
|
||||
|
||||
if [ "$FAILED" -gt 0 ]; then
|
||||
echo "::error::$FAILED CycloneDX fixtures failed validation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Schema validation summary
|
||||
run: |
|
||||
echo "## Schema Validation" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "✅ All SBOM fixtures passed schema validation" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# ===========================================================================
|
||||
# Determinism Validation Gate
|
||||
# ===========================================================================
|
||||
determinism-gate:
|
||||
needs: [schema-validation]
|
||||
if: always() && (needs.schema-validation.result == 'success' || needs.schema-validation.result == 'skipped')
|
||||
name: Determinism Validation
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 30
|
||||
|
||||
outputs:
|
||||
status: ${{ steps.check.outputs.status }}
|
||||
drifted: ${{ steps.check.outputs.drifted }}
|
||||
missing: ${{ steps.check.outputs.missing }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore solution
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build solution
|
||||
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||
|
||||
- name: Create output directories
|
||||
run: |
|
||||
mkdir -p "$DETERMINISM_OUTPUT_DIR"
|
||||
mkdir -p "$DETERMINISM_OUTPUT_DIR/hashes"
|
||||
mkdir -p "$DETERMINISM_OUTPUT_DIR/manifests"
|
||||
|
||||
- name: Run determinism tests
|
||||
id: tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism/StellaOps.Integration.Determinism.csproj \
|
||||
--configuration $BUILD_CONFIGURATION \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=determinism-tests.trx" \
|
||||
--results-directory "$DETERMINISM_OUTPUT_DIR" \
|
||||
--verbosity normal
|
||||
env:
|
||||
DETERMINISM_OUTPUT_DIR: ${{ env.DETERMINISM_OUTPUT_DIR }}
|
||||
UPDATE_BASELINES: ${{ github.event.inputs.update_baselines || 'false' }}
|
||||
FAIL_ON_MISSING: ${{ github.event.inputs.fail_on_missing || 'false' }}
|
||||
|
||||
- name: Generate determinism summary
|
||||
id: check
|
||||
run: |
|
||||
# Create determinism.json summary
|
||||
cat > "$DETERMINISM_OUTPUT_DIR/determinism.json" << 'EOF'
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"generatedAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"sourceRef": "${{ github.sha }}",
|
||||
"ciRunId": "${{ github.run_id }}",
|
||||
"status": "pass",
|
||||
"statistics": {
|
||||
"total": 0,
|
||||
"matched": 0,
|
||||
"drifted": 0,
|
||||
"missing": 0
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
# Output status for downstream jobs
|
||||
echo "status=pass" >> $GITHUB_OUTPUT
|
||||
echo "drifted=0" >> $GITHUB_OUTPUT
|
||||
echo "missing=0" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload determinism artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: determinism-artifacts
|
||||
path: |
|
||||
${{ env.DETERMINISM_OUTPUT_DIR }}/determinism.json
|
||||
${{ env.DETERMINISM_OUTPUT_DIR }}/hashes/**
|
||||
${{ env.DETERMINISM_OUTPUT_DIR }}/manifests/**
|
||||
${{ env.DETERMINISM_OUTPUT_DIR }}/*.trx
|
||||
if-no-files-found: warn
|
||||
retention-days: 30
|
||||
|
||||
- name: Upload hash files as individual artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: determinism-hashes
|
||||
path: ${{ env.DETERMINISM_OUTPUT_DIR }}/hashes/**
|
||||
if-no-files-found: ignore
|
||||
retention-days: 30
|
||||
|
||||
- name: Generate summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Determinism Gate Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Status | ${{ steps.check.outputs.status || 'unknown' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Source Ref | \`${{ github.sha }}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| CI Run | ${{ github.run_id }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Artifact Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Drifted**: ${{ steps.check.outputs.drifted || '0' }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Missing Baselines**: ${{ steps.check.outputs.missing || '0' }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "See \`determinism.json\` artifact for full details." >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# ===========================================================================
|
||||
# Baseline Update (only on workflow_dispatch with update_baselines=true)
|
||||
# ===========================================================================
|
||||
update-baselines:
|
||||
name: Update Baselines
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [schema-validation, determinism-gate]
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.update_baselines == 'true'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Download determinism artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: determinism-hashes
|
||||
path: new-hashes
|
||||
|
||||
- name: Update baseline files
|
||||
run: |
|
||||
mkdir -p "$BASELINE_DIR"
|
||||
if [ -d "new-hashes" ]; then
|
||||
cp -r new-hashes/* "$BASELINE_DIR/" || true
|
||||
echo "Updated baseline files from new-hashes"
|
||||
fi
|
||||
|
||||
- name: Commit baseline updates
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
git add "$BASELINE_DIR"
|
||||
|
||||
if git diff --cached --quiet; then
|
||||
echo "No baseline changes to commit"
|
||||
else
|
||||
git commit -m "chore: update determinism baselines
|
||||
|
||||
Updated by Determinism Gate workflow run #${{ github.run_id }}
|
||||
Source: ${{ github.sha }}
|
||||
|
||||
Co-Authored-By: github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
git push
|
||||
echo "Baseline updates committed and pushed"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# Drift Detection Gate (fails workflow if drift detected)
|
||||
# ===========================================================================
|
||||
drift-check:
|
||||
name: Drift Detection Gate
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [schema-validation, determinism-gate]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Check for drift
|
||||
run: |
|
||||
SCHEMA_STATUS="${{ needs.schema-validation.result || 'skipped' }}"
|
||||
DRIFTED="${{ needs.determinism-gate.outputs.drifted || '0' }}"
|
||||
STATUS="${{ needs.determinism-gate.outputs.status || 'unknown' }}"
|
||||
|
||||
echo "Schema Validation: $SCHEMA_STATUS"
|
||||
echo "Determinism Status: $STATUS"
|
||||
echo "Drifted Artifacts: $DRIFTED"
|
||||
|
||||
# Fail if schema validation failed
|
||||
if [ "$SCHEMA_STATUS" = "failure" ]; then
|
||||
echo "::error::Schema validation failed! Fix SBOM schema issues before determinism check."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$STATUS" = "fail" ] || [ "$DRIFTED" != "0" ]; then
|
||||
echo "::error::Determinism drift detected! $DRIFTED artifact(s) have changed."
|
||||
echo "Run workflow with 'update_baselines=true' to update baselines if changes are intentional."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "No determinism drift detected. All artifacts match baselines."
|
||||
|
||||
- name: Gate status
|
||||
run: |
|
||||
echo "## Drift Detection Gate" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Schema Validation: ${{ needs.schema-validation.result || 'skipped' }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Determinism Status: ${{ needs.determinism-gate.outputs.status || 'pass' }}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
name: devportal-offline
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 5 * * *"
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
build-offline:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node (corepack/pnpm)
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Build devportal (offline bundle)
|
||||
run: |
|
||||
chmod +x scripts/devportal/build-devportal.sh
|
||||
scripts/devportal/build-devportal.sh
|
||||
|
||||
- name: Upload bundle
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: devportal-offline
|
||||
path: out/devportal/**.tgz
|
||||
@@ -1,218 +0,0 @@
|
||||
name: Regional Docker Builds
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'devops/docker/**'
|
||||
- 'devops/compose/docker-compose.*.yml'
|
||||
- 'etc/appsettings.crypto.*.yaml'
|
||||
- 'etc/crypto-plugins-manifest.json'
|
||||
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||
- '.gitea/workflows/docker-regional-builds.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'devops/docker/**'
|
||||
- 'devops/compose/docker-compose.*.yml'
|
||||
- 'etc/appsettings.crypto.*.yaml'
|
||||
- 'etc/crypto-plugins-manifest.json'
|
||||
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: registry.stella-ops.org
|
||||
PLATFORM_IMAGE_NAME: stellaops/platform
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
jobs:
|
||||
# Build the base platform image containing all crypto plugins
|
||||
build-platform:
|
||||
name: Build Platform Image (All Plugins)
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.GITEA_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha,prefix={{branch}}-
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Build and push platform image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./devops/docker/Dockerfile.platform
|
||||
target: runtime-base
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:buildcache
|
||||
cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:buildcache,mode=max
|
||||
build-args: |
|
||||
BUILDKIT_INLINE_CACHE=1
|
||||
|
||||
- name: Export platform image tag
|
||||
id: platform
|
||||
run: |
|
||||
echo "tag=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:${{ github.sha }}" >> $GITHUB_OUTPUT
|
||||
|
||||
outputs:
|
||||
platform-tag: ${{ steps.platform.outputs.tag }}
|
||||
|
||||
# Build regional profile images for each service
|
||||
build-regional-profiles:
|
||||
name: Build Regional Profiles
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-platform
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile: [international, russia, eu, china]
|
||||
service:
|
||||
- authority
|
||||
- signer
|
||||
- attestor
|
||||
- concelier
|
||||
- scanner
|
||||
- excititor
|
||||
- policy
|
||||
- scheduler
|
||||
- notify
|
||||
- zastava
|
||||
- gateway
|
||||
- airgap-importer
|
||||
- airgap-exporter
|
||||
- cli
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ gitea.actor }}
|
||||
password: ${{ secrets.GITEA_TOKEN }}
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/stellaops/${{ matrix.service }}
|
||||
tags: |
|
||||
type=raw,value=${{ matrix.profile }},enable={{is_default_branch}}
|
||||
type=raw,value=${{ matrix.profile }}-${{ github.sha }}
|
||||
type=raw,value=${{ matrix.profile }}-pr-${{ github.event.pull_request.number }},enable=${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- name: Build and push regional service image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./devops/docker/Dockerfile.crypto-profile
|
||||
target: ${{ matrix.service }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
CRYPTO_PROFILE=${{ matrix.profile }}
|
||||
BASE_IMAGE=${{ needs.build-platform.outputs.platform-tag }}
|
||||
SERVICE_NAME=${{ matrix.service }}
|
||||
|
||||
# Validate regional configurations
|
||||
validate-configs:
|
||||
name: Validate Regional Configurations
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-regional-profiles
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile: [international, russia, eu, china]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate crypto configuration YAML
|
||||
run: |
|
||||
# Install yq for YAML validation
|
||||
sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64
|
||||
sudo chmod +x /usr/local/bin/yq
|
||||
|
||||
# Validate YAML syntax
|
||||
yq eval 'true' etc/appsettings.crypto.${{ matrix.profile }}.yaml
|
||||
|
||||
- name: Validate docker-compose file
|
||||
run: |
|
||||
docker compose -f devops/compose/docker-compose.${{ matrix.profile }}.yml config --quiet
|
||||
|
||||
- name: Check required crypto configuration fields
|
||||
run: |
|
||||
# Verify ManifestPath is set
|
||||
MANIFEST_PATH=$(yq eval '.StellaOps.Crypto.Plugins.ManifestPath' etc/appsettings.crypto.${{ matrix.profile }}.yaml)
|
||||
if [ -z "$MANIFEST_PATH" ] || [ "$MANIFEST_PATH" == "null" ]; then
|
||||
echo "Error: ManifestPath not set in ${{ matrix.profile }} configuration"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify at least one plugin is enabled
|
||||
ENABLED_COUNT=$(yq eval '.StellaOps.Crypto.Plugins.Enabled | length' etc/appsettings.crypto.${{ matrix.profile }}.yaml)
|
||||
if [ "$ENABLED_COUNT" -eq 0 ]; then
|
||||
echo "Error: No plugins enabled in ${{ matrix.profile }} configuration"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Configuration valid: ${{ matrix.profile }}"
|
||||
|
||||
# Summary job
|
||||
summary:
|
||||
name: Build Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-platform, build-regional-profiles, validate-configs]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Generate summary
|
||||
run: |
|
||||
echo "## Regional Docker Builds Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Platform image built successfully: ${{ needs.build-platform.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Regional profiles built: ${{ needs.build-regional-profiles.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Configurations validated: ${{ needs.validate-configs.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Build Details" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Commit: ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Branch: ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Event: ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -1,102 +0,0 @@
|
||||
# .gitea/workflows/docs.yml
|
||||
# Documentation quality checks and preview artefacts
|
||||
|
||||
name: Docs CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'scripts/render_docs.py'
|
||||
- '.gitea/workflows/docs.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'scripts/render_docs.py'
|
||||
- '.gitea/workflows/docs.yml'
|
||||
workflow_dispatch: {}
|
||||
|
||||
env:
|
||||
NODE_VERSION: '20'
|
||||
PYTHON_VERSION: '3.11'
|
||||
|
||||
jobs:
|
||||
lint-and-preview:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-preview
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Install documentation toolchain
|
||||
run: |
|
||||
npm install --no-save markdown-link-check remark-cli remark-preset-lint-recommended ajv ajv-cli ajv-formats
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
|
||||
- name: Link check
|
||||
run: |
|
||||
find docs -name '*.md' -print0 | \
|
||||
xargs -0 -n1 -I{} npx markdown-link-check --quiet '{}'
|
||||
|
||||
- name: Remark lint
|
||||
run: |
|
||||
npx remark docs -qf
|
||||
|
||||
- name: Validate event schemas
|
||||
run: |
|
||||
set -euo pipefail
|
||||
for schema in docs/events/*.json; do
|
||||
npx ajv compile -c ajv-formats -s "$schema"
|
||||
done
|
||||
for sample in docs/events/samples/*.json; do
|
||||
schema_name=$(basename "$sample" .sample.json)
|
||||
schema_path="docs/events/${schema_name}.json"
|
||||
if [ ! -f "$schema_path" ]; then
|
||||
echo "Missing schema for sample ${sample}" >&2
|
||||
exit 1
|
||||
fi
|
||||
npx ajv validate -c ajv-formats -s "$schema_path" -d "$sample"
|
||||
done
|
||||
|
||||
- name: Run Notify schema validation tests
|
||||
run: |
|
||||
dotnet test src/Notify/__Tests/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj --configuration Release --nologo
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install documentation dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install markdown pygments
|
||||
|
||||
- name: Render documentation preview bundle
|
||||
run: |
|
||||
python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean
|
||||
|
||||
- name: Upload documentation preview
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: feedser-docs-preview
|
||||
path: ${{ env.DOCS_OUTPUT_DIR }}
|
||||
retention-days: 7
|
||||
|
||||
@@ -1,473 +0,0 @@
|
||||
# =============================================================================
|
||||
# e2e-reproducibility.yml
|
||||
# Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
# Tasks: E2E-8200-015 to E2E-8200-024 - CI Workflow for E2E Reproducibility
|
||||
# Description: CI workflow for end-to-end reproducibility verification.
|
||||
# Runs tests across multiple platforms and compares results.
|
||||
# =============================================================================
|
||||
|
||||
name: E2E Reproducibility
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'src/__Tests/Integration/StellaOps.Integration.E2E/**'
|
||||
- 'src/__Tests/fixtures/**'
|
||||
- '.gitea/workflows/e2e-reproducibility.yml'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'src/__Tests/Integration/StellaOps.Integration.E2E/**'
|
||||
schedule:
|
||||
# Nightly at 2am UTC
|
||||
- cron: '0 2 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_cross_platform:
|
||||
description: 'Run cross-platform tests'
|
||||
type: boolean
|
||||
default: false
|
||||
update_baseline:
|
||||
description: 'Update golden baseline (requires approval)'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
DOTNET_NOLOGO: true
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||
|
||||
jobs:
|
||||
# =============================================================================
|
||||
# Job: Run E2E reproducibility tests on primary platform
|
||||
# =============================================================================
|
||||
reproducibility-ubuntu:
|
||||
name: E2E Reproducibility (Ubuntu)
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: test_user
|
||||
POSTGRES_PASSWORD: test_password
|
||||
POSTGRES_DB: stellaops_e2e_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||
|
||||
- name: Build E2E tests
|
||||
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||
|
||||
- name: Run E2E reproducibility tests
|
||||
id: run-tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj \
|
||||
--no-build \
|
||||
-c Release \
|
||||
--logger "trx;LogFileName=e2e-results.trx" \
|
||||
--logger "console;verbosity=detailed" \
|
||||
--results-directory ./TestResults \
|
||||
-- RunConfiguration.CollectSourceInformation=true
|
||||
|
||||
# Extract hashes from test output for cross-platform comparison
|
||||
echo "verdict_hash=$(cat ./TestResults/verdict_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||
echo "manifest_hash=$(cat ./TestResults/manifest_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||
echo "envelope_hash=$(cat ./TestResults/envelope_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
ConnectionStrings__ScannerDb: "Host=localhost;Port=5432;Database=stellaops_e2e_test;Username=test_user;Password=test_password"
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-results-ubuntu
|
||||
path: ./TestResults/
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload hash artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hashes-ubuntu
|
||||
path: |
|
||||
./TestResults/verdict_hash.txt
|
||||
./TestResults/manifest_hash.txt
|
||||
./TestResults/envelope_hash.txt
|
||||
retention-days: 14
|
||||
|
||||
# =============================================================================
|
||||
# Job: Run E2E tests on Windows (conditional)
|
||||
# =============================================================================
|
||||
reproducibility-windows:
|
||||
name: E2E Reproducibility (Windows)
|
||||
runs-on: windows-latest
|
||||
if: github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true'
|
||||
outputs:
|
||||
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||
|
||||
- name: Build E2E tests
|
||||
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||
|
||||
- name: Run E2E reproducibility tests
|
||||
id: run-tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj `
|
||||
--no-build `
|
||||
-c Release `
|
||||
--logger "trx;LogFileName=e2e-results.trx" `
|
||||
--logger "console;verbosity=detailed" `
|
||||
--results-directory ./TestResults
|
||||
|
||||
# Extract hashes for comparison
|
||||
$verdictHash = Get-Content -Path ./TestResults/verdict_hash.txt -ErrorAction SilentlyContinue
|
||||
$manifestHash = Get-Content -Path ./TestResults/manifest_hash.txt -ErrorAction SilentlyContinue
|
||||
$envelopeHash = Get-Content -Path ./TestResults/envelope_hash.txt -ErrorAction SilentlyContinue
|
||||
|
||||
"verdict_hash=$($verdictHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||
"manifest_hash=$($manifestHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||
"envelope_hash=$($envelopeHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-results-windows
|
||||
path: ./TestResults/
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload hash artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hashes-windows
|
||||
path: |
|
||||
./TestResults/verdict_hash.txt
|
||||
./TestResults/manifest_hash.txt
|
||||
./TestResults/envelope_hash.txt
|
||||
retention-days: 14
|
||||
|
||||
# =============================================================================
|
||||
# Job: Run E2E tests on macOS (conditional)
|
||||
# =============================================================================
|
||||
reproducibility-macos:
|
||||
name: E2E Reproducibility (macOS)
|
||||
runs-on: macos-latest
|
||||
if: github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true'
|
||||
outputs:
|
||||
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||
|
||||
- name: Build E2E tests
|
||||
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||
|
||||
- name: Run E2E reproducibility tests
|
||||
id: run-tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj \
|
||||
--no-build \
|
||||
-c Release \
|
||||
--logger "trx;LogFileName=e2e-results.trx" \
|
||||
--logger "console;verbosity=detailed" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
# Extract hashes for comparison
|
||||
echo "verdict_hash=$(cat ./TestResults/verdict_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||
echo "manifest_hash=$(cat ./TestResults/manifest_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||
echo "envelope_hash=$(cat ./TestResults/envelope_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-results-macos
|
||||
path: ./TestResults/
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload hash artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hashes-macos
|
||||
path: |
|
||||
./TestResults/verdict_hash.txt
|
||||
./TestResults/manifest_hash.txt
|
||||
./TestResults/envelope_hash.txt
|
||||
retention-days: 14
|
||||
|
||||
# =============================================================================
|
||||
# Job: Cross-platform hash comparison
|
||||
# =============================================================================
|
||||
cross-platform-compare:
|
||||
name: Cross-Platform Hash Comparison
|
||||
runs-on: ubuntu-latest
|
||||
needs: [reproducibility-ubuntu, reproducibility-windows, reproducibility-macos]
|
||||
if: always() && (github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download Ubuntu hashes
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: hashes-ubuntu
|
||||
path: ./hashes/ubuntu
|
||||
|
||||
- name: Download Windows hashes
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: hashes-windows
|
||||
path: ./hashes/windows
|
||||
continue-on-error: true
|
||||
|
||||
- name: Download macOS hashes
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: hashes-macos
|
||||
path: ./hashes/macos
|
||||
continue-on-error: true
|
||||
|
||||
- name: Compare hashes across platforms
|
||||
run: |
|
||||
echo "=== Cross-Platform Hash Comparison ==="
|
||||
echo ""
|
||||
|
||||
ubuntu_verdict=$(cat ./hashes/ubuntu/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||
windows_verdict=$(cat ./hashes/windows/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||
macos_verdict=$(cat ./hashes/macos/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||
|
||||
echo "Verdict Hashes:"
|
||||
echo " Ubuntu: $ubuntu_verdict"
|
||||
echo " Windows: $windows_verdict"
|
||||
echo " macOS: $macos_verdict"
|
||||
echo ""
|
||||
|
||||
ubuntu_manifest=$(cat ./hashes/ubuntu/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||
windows_manifest=$(cat ./hashes/windows/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||
macos_manifest=$(cat ./hashes/macos/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||
|
||||
echo "Manifest Hashes:"
|
||||
echo " Ubuntu: $ubuntu_manifest"
|
||||
echo " Windows: $windows_manifest"
|
||||
echo " macOS: $macos_manifest"
|
||||
echo ""
|
||||
|
||||
# Check if all available hashes match
|
||||
all_match=true
|
||||
|
||||
if [ "$ubuntu_verdict" != "NOT_AVAILABLE" ] && [ "$windows_verdict" != "NOT_AVAILABLE" ]; then
|
||||
if [ "$ubuntu_verdict" != "$windows_verdict" ]; then
|
||||
echo "❌ FAIL: Ubuntu and Windows verdict hashes differ!"
|
||||
all_match=false
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$ubuntu_verdict" != "NOT_AVAILABLE" ] && [ "$macos_verdict" != "NOT_AVAILABLE" ]; then
|
||||
if [ "$ubuntu_verdict" != "$macos_verdict" ]; then
|
||||
echo "❌ FAIL: Ubuntu and macOS verdict hashes differ!"
|
||||
all_match=false
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$all_match" = true ]; then
|
||||
echo "✅ All available platform hashes match!"
|
||||
else
|
||||
echo ""
|
||||
echo "Cross-platform reproducibility verification FAILED."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create comparison report
|
||||
run: |
|
||||
cat > ./cross-platform-report.md << 'EOF'
|
||||
# Cross-Platform Reproducibility Report
|
||||
|
||||
## Test Run Information
|
||||
- **Workflow Run:** ${{ github.run_id }}
|
||||
- **Trigger:** ${{ github.event_name }}
|
||||
- **Commit:** ${{ github.sha }}
|
||||
- **Branch:** ${{ github.ref_name }}
|
||||
|
||||
## Hash Comparison
|
||||
|
||||
| Platform | Verdict Hash | Manifest Hash | Status |
|
||||
|----------|--------------|---------------|--------|
|
||||
| Ubuntu | ${{ needs.reproducibility-ubuntu.outputs.verdict_hash }} | ${{ needs.reproducibility-ubuntu.outputs.manifest_hash }} | ✅ |
|
||||
| Windows | ${{ needs.reproducibility-windows.outputs.verdict_hash }} | ${{ needs.reproducibility-windows.outputs.manifest_hash }} | ${{ needs.reproducibility-windows.result == 'success' && '✅' || '⚠️' }} |
|
||||
| macOS | ${{ needs.reproducibility-macos.outputs.verdict_hash }} | ${{ needs.reproducibility-macos.outputs.manifest_hash }} | ${{ needs.reproducibility-macos.result == 'success' && '✅' || '⚠️' }} |
|
||||
|
||||
## Conclusion
|
||||
|
||||
Cross-platform reproducibility: **${{ job.status == 'success' && 'VERIFIED' || 'NEEDS REVIEW' }}**
|
||||
EOF
|
||||
|
||||
cat ./cross-platform-report.md
|
||||
|
||||
- name: Upload comparison report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cross-platform-report
|
||||
path: ./cross-platform-report.md
|
||||
retention-days: 30
|
||||
|
||||
# =============================================================================
|
||||
# Job: Golden baseline comparison
|
||||
# =============================================================================
|
||||
golden-baseline:
|
||||
name: Golden Baseline Verification
|
||||
runs-on: ubuntu-latest
|
||||
needs: [reproducibility-ubuntu]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download current hashes
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: hashes-ubuntu
|
||||
path: ./current
|
||||
|
||||
- name: Compare with golden baseline
|
||||
run: |
|
||||
echo "=== Golden Baseline Comparison ==="
|
||||
|
||||
baseline_file="./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json"
|
||||
|
||||
if [ ! -f "$baseline_file" ]; then
|
||||
echo "⚠️ Golden baseline not found. Skipping comparison."
|
||||
echo "To create baseline, run with update_baseline=true"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
current_verdict=$(cat ./current/verdict_hash.txt 2>/dev/null || echo "NOT_FOUND")
|
||||
baseline_verdict=$(jq -r '.verdict_hash' "$baseline_file" 2>/dev/null || echo "NOT_FOUND")
|
||||
|
||||
echo "Current verdict hash: $current_verdict"
|
||||
echo "Baseline verdict hash: $baseline_verdict"
|
||||
|
||||
if [ "$current_verdict" != "$baseline_verdict" ]; then
|
||||
echo ""
|
||||
echo "❌ FAIL: Current run does not match golden baseline!"
|
||||
echo ""
|
||||
echo "This may indicate:"
|
||||
echo " 1. An intentional change requiring baseline update"
|
||||
echo " 2. An unintentional regression in reproducibility"
|
||||
echo ""
|
||||
echo "To update baseline, run workflow with update_baseline=true"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ Current run matches golden baseline!"
|
||||
|
||||
- name: Update golden baseline (if requested)
|
||||
if: github.event.inputs.update_baseline == 'true'
|
||||
run: |
|
||||
mkdir -p ./src/__Tests/__Benchmarks/determinism/golden-baseline
|
||||
|
||||
cat > ./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json << EOF
|
||||
{
|
||||
"verdict_hash": "$(cat ./current/verdict_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||
"manifest_hash": "$(cat ./current/manifest_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||
"envelope_hash": "$(cat ./current/envelope_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||
"updated_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"updated_by": "${{ github.actor }}",
|
||||
"commit": "${{ github.sha }}"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Golden baseline updated:"
|
||||
cat ./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json
|
||||
|
||||
- name: Commit baseline update
|
||||
if: github.event.inputs.update_baseline == 'true'
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "chore: Update E2E reproducibility golden baseline"
|
||||
file_pattern: src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json
|
||||
|
||||
# =============================================================================
|
||||
# Job: Status check gate
|
||||
# =============================================================================
|
||||
reproducibility-gate:
|
||||
name: Reproducibility Gate
|
||||
runs-on: ubuntu-latest
|
||||
needs: [reproducibility-ubuntu, golden-baseline]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Check reproducibility status
|
||||
run: |
|
||||
ubuntu_status="${{ needs.reproducibility-ubuntu.result }}"
|
||||
baseline_status="${{ needs.golden-baseline.result }}"
|
||||
|
||||
echo "Ubuntu E2E tests: $ubuntu_status"
|
||||
echo "Golden baseline: $baseline_status"
|
||||
|
||||
if [ "$ubuntu_status" != "success" ]; then
|
||||
echo "❌ E2E reproducibility tests failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$baseline_status" == "failure" ]; then
|
||||
echo "⚠️ Golden baseline comparison failed (may require review)"
|
||||
# Don't fail the gate for baseline mismatch - it may be intentional
|
||||
fi
|
||||
|
||||
echo "✅ Reproducibility gate passed!"
|
||||
@@ -1,327 +0,0 @@
|
||||
name: eBPF Reachability Determinism
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Signals/__Libraries/StellaOps.Signals.Ebpf/**'
|
||||
- 'src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/**'
|
||||
- 'tests/reachability/**'
|
||||
- '.gitea/workflows/ebpf-reachability-determinism.yml'
|
||||
- 'scripts/ebpf/**'
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/Signals/__Libraries/StellaOps.Signals.Ebpf/**'
|
||||
- 'src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/**'
|
||||
- 'tests/reachability/**'
|
||||
- '.gitea/workflows/ebpf-reachability-determinism.yml'
|
||||
- 'scripts/ebpf/**'
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
# ============================================================================
|
||||
# Multi-Kernel eBPF CO-RE Testing (3 major kernel versions)
|
||||
# ============================================================================
|
||||
multi-kernel-tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Kernel 5.4 LTS (Ubuntu 20.04)
|
||||
- kernel_version: "5.4"
|
||||
distro: "focal"
|
||||
image: "ubuntu:20.04"
|
||||
dotnet_install: "true"
|
||||
runner: ${{ vars.KERNEL_5_4_RUNNER || 'ubuntu-latest' }}
|
||||
# Kernel 5.15 LTS (Ubuntu 22.04)
|
||||
- kernel_version: "5.15"
|
||||
distro: "jammy"
|
||||
image: "ubuntu:22.04"
|
||||
dotnet_install: "true"
|
||||
runner: ${{ vars.KERNEL_5_15_RUNNER || 'ubuntu-22.04' }}
|
||||
# Kernel 6.x (Ubuntu 24.04)
|
||||
- kernel_version: "6.x"
|
||||
distro: "noble"
|
||||
image: "ubuntu:24.04"
|
||||
dotnet_install: "true"
|
||||
runner: ${{ vars.KERNEL_6_X_RUNNER || 'ubuntu-24.04' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
name: "Kernel ${{ matrix.kernel_version }} (${{ matrix.distro }})"
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
STELLAOPS_UPDATE_FIXTURES: "false"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Report kernel version
|
||||
run: |
|
||||
echo "=============================================="
|
||||
echo "Kernel ${{ matrix.kernel_version }} Test (${{ matrix.distro }})"
|
||||
echo "=============================================="
|
||||
uname -a
|
||||
cat /etc/os-release | head -5
|
||||
echo ""
|
||||
echo "BTF availability:"
|
||||
if [ -f /sys/kernel/btf/vmlinux ]; then
|
||||
echo " Built-in BTF: YES"
|
||||
ls -la /sys/kernel/btf/vmlinux
|
||||
else
|
||||
echo " Built-in BTF: NO (external BTF may be required)"
|
||||
fi
|
||||
echo ""
|
||||
echo "eBPF kernel config:"
|
||||
if [ -f /boot/config-$(uname -r) ]; then
|
||||
grep -E "CONFIG_BPF|CONFIG_DEBUG_INFO_BTF" /boot/config-$(uname -r) 2>/dev/null || echo " Config not readable"
|
||||
else
|
||||
echo " Kernel config not available"
|
||||
fi
|
||||
|
||||
- name: Setup .NET 10
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.x
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.nuget/packages
|
||||
.nuget/packages
|
||||
key: ebpf-${{ matrix.distro }}-nuget-${{ hashFiles('src/Signals/**/*.csproj') }}
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj --configfile nuget.config
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj -c Release --no-restore
|
||||
|
||||
- name: Run all eBPF tests
|
||||
run: |
|
||||
echo "Running all eBPF tests on kernel ${{ matrix.kernel_version }}..."
|
||||
dotnet test src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj \
|
||||
-c Release --no-build \
|
||||
--logger "trx;LogFileName=ebpf-tests-${{ matrix.distro }}.trx" \
|
||||
--logger "console;verbosity=minimal"
|
||||
|
||||
- name: Record kernel compatibility
|
||||
run: |
|
||||
echo "Kernel ${{ matrix.kernel_version }} (${{ matrix.distro }}): PASSED" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Host kernel: $(uname -r)" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ebpf-test-results-kernel-${{ matrix.kernel_version }}
|
||||
path: |
|
||||
**/ebpf-tests-${{ matrix.distro }}.trx
|
||||
retention-days: 7
|
||||
|
||||
# ============================================================================
|
||||
# Docker-based Multi-Kernel Tests (for environments without native runners)
|
||||
# ============================================================================
|
||||
docker-kernel-tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Ubuntu 20.04 (Kernel 5.4 compatible)
|
||||
- kernel_version: "5.4"
|
||||
distro: "focal"
|
||||
base_image: "ubuntu:20.04"
|
||||
# Ubuntu 22.04 (Kernel 5.15 compatible)
|
||||
- kernel_version: "5.15"
|
||||
distro: "jammy"
|
||||
base_image: "ubuntu:22.04"
|
||||
# Ubuntu 24.04 (Kernel 6.x compatible)
|
||||
- kernel_version: "6.x"
|
||||
distro: "noble"
|
||||
base_image: "ubuntu:24.04"
|
||||
runs-on: ubuntu-latest
|
||||
name: "Docker: Kernel ${{ matrix.kernel_version }} (${{ matrix.distro }})"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build and test in Docker
|
||||
run: |
|
||||
chmod +x scripts/ebpf/docker-kernel-test.sh
|
||||
scripts/ebpf/docker-kernel-test.sh "${{ matrix.base_image }}" "${{ matrix.kernel_version }}" "${{ matrix.distro }}"
|
||||
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-test-results-${{ matrix.distro }}
|
||||
path: |
|
||||
out/ebpf-tests-${{ matrix.distro }}.trx
|
||||
retention-days: 7
|
||||
|
||||
# ============================================================================
|
||||
# Cross-Distribution Tests (glibc vs musl)
|
||||
# ============================================================================
|
||||
cross-distro-tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- image: "mcr.microsoft.com/dotnet/sdk:10.0"
|
||||
distro: "ubuntu-glibc"
|
||||
libc: "glibc"
|
||||
- image: "mcr.microsoft.com/dotnet/sdk:10.0-alpine"
|
||||
distro: "alpine-musl"
|
||||
libc: "musl"
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
name: "Distro: ${{ matrix.distro }} (${{ matrix.libc }})"
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
STELLAOPS_UPDATE_FIXTURES: "false"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Report environment
|
||||
run: |
|
||||
echo "=============================================="
|
||||
echo "Cross-distribution test: ${{ matrix.distro }}"
|
||||
echo "=============================================="
|
||||
uname -a
|
||||
cat /etc/os-release | head -3
|
||||
echo "libc: ${{ matrix.libc }}"
|
||||
dotnet --version
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj --configfile nuget.config
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj -c Release --no-restore
|
||||
|
||||
- name: Run all tests
|
||||
run: |
|
||||
dotnet test src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj \
|
||||
-c Release --no-build \
|
||||
--logger "trx;LogFileName=tests-${{ matrix.distro }}.trx" \
|
||||
--logger "console;verbosity=minimal"
|
||||
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: distro-test-results-${{ matrix.distro }}
|
||||
path: |
|
||||
**/tests-${{ matrix.distro }}.trx
|
||||
retention-days: 7
|
||||
|
||||
# ============================================================================
|
||||
# Determinism Tests
|
||||
# ============================================================================
|
||||
determinism-tests:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
STELLAOPS_UPDATE_FIXTURES: "false"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET 10
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.x
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj --configfile nuget.config
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj -c Release --no-restore
|
||||
|
||||
- name: Run determinism tests
|
||||
run: |
|
||||
dotnet test src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj \
|
||||
-c Release --no-build \
|
||||
--filter "Category=Determinism" \
|
||||
--logger "trx;LogFileName=determinism-tests.trx" \
|
||||
--logger "console;verbosity=normal"
|
||||
|
||||
- name: Verify golden file integrity
|
||||
run: |
|
||||
if git diff --exit-code tests/reachability/fixtures/ebpf/golden/; then
|
||||
echo "Golden files unchanged - determinism verified"
|
||||
else
|
||||
echo "ERROR: Golden files were modified during test run!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# Golden File Validation
|
||||
# ============================================================================
|
||||
golden-file-validation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate golden files
|
||||
run: |
|
||||
for file in tests/reachability/fixtures/ebpf/golden/*.ndjson; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "Checking $file..."
|
||||
while IFS= read -r line || [ -n "$line" ]; do
|
||||
if [ -n "$line" ]; then
|
||||
echo "$line" | jq -e . > /dev/null 2>&1 || { echo "Invalid JSON in $file"; exit 1; }
|
||||
fi
|
||||
done < "$file"
|
||||
fi
|
||||
done
|
||||
echo "All golden files valid"
|
||||
|
||||
# ============================================================================
|
||||
# Summary
|
||||
# ============================================================================
|
||||
summary:
|
||||
needs: [multi-kernel-tests, docker-kernel-tests, cross-distro-tests, determinism-tests, golden-file-validation]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Check results
|
||||
run: |
|
||||
echo "=============================================="
|
||||
echo "eBPF Reachability Test Summary"
|
||||
echo "=============================================="
|
||||
echo ""
|
||||
echo "Multi-kernel tests (native): ${{ needs.multi-kernel-tests.result }}"
|
||||
echo "Multi-kernel tests (Docker): ${{ needs.docker-kernel-tests.result }}"
|
||||
echo "Cross-distro tests: ${{ needs.cross-distro-tests.result }}"
|
||||
echo "Determinism tests: ${{ needs.determinism-tests.result }}"
|
||||
echo "Golden file validation: ${{ needs.golden-file-validation.result }}"
|
||||
|
||||
if [[ "${{ needs.multi-kernel-tests.result }}" != "success" ]] || \
|
||||
[[ "${{ needs.docker-kernel-tests.result }}" != "success" ]] || \
|
||||
[[ "${{ needs.cross-distro-tests.result }}" != "success" ]] || \
|
||||
[[ "${{ needs.determinism-tests.result }}" != "success" ]] || \
|
||||
[[ "${{ needs.golden-file-validation.result }}" != "success" ]]; then
|
||||
echo "ERROR: One or more test jobs failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "All tests passed across kernel versions 5.4, 5.15, and 6.x!"
|
||||
@@ -1,99 +0,0 @@
|
||||
name: EPSS Ingest Perf
|
||||
|
||||
# Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
|
||||
# Tasks: EPSS-3410-013B, EPSS-3410-014
|
||||
#
|
||||
# Runs the EPSS ingest perf harness against a Dockerized PostgreSQL instance (Testcontainers).
|
||||
#
|
||||
# Runner requirements:
|
||||
# - Linux runner with Docker Engine available to the runner user (Testcontainers).
|
||||
# - Label: `ubuntu-22.04` (adjust `runs-on` if your labels differ).
|
||||
# - >= 4 CPU / >= 8GB RAM recommended for stable baselines.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
rows:
|
||||
description: 'Row count to generate (default: 310000)'
|
||||
required: false
|
||||
default: '310000'
|
||||
postgres_image:
|
||||
description: 'PostgreSQL image (default: postgres:16-alpine)'
|
||||
required: false
|
||||
default: 'postgres:16-alpine'
|
||||
schedule:
|
||||
# Nightly at 03:00 UTC
|
||||
- cron: '0 3 * * *'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Storage/**'
|
||||
- 'src/Scanner/StellaOps.Scanner.Worker/**'
|
||||
- 'src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/**'
|
||||
- '.gitea/workflows/epss-ingest-perf.yml'
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Storage/**'
|
||||
- 'src/Scanner/StellaOps.Scanner.Worker/**'
|
||||
- 'src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/**'
|
||||
- '.gitea/workflows/epss-ingest-perf.yml'
|
||||
|
||||
jobs:
|
||||
perf:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
STELLAOPS_OFFLINE: 'true'
|
||||
STELLAOPS_DETERMINISTIC: 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET 10
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nuget-
|
||||
|
||||
- name: Restore
|
||||
run: |
|
||||
dotnet restore src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||
--configfile nuget.config
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||
-c Release \
|
||||
--no-restore
|
||||
|
||||
- name: Run perf harness
|
||||
run: |
|
||||
mkdir -p bench/results
|
||||
dotnet run \
|
||||
--project src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||
-c Release \
|
||||
--no-build \
|
||||
-- \
|
||||
--rows ${{ inputs.rows || '310000' }} \
|
||||
--postgres-image '${{ inputs.postgres_image || 'postgres:16-alpine' }}' \
|
||||
--output bench/results/epss-ingest-perf-${{ github.sha }}.json
|
||||
|
||||
- name: Upload results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: epss-ingest-perf-${{ github.sha }}
|
||||
path: |
|
||||
bench/results/epss-ingest-perf-${{ github.sha }}.json
|
||||
retention-days: 90
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
name: evidence-locker
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retention_target:
|
||||
description: "Retention days target"
|
||||
required: false
|
||||
default: "180"
|
||||
|
||||
jobs:
|
||||
check-evidence-locker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Emit retention summary
|
||||
env:
|
||||
RETENTION_TARGET: ${{ github.event.inputs.retention_target }}
|
||||
run: |
|
||||
echo "target_retention_days=${RETENTION_TARGET}" > out/evidence-locker/summary.txt
|
||||
|
||||
- name: Upload evidence locker summary
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: evidence-locker
|
||||
path: out/evidence-locker/**
|
||||
|
||||
push-zastava-evidence:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-evidence-locker
|
||||
env:
|
||||
STAGED_DIR: evidence-locker/zastava/2025-12-02
|
||||
MODULE_ROOT: docs/modules/zastava
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Package staged Zastava artefacts
|
||||
run: |
|
||||
test -d "$MODULE_ROOT" || { echo "missing $MODULE_ROOT" >&2; exit 1; }
|
||||
tmpdir=$(mktemp -d)
|
||||
rsync -a --relative \
|
||||
"$MODULE_ROOT/SHA256SUMS" \
|
||||
"$MODULE_ROOT/schemas/" \
|
||||
"$MODULE_ROOT/exports/" \
|
||||
"$MODULE_ROOT/thresholds.yaml" \
|
||||
"$MODULE_ROOT/thresholds.yaml.dsse" \
|
||||
"$MODULE_ROOT/kit/verify.sh" \
|
||||
"$MODULE_ROOT/kit/README.md" \
|
||||
"$MODULE_ROOT/kit/ed25519.pub" \
|
||||
"$MODULE_ROOT/kit/zastava-kit.tzst" \
|
||||
"$MODULE_ROOT/kit/zastava-kit.tzst.dsse" \
|
||||
"$MODULE_ROOT/evidence/README.md" \
|
||||
"$tmpdir/"
|
||||
(cd "$tmpdir/docs/modules/zastava" && sha256sum --check SHA256SUMS)
|
||||
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
|
||||
-cf /tmp/zastava-evidence.tar -C "$tmpdir/docs/modules/zastava" .
|
||||
sha256sum /tmp/zastava-evidence.tar
|
||||
|
||||
- name: Upload staged artefacts (fallback)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: zastava-evidence-locker-2025-12-02
|
||||
path: /tmp/zastava-evidence.tar
|
||||
|
||||
- name: Push to Evidence Locker
|
||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||
env:
|
||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||
run: |
|
||||
curl -f -X PUT "$URL/zastava/2025-12-02/zastava-evidence.tar" \
|
||||
-H "Authorization: Bearer $TOKEN" \
|
||||
--data-binary @/tmp/zastava-evidence.tar
|
||||
|
||||
- name: Skip push (missing secret or URL)
|
||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||
run: |
|
||||
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
||||
@@ -1,86 +0,0 @@
|
||||
name: Export Center CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/ExportCenter/**'
|
||||
- 'devops/export/**'
|
||||
- '.gitea/workflows/export-ci.yml'
|
||||
- 'docs/modules/devops/export-ci-contract.md'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/ExportCenter/**'
|
||||
- 'devops/export/**'
|
||||
- '.gitea/workflows/export-ci.yml'
|
||||
- 'docs/modules/devops/export-ci-contract.md'
|
||||
|
||||
jobs:
|
||||
export-ci:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
MINIO_ACCESS_KEY: exportci
|
||||
MINIO_SECRET_KEY: exportci123
|
||||
BUCKET: export-ci
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj
|
||||
|
||||
- name: Bring up MinIO
|
||||
run: |
|
||||
docker compose -f devops/export/minio-compose.yml up -d
|
||||
sleep 5
|
||||
MINIO_ENDPOINT=http://localhost:9000 devops/export/seed-minio.sh
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj -c Release --logger "trx;LogFileName=export-tests.trx" --results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Trivy/OCI smoke
|
||||
run: devops/export/trivy-smoke.sh
|
||||
|
||||
- name: Schema lint
|
||||
run: |
|
||||
python -m json.tool docs/modules/export-center/schemas/export-profile.schema.json >/dev/null
|
||||
python -m json.tool docs/modules/export-center/schemas/export-manifest.schema.json >/dev/null
|
||||
|
||||
- name: Offline kit verify (fixtures)
|
||||
run: bash docs/modules/export-center/operations/verify-export-kit.sh src/ExportCenter/__fixtures/export-kit
|
||||
|
||||
- name: SBOM
|
||||
run: syft dir:src/ExportCenter -o spdx-json=$ARTIFACT_DIR/exportcenter.spdx.json
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: export-ci-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
- name: Teardown MinIO
|
||||
if: always()
|
||||
run: docker compose -f devops/export/minio-compose.yml down -v
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
name: export-compat
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
image:
|
||||
description: "Exporter image ref"
|
||||
required: true
|
||||
default: "ghcr.io/stella-ops/exporter:edge"
|
||||
|
||||
jobs:
|
||||
compat:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Trivy
|
||||
uses: aquasecurity/trivy-action@v0.24.0
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Setup Cosign
|
||||
uses: sigstore/cosign-installer@v3.6.0
|
||||
|
||||
- name: Run compatibility checks
|
||||
env:
|
||||
IMAGE: ${{ github.event.inputs.image }}
|
||||
run: |
|
||||
chmod +x scripts/export/trivy-compat.sh
|
||||
chmod +x scripts/export/oci-verify.sh
|
||||
scripts/export/trivy-compat.sh
|
||||
scripts/export/oci-verify.sh
|
||||
|
||||
- name: Upload reports
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: export-compat
|
||||
path: out/export-compat/**
|
||||
@@ -1,46 +0,0 @@
|
||||
name: exporter-ci
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/ExportCenter/**'
|
||||
- '.gitea/workflows/exporter-ci.yml'
|
||||
|
||||
env:
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_NOLOGO: 1
|
||||
|
||||
jobs:
|
||||
build-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj --configuration Release --no-restore
|
||||
|
||||
- name: Test
|
||||
run: dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj --configuration Release --no-build --verbosity normal
|
||||
|
||||
- name: Publish
|
||||
run: |
|
||||
dotnet publish src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj \
|
||||
--configuration Release \
|
||||
--output artifacts/exporter
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exporter-${{ github.run_id }}
|
||||
path: artifacts/
|
||||
retention-days: 14
|
||||
@@ -1,283 +0,0 @@
|
||||
# Sprint: Testing Enhancement Advisory - Phase 2.2/2.3
|
||||
# Multi-site federation integration tests
|
||||
# Tests 3+ site federation scenarios including partitions and latency
|
||||
|
||||
name: federation-multisite
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run nightly at 02:00 UTC
|
||||
- cron: '0 2 * * *'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Concelier/__Libraries/StellaOps.Concelier.Federation/**'
|
||||
- 'src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests/**'
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/Concelier/__Libraries/StellaOps.Concelier.Federation/**'
|
||||
- 'src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests/**'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_latency_stress:
|
||||
description: 'Run extended latency stress tests'
|
||||
type: boolean
|
||||
default: false
|
||||
run_chaos_scenarios:
|
||||
description: 'Run chaos/partition scenarios'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: federation-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ==========================================================================
|
||||
# Multi-Site Federation Tests
|
||||
# ==========================================================================
|
||||
federation-multisite-tests:
|
||||
name: Multi-Site Federation Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests/StellaOps.Concelier.Federation.Tests.csproj
|
||||
|
||||
- name: Build federation tests
|
||||
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests/StellaOps.Concelier.Federation.Tests.csproj --configuration Release --no-restore
|
||||
|
||||
- name: Run 3-Site Convergence Tests
|
||||
run: |
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--filter "Category=Federation&FullyQualifiedName~ThreeSite" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=federation-convergence.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Partition Tests
|
||||
run: |
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--filter "Category=Federation&FullyQualifiedName~Partition" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=federation-partition.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Latency Tests
|
||||
run: |
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--filter "Category=Latency" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=federation-latency.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: federation-test-results
|
||||
path: TestResults/**/*.trx
|
||||
|
||||
- name: Publish test summary
|
||||
uses: dorny/test-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
name: Federation Test Results
|
||||
path: TestResults/**/*.trx
|
||||
reporter: dotnet-trx
|
||||
|
||||
# ==========================================================================
|
||||
# Extended Latency Stress Tests (On-Demand)
|
||||
# ==========================================================================
|
||||
latency-stress-tests:
|
||||
name: Latency Stress Tests
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_latency_stress == 'true'
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Build federation tests
|
||||
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests/StellaOps.Concelier.Federation.Tests.csproj --configuration Release
|
||||
|
||||
- name: Run Extended Latency Scenarios
|
||||
run: |
|
||||
# Run cross-region tests with various latency configurations
|
||||
for LATENCY in 100 500 1000 2000; do
|
||||
echo "Testing with ${LATENCY}ms latency..."
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--filter "Category=Latency&FullyQualifiedName~CrossRegion" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=latency-stress-${LATENCY}ms.trx" \
|
||||
--results-directory ./TestResults/latency-stress || true
|
||||
done
|
||||
|
||||
- name: Analyze latency results
|
||||
run: |
|
||||
echo "Latency stress test results:"
|
||||
find ./TestResults -name "*.trx" -exec basename {} \;
|
||||
|
||||
- name: Upload stress test results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: latency-stress-results
|
||||
path: TestResults/**
|
||||
|
||||
# ==========================================================================
|
||||
# Chaos Scenario Tests (On-Demand)
|
||||
# ==========================================================================
|
||||
chaos-scenario-tests:
|
||||
name: Chaos Scenario Tests
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_chaos_scenarios == 'true'
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Build federation tests
|
||||
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests/StellaOps.Concelier.Federation.Tests.csproj --configuration Release
|
||||
|
||||
- name: Run Split Brain Scenarios
|
||||
run: |
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--filter "Category=Chaos&FullyQualifiedName~SplitBrain" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=chaos-splitbrain.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Flapping Network Scenarios
|
||||
run: |
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--filter "Category=Chaos&FullyQualifiedName~Flap" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=chaos-flapping.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Partition Healing Scenarios
|
||||
run: |
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--filter "Category=Chaos&FullyQualifiedName~Heal" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=chaos-healing.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload chaos test results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: chaos-test-results
|
||||
path: TestResults/**
|
||||
|
||||
# ==========================================================================
|
||||
# Nightly Full Federation Suite
|
||||
# ==========================================================================
|
||||
nightly-full-suite:
|
||||
name: Nightly Full Federation Suite
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'schedule'
|
||||
timeout-minutes: 90
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Build all federation tests
|
||||
run: dotnet build src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests/StellaOps.Concelier.Federation.Tests.csproj --configuration Release
|
||||
|
||||
- name: Run complete federation test suite
|
||||
run: |
|
||||
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Federation.Tests \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--logger "trx;LogFileName=federation-full.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Generate test report
|
||||
run: |
|
||||
echo "# Federation Test Report" > ./TestResults/report.md
|
||||
echo "" >> ./TestResults/report.md
|
||||
echo "Run date: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" >> ./TestResults/report.md
|
||||
echo "" >> ./TestResults/report.md
|
||||
echo "## Test Categories" >> ./TestResults/report.md
|
||||
echo "- Multi-site convergence" >> ./TestResults/report.md
|
||||
echo "- Network partition handling" >> ./TestResults/report.md
|
||||
echo "- Cross-region latency" >> ./TestResults/report.md
|
||||
echo "- Split-brain recovery" >> ./TestResults/report.md
|
||||
|
||||
- name: Upload nightly results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nightly-federation-results
|
||||
path: TestResults/**
|
||||
|
||||
- name: Send notification on failure
|
||||
if: failure()
|
||||
run: |
|
||||
echo "Federation nightly tests failed - notification would be sent here"
|
||||
# Could integrate with Slack/Teams/Email notification
|
||||
|
||||
# ==========================================================================
|
||||
# Test Result Summary
|
||||
# ==========================================================================
|
||||
test-summary:
|
||||
name: Test Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [federation-multisite-tests]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Download test results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: federation-test-results
|
||||
path: ./TestResults
|
||||
|
||||
- name: Summarize results
|
||||
run: |
|
||||
echo "## Federation Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Test categories executed:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Three-site convergence tests" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Partition/split-brain tests" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Cross-region latency tests" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Result files:" >> $GITHUB_STEP_SUMMARY
|
||||
find ./TestResults -name "*.trx" -exec basename {} \; | while read f; do
|
||||
echo "- $f" >> $GITHUB_STEP_SUMMARY
|
||||
done
|
||||
@@ -1,326 +0,0 @@
|
||||
# .gitea/workflows/findings-ledger-ci.yml
|
||||
# Findings Ledger CI with RLS migration validation (DEVOPS-LEDGER-TEN-48-001-REL)
|
||||
|
||||
name: Findings Ledger CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Findings/**'
|
||||
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||
- 'devops/releases/2025.09-stable.yaml'
|
||||
- 'devops/releases/2025.09-airgap.yaml'
|
||||
- 'devops/downloads/manifest.json'
|
||||
- 'devops/release/check_release_manifest.py'
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/Findings/**'
|
||||
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
POSTGRES_IMAGE: postgres:16-alpine
|
||||
BUILD_CONFIGURATION: Release
|
||||
|
||||
jobs:
|
||||
build-test:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore dependencies
|
||||
run: |
|
||||
dotnet restore src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj
|
||||
dotnet restore src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj \
|
||||
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||
/p:ContinuousIntegrationBuild=true
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
mkdir -p $TEST_RESULTS_DIR
|
||||
dotnet test src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj \
|
||||
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||
--logger "trx;LogFileName=ledger-tests.trx" \
|
||||
--results-directory $TEST_RESULTS_DIR
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: ledger-test-results
|
||||
path: ${{ env.TEST_RESULTS_DIR }}
|
||||
|
||||
migration-validation:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: ledgertest
|
||||
POSTGRES_PASSWORD: ledgertest
|
||||
POSTGRES_DB: ledger_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGUSER: ledgertest
|
||||
PGPASSWORD: ledgertest
|
||||
PGDATABASE: ledger_test
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install PostgreSQL client
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y postgresql-client
|
||||
|
||||
- name: Wait for PostgreSQL
|
||||
run: |
|
||||
until pg_isready -h $PGHOST -p $PGPORT -U $PGUSER; do
|
||||
echo "Waiting for PostgreSQL..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Apply prerequisite migrations (001-006)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIGRATION_DIR="src/Findings/StellaOps.Findings.Ledger/migrations"
|
||||
for migration in 001_initial.sql 002_add_evidence_bundle_ref.sql 002_projection_offsets.sql \
|
||||
003_policy_rationale.sql 004_ledger_attestations.sql 004_risk_fields.sql \
|
||||
005_risk_fields.sql 006_orchestrator_airgap.sql; do
|
||||
if [ -f "$MIGRATION_DIR/$migration" ]; then
|
||||
echo "Applying migration: $migration"
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f "$MIGRATION_DIR/$migration"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Apply RLS migration (007_enable_rls.sql)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Applying RLS migration..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||
|
||||
- name: Validate RLS configuration
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Validating RLS is enabled on all protected tables..."
|
||||
|
||||
# Check RLS enabled
|
||||
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||
WHERE n.nspname = 'public'
|
||||
AND c.relrowsecurity = true
|
||||
AND c.relname IN (
|
||||
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||
'orchestrator_exports', 'airgap_imports'
|
||||
);
|
||||
")
|
||||
|
||||
if [ "$TABLES_WITH_RLS" -ne 8 ]; then
|
||||
echo "::error::Expected 8 tables with RLS enabled, found $TABLES_WITH_RLS"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ All 8 tables have RLS enabled"
|
||||
|
||||
# Check policies exist
|
||||
POLICIES=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(DISTINCT tablename)
|
||||
FROM pg_policies
|
||||
WHERE schemaname = 'public'
|
||||
AND policyname LIKE '%_tenant_isolation';
|
||||
")
|
||||
|
||||
if [ "$POLICIES" -ne 8 ]; then
|
||||
echo "::error::Expected 8 tenant isolation policies, found $POLICIES"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ All 8 tenant isolation policies created"
|
||||
|
||||
# Check tenant function exists
|
||||
FUNC_EXISTS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_proc p
|
||||
JOIN pg_namespace n ON p.pronamespace = n.oid
|
||||
WHERE p.proname = 'require_current_tenant'
|
||||
AND n.nspname = 'findings_ledger_app';
|
||||
")
|
||||
|
||||
if [ "$FUNC_EXISTS" -ne 1 ]; then
|
||||
echo "::error::Tenant function 'require_current_tenant' not found"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Tenant function 'findings_ledger_app.require_current_tenant()' exists"
|
||||
|
||||
echo ""
|
||||
echo "=== RLS Migration Validation PASSED ==="
|
||||
|
||||
- name: Test rollback migration
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Testing rollback migration..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql
|
||||
|
||||
# Verify RLS is disabled
|
||||
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||
SELECT COUNT(*)
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||
WHERE n.nspname = 'public'
|
||||
AND c.relrowsecurity = true
|
||||
AND c.relname IN (
|
||||
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||
'orchestrator_exports', 'airgap_imports'
|
||||
);
|
||||
")
|
||||
|
||||
if [ "$TABLES_WITH_RLS" -ne 0 ]; then
|
||||
echo "::error::Rollback failed - $TABLES_WITH_RLS tables still have RLS enabled"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Rollback successful - RLS disabled on all tables"
|
||||
- name: Validate release manifests (production)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python devops/release/check_release_manifest.py
|
||||
|
||||
- name: Re-apply RLS migration (idempotency check)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Re-applying RLS migration to verify idempotency..."
|
||||
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||
echo "✓ Migration is idempotent"
|
||||
|
||||
generate-manifest:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [build-test, migration-validation]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Generate migration manifest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIGRATION_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql"
|
||||
ROLLBACK_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql"
|
||||
MANIFEST_DIR="out/findings-ledger/migrations"
|
||||
mkdir -p "$MANIFEST_DIR"
|
||||
|
||||
# Compute SHA256 hashes
|
||||
MIGRATION_SHA=$(sha256sum "$MIGRATION_FILE" | awk '{print $1}')
|
||||
ROLLBACK_SHA=$(sha256sum "$ROLLBACK_FILE" | awk '{print $1}')
|
||||
CREATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
cat > "$MANIFEST_DIR/007_enable_rls.manifest.json" <<EOF
|
||||
{
|
||||
"\$schema": "https://stella-ops.org/schemas/migration-manifest.v1.json",
|
||||
"schemaVersion": "1.0.0",
|
||||
"migrationId": "007_enable_rls",
|
||||
"module": "findings-ledger",
|
||||
"version": "2025.12.0",
|
||||
"createdAt": "$CREATED_AT",
|
||||
"description": "Enable Row-Level Security for Findings Ledger tenant isolation",
|
||||
"taskId": "LEDGER-TEN-48-001-DEV",
|
||||
"contractRef": "CONTRACT-FINDINGS-LEDGER-RLS-011",
|
||||
"database": {
|
||||
"engine": "postgresql",
|
||||
"minVersion": "16.0"
|
||||
},
|
||||
"files": {
|
||||
"apply": {
|
||||
"path": "007_enable_rls.sql",
|
||||
"sha256": "$MIGRATION_SHA"
|
||||
},
|
||||
"rollback": {
|
||||
"path": "007_enable_rls_rollback.sql",
|
||||
"sha256": "$ROLLBACK_SHA"
|
||||
}
|
||||
},
|
||||
"affects": {
|
||||
"tables": [
|
||||
"ledger_events",
|
||||
"ledger_merkle_roots",
|
||||
"findings_projection",
|
||||
"finding_history",
|
||||
"triage_actions",
|
||||
"ledger_attestations",
|
||||
"orchestrator_exports",
|
||||
"airgap_imports"
|
||||
],
|
||||
"schemas": ["public", "findings_ledger_app"],
|
||||
"roles": ["findings_ledger_admin"]
|
||||
},
|
||||
"prerequisites": [
|
||||
"006_orchestrator_airgap"
|
||||
],
|
||||
"validation": {
|
||||
"type": "rls-check",
|
||||
"expectedTables": 8,
|
||||
"expectedPolicies": 8,
|
||||
"tenantFunction": "findings_ledger_app.require_current_tenant"
|
||||
},
|
||||
"offlineKit": {
|
||||
"includedInBundle": true,
|
||||
"requiresManualApply": true,
|
||||
"applyOrder": 7
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Generated migration manifest at $MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||
cat "$MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||
|
||||
- name: Copy migration files for offline-kit
|
||||
run: |
|
||||
set -euo pipefail
|
||||
OFFLINE_DIR="out/findings-ledger/offline-kit/migrations"
|
||||
mkdir -p "$OFFLINE_DIR"
|
||||
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql "$OFFLINE_DIR/"
|
||||
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql "$OFFLINE_DIR/"
|
||||
cp out/findings-ledger/migrations/007_enable_rls.manifest.json "$OFFLINE_DIR/"
|
||||
echo "Offline-kit migration files prepared"
|
||||
ls -la "$OFFLINE_DIR"
|
||||
|
||||
- name: Upload migration artefacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: findings-ledger-migrations
|
||||
path: out/findings-ledger/
|
||||
if-no-files-found: error
|
||||
|
||||
@@ -1,358 +0,0 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# golden-corpus-bench.yaml
|
||||
# Sprint: SPRINT_20260121_036_BinaryIndex_golden_corpus_bundle_verification
|
||||
# Task: GCB-005 - Implement CI regression gates for corpus KPIs
|
||||
# Description: CI workflow for golden corpus benchmark and regression detection.
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
name: Golden Corpus Benchmark
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/BinaryIndex/**'
|
||||
- 'src/Scanner/**'
|
||||
- 'datasets/golden-corpus/**'
|
||||
- '.gitea/workflows/golden-corpus-bench.yaml'
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/BinaryIndex/**'
|
||||
- 'src/Scanner/**'
|
||||
- 'datasets/golden-corpus/**'
|
||||
schedule:
|
||||
# Nightly at 3 AM UTC
|
||||
- cron: '0 3 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
corpus_subset:
|
||||
description: 'Corpus subset to validate (seed, extended, full)'
|
||||
required: false
|
||||
default: 'seed'
|
||||
update_baseline:
|
||||
description: 'Update baseline after successful run'
|
||||
required: false
|
||||
default: 'false'
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOTNET_NOLOGO: true
|
||||
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||
CORPUS_ROOT: datasets/golden-corpus
|
||||
BASELINE_PATH: bench/baselines/current.json
|
||||
RESULTS_DIR: bench/results
|
||||
|
||||
jobs:
|
||||
validate-corpus:
|
||||
name: Validate Golden Corpus
|
||||
runs-on: self-hosted
|
||||
timeout-minutes: 120
|
||||
outputs:
|
||||
run_id: ${{ steps.validate.outputs.run_id }}
|
||||
precision: ${{ steps.validate.outputs.precision }}
|
||||
recall: ${{ steps.validate.outputs.recall }}
|
||||
fn_rate: ${{ steps.validate.outputs.fn_rate }}
|
||||
determinism: ${{ steps.validate.outputs.determinism }}
|
||||
ttfrp_p95: ${{ steps.validate.outputs.ttfrp_p95 }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Restore CLI
|
||||
run: dotnet restore src/Cli/StellaOps.Cli/StellaOps.Cli.csproj
|
||||
|
||||
- name: Build CLI
|
||||
run: dotnet build src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release --no-restore
|
||||
|
||||
- name: Determine corpus subset
|
||||
id: corpus
|
||||
run: |
|
||||
SUBSET="${{ github.event.inputs.corpus_subset || 'seed' }}"
|
||||
if [ "${{ github.event_name }}" == "schedule" ]; then
|
||||
# Use extended corpus for nightly, full corpus weekly
|
||||
DAY_OF_WEEK=$(date +%u)
|
||||
if [ "$DAY_OF_WEEK" == "7" ]; then
|
||||
SUBSET="full"
|
||||
else
|
||||
SUBSET="extended"
|
||||
fi
|
||||
fi
|
||||
echo "subset=$SUBSET" >> $GITHUB_OUTPUT
|
||||
echo "path=${{ env.CORPUS_ROOT }}/${SUBSET}/" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run corpus validation
|
||||
id: validate
|
||||
run: |
|
||||
RUN_ID=$(date +%Y%m%d%H%M%S)
|
||||
RESULTS_FILE="${{ env.RESULTS_DIR }}/${RUN_ID}.json"
|
||||
mkdir -p "${{ env.RESULTS_DIR }}"
|
||||
|
||||
echo "Starting validation run: $RUN_ID"
|
||||
echo "Corpus: ${{ steps.corpus.outputs.path }}"
|
||||
echo "Results: $RESULTS_FILE"
|
||||
|
||||
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release -- \
|
||||
groundtruth validate run \
|
||||
--matcher semantic-diffing \
|
||||
--output "$RESULTS_FILE" \
|
||||
--verbose
|
||||
|
||||
# Extract KPIs from results for output
|
||||
if [ -f "$RESULTS_FILE" ]; then
|
||||
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||
echo "results_file=$RESULTS_FILE" >> $GITHUB_OUTPUT
|
||||
|
||||
# Parse KPIs from JSON (using jq if available, else defaults)
|
||||
PRECISION=$(jq -r '.precision // 0' "$RESULTS_FILE" 2>/dev/null || echo "0.95")
|
||||
RECALL=$(jq -r '.recall // 0' "$RESULTS_FILE" 2>/dev/null || echo "0.92")
|
||||
FN_RATE=$(jq -r '.falseNegativeRate // 0' "$RESULTS_FILE" 2>/dev/null || echo "0.08")
|
||||
DETERMINISM=$(jq -r '.deterministicReplayRate // 0' "$RESULTS_FILE" 2>/dev/null || echo "1.0")
|
||||
TTFRP_P95=$(jq -r '.ttfrpP95Ms // 0' "$RESULTS_FILE" 2>/dev/null || echo "150")
|
||||
|
||||
echo "precision=$PRECISION" >> $GITHUB_OUTPUT
|
||||
echo "recall=$RECALL" >> $GITHUB_OUTPUT
|
||||
echo "fn_rate=$FN_RATE" >> $GITHUB_OUTPUT
|
||||
echo "determinism=$DETERMINISM" >> $GITHUB_OUTPUT
|
||||
echo "ttfrp_p95=$TTFRP_P95" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload validation results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: validation-results-${{ steps.validate.outputs.run_id }}
|
||||
path: ${{ env.RESULTS_DIR }}/*.json
|
||||
retention-days: 90
|
||||
|
||||
check-regression:
|
||||
name: Check KPI Regression
|
||||
runs-on: self-hosted
|
||||
needs: validate-corpus
|
||||
outputs:
|
||||
passed: ${{ steps.check.outputs.passed }}
|
||||
exit_code: ${{ steps.check.outputs.exit_code }}
|
||||
summary: ${{ steps.check.outputs.summary }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Download validation results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: validation-results-${{ needs.validate-corpus.outputs.run_id }}
|
||||
path: ${{ env.RESULTS_DIR }}
|
||||
|
||||
- name: Build CLI
|
||||
run: dotnet build src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release
|
||||
|
||||
- name: Check regression gates
|
||||
id: check
|
||||
run: |
|
||||
RESULTS_FILE="${{ env.RESULTS_DIR }}/${{ needs.validate-corpus.outputs.run_id }}.json"
|
||||
REPORT_FILE="${{ env.RESULTS_DIR }}/regression-report-${{ needs.validate-corpus.outputs.run_id }}.md"
|
||||
|
||||
echo "Checking regression against baseline: ${{ env.BASELINE_PATH }}"
|
||||
|
||||
# Run regression check
|
||||
set +e
|
||||
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release -- \
|
||||
groundtruth validate check \
|
||||
--results "$RESULTS_FILE" \
|
||||
--baseline "${{ env.BASELINE_PATH }}" \
|
||||
--precision-threshold 0.01 \
|
||||
--recall-threshold 0.01 \
|
||||
--fn-rate-threshold 0.01 \
|
||||
--determinism-threshold 1.0 \
|
||||
--ttfrp-threshold 0.20 \
|
||||
--output "$REPORT_FILE" \
|
||||
--format markdown
|
||||
|
||||
EXIT_CODE=$?
|
||||
set -e
|
||||
|
||||
echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ $EXIT_CODE -eq 0 ]; then
|
||||
echo "passed=true" >> $GITHUB_OUTPUT
|
||||
echo "summary=All regression gates passed" >> $GITHUB_OUTPUT
|
||||
elif [ $EXIT_CODE -eq 1 ]; then
|
||||
echo "passed=false" >> $GITHUB_OUTPUT
|
||||
echo "summary=Regression detected - one or more gates failed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "passed=false" >> $GITHUB_OUTPUT
|
||||
echo "summary=Error during regression check (exit code: $EXIT_CODE)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload regression report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: regression-report-${{ needs.validate-corpus.outputs.run_id }}
|
||||
path: ${{ env.RESULTS_DIR }}/regression-report-*.md
|
||||
retention-days: 90
|
||||
|
||||
- name: Post PR comment with regression report
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const reportPath = '${{ env.RESULTS_DIR }}/regression-report-${{ needs.validate-corpus.outputs.run_id }}.md';
|
||||
|
||||
let report = '## Golden Corpus KPI Regression Check\n\n';
|
||||
|
||||
if (fs.existsSync(reportPath)) {
|
||||
report += fs.readFileSync(reportPath, 'utf8');
|
||||
} else {
|
||||
report += '> Report file not found\n';
|
||||
report += '\n**Status:** ${{ steps.check.outputs.summary }}\n';
|
||||
}
|
||||
|
||||
// Find existing comment
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const botComment = comments.find(comment =>
|
||||
comment.user.type === 'Bot' &&
|
||||
comment.body.includes('Golden Corpus KPI Regression Check')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: report
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: report
|
||||
});
|
||||
}
|
||||
|
||||
- name: Fail on regression
|
||||
if: steps.check.outputs.passed != 'true'
|
||||
run: |
|
||||
echo "::error::${{ steps.check.outputs.summary }}"
|
||||
exit ${{ steps.check.outputs.exit_code }}
|
||||
|
||||
update-baseline:
|
||||
name: Update Baseline
|
||||
runs-on: self-hosted
|
||||
needs: [validate-corpus, check-regression]
|
||||
if: |
|
||||
always() &&
|
||||
needs.check-regression.outputs.passed == 'true' &&
|
||||
(github.event.inputs.update_baseline == 'true' ||
|
||||
(github.event_name == 'schedule' && github.ref == 'refs/heads/main'))
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Download validation results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: validation-results-${{ needs.validate-corpus.outputs.run_id }}
|
||||
path: ${{ env.RESULTS_DIR }}
|
||||
|
||||
- name: Build CLI
|
||||
run: dotnet build src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release
|
||||
|
||||
- name: Update baseline
|
||||
run: |
|
||||
RESULTS_FILE="${{ env.RESULTS_DIR }}/${{ needs.validate-corpus.outputs.run_id }}.json"
|
||||
|
||||
echo "Updating baseline from: $RESULTS_FILE"
|
||||
|
||||
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release -- \
|
||||
groundtruth baseline update \
|
||||
--from-results "$RESULTS_FILE" \
|
||||
--output "${{ env.BASELINE_PATH }}" \
|
||||
--description "Auto-updated from nightly run ${{ needs.validate-corpus.outputs.run_id }}" \
|
||||
--source "${{ github.sha }}"
|
||||
|
||||
- name: Archive previous baseline
|
||||
run: |
|
||||
ARCHIVE_DIR="bench/baselines/archive"
|
||||
mkdir -p "$ARCHIVE_DIR"
|
||||
|
||||
if [ -f "${{ env.BASELINE_PATH }}" ]; then
|
||||
TIMESTAMP=$(date +%Y%m%d%H%M%S)
|
||||
cp "${{ env.BASELINE_PATH }}" "$ARCHIVE_DIR/baseline-${TIMESTAMP}.json"
|
||||
fi
|
||||
|
||||
- name: Commit baseline update
|
||||
run: |
|
||||
git config user.name "Stella Ops CI"
|
||||
git config user.email "ci@stella-ops.org"
|
||||
|
||||
git add "${{ env.BASELINE_PATH }}"
|
||||
git add "bench/baselines/archive/"
|
||||
|
||||
git commit -m "chore(bench): update golden corpus baseline from ${{ needs.validate-corpus.outputs.run_id }}
|
||||
|
||||
Precision: ${{ needs.validate-corpus.outputs.precision }}
|
||||
Recall: ${{ needs.validate-corpus.outputs.recall }}
|
||||
FN Rate: ${{ needs.validate-corpus.outputs.fn_rate }}
|
||||
Determinism: ${{ needs.validate-corpus.outputs.determinism }}
|
||||
TTFRP p95: ${{ needs.validate-corpus.outputs.ttfrp_p95 }}ms
|
||||
|
||||
Source: ${{ github.sha }}"
|
||||
|
||||
git push
|
||||
|
||||
summary:
|
||||
name: Workflow Summary
|
||||
runs-on: self-hosted
|
||||
needs: [validate-corpus, check-regression]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Generate summary
|
||||
run: |
|
||||
echo "## Golden Corpus Benchmark Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Run ID | ${{ needs.validate-corpus.outputs.run_id }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Precision | ${{ needs.validate-corpus.outputs.precision }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Recall | ${{ needs.validate-corpus.outputs.recall }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| False Negative Rate | ${{ needs.validate-corpus.outputs.fn_rate }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Deterministic Replay | ${{ needs.validate-corpus.outputs.determinism }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| TTFRP p95 | ${{ needs.validate-corpus.outputs.ttfrp_p95 }}ms |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Regression Check" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
if [ "${{ needs.check-regression.outputs.passed }}" == "true" ]; then
|
||||
echo ":white_check_mark: **${{ needs.check-regression.outputs.summary }}**" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo ":x: **${{ needs.check-regression.outputs.summary }}**" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
@@ -1,140 +0,0 @@
|
||||
# Licensed under AGPL-3.0-or-later. Copyright (C) 2026 StellaOps Contributors.
|
||||
# Sprint: SPRINT_20260110_012_010_TEST
|
||||
# Golden Set Corpus Validation Workflow
|
||||
|
||||
name: Golden Set Validation
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'src/__Tests/__Datasets/golden-sets/**'
|
||||
- 'src/__Tests/Integration/GoldenSetDiff/**'
|
||||
- 'src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.GoldenSet/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/__Tests/__Datasets/golden-sets/**'
|
||||
- 'src/__Tests/Integration/GoldenSetDiff/**'
|
||||
- 'src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.GoldenSet/**'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
validate-corpus:
|
||||
name: Validate Golden Set Corpus
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/Integration/GoldenSetDiff/StellaOps.Integration.GoldenSetDiff.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/__Tests/Integration/GoldenSetDiff/StellaOps.Integration.GoldenSetDiff.csproj --no-restore
|
||||
|
||||
- name: Run Corpus Validation Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/GoldenSetDiff/StellaOps.Integration.GoldenSetDiff.csproj \
|
||||
--filter "FullyQualifiedName~CorpusValidationTests" \
|
||||
--logger "trx;LogFileName=corpus-validation.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Determinism Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/GoldenSetDiff/StellaOps.Integration.GoldenSetDiff.csproj \
|
||||
--filter "FullyQualifiedName~DeterminismTests" \
|
||||
--logger "trx;LogFileName=determinism.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Replay Validation Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/GoldenSetDiff/StellaOps.Integration.GoldenSetDiff.csproj \
|
||||
--filter "FullyQualifiedName~ReplayValidationTests" \
|
||||
--logger "trx;LogFileName=replay-validation.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results
|
||||
path: ./TestResults/*.trx
|
||||
|
||||
e2e-tests:
|
||||
name: E2E Fix Verification Tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: validate-corpus
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.x'
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/E2E/GoldenSetDiff/StellaOps.E2E.GoldenSetDiff.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/__Tests/E2E/GoldenSetDiff/StellaOps.E2E.GoldenSetDiff.csproj --no-restore
|
||||
|
||||
- name: Run E2E Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/E2E/GoldenSetDiff/StellaOps.E2E.GoldenSetDiff.csproj \
|
||||
--logger "trx;LogFileName=e2e.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-test-results
|
||||
path: ./TestResults/*.trx
|
||||
|
||||
count-golden-sets:
|
||||
name: Count and Report Golden Sets
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Count Golden Sets
|
||||
id: count
|
||||
run: |
|
||||
total=$(find src/__Tests/__Datasets/golden-sets -name "*.golden.yaml" | wc -l)
|
||||
openssl=$(find src/__Tests/__Datasets/golden-sets/openssl -name "*.golden.yaml" 2>/dev/null | wc -l)
|
||||
glibc=$(find src/__Tests/__Datasets/golden-sets/glibc -name "*.golden.yaml" 2>/dev/null | wc -l)
|
||||
curl=$(find src/__Tests/__Datasets/golden-sets/curl -name "*.golden.yaml" 2>/dev/null | wc -l)
|
||||
log4j=$(find src/__Tests/__Datasets/golden-sets/log4j -name "*.golden.yaml" 2>/dev/null | wc -l)
|
||||
synthetic=$(find src/__Tests/__Datasets/golden-sets/synthetic -name "*.golden.yaml" 2>/dev/null | wc -l)
|
||||
|
||||
echo "Total: $total"
|
||||
echo "OpenSSL: $openssl"
|
||||
echo "glibc: $glibc"
|
||||
echo "curl: $curl"
|
||||
echo "Log4j: $log4j"
|
||||
echo "Synthetic: $synthetic"
|
||||
|
||||
echo "total=$total" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "$total" -lt 15 ]; then
|
||||
echo "::warning::Golden set corpus has fewer than 15 entries ($total)"
|
||||
fi
|
||||
|
||||
- name: Report Summary
|
||||
run: |
|
||||
echo "## Golden Set Corpus Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Component | Count |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| OpenSSL | $(find src/__Tests/__Datasets/golden-sets/openssl -name '*.golden.yaml' 2>/dev/null | wc -l) |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| glibc | $(find src/__Tests/__Datasets/golden-sets/glibc -name '*.golden.yaml' 2>/dev/null | wc -l) |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| curl | $(find src/__Tests/__Datasets/golden-sets/curl -name '*.golden.yaml' 2>/dev/null | wc -l) |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Log4j | $(find src/__Tests/__Datasets/golden-sets/log4j -name '*.golden.yaml' 2>/dev/null | wc -l) |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Synthetic | $(find src/__Tests/__Datasets/golden-sets/synthetic -name '*.golden.yaml' 2>/dev/null | wc -l) |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| **Total** | **$(find src/__Tests/__Datasets/golden-sets -name '*.golden.yaml' | wc -l)** |" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -1,42 +0,0 @@
|
||||
name: graph-load
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
target:
|
||||
description: "Graph API base URL"
|
||||
required: true
|
||||
default: "http://localhost:5000"
|
||||
users:
|
||||
description: "Virtual users"
|
||||
required: false
|
||||
default: "8"
|
||||
duration:
|
||||
description: "Duration seconds"
|
||||
required: false
|
||||
default: "60"
|
||||
|
||||
jobs:
|
||||
load-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Install k6
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y k6
|
||||
|
||||
- name: Run graph load test
|
||||
run: |
|
||||
chmod +x scripts/graph/load-test.sh
|
||||
TARGET="${{ github.event.inputs.target }}" USERS="${{ github.event.inputs.users }}" DURATION="${{ github.event.inputs.duration }}" scripts/graph/load-test.sh
|
||||
|
||||
- name: Upload results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: graph-load-summary
|
||||
path: out/graph-load/**
|
||||
@@ -1,57 +0,0 @@
|
||||
name: graph-ui-sim
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
graph_api:
|
||||
description: "Graph API base URL"
|
||||
required: true
|
||||
default: "http://localhost:5000"
|
||||
graph_ui:
|
||||
description: "Graph UI base URL"
|
||||
required: true
|
||||
default: "http://localhost:4200"
|
||||
perf_budget_ms:
|
||||
description: "Perf budget in ms"
|
||||
required: false
|
||||
default: "3000"
|
||||
|
||||
jobs:
|
||||
ui-and-sim:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Install Playwright deps
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
- name: Run UI perf probe
|
||||
env:
|
||||
GRAPH_UI_BASE: ${{ github.event.inputs.graph_ui }}
|
||||
GRAPH_UI_BUDGET_MS: ${{ github.event.inputs.perf_budget_ms }}
|
||||
OUT: out/graph-ui-perf
|
||||
run: |
|
||||
npx ts-node scripts/graph/ui-perf.ts
|
||||
|
||||
- name: Run simulation smoke
|
||||
env:
|
||||
TARGET: ${{ github.event.inputs.graph_api }}
|
||||
run: |
|
||||
chmod +x scripts/graph/simulation-smoke.sh
|
||||
scripts/graph/simulation-smoke.sh
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: graph-ui-sim
|
||||
path: |
|
||||
out/graph-ui-perf/**
|
||||
out/graph-sim/**
|
||||
@@ -1,215 +0,0 @@
|
||||
# HLC Distributed Tests Workflow
|
||||
# Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
# Tests multi-node HLC scenarios with network partition simulation
|
||||
|
||||
name: hlc-distributed
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run nightly at 2 AM UTC
|
||||
- cron: '0 2 * * *'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/__Libraries/StellaOps.HybridLogicalClock/**'
|
||||
- 'src/__Tests/Integration/StellaOps.Integration.HLC/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/__Libraries/StellaOps.HybridLogicalClock/**'
|
||||
- 'src/__Tests/Integration/StellaOps.Integration.HLC/**'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_extended:
|
||||
description: 'Run extended multi-node tests'
|
||||
type: boolean
|
||||
default: false
|
||||
run_chaos:
|
||||
description: 'Run chaos/partition tests'
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
concurrency:
|
||||
group: hlc-distributed-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ==========================================================================
|
||||
# Multi-Node HLC Tests
|
||||
# ==========================================================================
|
||||
hlc-distributed:
|
||||
name: Distributed HLC Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.HLC/StellaOps.Integration.HLC.csproj
|
||||
|
||||
- name: Build HLC tests
|
||||
run: dotnet build src/__Tests/Integration/StellaOps.Integration.HLC/StellaOps.Integration.HLC.csproj --configuration Release --no-restore
|
||||
|
||||
- name: Run distributed HLC tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.HLC \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--filter "Category=HLC&Category=Integration" \
|
||||
--logger "trx;LogFileName=hlc-distributed.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: hlc-distributed-results
|
||||
path: TestResults/**
|
||||
|
||||
- name: Publish test summary
|
||||
uses: dorny/test-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
name: HLC Distributed Test Results
|
||||
path: TestResults/**/*.trx
|
||||
reporter: dotnet-trx
|
||||
|
||||
# ==========================================================================
|
||||
# Network Partition / Chaos Tests
|
||||
# ==========================================================================
|
||||
hlc-chaos:
|
||||
name: HLC Chaos Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.HLC/StellaOps.Integration.HLC.csproj
|
||||
|
||||
- name: Build HLC tests
|
||||
run: dotnet build src/__Tests/Integration/StellaOps.Integration.HLC/StellaOps.Integration.HLC.csproj --configuration Release --no-restore
|
||||
|
||||
- name: Run partition tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.HLC \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--filter "Category=Chaos" \
|
||||
--logger "trx;LogFileName=hlc-chaos.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run extended multi-node tests
|
||||
if: github.event.inputs.run_extended == 'true'
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.HLC \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--filter "FullyQualifiedName~LargeCluster|FullyQualifiedName~HighFrequency" \
|
||||
--logger "trx;LogFileName=hlc-extended.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload chaos test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: hlc-chaos-results
|
||||
path: TestResults/**
|
||||
|
||||
- name: Publish test summary
|
||||
uses: dorny/test-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
name: HLC Chaos Test Results
|
||||
path: TestResults/**/*.trx
|
||||
reporter: dotnet-trx
|
||||
|
||||
# ==========================================================================
|
||||
# Determinism Verification
|
||||
# ==========================================================================
|
||||
hlc-determinism:
|
||||
name: HLC Determinism Verification
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Libraries/__Tests/StellaOps.HybridLogicalClock.Tests/StellaOps.HybridLogicalClock.Tests.csproj
|
||||
|
||||
- name: Build HLC unit tests
|
||||
run: dotnet build src/__Libraries/__Tests/StellaOps.HybridLogicalClock.Tests/StellaOps.HybridLogicalClock.Tests.csproj --configuration Release --no-restore
|
||||
|
||||
- name: Run determinism verification (3 runs)
|
||||
run: |
|
||||
for i in 1 2 3; do
|
||||
echo "=== Run $i ==="
|
||||
dotnet test src/__Libraries/__Tests/StellaOps.HybridLogicalClock.Tests \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--filter "FullyQualifiedName~Monotonic|FullyQualifiedName~Uniqueness" \
|
||||
--logger "trx;LogFileName=hlc-determinism-$i.trx" \
|
||||
--results-directory ./TestResults/run-$i
|
||||
done
|
||||
|
||||
- name: Compare determinism runs
|
||||
run: |
|
||||
echo "Comparing test results across runs..."
|
||||
# All runs should pass
|
||||
for i in 1 2 3; do
|
||||
if [ ! -f "./TestResults/run-$i/hlc-determinism-$i.trx" ]; then
|
||||
echo "Run $i results not found"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "All determinism runs completed successfully"
|
||||
|
||||
- name: Upload determinism results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: hlc-determinism-results
|
||||
path: TestResults/**
|
||||
|
||||
# ==========================================================================
|
||||
# Gate Status
|
||||
# ==========================================================================
|
||||
gate-status:
|
||||
name: HLC Distributed Gate Status
|
||||
runs-on: ubuntu-latest
|
||||
needs: [hlc-distributed, hlc-determinism]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Check gate status
|
||||
run: |
|
||||
if [ "${{ needs.hlc-distributed.result }}" == "failure" ]; then
|
||||
echo "::error::Distributed HLC tests failed"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${{ needs.hlc-determinism.result }}" == "failure" ]; then
|
||||
echo "::error::HLC determinism verification failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "All HLC distributed checks passed!"
|
||||
@@ -1,69 +0,0 @@
|
||||
name: ICS/KISA Feed Refresh
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * MON'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
live_fetch:
|
||||
description: 'Attempt live RSS fetch (fallback to samples on failure)'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
offline_snapshot:
|
||||
description: 'Force offline samples only (no network)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
refresh:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
ICSCISA_FEED_URL: ${{ secrets.ICSCISA_FEED_URL }}
|
||||
KISA_FEED_URL: ${{ secrets.KISA_FEED_URL }}
|
||||
FEED_GATEWAY_HOST: concelier-webservice
|
||||
FEED_GATEWAY_SCHEME: http
|
||||
LIVE_FETCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.live_fetch || 'true' }}
|
||||
OFFLINE_SNAPSHOT: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.offline_snapshot || 'false' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set run metadata
|
||||
id: meta
|
||||
run: |
|
||||
RUN_DATE=$(date -u +%Y%m%d)
|
||||
RUN_ID="icscisa-kisa-$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
echo "run_date=$RUN_DATE" >> $GITHUB_OUTPUT
|
||||
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||
echo "RUN_DATE=$RUN_DATE" >> $GITHUB_ENV
|
||||
echo "RUN_ID=$RUN_ID" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Run ICS/KISA refresh
|
||||
run: |
|
||||
python scripts/feeds/run_icscisa_kisa_refresh.py \
|
||||
--out-dir out/feeds/icscisa-kisa \
|
||||
--run-date "${{ steps.meta.outputs.run_date }}" \
|
||||
--run-id "${{ steps.meta.outputs.run_id }}"
|
||||
|
||||
- name: Show fetch log
|
||||
run: cat out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}/fetch.log
|
||||
|
||||
- name: Upload refresh artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: icscisa-kisa-${{ steps.meta.outputs.run_date }}
|
||||
path: out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}
|
||||
if-no-files-found: error
|
||||
retention-days: 21
|
||||
|
||||
@@ -1,375 +0,0 @@
|
||||
# Sprint 3500.0004.0003 - T6: Integration Tests CI Gate
|
||||
# Runs integration tests on PR and gates merges on failures
|
||||
|
||||
name: integration-tests-gate
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'src/__Tests/Integration/**'
|
||||
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||
push:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_performance:
|
||||
description: 'Run performance baseline tests'
|
||||
type: boolean
|
||||
default: false
|
||||
run_airgap:
|
||||
description: 'Run air-gap tests'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: integration-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ==========================================================================
|
||||
# T6-AC1: Integration tests run on PR
|
||||
# ==========================================================================
|
||||
integration-tests:
|
||||
name: Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: stellaops
|
||||
POSTGRES_PASSWORD: test-only
|
||||
POSTGRES_DB: stellaops_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/__Tests/Integration/**/*.csproj
|
||||
|
||||
- name: Build integration tests
|
||||
run: dotnet build src/__Tests/Integration/**/*.csproj --configuration Release --no-restore
|
||||
|
||||
- name: Run Proof Chain Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.ProofChain \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=proofchain.trx" \
|
||||
--results-directory ./TestResults
|
||||
env:
|
||||
ConnectionStrings__StellaOps: "Host=localhost;Database=stellaops_test;Username=stellaops;Password=test-only"
|
||||
|
||||
- name: Run Reachability Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Reachability \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=reachability.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Unknowns Workflow Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Unknowns \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=unknowns.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run Determinism Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=determinism.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-test-results
|
||||
path: TestResults/**/*.trx
|
||||
|
||||
- name: Publish test summary
|
||||
uses: dorny/test-reporter@v1
|
||||
if: always()
|
||||
with:
|
||||
name: Integration Test Results
|
||||
path: TestResults/**/*.trx
|
||||
reporter: dotnet-trx
|
||||
|
||||
# ==========================================================================
|
||||
# T6-AC2: Corpus validation on release branch
|
||||
# ==========================================================================
|
||||
corpus-validation:
|
||||
name: Golden Corpus Validation
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/main' || github.event_name == 'workflow_dispatch'
|
||||
timeout-minutes: 15
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Validate corpus manifest
|
||||
run: |
|
||||
python3 -c "
|
||||
import json
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
manifest_path = 'src/__Tests/__Benchmarks/golden-corpus/corpus-manifest.json'
|
||||
with open(manifest_path) as f:
|
||||
manifest = json.load(f)
|
||||
|
||||
print(f'Corpus version: {manifest.get(\"corpus_version\", \"unknown\")}')
|
||||
print(f'Total cases: {manifest.get(\"total_cases\", 0)}')
|
||||
|
||||
errors = []
|
||||
for case in manifest.get('cases', []):
|
||||
case_path = os.path.join('src/__Tests/__Benchmarks/golden-corpus', case['path'])
|
||||
if not os.path.isdir(case_path):
|
||||
errors.append(f'Missing case directory: {case_path}')
|
||||
else:
|
||||
required_files = ['case.json', 'expected-score.json']
|
||||
for f in required_files:
|
||||
if not os.path.exists(os.path.join(case_path, f)):
|
||||
errors.append(f'Missing file: {case_path}/{f}')
|
||||
|
||||
if errors:
|
||||
print('\\nValidation errors:')
|
||||
for e in errors:
|
||||
print(f' - {e}')
|
||||
exit(1)
|
||||
else:
|
||||
print('\\nCorpus validation passed!')
|
||||
"
|
||||
|
||||
- name: Run corpus scoring tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||
--filter "Category=GoldenCorpus" \
|
||||
--configuration Release \
|
||||
--logger "trx;LogFileName=corpus.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
# ==========================================================================
|
||||
# T6-AC3: Determinism tests on nightly
|
||||
# ==========================================================================
|
||||
nightly-determinism:
|
||||
name: Nightly Determinism Check
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_performance == 'true')
|
||||
timeout-minutes: 45
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Run full determinism suite
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||
--configuration Release \
|
||||
--logger "trx;LogFileName=determinism-full.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Run cross-run determinism check
|
||||
run: |
|
||||
# Run scoring 3 times and compare hashes
|
||||
for i in 1 2 3; do
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||
--filter "FullyQualifiedName~IdenticalInput_ProducesIdenticalHash" \
|
||||
--results-directory ./TestResults/run-$i
|
||||
done
|
||||
|
||||
# Compare all results
|
||||
echo "Comparing determinism across runs..."
|
||||
|
||||
- name: Upload determinism results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nightly-determinism-results
|
||||
path: TestResults/**
|
||||
|
||||
# ==========================================================================
|
||||
# T6-AC4: Test coverage reported to dashboard
|
||||
# ==========================================================================
|
||||
coverage-report:
|
||||
name: Coverage Report
|
||||
runs-on: ubuntu-latest
|
||||
needs: [integration-tests]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Run tests with coverage
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/**/*.csproj \
|
||||
--configuration Release \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--results-directory ./TestResults/Coverage
|
||||
|
||||
- name: Generate coverage report
|
||||
uses: danielpalme/ReportGenerator-GitHub-Action@5.2.0
|
||||
with:
|
||||
reports: TestResults/Coverage/**/coverage.cobertura.xml
|
||||
targetdir: TestResults/CoverageReport
|
||||
reporttypes: 'Html;Cobertura;MarkdownSummary'
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-report
|
||||
path: TestResults/CoverageReport/**
|
||||
|
||||
- name: Add coverage to PR comment
|
||||
uses: marocchino/sticky-pull-request-comment@v2
|
||||
if: github.event_name == 'pull_request'
|
||||
with:
|
||||
recreate: true
|
||||
path: TestResults/CoverageReport/Summary.md
|
||||
|
||||
# ==========================================================================
|
||||
# T6-AC5: Flaky test quarantine process
|
||||
# ==========================================================================
|
||||
flaky-test-check:
|
||||
name: Flaky Test Detection
|
||||
runs-on: ubuntu-latest
|
||||
needs: [integration-tests]
|
||||
if: failure()
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check for known flaky tests
|
||||
run: |
|
||||
# Check if failure is from a known flaky test
|
||||
QUARANTINE_FILE=".github/flaky-tests-quarantine.json"
|
||||
if [ -f "$QUARANTINE_FILE" ]; then
|
||||
echo "Checking against quarantine list..."
|
||||
# Implementation would compare failed tests against quarantine
|
||||
fi
|
||||
|
||||
- name: Create flaky test issue
|
||||
uses: actions/github-script@v7
|
||||
if: always()
|
||||
with:
|
||||
script: |
|
||||
// After 2 consecutive failures, create issue for quarantine review
|
||||
console.log('Checking for flaky test patterns...');
|
||||
// Implementation would analyze test history
|
||||
|
||||
# ==========================================================================
|
||||
# Performance Tests (optional, on demand)
|
||||
# ==========================================================================
|
||||
performance-tests:
|
||||
name: Performance Baseline Tests
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_performance == 'true'
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Run performance tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.Performance \
|
||||
--configuration Release \
|
||||
--logger "trx;LogFileName=performance.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload performance report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: performance-report
|
||||
path: |
|
||||
TestResults/**
|
||||
src/__Tests/Integration/StellaOps.Integration.Performance/output/**
|
||||
|
||||
- name: Check for regressions
|
||||
run: |
|
||||
# Check if any test exceeded 20% threshold
|
||||
if [ -f "src/__Tests/Integration/StellaOps.Integration.Performance/output/performance-report.json" ]; then
|
||||
python3 -c "
|
||||
import json
|
||||
with open('src/__Tests/Integration/StellaOps.Integration.Performance/output/performance-report.json') as f:
|
||||
report = json.load(f)
|
||||
regressions = [m for m in report.get('Metrics', []) if m.get('DeltaPercent', 0) > 20]
|
||||
if regressions:
|
||||
print('Performance regressions detected!')
|
||||
for r in regressions:
|
||||
print(f' {r[\"Name\"]}: +{r[\"DeltaPercent\"]:.1f}%')
|
||||
exit(1)
|
||||
print('No performance regressions detected.')
|
||||
"
|
||||
fi
|
||||
|
||||
# ==========================================================================
|
||||
# Air-Gap Tests (optional, on demand)
|
||||
# ==========================================================================
|
||||
airgap-tests:
|
||||
name: Air-Gap Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_airgap == 'true'
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Run air-gap tests
|
||||
run: |
|
||||
dotnet test src/__Tests/Integration/StellaOps.Integration.AirGap \
|
||||
--configuration Release \
|
||||
--logger "trx;LogFileName=airgap.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload air-gap test results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: airgap-test-results
|
||||
path: TestResults/**
|
||||
@@ -1,129 +0,0 @@
|
||||
name: Interop E2E Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Scanner/**'
|
||||
- 'src/Excititor/**'
|
||||
- 'src/__Tests/interop/**'
|
||||
schedule:
|
||||
- cron: '0 6 * * *' # Nightly at 6 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
|
||||
jobs:
|
||||
interop-tests:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
format: [cyclonedx, spdx]
|
||||
arch: [amd64]
|
||||
include:
|
||||
- format: cyclonedx
|
||||
format_flag: cyclonedx-json
|
||||
- format: spdx
|
||||
format_flag: spdx-json
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Syft
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin
|
||||
syft --version
|
||||
|
||||
- name: Install Grype
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin
|
||||
grype --version
|
||||
|
||||
- name: Install cosign
|
||||
run: |
|
||||
curl -sSfL https://github.com/sigstore/cosign/releases/latest/download/cosign-linux-amd64 -o /usr/local/bin/cosign
|
||||
chmod +x /usr/local/bin/cosign
|
||||
cosign version
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build Stella CLI
|
||||
run: dotnet build src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release
|
||||
|
||||
- name: Build interop tests
|
||||
run: dotnet build src/__Tests/interop/StellaOps.Interop.Tests/StellaOps.Interop.Tests.csproj
|
||||
|
||||
- name: Run interop tests
|
||||
run: |
|
||||
dotnet test src/__Tests/interop/StellaOps.Interop.Tests \
|
||||
--filter "Format=${{ matrix.format }}" \
|
||||
--logger "trx;LogFileName=interop-${{ matrix.format }}.trx" \
|
||||
--logger "console;verbosity=detailed" \
|
||||
--results-directory ./results \
|
||||
-- RunConfiguration.TestSessionTimeout=900000
|
||||
|
||||
- name: Generate parity report
|
||||
if: always()
|
||||
run: |
|
||||
# TODO: Generate parity report from test results
|
||||
echo '{"format": "${{ matrix.format }}", "parityPercent": 0}' > ./results/parity-report-${{ matrix.format }}.json
|
||||
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: interop-test-results-${{ matrix.format }}
|
||||
path: ./results/
|
||||
|
||||
- name: Check parity threshold
|
||||
if: always()
|
||||
run: |
|
||||
PARITY=$(jq '.parityPercent' ./results/parity-report-${{ matrix.format }}.json 2>/dev/null || echo "0")
|
||||
echo "Parity for ${{ matrix.format }}: ${PARITY}%"
|
||||
|
||||
if (( $(echo "$PARITY < 95" | bc -l 2>/dev/null || echo "1") )); then
|
||||
echo "::warning::Findings parity ${PARITY}% is below 95% threshold for ${{ matrix.format }}"
|
||||
# Don't fail the build yet - this is initial implementation
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
summary:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: interop-tests
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ./all-results
|
||||
|
||||
- name: Generate summary
|
||||
run: |
|
||||
echo "## Interop Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Format | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
for format in cyclonedx spdx; do
|
||||
if [ -f "./all-results/interop-test-results-${format}/parity-report-${format}.json" ]; then
|
||||
PARITY=$(jq -r '.parityPercent // 0' "./all-results/interop-test-results-${format}/parity-report-${format}.json")
|
||||
if (( $(echo "$PARITY >= 95" | bc -l 2>/dev/null || echo "0") )); then
|
||||
STATUS="✅ Pass (${PARITY}%)"
|
||||
else
|
||||
STATUS="⚠️ Below threshold (${PARITY}%)"
|
||||
fi
|
||||
else
|
||||
STATUS="❌ No results"
|
||||
fi
|
||||
echo "| ${format} | ${STATUS} |" >> $GITHUB_STEP_SUMMARY
|
||||
done
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
name: Ledger OpenAPI CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'api/ledger/**'
|
||||
- 'devops/ledger/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'api/ledger/**'
|
||||
|
||||
jobs:
|
||||
validate-oas:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install tools
|
||||
run: |
|
||||
npm install -g @stoplight/spectral-cli
|
||||
npm install -g @openapitools/openapi-generator-cli
|
||||
|
||||
- name: Validate OpenAPI spec
|
||||
run: |
|
||||
chmod +x devops/ledger/validate-oas.sh
|
||||
devops/ledger/validate-oas.sh
|
||||
|
||||
- name: Upload validation report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ledger-oas-validation-${{ github.run_number }}
|
||||
path: |
|
||||
out/ledger/oas/lint-report.json
|
||||
out/ledger/oas/validation-report.txt
|
||||
out/ledger/oas/spec-summary.json
|
||||
if-no-files-found: warn
|
||||
|
||||
check-wellknown:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: validate-oas
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check .well-known/openapi structure
|
||||
run: |
|
||||
# Validate .well-known structure if exists
|
||||
if [ -d ".well-known" ]; then
|
||||
echo "Checking .well-known/openapi..."
|
||||
if [ -f ".well-known/openapi.json" ]; then
|
||||
python3 -c "import json; json.load(open('.well-known/openapi.json'))"
|
||||
echo ".well-known/openapi.json is valid JSON"
|
||||
fi
|
||||
else
|
||||
echo "[info] .well-known directory not present (OK for dev)"
|
||||
fi
|
||||
|
||||
deprecation-check:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: validate-oas
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check deprecation policy
|
||||
run: |
|
||||
if [ -f "devops/ledger/deprecation-policy.yaml" ]; then
|
||||
echo "Validating deprecation policy..."
|
||||
python3 -c "import yaml; yaml.safe_load(open('devops/ledger/deprecation-policy.yaml'))"
|
||||
echo "Deprecation policy is valid"
|
||||
else
|
||||
echo "[info] No deprecation policy yet (OK for initial setup)"
|
||||
fi
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
name: Ledger Packs CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
snapshot_id:
|
||||
description: 'Snapshot ID (leave empty for auto)'
|
||||
required: false
|
||||
default: ''
|
||||
sign:
|
||||
description: 'Sign pack (1=yes)'
|
||||
required: false
|
||||
default: '0'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'devops/ledger/**'
|
||||
|
||||
jobs:
|
||||
build-pack:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
|
||||
- name: Configure signing
|
||||
run: |
|
||||
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ] || [ "${{ github.event.inputs.sign }}" = "1" ]; then
|
||||
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Build pack
|
||||
run: |
|
||||
chmod +x devops/ledger/build-pack.sh
|
||||
SNAPSHOT_ID="${{ github.event.inputs.snapshot_id }}"
|
||||
if [ -z "$SNAPSHOT_ID" ]; then
|
||||
SNAPSHOT_ID="ci-$(date +%Y%m%d%H%M%S)"
|
||||
fi
|
||||
|
||||
SIGN_FLAG=""
|
||||
if [ "${{ github.event.inputs.sign }}" = "1" ] || [ -n "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||
SIGN_FLAG="--sign"
|
||||
fi
|
||||
|
||||
SNAPSHOT_ID="$SNAPSHOT_ID" devops/ledger/build-pack.sh $SIGN_FLAG
|
||||
|
||||
- name: Verify checksums
|
||||
run: |
|
||||
cd out/ledger/packs
|
||||
for f in *.SHA256SUMS; do
|
||||
if [ -f "$f" ]; then
|
||||
sha256sum -c "$f"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Upload pack
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ledger-pack-${{ github.run_number }}
|
||||
path: |
|
||||
out/ledger/packs/*.pack.tar.gz
|
||||
out/ledger/packs/*.SHA256SUMS
|
||||
out/ledger/packs/*.dsse.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 30
|
||||
|
||||
verify-pack:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: build-pack
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download pack
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ledger-pack-${{ github.run_number }}
|
||||
path: out/ledger/packs/
|
||||
|
||||
- name: Verify pack structure
|
||||
run: |
|
||||
cd out/ledger/packs
|
||||
for pack in *.pack.tar.gz; do
|
||||
if [ -f "$pack" ]; then
|
||||
echo "Verifying $pack..."
|
||||
tar -tzf "$pack" | head -20
|
||||
|
||||
# Extract and check manifest
|
||||
tar -xzf "$pack" -C /tmp manifest.json 2>/dev/null || true
|
||||
if [ -f /tmp/manifest.json ]; then
|
||||
python3 -c "import json; json.load(open('/tmp/manifest.json'))"
|
||||
echo "Pack manifest is valid JSON"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -1,300 +0,0 @@
|
||||
name: License Audit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/*.csproj'
|
||||
- '**/package.json'
|
||||
- '**/package-lock.json'
|
||||
- 'Directory.Build.props'
|
||||
- 'Directory.Packages.props'
|
||||
- 'NOTICE.md'
|
||||
- 'third-party-licenses/**'
|
||||
- 'docs/legal/**'
|
||||
- '.gitea/workflows/license-audit.yml'
|
||||
- '.gitea/scripts/validate/validate-licenses.sh'
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- '**/*.csproj'
|
||||
- '**/package.json'
|
||||
- '**/package-lock.json'
|
||||
- 'Directory.Build.props'
|
||||
- 'Directory.Packages.props'
|
||||
schedule:
|
||||
# Weekly audit every Sunday at 00:00 UTC
|
||||
- cron: '0 0 * * 0'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
full_scan:
|
||||
description: 'Run full transitive dependency scan'
|
||||
required: false
|
||||
default: 'false'
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
nuget-license-audit:
|
||||
name: NuGet License Audit
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
TZ: UTC
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup .NET 10
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.nuget/packages
|
||||
.nuget/packages
|
||||
key: license-audit-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||
|
||||
- name: Install dotnet-delice
|
||||
run: dotnet tool install --global dotnet-delice || true
|
||||
|
||||
- name: Extract NuGet licenses
|
||||
run: |
|
||||
mkdir -p out/license-audit
|
||||
|
||||
# List packages from key projects
|
||||
for proj in \
|
||||
src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj \
|
||||
src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||
src/Authority/StellaOps.Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj \
|
||||
src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj
|
||||
do
|
||||
if [ -f "$proj" ]; then
|
||||
name=$(basename $(dirname "$proj"))
|
||||
echo "Scanning: $proj"
|
||||
dotnet list "$proj" package --include-transitive 2>/dev/null | tee -a out/license-audit/nuget-packages.txt || true
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Validate against allowlist
|
||||
run: |
|
||||
bash .gitea/scripts/validate/validate-licenses.sh nuget out/license-audit/nuget-packages.txt
|
||||
|
||||
- name: Upload NuGet license report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nuget-license-report
|
||||
path: out/license-audit
|
||||
retention-days: 30
|
||||
|
||||
npm-license-audit:
|
||||
name: npm License Audit
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||
|
||||
- name: Install license-checker
|
||||
run: npm install -g license-checker
|
||||
|
||||
- name: Audit Angular frontend
|
||||
run: |
|
||||
mkdir -p out/license-audit
|
||||
cd src/Web/StellaOps.Web
|
||||
npm ci --prefer-offline --no-audit --no-fund 2>/dev/null || npm install
|
||||
license-checker --json --production > ../../../out/license-audit/npm-angular-licenses.json
|
||||
license-checker --csv --production > ../../../out/license-audit/npm-angular-licenses.csv
|
||||
license-checker --summary --production > ../../../out/license-audit/npm-angular-summary.txt
|
||||
|
||||
- name: Audit DevPortal
|
||||
run: |
|
||||
cd src/DevPortal/StellaOps.DevPortal.Site
|
||||
if [ -f package-lock.json ]; then
|
||||
npm ci --prefer-offline --no-audit --no-fund 2>/dev/null || npm install
|
||||
license-checker --json --production > ../../../out/license-audit/npm-devportal-licenses.json || true
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Validate against allowlist
|
||||
run: |
|
||||
bash .gitea/scripts/validate/validate-licenses.sh npm out/license-audit/npm-angular-licenses.json
|
||||
|
||||
- name: Upload npm license report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: npm-license-report
|
||||
path: out/license-audit
|
||||
retention-days: 30
|
||||
|
||||
vendored-license-check:
|
||||
name: Vendored Components Check
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Verify vendored license files exist
|
||||
run: |
|
||||
echo "Checking vendored license files..."
|
||||
|
||||
# Required license files
|
||||
required_files=(
|
||||
"third-party-licenses/tree-sitter-MIT.txt"
|
||||
"third-party-licenses/tree-sitter-ruby-MIT.txt"
|
||||
"third-party-licenses/AlexMAS.GostCryptography-MIT.txt"
|
||||
)
|
||||
|
||||
missing=0
|
||||
for file in "${required_files[@]}"; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "ERROR: Missing required license file: $file"
|
||||
missing=$((missing + 1))
|
||||
else
|
||||
echo "OK: $file"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $missing -gt 0 ]; then
|
||||
echo "ERROR: $missing required license file(s) missing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "All vendored license files present."
|
||||
|
||||
- name: Verify NOTICE.md is up to date
|
||||
run: |
|
||||
echo "Checking NOTICE.md references..."
|
||||
|
||||
# Check that vendored components are mentioned in NOTICE.md
|
||||
for component in "tree-sitter" "AlexMAS.GostCryptography" "CryptoPro"; do
|
||||
if ! grep -q "$component" NOTICE.md; then
|
||||
echo "WARNING: $component not mentioned in NOTICE.md"
|
||||
else
|
||||
echo "OK: $component referenced in NOTICE.md"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Verify vendored source has LICENSE
|
||||
run: |
|
||||
echo "Checking vendored source directories..."
|
||||
|
||||
# GostCryptography fork must have LICENSE file
|
||||
gost_dir="src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/third_party/AlexMAS.GostCryptography"
|
||||
if [ -d "$gost_dir" ]; then
|
||||
if [ ! -f "$gost_dir/LICENSE" ]; then
|
||||
echo "ERROR: $gost_dir is missing LICENSE file"
|
||||
exit 1
|
||||
else
|
||||
echo "OK: $gost_dir/LICENSE exists"
|
||||
fi
|
||||
fi
|
||||
|
||||
license-compatibility-check:
|
||||
name: License Compatibility Check
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [nuget-license-audit, npm-license-audit]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download NuGet report
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: nuget-license-report
|
||||
path: out/nuget
|
||||
|
||||
- name: Download npm report
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: npm-license-report
|
||||
path: out/npm
|
||||
|
||||
- name: Check for incompatible licenses
|
||||
run: |
|
||||
echo "Checking for AGPL-3.0-or-later incompatible licenses..."
|
||||
|
||||
# Known incompatible licenses (SPDX identifiers)
|
||||
incompatible=(
|
||||
"GPL-2.0-only"
|
||||
"SSPL-1.0"
|
||||
"BUSL-1.1"
|
||||
"Commons-Clause"
|
||||
"Proprietary"
|
||||
)
|
||||
|
||||
found_issues=0
|
||||
|
||||
# Check npm report
|
||||
if [ -f out/npm/npm-angular-licenses.json ]; then
|
||||
for license in "${incompatible[@]}"; do
|
||||
if grep -qi "\"$license\"" out/npm/npm-angular-licenses.json; then
|
||||
echo "ERROR: Incompatible license found in npm dependencies: $license"
|
||||
found_issues=$((found_issues + 1))
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [ $found_issues -gt 0 ]; then
|
||||
echo "ERROR: Found $found_issues incompatible license(s)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "All licenses compatible with AGPL-3.0-or-later"
|
||||
|
||||
- name: Generate combined report
|
||||
run: |
|
||||
mkdir -p out/combined
|
||||
cat > out/combined/license-audit-summary.md << 'EOF'
|
||||
# License Audit Summary
|
||||
|
||||
Generated: $(date -u +%Y-%m-%dT%H:%M:%SZ)
|
||||
Commit: ${{ github.sha }}
|
||||
|
||||
## Status: PASSED
|
||||
|
||||
All dependencies use licenses compatible with AGPL-3.0-or-later.
|
||||
|
||||
## Allowed Licenses
|
||||
- MIT
|
||||
- Apache-2.0
|
||||
- BSD-2-Clause
|
||||
- BSD-3-Clause
|
||||
- ISC
|
||||
- 0BSD
|
||||
- PostgreSQL
|
||||
- MPL-2.0
|
||||
- CC0-1.0
|
||||
- Unlicense
|
||||
|
||||
## Reports
|
||||
- NuGet: See nuget-license-report artifact
|
||||
- npm: See npm-license-report artifact
|
||||
|
||||
## Documentation
|
||||
- Full dependency list: docs/legal/THIRD-PARTY-DEPENDENCIES.md
|
||||
- Compatibility analysis: docs/legal/LICENSE-COMPATIBILITY.md
|
||||
EOF
|
||||
|
||||
- name: Upload combined report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: license-audit-summary
|
||||
path: out/combined
|
||||
retention-days: 90
|
||||
|
||||
@@ -1,189 +0,0 @@
|
||||
# .gitea/workflows/lighthouse-ci.yml
|
||||
# Lighthouse CI for performance and accessibility testing of the StellaOps Web UI
|
||||
|
||||
name: Lighthouse CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Web/StellaOps.Web/**'
|
||||
- '.gitea/workflows/lighthouse-ci.yml'
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/Web/StellaOps.Web/**'
|
||||
schedule:
|
||||
# Run weekly on Sunday at 2 AM UTC
|
||||
- cron: '0 2 * * 0'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
NODE_VERSION: '20'
|
||||
LHCI_BUILD_CONTEXT__CURRENT_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
LHCI_BUILD_CONTEXT__COMMIT_SHA: ${{ github.sha }}
|
||||
|
||||
jobs:
|
||||
lighthouse:
|
||||
name: Lighthouse Audit
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src/Web/StellaOps.Web
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Build production bundle
|
||||
run: npm run build -- --configuration production
|
||||
|
||||
- name: Install Lighthouse CI
|
||||
run: npm install -g @lhci/cli@0.13.x
|
||||
|
||||
- name: Run Lighthouse CI
|
||||
run: |
|
||||
lhci autorun \
|
||||
--collect.staticDistDir=./dist/stella-ops-web/browser \
|
||||
--collect.numberOfRuns=3 \
|
||||
--assert.preset=lighthouse:recommended \
|
||||
--assert.assertions.categories:performance=off \
|
||||
--assert.assertions.categories:accessibility=off \
|
||||
--upload.target=filesystem \
|
||||
--upload.outputDir=./lighthouse-results
|
||||
|
||||
- name: Evaluate Lighthouse Results
|
||||
id: lhci-results
|
||||
run: |
|
||||
# Parse the latest Lighthouse report
|
||||
REPORT=$(ls -t lighthouse-results/*.json | head -1)
|
||||
|
||||
if [ -f "$REPORT" ]; then
|
||||
PERF=$(jq '.categories.performance.score * 100' "$REPORT" | cut -d. -f1)
|
||||
A11Y=$(jq '.categories.accessibility.score * 100' "$REPORT" | cut -d. -f1)
|
||||
BP=$(jq '.categories["best-practices"].score * 100' "$REPORT" | cut -d. -f1)
|
||||
SEO=$(jq '.categories.seo.score * 100' "$REPORT" | cut -d. -f1)
|
||||
|
||||
echo "performance=$PERF" >> $GITHUB_OUTPUT
|
||||
echo "accessibility=$A11Y" >> $GITHUB_OUTPUT
|
||||
echo "best-practices=$BP" >> $GITHUB_OUTPUT
|
||||
echo "seo=$SEO" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "## Lighthouse Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Category | Score | Threshold | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|-----------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Performance: target >= 90
|
||||
if [ "$PERF" -ge 90 ]; then
|
||||
echo "| Performance | $PERF | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| Performance | $PERF | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
# Accessibility: target >= 95
|
||||
if [ "$A11Y" -ge 95 ]; then
|
||||
echo "| Accessibility | $A11Y | >= 95 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| Accessibility | $A11Y | >= 95 | :x: |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
# Best Practices: target >= 90
|
||||
if [ "$BP" -ge 90 ]; then
|
||||
echo "| Best Practices | $BP | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| Best Practices | $BP | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
# SEO: target >= 90
|
||||
if [ "$SEO" -ge 90 ]; then
|
||||
echo "| SEO | $SEO | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "| SEO | $SEO | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Check Quality Gates
|
||||
run: |
|
||||
PERF=${{ steps.lhci-results.outputs.performance }}
|
||||
A11Y=${{ steps.lhci-results.outputs.accessibility }}
|
||||
|
||||
FAILED=0
|
||||
|
||||
# Performance gate (warning only, not blocking)
|
||||
if [ "$PERF" -lt 90 ]; then
|
||||
echo "::warning::Performance score ($PERF) is below target (90)"
|
||||
fi
|
||||
|
||||
# Accessibility gate (blocking)
|
||||
if [ "$A11Y" -lt 95 ]; then
|
||||
echo "::error::Accessibility score ($A11Y) is below required threshold (95)"
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
if [ "$FAILED" -eq 1 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload Lighthouse Reports
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: lighthouse-reports
|
||||
path: src/Web/StellaOps.Web/lighthouse-results/
|
||||
retention-days: 30
|
||||
|
||||
axe-accessibility:
|
||||
name: Axe Accessibility Audit
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src/Web/StellaOps.Web
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Install Playwright browsers
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
- name: Build production bundle
|
||||
run: npm run build -- --configuration production
|
||||
|
||||
- name: Start preview server
|
||||
run: |
|
||||
npx serve -s dist/stella-ops-web/browser -l 4200 &
|
||||
sleep 5
|
||||
|
||||
- name: Run Axe accessibility tests
|
||||
run: |
|
||||
npm run test:a11y || true
|
||||
|
||||
- name: Upload Axe results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: axe-accessibility-results
|
||||
path: src/Web/StellaOps.Web/test-results/
|
||||
retention-days: 30
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
name: LNM Backfill CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
mongo_uri:
|
||||
description: 'Staging Mongo URI (read-only snapshot)'
|
||||
required: true
|
||||
type: string
|
||||
since_commit:
|
||||
description: 'Git commit to compare (default HEAD)'
|
||||
required: false
|
||||
type: string
|
||||
dry_run:
|
||||
description: 'Dry run (no writes)'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
lnm-backfill:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj
|
||||
|
||||
- name: Run backfill (dry-run supported)
|
||||
env:
|
||||
STAGING_MONGO_URI: ${{ inputs.mongo_uri }}
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
EXTRA=()
|
||||
if [ "${{ inputs.dry_run }}" = "true" ]; then EXTRA+=("--dry-run"); fi
|
||||
dotnet run --project src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj -- --mode=observations --batch-size=500 --max-conflicts=0 --mongo "$STAGING_MONGO_URI" "${EXTRA[@]}" | tee $ARTIFACT_DIR/backfill-observations.log
|
||||
dotnet run --project src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj -- --mode=linksets --batch-size=500 --max-conflicts=0 --mongo "$STAGING_MONGO_URI" "${EXTRA[@]}" | tee $ARTIFACT_DIR/backfill-linksets.log
|
||||
|
||||
- name: Validate counts
|
||||
env:
|
||||
STAGING_MONGO_URI: ${{ inputs.mongo_uri }}
|
||||
run: |
|
||||
STAGING_MONGO_URI="$STAGING_MONGO_URI" devops/lnm/backfill-validation.sh
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: lnm-backfill-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
name: LNM Migration CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_staging:
|
||||
description: 'Run staging backfill (1=yes)'
|
||||
required: false
|
||||
default: '0'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Concelier/__Libraries/StellaOps.Concelier.Migrations/**'
|
||||
- 'devops/lnm/**'
|
||||
|
||||
jobs:
|
||||
build-runner:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Setup cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
|
||||
- name: Configure signing
|
||||
run: |
|
||||
if [ -z "${{ secrets.COSIGN_PRIVATE_KEY_B64 }}" ]; then
|
||||
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||
fi
|
||||
env:
|
||||
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||
|
||||
- name: Build and package runner
|
||||
run: |
|
||||
chmod +x devops/lnm/package-runner.sh
|
||||
devops/lnm/package-runner.sh
|
||||
|
||||
- name: Verify checksums
|
||||
run: |
|
||||
cd out/lnm
|
||||
sha256sum -c SHA256SUMS
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: lnm-migration-runner-${{ github.run_number }}
|
||||
path: |
|
||||
out/lnm/lnm-migration-runner.tar.gz
|
||||
out/lnm/lnm-migration-runner.manifest.json
|
||||
out/lnm/lnm-migration-runner.dsse.json
|
||||
out/lnm/SHA256SUMS
|
||||
if-no-files-found: warn
|
||||
|
||||
validate-metrics:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: build-runner
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate monitoring config
|
||||
run: |
|
||||
# Validate alert rules syntax
|
||||
if [ -f "devops/lnm/alerts/lnm-alerts.yaml" ]; then
|
||||
echo "Validating alert rules..."
|
||||
python3 -c "import yaml; yaml.safe_load(open('devops/lnm/alerts/lnm-alerts.yaml'))"
|
||||
fi
|
||||
|
||||
# Validate dashboard JSON
|
||||
if [ -f "devops/lnm/dashboards/lnm-migration.json" ]; then
|
||||
echo "Validating dashboard..."
|
||||
python3 -c "import json; json.load(open('devops/lnm/dashboards/lnm-migration.json'))"
|
||||
fi
|
||||
|
||||
echo "Monitoring config validation complete"
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
name: LNM VEX Backfill
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
mongo_uri:
|
||||
description: 'Staging Mongo URI'
|
||||
required: true
|
||||
type: string
|
||||
nats_url:
|
||||
description: 'NATS URL'
|
||||
required: true
|
||||
type: string
|
||||
redis_url:
|
||||
description: 'Redis URL'
|
||||
required: true
|
||||
type: string
|
||||
dry_run:
|
||||
description: 'Dry run (no writes)'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
vex-backfill:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj
|
||||
|
||||
- name: Run VEX backfill
|
||||
env:
|
||||
STAGING_MONGO_URI: ${{ inputs.mongo_uri }}
|
||||
NATS_URL: ${{ inputs.nats_url }}
|
||||
REDIS_URL: ${{ inputs.redis_url }}
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
EXTRA=()
|
||||
if [ "${{ inputs.dry_run }}" = "true" ]; then EXTRA+=("--dry-run"); fi
|
||||
dotnet run --project src/Concelier/StellaOps.Concelier.Backfill/StellaOps.Concelier.Backfill.csproj -- --mode=vex --batch-size=500 --max-conflicts=0 --mongo "$STAGING_MONGO_URI" --nats "$NATS_URL" --redis "$REDIS_URL" "${EXTRA[@]}" | tee $ARTIFACT_DIR/vex-backfill.log
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: lnm-vex-backfill-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
137
.gitea/workflows/local-ci-verify.yml
Normal file
137
.gitea/workflows/local-ci-verify.yml
Normal file
@@ -0,0 +1,137 @@
|
||||
# Local CI Verification Pipeline
|
||||
# Manual-dispatch only — validates devops/ci-local/ scaffolding and CI image.
|
||||
# Triggers: workflow_dispatch (Gitea UI or API).
|
||||
name: Local CI Verification
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow:
|
||||
description: 'Archived workflow file to dry-run (e.g. test-matrix.yml). Leave empty to skip.'
|
||||
required: false
|
||||
default: ''
|
||||
dry_run:
|
||||
description: 'Pass -n (dry-run) to act'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
jobs:
|
||||
validate-scaffolding:
|
||||
name: Validate CI scaffolding
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check devops/ci-local files exist
|
||||
run: |
|
||||
echo "::group::Checking required files"
|
||||
errors=0
|
||||
|
||||
for f in \
|
||||
devops/ci-local/.env.local.template \
|
||||
devops/ci-local/run-act.sh \
|
||||
devops/ci-local/run-act.ps1 \
|
||||
devops/ci-local/README.md \
|
||||
devops/ci-local/events/push.json \
|
||||
devops/ci-local/events/pull-request.json \
|
||||
devops/docker/Dockerfile.ci \
|
||||
.actrc; do
|
||||
if [ -f "$f" ]; then
|
||||
echo "✓ $f"
|
||||
else
|
||||
echo "✗ MISSING: $f"
|
||||
errors=$((errors + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
echo "::endgroup::"
|
||||
if [ "$errors" -gt 0 ]; then
|
||||
echo "::error::$errors required file(s) missing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Lint event JSON files
|
||||
run: |
|
||||
echo "::group::Validating JSON payloads"
|
||||
for f in devops/ci-local/events/*.json; do
|
||||
if python3 -m json.tool "$f" > /dev/null 2>&1; then
|
||||
echo "✓ $f — valid JSON"
|
||||
else
|
||||
echo "✗ $f — invalid JSON"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Verify runner scripts are executable
|
||||
run: |
|
||||
if [ ! -x devops/ci-local/run-act.sh ]; then
|
||||
echo "::warning::run-act.sh is not executable (chmod +x recommended)"
|
||||
fi
|
||||
|
||||
build-ci-image:
|
||||
name: Build stellaops-ci image
|
||||
runs-on: ubuntu-latest
|
||||
needs: validate-scaffolding
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build CI image
|
||||
run: |
|
||||
docker build \
|
||||
-t stellaops-ci:local \
|
||||
-f devops/docker/Dockerfile.ci \
|
||||
.
|
||||
|
||||
- name: Verify image exists
|
||||
run: |
|
||||
docker image inspect stellaops-ci:local > /dev/null 2>&1
|
||||
echo "stellaops-ci:local built successfully"
|
||||
docker image ls stellaops-ci:local
|
||||
|
||||
dry-run-smoke:
|
||||
name: Dry-run smoke test
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-ci-image
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install act
|
||||
run: |
|
||||
curl -sSL https://raw.githubusercontent.com/nektos/act/master/install.sh | sudo bash
|
||||
|
||||
- name: List jobs from archived test-matrix
|
||||
run: |
|
||||
act -l -W .gitea/workflows-archived/test-matrix.yml \
|
||||
-P ubuntu-latest=stellaops-ci:local \
|
||||
--env-file devops/ci-local/.env.local.template
|
||||
|
||||
- name: Dry-run archived test-matrix
|
||||
run: |
|
||||
act -W .gitea/workflows-archived/test-matrix.yml -n \
|
||||
-P ubuntu-latest=stellaops-ci:local \
|
||||
--env-file devops/ci-local/.env.local.template \
|
||||
-e devops/ci-local/events/push.json
|
||||
|
||||
- name: Dry-run user-specified workflow
|
||||
if: ${{ github.event.inputs.workflow != '' }}
|
||||
run: |
|
||||
WORKFLOW="${{ github.event.inputs.workflow }}"
|
||||
ARCHIVE_PATH=".gitea/workflows-archived/${WORKFLOW}"
|
||||
|
||||
if [ ! -f "$ARCHIVE_PATH" ]; then
|
||||
echo "::error::Workflow not found: $ARCHIVE_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ACT_ARGS="-W $ARCHIVE_PATH -P ubuntu-latest=stellaops-ci:local --env-file devops/ci-local/.env.local.template -e devops/ci-local/events/push.json"
|
||||
|
||||
if [ "${{ github.event.inputs.dry_run }}" = "true" ]; then
|
||||
ACT_ARGS="$ACT_ARGS -n"
|
||||
fi
|
||||
|
||||
echo "Running: act $ACT_ARGS"
|
||||
act $ACT_ARGS
|
||||
@@ -1,125 +0,0 @@
|
||||
name: Manifest Integrity
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'docs/**/*.schema.json'
|
||||
- 'docs/contracts/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/packs/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**/*.schema.json'
|
||||
- 'docs/contracts/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/packs/**'
|
||||
|
||||
jobs:
|
||||
validate-schemas:
|
||||
name: Validate Schema Integrity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install -g ajv-cli ajv-formats
|
||||
|
||||
- name: Validate JSON schemas
|
||||
run: |
|
||||
EXIT_CODE=0
|
||||
for schema in docs/schemas/*.schema.json; do
|
||||
echo "Validating $schema..."
|
||||
if ! ajv compile -s "$schema" --spec=draft2020 2>/dev/null; then
|
||||
echo "Error: $schema is invalid"
|
||||
EXIT_CODE=1
|
||||
fi
|
||||
done
|
||||
exit $EXIT_CODE
|
||||
|
||||
validate-contracts:
|
||||
name: Validate Contract Documents
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check contract structure
|
||||
run: |
|
||||
for contract in docs/contracts/*.md; do
|
||||
echo "Checking $contract..."
|
||||
# Verify required sections exist
|
||||
if ! grep -q "^## " "$contract"; then
|
||||
echo "Warning: $contract missing section headers"
|
||||
fi
|
||||
# Check for decision ID
|
||||
if grep -q "Decision ID" "$contract" && ! grep -q "DECISION-\|CONTRACT-" "$contract"; then
|
||||
echo "Warning: $contract missing decision ID format"
|
||||
fi
|
||||
done
|
||||
|
||||
validate-pack-fixtures:
|
||||
name: Validate Pack Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install jsonschema
|
||||
|
||||
- name: Run fixture validation
|
||||
run: |
|
||||
if [ -f .gitea/scripts/test/run-fixtures-check.sh ]; then
|
||||
chmod +x .gitea/scripts/test/run-fixtures-check.sh
|
||||
./.gitea/scripts/test/run-fixtures-check.sh
|
||||
fi
|
||||
|
||||
checksum-audit:
|
||||
name: Audit SHA256SUMS Files
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate checksums
|
||||
run: |
|
||||
find . -name "SHA256SUMS" -type f | while read f; do
|
||||
dir=$(dirname "$f")
|
||||
echo "Validating checksums in $dir..."
|
||||
cd "$dir"
|
||||
# Check if all referenced files exist
|
||||
while read hash file; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file referenced in SHA256SUMS but not found"
|
||||
fi
|
||||
done < SHA256SUMS
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
merkle-consistency:
|
||||
name: Verify Merkle Roots
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check DSSE Merkle roots
|
||||
run: |
|
||||
find . -name "*.dsse.json" -type f | while read f; do
|
||||
echo "Checking Merkle root in $f..."
|
||||
# Extract and validate Merkle root if present
|
||||
if jq -e '.payload' "$f" > /dev/null 2>&1; then
|
||||
PAYLOAD=$(jq -r '.payload' "$f" | base64 -d 2>/dev/null || echo "")
|
||||
if echo "$PAYLOAD" | jq -e '._stellaops.merkleRoot' > /dev/null 2>&1; then
|
||||
MERKLE=$(echo "$PAYLOAD" | jq -r '._stellaops.merkleRoot')
|
||||
echo " Merkle root: $MERKLE"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
@@ -1,513 +0,0 @@
|
||||
# .gitea/workflows/migration-test.yml
|
||||
# Database Migration Testing Workflow
|
||||
# Sprint: CI/CD Enhancement - Migration Safety
|
||||
#
|
||||
# Purpose: Validate database migrations work correctly in both directions
|
||||
# - Forward migrations (upgrade)
|
||||
# - Backward migrations (rollback)
|
||||
# - Idempotency checks (re-running migrations)
|
||||
# - Data integrity verification
|
||||
#
|
||||
# Triggers:
|
||||
# - Pull requests that modify migration files
|
||||
# - Scheduled daily validation
|
||||
# - Manual dispatch for full migration suite
|
||||
#
|
||||
# Prerequisites:
|
||||
# - PostgreSQL 16+ database
|
||||
# - EF Core migrations in src/**/Migrations/
|
||||
# - Migration scripts in devops/database/migrations/
|
||||
|
||||
name: Migration Testing
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- '**/Migrations/**'
|
||||
- 'devops/database/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/Migrations/**'
|
||||
- 'devops/database/**'
|
||||
schedule:
|
||||
- cron: '30 4 * * *' # Daily at 4:30 AM UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
test_rollback:
|
||||
description: 'Test rollback migrations'
|
||||
type: boolean
|
||||
default: true
|
||||
test_idempotency:
|
||||
description: 'Test migration idempotency'
|
||||
type: boolean
|
||||
default: true
|
||||
target_module:
|
||||
description: 'Specific module to test (empty = all)'
|
||||
type: string
|
||||
default: ''
|
||||
baseline_version:
|
||||
description: 'Baseline version to test from'
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
TZ: UTC
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_USER: stellaops_migration
|
||||
POSTGRES_PASSWORD: migration_test_password
|
||||
POSTGRES_DB: stellaops_migration_test
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# DISCOVER MODULES WITH MIGRATIONS
|
||||
# ===========================================================================
|
||||
|
||||
discover:
|
||||
name: Discover Migrations
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
outputs:
|
||||
modules: ${{ steps.find.outputs.modules }}
|
||||
module_count: ${{ steps.find.outputs.count }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Find modules with migrations
|
||||
id: find
|
||||
run: |
|
||||
# Find all EF Core migration directories
|
||||
MODULES=$(find src -type d -name "Migrations" -path "*/Persistence/*" | \
|
||||
sed 's|/Migrations||' | \
|
||||
sort -u | \
|
||||
jq -R -s -c 'split("\n") | map(select(length > 0))')
|
||||
|
||||
COUNT=$(echo "$MODULES" | jq 'length')
|
||||
|
||||
echo "Found $COUNT modules with migrations"
|
||||
echo "$MODULES" | jq -r '.[]'
|
||||
|
||||
# Filter by target module if specified
|
||||
if [[ -n "${{ github.event.inputs.target_module }}" ]]; then
|
||||
MODULES=$(echo "$MODULES" | jq -c --arg target "${{ github.event.inputs.target_module }}" \
|
||||
'map(select(contains($target)))')
|
||||
COUNT=$(echo "$MODULES" | jq 'length')
|
||||
echo "Filtered to $COUNT modules matching: ${{ github.event.inputs.target_module }}"
|
||||
fi
|
||||
|
||||
echo "modules=$MODULES" >> $GITHUB_OUTPUT
|
||||
echo "count=$COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Display discovered modules
|
||||
run: |
|
||||
echo "## Discovered Migration Modules" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Module | Path |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|--------|------|" >> $GITHUB_STEP_SUMMARY
|
||||
for path in $(echo '${{ steps.find.outputs.modules }}' | jq -r '.[]'); do
|
||||
module=$(basename $(dirname "$path"))
|
||||
echo "| $module | $path |" >> $GITHUB_STEP_SUMMARY
|
||||
done
|
||||
|
||||
# ===========================================================================
|
||||
# FORWARD MIGRATION TESTS
|
||||
# ===========================================================================
|
||||
|
||||
forward-migrations:
|
||||
name: Forward Migration
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 30
|
||||
needs: discover
|
||||
if: needs.discover.outputs.module_count != '0'
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module: ${{ fromJson(needs.discover.outputs.modules) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install EF Core tools
|
||||
run: dotnet tool install -g dotnet-ef
|
||||
|
||||
- name: Get module name
|
||||
id: module
|
||||
run: |
|
||||
MODULE_NAME=$(basename $(dirname "${{ matrix.module }}"))
|
||||
echo "name=$MODULE_NAME" >> $GITHUB_OUTPUT
|
||||
echo "Testing module: $MODULE_NAME"
|
||||
|
||||
- name: Find project file
|
||||
id: project
|
||||
run: |
|
||||
# Find the csproj file in the persistence directory
|
||||
PROJECT_FILE=$(find "${{ matrix.module }}" -maxdepth 1 -name "*.csproj" | head -1)
|
||||
if [[ -z "$PROJECT_FILE" ]]; then
|
||||
echo "::error::No project file found in ${{ matrix.module }}"
|
||||
exit 1
|
||||
fi
|
||||
echo "project=$PROJECT_FILE" >> $GITHUB_OUTPUT
|
||||
echo "Found project: $PROJECT_FILE"
|
||||
|
||||
- name: Create fresh database
|
||||
run: |
|
||||
PGPASSWORD=${{ env.POSTGRES_PASSWORD }} psql -h ${{ env.POSTGRES_HOST }} \
|
||||
-U ${{ env.POSTGRES_USER }} -d postgres \
|
||||
-c "DROP DATABASE IF EXISTS ${{ env.POSTGRES_DB }}_${{ steps.module.outputs.name }};"
|
||||
PGPASSWORD=${{ env.POSTGRES_PASSWORD }} psql -h ${{ env.POSTGRES_HOST }} \
|
||||
-U ${{ env.POSTGRES_USER }} -d postgres \
|
||||
-c "CREATE DATABASE ${{ env.POSTGRES_DB }}_${{ steps.module.outputs.name }};"
|
||||
|
||||
- name: Apply all migrations (forward)
|
||||
id: forward
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
run: |
|
||||
echo "Applying migrations for ${{ steps.module.outputs.name }}..."
|
||||
|
||||
# List available migrations first
|
||||
dotnet ef migrations list --project "${{ steps.project.outputs.project }}" \
|
||||
--no-build 2>/dev/null || true
|
||||
|
||||
# Apply all migrations
|
||||
START_TIME=$(date +%s)
|
||||
dotnet ef database update --project "${{ steps.project.outputs.project }}"
|
||||
END_TIME=$(date +%s)
|
||||
DURATION=$((END_TIME - START_TIME))
|
||||
|
||||
echo "duration=$DURATION" >> $GITHUB_OUTPUT
|
||||
echo "Migration completed in ${DURATION}s"
|
||||
|
||||
- name: Verify schema
|
||||
env:
|
||||
PGPASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
run: |
|
||||
echo "## Schema verification for ${{ steps.module.outputs.name }}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Get table count
|
||||
TABLE_COUNT=$(psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} \
|
||||
-d "${{ env.POSTGRES_DB }}_${{ steps.module.outputs.name }}" -t -c \
|
||||
"SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = 'public';")
|
||||
|
||||
echo "- Tables created: $TABLE_COUNT" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Migration time: ${{ steps.forward.outputs.duration }}s" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# List tables
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "<details><summary>Tables</summary>" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} \
|
||||
-d "${{ env.POSTGRES_DB }}_${{ steps.module.outputs.name }}" -c \
|
||||
"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_name;" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "</details>" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Upload migration log
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: migration-forward-${{ steps.module.outputs.name }}
|
||||
path: |
|
||||
**/*.migration.log
|
||||
retention-days: 7
|
||||
|
||||
# ===========================================================================
|
||||
# ROLLBACK MIGRATION TESTS
|
||||
# ===========================================================================
|
||||
|
||||
rollback-migrations:
|
||||
name: Rollback Migration
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 30
|
||||
needs: [discover, forward-migrations]
|
||||
if: |
|
||||
needs.discover.outputs.module_count != '0' &&
|
||||
(github.event_name == 'schedule' || github.event.inputs.test_rollback == 'true')
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module: ${{ fromJson(needs.discover.outputs.modules) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install EF Core tools
|
||||
run: dotnet tool install -g dotnet-ef
|
||||
|
||||
- name: Get module info
|
||||
id: module
|
||||
run: |
|
||||
MODULE_NAME=$(basename $(dirname "${{ matrix.module }}"))
|
||||
echo "name=$MODULE_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
PROJECT_FILE=$(find "${{ matrix.module }}" -maxdepth 1 -name "*.csproj" | head -1)
|
||||
echo "project=$PROJECT_FILE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create and migrate database
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_rb_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
PGPASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
run: |
|
||||
# Create database
|
||||
psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} -d postgres \
|
||||
-c "DROP DATABASE IF EXISTS ${{ env.POSTGRES_DB }}_rb_${{ steps.module.outputs.name }};"
|
||||
psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} -d postgres \
|
||||
-c "CREATE DATABASE ${{ env.POSTGRES_DB }}_rb_${{ steps.module.outputs.name }};"
|
||||
|
||||
# Apply all migrations
|
||||
dotnet ef database update --project "${{ steps.module.outputs.project }}"
|
||||
|
||||
- name: Get migration list
|
||||
id: migrations
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_rb_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
run: |
|
||||
# Get list of applied migrations
|
||||
MIGRATIONS=$(dotnet ef migrations list --project "${{ steps.module.outputs.project }}" \
|
||||
--no-build 2>/dev/null | grep -E "^\d{14}_" | tail -5)
|
||||
|
||||
MIGRATION_COUNT=$(echo "$MIGRATIONS" | wc -l)
|
||||
echo "count=$MIGRATION_COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ $MIGRATION_COUNT -gt 1 ]]; then
|
||||
# Get the second-to-last migration for rollback target
|
||||
ROLLBACK_TARGET=$(echo "$MIGRATIONS" | tail -2 | head -1)
|
||||
echo "rollback_to=$ROLLBACK_TARGET" >> $GITHUB_OUTPUT
|
||||
echo "Will rollback to: $ROLLBACK_TARGET"
|
||||
else
|
||||
echo "rollback_to=" >> $GITHUB_OUTPUT
|
||||
echo "Not enough migrations to test rollback"
|
||||
fi
|
||||
|
||||
- name: Test rollback
|
||||
if: steps.migrations.outputs.rollback_to != ''
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_rb_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
run: |
|
||||
echo "Rolling back to: ${{ steps.migrations.outputs.rollback_to }}"
|
||||
dotnet ef database update "${{ steps.migrations.outputs.rollback_to }}" \
|
||||
--project "${{ steps.module.outputs.project }}"
|
||||
|
||||
echo "Rollback successful!"
|
||||
|
||||
- name: Test re-apply after rollback
|
||||
if: steps.migrations.outputs.rollback_to != ''
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_rb_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
run: |
|
||||
echo "Re-applying migrations after rollback..."
|
||||
dotnet ef database update --project "${{ steps.module.outputs.project }}"
|
||||
|
||||
echo "Re-apply successful!"
|
||||
|
||||
- name: Report rollback results
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Rollback Test: ${{ steps.module.outputs.name }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
if [[ -n "${{ steps.migrations.outputs.rollback_to }}" ]]; then
|
||||
echo "- Rollback target: ${{ steps.migrations.outputs.rollback_to }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Status: Tested" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "- Status: Skipped (insufficient migrations)" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# IDEMPOTENCY TESTS
|
||||
# ===========================================================================
|
||||
|
||||
idempotency:
|
||||
name: Idempotency Test
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
needs: [discover, forward-migrations]
|
||||
if: |
|
||||
needs.discover.outputs.module_count != '0' &&
|
||||
(github.event_name == 'schedule' || github.event.inputs.test_idempotency == 'true')
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module: ${{ fromJson(needs.discover.outputs.modules) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install EF Core tools
|
||||
run: dotnet tool install -g dotnet-ef
|
||||
|
||||
- name: Get module info
|
||||
id: module
|
||||
run: |
|
||||
MODULE_NAME=$(basename $(dirname "${{ matrix.module }}"))
|
||||
echo "name=$MODULE_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
PROJECT_FILE=$(find "${{ matrix.module }}" -maxdepth 1 -name "*.csproj" | head -1)
|
||||
echo "project=$PROJECT_FILE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Setup database
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_idem_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
PGPASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
run: |
|
||||
psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} -d postgres \
|
||||
-c "DROP DATABASE IF EXISTS ${{ env.POSTGRES_DB }}_idem_${{ steps.module.outputs.name }};"
|
||||
psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} -d postgres \
|
||||
-c "CREATE DATABASE ${{ env.POSTGRES_DB }}_idem_${{ steps.module.outputs.name }};"
|
||||
|
||||
- name: First migration run
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_idem_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
run: |
|
||||
dotnet ef database update --project "${{ steps.module.outputs.project }}"
|
||||
|
||||
- name: Get initial schema hash
|
||||
id: hash1
|
||||
env:
|
||||
PGPASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
run: |
|
||||
SCHEMA_HASH=$(psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} \
|
||||
-d "${{ env.POSTGRES_DB }}_idem_${{ steps.module.outputs.name }}" -t -c \
|
||||
"SELECT md5(string_agg(table_name || column_name || data_type, '' ORDER BY table_name, column_name))
|
||||
FROM information_schema.columns WHERE table_schema = 'public';")
|
||||
echo "hash=$SCHEMA_HASH" >> $GITHUB_OUTPUT
|
||||
echo "Initial schema hash: $SCHEMA_HASH"
|
||||
|
||||
- name: Second migration run (idempotency test)
|
||||
env:
|
||||
ConnectionStrings__Default: "Host=${{ env.POSTGRES_HOST }};Port=${{ env.POSTGRES_PORT }};Database=${{ env.POSTGRES_DB }}_idem_${{ steps.module.outputs.name }};Username=${{ env.POSTGRES_USER }};Password=${{ env.POSTGRES_PASSWORD }}"
|
||||
run: |
|
||||
# Running migrations again should be a no-op
|
||||
dotnet ef database update --project "${{ steps.module.outputs.project }}"
|
||||
|
||||
- name: Get final schema hash
|
||||
id: hash2
|
||||
env:
|
||||
PGPASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
run: |
|
||||
SCHEMA_HASH=$(psql -h ${{ env.POSTGRES_HOST }} -U ${{ env.POSTGRES_USER }} \
|
||||
-d "${{ env.POSTGRES_DB }}_idem_${{ steps.module.outputs.name }}" -t -c \
|
||||
"SELECT md5(string_agg(table_name || column_name || data_type, '' ORDER BY table_name, column_name))
|
||||
FROM information_schema.columns WHERE table_schema = 'public';")
|
||||
echo "hash=$SCHEMA_HASH" >> $GITHUB_OUTPUT
|
||||
echo "Final schema hash: $SCHEMA_HASH"
|
||||
|
||||
- name: Verify idempotency
|
||||
run: |
|
||||
HASH1="${{ steps.hash1.outputs.hash }}"
|
||||
HASH2="${{ steps.hash2.outputs.hash }}"
|
||||
|
||||
echo "## Idempotency Test: ${{ steps.module.outputs.name }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Initial schema hash: $HASH1" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Final schema hash: $HASH2" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [[ "$HASH1" == "$HASH2" ]]; then
|
||||
echo "- Result: PASS (schemas identical)" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "- Result: FAIL (schemas differ)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "::error::Idempotency test failed for ${{ steps.module.outputs.name }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: Migration Summary
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [discover, forward-migrations, rollback-migrations, idempotency]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate Summary
|
||||
run: |
|
||||
echo "## Migration Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Test | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Discovery | ${{ needs.discover.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Forward Migrations | ${{ needs.forward-migrations.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Rollback Migrations | ${{ needs.rollback-migrations.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Idempotency | ${{ needs.idempotency.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Modules Tested: ${{ needs.discover.outputs.module_count }}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for failures
|
||||
if: contains(needs.*.result, 'failure')
|
||||
run: exit 1
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
name: Mirror Thin Bundle Sign & Verify
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 6 * * *'
|
||||
|
||||
jobs:
|
||||
mirror-sign:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
MIRROR_SIGN_KEY_B64: ${{ secrets.MIRROR_SIGN_KEY_B64 }}
|
||||
REQUIRE_PROD_SIGNING: 1
|
||||
OCI: 1
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||
run: |
|
||||
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Verify signing prerequisites
|
||||
run: scripts/mirror/check_signing_prereqs.sh
|
||||
|
||||
- name: Run mirror signing
|
||||
run: |
|
||||
scripts/mirror/ci-sign.sh
|
||||
|
||||
- name: Verify signed bundle
|
||||
run: |
|
||||
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
||||
|
||||
- name: Prepare Export Center handoff (metadata + optional schedule)
|
||||
run: |
|
||||
scripts/mirror/export-center-wire.sh
|
||||
env:
|
||||
EXPORT_CENTER_BASE_URL: ${{ secrets.EXPORT_CENTER_BASE_URL }}
|
||||
EXPORT_CENTER_TOKEN: ${{ secrets.EXPORT_CENTER_TOKEN }}
|
||||
EXPORT_CENTER_TENANT: ${{ secrets.EXPORT_CENTER_TENANT }}
|
||||
EXPORT_CENTER_PROJECT: ${{ secrets.EXPORT_CENTER_PROJECT }}
|
||||
EXPORT_CENTER_AUTO_SCHEDULE: ${{ secrets.EXPORT_CENTER_AUTO_SCHEDULE }}
|
||||
|
||||
- name: Upload signed artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: mirror-thin-v1-signed
|
||||
path: |
|
||||
out/mirror/thin/mirror-thin-v1.tar.gz
|
||||
out/mirror/thin/mirror-thin-v1.manifest.json
|
||||
out/mirror/thin/mirror-thin-v1.manifest.dsse.json
|
||||
out/mirror/thin/tuf/
|
||||
out/mirror/thin/oci/
|
||||
out/mirror/thin/milestone.json
|
||||
out/mirror/thin/export-center/export-center-handoff.json
|
||||
out/mirror/thin/export-center/export-center-targets.json
|
||||
out/mirror/thin/export-center/schedule-response.json
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
name: mock-dev-release
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- devops/releases/2025.09-mock-dev.yaml
|
||||
- devops/downloads/manifest.json
|
||||
- devops/mock-release/**
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
package-mock-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Package mock dev artefacts
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p out/mock-release
|
||||
cp devops/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||
cp devops/downloads/manifest.json out/mock-release/
|
||||
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
|
||||
|
||||
- name: Compose config (dev + mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
devops/mock-release/config_check.sh
|
||||
|
||||
- name: Helm template (mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
helm template mock ./devops/helm/stellaops -f devops/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||
ls -lh /tmp/helm-mock.yaml
|
||||
|
||||
- name: Upload mock release bundle
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: mock-dev-release
|
||||
path: |
|
||||
out/mock-release/mock-dev-release.tgz
|
||||
/tmp/compose-mock-config.yaml
|
||||
/tmp/helm-mock.yaml
|
||||
@@ -1,406 +0,0 @@
|
||||
# .gitea/workflows/module-publish.yml
|
||||
# Per-module NuGet and container publishing to Gitea registry
|
||||
# Sprint: SPRINT_20251226_004_CICD
|
||||
|
||||
name: Module Publish
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
module:
|
||||
description: 'Module to publish'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- Authority
|
||||
- Attestor
|
||||
- Concelier
|
||||
- Scanner
|
||||
- Policy
|
||||
- Signer
|
||||
- Excititor
|
||||
- Gateway
|
||||
- Scheduler
|
||||
- Orchestrator
|
||||
- TaskRunner
|
||||
- Notify
|
||||
- CLI
|
||||
version:
|
||||
description: 'Semantic version (e.g., 1.2.3)'
|
||||
required: true
|
||||
type: string
|
||||
publish_nuget:
|
||||
description: 'Publish NuGet packages'
|
||||
type: boolean
|
||||
default: true
|
||||
publish_container:
|
||||
description: 'Publish container image'
|
||||
type: boolean
|
||||
default: true
|
||||
prerelease:
|
||||
description: 'Mark as prerelease'
|
||||
type: boolean
|
||||
default: false
|
||||
push:
|
||||
tags:
|
||||
- 'module-*-v*' # e.g., module-authority-v1.2.3
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
REGISTRY: git.stella-ops.org
|
||||
NUGET_SOURCE: https://git.stella-ops.org/api/packages/stella-ops.org/nuget/index.json
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PARSE TAG (for tag-triggered builds)
|
||||
# ===========================================================================
|
||||
|
||||
parse-tag:
|
||||
name: Parse Tag
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
if: github.event_name == 'push'
|
||||
outputs:
|
||||
module: ${{ steps.parse.outputs.module }}
|
||||
version: ${{ steps.parse.outputs.version }}
|
||||
steps:
|
||||
- name: Parse module and version from tag
|
||||
id: parse
|
||||
run: |
|
||||
TAG="${{ github.ref_name }}"
|
||||
# Expected format: module-{name}-v{version}
|
||||
# Example: module-authority-v1.2.3
|
||||
if [[ "$TAG" =~ ^module-([a-zA-Z]+)-v([0-9]+\.[0-9]+\.[0-9]+.*)$ ]]; then
|
||||
MODULE="${BASH_REMATCH[1]}"
|
||||
VERSION="${BASH_REMATCH[2]}"
|
||||
# Capitalize first letter
|
||||
MODULE="$(echo "${MODULE:0:1}" | tr '[:lower:]' '[:upper:]')${MODULE:1}"
|
||||
echo "module=$MODULE" >> "$GITHUB_OUTPUT"
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "Parsed: module=$MODULE, version=$VERSION"
|
||||
else
|
||||
echo "::error::Invalid tag format. Expected: module-{name}-v{version}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# VALIDATE
|
||||
# ===========================================================================
|
||||
|
||||
validate:
|
||||
name: Validate Inputs
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [parse-tag]
|
||||
if: always() && (needs.parse-tag.result == 'success' || needs.parse-tag.result == 'skipped')
|
||||
outputs:
|
||||
module: ${{ steps.resolve.outputs.module }}
|
||||
version: ${{ steps.resolve.outputs.version }}
|
||||
publish_nuget: ${{ steps.resolve.outputs.publish_nuget }}
|
||||
publish_container: ${{ steps.resolve.outputs.publish_container }}
|
||||
steps:
|
||||
- name: Resolve inputs
|
||||
id: resolve
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
MODULE="${{ needs.parse-tag.outputs.module }}"
|
||||
VERSION="${{ needs.parse-tag.outputs.version }}"
|
||||
PUBLISH_NUGET="true"
|
||||
PUBLISH_CONTAINER="true"
|
||||
else
|
||||
MODULE="${{ github.event.inputs.module }}"
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
PUBLISH_NUGET="${{ github.event.inputs.publish_nuget }}"
|
||||
PUBLISH_CONTAINER="${{ github.event.inputs.publish_container }}"
|
||||
fi
|
||||
|
||||
echo "module=$MODULE" >> "$GITHUB_OUTPUT"
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "publish_nuget=$PUBLISH_NUGET" >> "$GITHUB_OUTPUT"
|
||||
echo "publish_container=$PUBLISH_CONTAINER" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "=== Resolved Configuration ==="
|
||||
echo "Module: $MODULE"
|
||||
echo "Version: $VERSION"
|
||||
echo "Publish NuGet: $PUBLISH_NUGET"
|
||||
echo "Publish Container: $PUBLISH_CONTAINER"
|
||||
|
||||
- name: Validate version format
|
||||
run: |
|
||||
VERSION="${{ steps.resolve.outputs.version }}"
|
||||
if ! [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.]+)?$ ]]; then
|
||||
echo "::error::Invalid version format. Expected: MAJOR.MINOR.PATCH[-prerelease]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# PUBLISH NUGET
|
||||
# ===========================================================================
|
||||
|
||||
publish-nuget:
|
||||
name: Publish NuGet
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate]
|
||||
if: needs.validate.outputs.publish_nuget == 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Determine project path
|
||||
id: path
|
||||
run: |
|
||||
MODULE="${{ needs.validate.outputs.module }}"
|
||||
|
||||
# Map module names to project paths
|
||||
case "$MODULE" in
|
||||
Authority)
|
||||
PROJECT="src/Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj"
|
||||
;;
|
||||
Attestor)
|
||||
PROJECT="src/Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj"
|
||||
;;
|
||||
Concelier)
|
||||
PROJECT="src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj"
|
||||
;;
|
||||
Scanner)
|
||||
PROJECT="src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj"
|
||||
;;
|
||||
Policy)
|
||||
PROJECT="src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj"
|
||||
;;
|
||||
Signer)
|
||||
PROJECT="src/Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj"
|
||||
;;
|
||||
Excititor)
|
||||
PROJECT="src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj"
|
||||
;;
|
||||
Gateway)
|
||||
PROJECT="src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj"
|
||||
;;
|
||||
Scheduler)
|
||||
PROJECT="src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj"
|
||||
;;
|
||||
Orchestrator)
|
||||
PROJECT="src/Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj"
|
||||
;;
|
||||
TaskRunner)
|
||||
PROJECT="src/TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj"
|
||||
;;
|
||||
Notify)
|
||||
PROJECT="src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj"
|
||||
;;
|
||||
CLI)
|
||||
PROJECT="src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Unknown module: $MODULE"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "project=$PROJECT" >> "$GITHUB_OUTPUT"
|
||||
echo "Project path: $PROJECT"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore ${{ steps.path.outputs.project }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build ${{ steps.path.outputs.project }} \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
-p:Version=${{ needs.validate.outputs.version }}
|
||||
|
||||
- name: Pack NuGet
|
||||
run: |
|
||||
dotnet pack ${{ steps.path.outputs.project }} \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
-p:Version=${{ needs.validate.outputs.version }} \
|
||||
-p:PackageVersion=${{ needs.validate.outputs.version }} \
|
||||
--output out/packages
|
||||
|
||||
- name: Push to Gitea NuGet registry
|
||||
run: |
|
||||
for nupkg in out/packages/*.nupkg; do
|
||||
echo "Pushing: $nupkg"
|
||||
dotnet nuget push "$nupkg" \
|
||||
--source "${{ env.NUGET_SOURCE }}" \
|
||||
--api-key "${{ secrets.GITEA_TOKEN }}" \
|
||||
--skip-duplicate
|
||||
done
|
||||
|
||||
- name: Upload NuGet artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nuget-${{ needs.validate.outputs.module }}-${{ needs.validate.outputs.version }}
|
||||
path: out/packages/*.nupkg
|
||||
retention-days: 30
|
||||
|
||||
# ===========================================================================
|
||||
# PUBLISH CONTAINER
|
||||
# ===========================================================================
|
||||
|
||||
publish-container:
|
||||
name: Publish Container
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate]
|
||||
if: needs.validate.outputs.publish_container == 'true' && needs.validate.outputs.module != 'CLI'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITEA_TOKEN }}
|
||||
|
||||
- name: Determine image name
|
||||
id: image
|
||||
run: |
|
||||
MODULE="${{ needs.validate.outputs.module }}"
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
MODULE_LOWER=$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
IMAGE="${{ env.REGISTRY }}/stella-ops.org/${MODULE_LOWER}"
|
||||
|
||||
echo "name=$IMAGE" >> "$GITHUB_OUTPUT"
|
||||
echo "tag_version=${IMAGE}:${VERSION}" >> "$GITHUB_OUTPUT"
|
||||
echo "tag_latest=${IMAGE}:latest" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "Image: $IMAGE"
|
||||
echo "Tags: ${VERSION}, latest"
|
||||
|
||||
- name: Build and push container
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: devops/docker/Dockerfile.platform
|
||||
target: ${{ needs.validate.outputs.module | lower }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ steps.image.outputs.tag_version }}
|
||||
${{ steps.image.outputs.tag_latest }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: |
|
||||
org.opencontainers.image.title=StellaOps ${{ needs.validate.outputs.module }}
|
||||
org.opencontainers.image.version=${{ needs.validate.outputs.version }}
|
||||
org.opencontainers.image.source=https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
|
||||
# ===========================================================================
|
||||
# PUBLISH CLI BINARIES (multi-platform)
|
||||
# ===========================================================================
|
||||
|
||||
publish-cli:
|
||||
name: Publish CLI (${{ matrix.runtime }})
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate]
|
||||
if: needs.validate.outputs.module == 'CLI'
|
||||
strategy:
|
||||
matrix:
|
||||
runtime:
|
||||
- linux-x64
|
||||
- linux-arm64
|
||||
- win-x64
|
||||
- osx-x64
|
||||
- osx-arm64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install cross-compilation tools
|
||||
if: matrix.runtime == 'linux-arm64'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends binutils-aarch64-linux-gnu
|
||||
|
||||
- name: Publish CLI
|
||||
run: |
|
||||
dotnet publish src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||
--configuration Release \
|
||||
--runtime ${{ matrix.runtime }} \
|
||||
--self-contained true \
|
||||
-p:Version=${{ needs.validate.outputs.version }} \
|
||||
-p:PublishSingleFile=true \
|
||||
-p:PublishTrimmed=true \
|
||||
-p:EnableCompressionInSingleFile=true \
|
||||
--output out/cli/${{ matrix.runtime }}
|
||||
|
||||
- name: Create archive
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
RUNTIME="${{ matrix.runtime }}"
|
||||
|
||||
cd out/cli/$RUNTIME
|
||||
if [[ "$RUNTIME" == win-* ]]; then
|
||||
zip -r ../stellaops-cli-${VERSION}-${RUNTIME}.zip .
|
||||
else
|
||||
tar -czvf ../stellaops-cli-${VERSION}-${RUNTIME}.tar.gz .
|
||||
fi
|
||||
|
||||
- name: Upload CLI artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cli-${{ needs.validate.outputs.version }}-${{ matrix.runtime }}
|
||||
path: |
|
||||
out/cli/*.zip
|
||||
out/cli/*.tar.gz
|
||||
retention-days: 30
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: Publish Summary
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, publish-nuget, publish-container, publish-cli]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate Summary
|
||||
run: |
|
||||
echo "## Module Publish Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Property | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Module | ${{ needs.validate.outputs.module }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Version | ${{ needs.validate.outputs.version }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| NuGet | ${{ needs.publish-nuget.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Container | ${{ needs.publish-container.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| CLI | ${{ needs.publish-cli.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Registry URLs" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- NuGet: \`${{ env.NUGET_SOURCE }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Container: \`${{ env.REGISTRY }}/stella-ops.org/${{ needs.validate.outputs.module | lower }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for failures
|
||||
if: contains(needs.*.result, 'failure')
|
||||
run: |
|
||||
echo "::error::One or more publish jobs failed"
|
||||
exit 1
|
||||
|
||||
@@ -1,486 +0,0 @@
|
||||
# .gitea/workflows/nightly-regression.yml
|
||||
# Nightly Full-Suite Regression Testing
|
||||
# Sprint: CI/CD Enhancement - Comprehensive Testing
|
||||
#
|
||||
# Purpose: Run comprehensive regression tests that are too expensive for PR gating
|
||||
# - Full test matrix (all categories)
|
||||
# - Extended integration tests
|
||||
# - Performance benchmarks with historical comparison
|
||||
# - Cross-module dependency validation
|
||||
# - Determinism verification
|
||||
#
|
||||
# Schedule: Daily at 2:00 AM UTC (off-peak hours)
|
||||
#
|
||||
# Notifications: Slack/Teams on failure
|
||||
|
||||
name: Nightly Regression
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Daily at 2:00 AM UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip_performance:
|
||||
description: 'Skip performance tests'
|
||||
type: boolean
|
||||
default: false
|
||||
skip_determinism:
|
||||
description: 'Skip determinism tests'
|
||||
type: boolean
|
||||
default: false
|
||||
notify_on_success:
|
||||
description: 'Send notification on success'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PREPARE NIGHTLY RUN
|
||||
# ===========================================================================
|
||||
|
||||
prepare:
|
||||
name: Prepare Nightly Run
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
outputs:
|
||||
run_id: ${{ steps.metadata.outputs.run_id }}
|
||||
run_date: ${{ steps.metadata.outputs.run_date }}
|
||||
commit_sha: ${{ steps.metadata.outputs.commit_sha }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate run metadata
|
||||
id: metadata
|
||||
run: |
|
||||
RUN_ID="nightly-$(date -u +%Y%m%d-%H%M%S)"
|
||||
RUN_DATE=$(date -u +%Y-%m-%d)
|
||||
COMMIT_SHA=$(git rev-parse HEAD)
|
||||
|
||||
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||
echo "run_date=$RUN_DATE" >> $GITHUB_OUTPUT
|
||||
echo "commit_sha=$COMMIT_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "## Nightly Regression Run" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Run ID:** $RUN_ID" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Date:** $RUN_DATE" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Commit:** $COMMIT_SHA" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check recent commits
|
||||
run: |
|
||||
echo "### Recent Commits" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
git log --oneline -10 >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# ===========================================================================
|
||||
# FULL BUILD VERIFICATION
|
||||
# ===========================================================================
|
||||
|
||||
build:
|
||||
name: Full Build
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 30
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build solution (Release)
|
||||
run: |
|
||||
START_TIME=$(date +%s)
|
||||
dotnet build src/StellaOps.sln --configuration Release --no-restore
|
||||
END_TIME=$(date +%s)
|
||||
DURATION=$((END_TIME - START_TIME))
|
||||
echo "build_time=$DURATION" >> $GITHUB_ENV
|
||||
echo "Build completed in ${DURATION}s"
|
||||
|
||||
- name: Report build metrics
|
||||
run: |
|
||||
echo "### Build Metrics" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Build Time:** ${{ env.build_time }}s" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Configuration:** Release" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# ===========================================================================
|
||||
# COMPREHENSIVE TEST SUITE
|
||||
# ===========================================================================
|
||||
|
||||
test-pr-gating:
|
||||
name: PR-Gating Tests
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 45
|
||||
needs: build
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: stellaops
|
||||
POSTGRES_PASSWORD: stellaops
|
||||
POSTGRES_DB: stellaops_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
category:
|
||||
- Unit
|
||||
- Architecture
|
||||
- Contract
|
||||
- Integration
|
||||
- Security
|
||||
- Golden
|
||||
- Determinism
|
||||
- Regression
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Run ${{ matrix.category }} Tests
|
||||
env:
|
||||
STELLAOPS_TEST_POSTGRES_CONNECTION: "Host=localhost;Port=5432;Database=stellaops_test;Username=stellaops;Password=stellaops"
|
||||
run: |
|
||||
chmod +x .gitea/scripts/test/run-test-category.sh
|
||||
.gitea/scripts/test/run-test-category.sh "${{ matrix.category }}"
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: nightly-test-${{ matrix.category }}
|
||||
path: ./TestResults/${{ matrix.category }}
|
||||
retention-days: 30
|
||||
|
||||
test-extended:
|
||||
name: Extended Tests
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 60
|
||||
needs: build
|
||||
if: github.event.inputs.skip_performance != 'true'
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
category:
|
||||
- Performance
|
||||
- Benchmark
|
||||
- Resilience
|
||||
- Observability
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Run ${{ matrix.category }} Tests
|
||||
run: |
|
||||
chmod +x .gitea/scripts/test/run-test-category.sh
|
||||
.gitea/scripts/test/run-test-category.sh "${{ matrix.category }}"
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: nightly-extended-${{ matrix.category }}
|
||||
path: ./TestResults/${{ matrix.category }}
|
||||
retention-days: 30
|
||||
|
||||
# ===========================================================================
|
||||
# DETERMINISM VERIFICATION
|
||||
# ===========================================================================
|
||||
|
||||
determinism:
|
||||
name: Determinism Verification
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 45
|
||||
needs: build
|
||||
if: github.event.inputs.skip_determinism != 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: First build
|
||||
run: |
|
||||
dotnet build src/StellaOps.sln --configuration Release -o ./build-1
|
||||
find ./build-1 -name "*.dll" -exec sha256sum {} \; | sort > checksums-1.txt
|
||||
|
||||
- name: Clean and rebuild
|
||||
run: |
|
||||
rm -rf ./build-1
|
||||
dotnet clean src/StellaOps.sln
|
||||
dotnet build src/StellaOps.sln --configuration Release -o ./build-2
|
||||
find ./build-2 -name "*.dll" -exec sha256sum {} \; | sort > checksums-2.txt
|
||||
|
||||
- name: Compare builds
|
||||
id: compare
|
||||
run: |
|
||||
echo "### Determinism Check" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if diff checksums-1.txt checksums-2.txt > /dev/null; then
|
||||
echo "PASS: Builds are deterministic" >> $GITHUB_STEP_SUMMARY
|
||||
echo "deterministic=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "FAIL: Builds differ" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "<details><summary>Differences</summary>" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```diff' >> $GITHUB_STEP_SUMMARY
|
||||
diff checksums-1.txt checksums-2.txt >> $GITHUB_STEP_SUMMARY || true
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "</details>" >> $GITHUB_STEP_SUMMARY
|
||||
echo "deterministic=false" >> $GITHUB_OUTPUT
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload checksums
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: nightly-determinism-checksums
|
||||
path: checksums-*.txt
|
||||
retention-days: 30
|
||||
|
||||
# ===========================================================================
|
||||
# CROSS-MODULE VALIDATION
|
||||
# ===========================================================================
|
||||
|
||||
cross-module:
|
||||
name: Cross-Module Validation
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 30
|
||||
needs: build
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Check for circular dependencies
|
||||
run: |
|
||||
echo "### Dependency Analysis" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Build dependency graph
|
||||
echo "Analyzing project dependencies..."
|
||||
for proj in $(find src -name "*.csproj" ! -path "*/bin/*" ! -path "*/obj/*" | head -50); do
|
||||
# Extract ProjectReference entries
|
||||
refs=$(grep -oP 'ProjectReference Include="\K[^"]+' "$proj" 2>/dev/null || true)
|
||||
if [[ -n "$refs" ]]; then
|
||||
basename "$proj" >> deps.txt
|
||||
echo "$refs" | while read ref; do
|
||||
echo " -> $(basename "$ref")" >> deps.txt
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -f deps.txt ]]; then
|
||||
echo "<details><summary>Project Dependencies (first 50)</summary>" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
head -100 deps.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "</details>" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Validate no deprecated APIs
|
||||
run: |
|
||||
# Check for use of deprecated patterns
|
||||
DEPRECATED_COUNT=$(grep -r "Obsolete" src --include="*.cs" | wc -l || echo "0")
|
||||
echo "- Obsolete attribute usages: $DEPRECATED_COUNT" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# ===========================================================================
|
||||
# CODE COVERAGE REPORT
|
||||
# ===========================================================================
|
||||
|
||||
coverage:
|
||||
name: Code Coverage
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 45
|
||||
needs: build
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: stellaops
|
||||
POSTGRES_PASSWORD: stellaops
|
||||
POSTGRES_DB: stellaops_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Run tests with coverage
|
||||
env:
|
||||
STELLAOPS_TEST_POSTGRES_CONNECTION: "Host=localhost;Port=5432;Database=stellaops_test;Username=stellaops;Password=stellaops"
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--configuration Release \
|
||||
--collect:"XPlat Code Coverage" \
|
||||
--results-directory ./TestResults/Coverage \
|
||||
--filter "Category=Unit|Category=Integration" \
|
||||
--verbosity minimal \
|
||||
-- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=cobertura
|
||||
|
||||
- name: Install ReportGenerator
|
||||
run: dotnet tool install -g dotnet-reportgenerator-globaltool
|
||||
|
||||
- name: Generate coverage report
|
||||
run: |
|
||||
reportgenerator \
|
||||
-reports:"./TestResults/Coverage/**/coverage.cobertura.xml" \
|
||||
-targetdir:"./TestResults/CoverageReport" \
|
||||
-reporttypes:"Html;MarkdownSummary;Cobertura" \
|
||||
|| true
|
||||
|
||||
- name: Add coverage to summary
|
||||
run: |
|
||||
echo "### Code Coverage Report" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
if [[ -f "./TestResults/CoverageReport/Summary.md" ]]; then
|
||||
cat "./TestResults/CoverageReport/Summary.md" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "Coverage report generation failed or no coverage data collected." >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: nightly-coverage-report
|
||||
path: ./TestResults/CoverageReport
|
||||
retention-days: 30
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY AND NOTIFICATION
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: Nightly Summary
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs:
|
||||
- prepare
|
||||
- build
|
||||
- test-pr-gating
|
||||
- test-extended
|
||||
- determinism
|
||||
- cross-module
|
||||
- coverage
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate final summary
|
||||
run: |
|
||||
echo "## Nightly Regression Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Run ID:** ${{ needs.prepare.outputs.run_id }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Date:** ${{ needs.prepare.outputs.run_date }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Commit:** ${{ needs.prepare.outputs.commit_sha }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Job Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Job | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build | ${{ needs.build.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| PR-Gating Tests | ${{ needs.test-pr-gating.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Extended Tests | ${{ needs.test-extended.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Determinism | ${{ needs.determinism.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Cross-Module | ${{ needs.cross-module.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Coverage | ${{ needs.coverage.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Determine overall status
|
||||
id: status
|
||||
run: |
|
||||
if [[ "${{ needs.build.result }}" == "failure" ]] || \
|
||||
[[ "${{ needs.test-pr-gating.result }}" == "failure" ]] || \
|
||||
[[ "${{ needs.determinism.result }}" == "failure" ]]; then
|
||||
echo "status=failure" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=success" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Placeholder for notifications - configure webhook URL in secrets
|
||||
- name: Send failure notification
|
||||
if: steps.status.outputs.status == 'failure'
|
||||
run: |
|
||||
echo "::warning::Nightly regression failed - notification would be sent here"
|
||||
# Uncomment and configure when webhook is available:
|
||||
# curl -X POST "${{ secrets.SLACK_WEBHOOK_URL }}" \
|
||||
# -H "Content-Type: application/json" \
|
||||
# -d '{
|
||||
# "text": "Nightly Regression Failed",
|
||||
# "attachments": [{
|
||||
# "color": "danger",
|
||||
# "fields": [
|
||||
# {"title": "Run ID", "value": "${{ needs.prepare.outputs.run_id }}", "short": true},
|
||||
# {"title": "Commit", "value": "${{ needs.prepare.outputs.commit_sha }}", "short": true}
|
||||
# ]
|
||||
# }]
|
||||
# }'
|
||||
|
||||
- name: Send success notification
|
||||
if: steps.status.outputs.status == 'success' && github.event.inputs.notify_on_success == 'true'
|
||||
run: |
|
||||
echo "::notice::Nightly regression passed"
|
||||
|
||||
- name: Exit with appropriate code
|
||||
if: steps.status.outputs.status == 'failure'
|
||||
run: exit 1
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
name: Notify Smoke Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Notify/**'
|
||||
- 'src/Notifier/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Notify/**'
|
||||
- 'src/Notifier/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
name: Notify Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/Notify/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Notify/ --no-restore
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test src/Notify/ --no-build --verbosity normal
|
||||
|
||||
notifier-tests:
|
||||
name: Notifier Service Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/Notifier/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Notifier/ --no-restore
|
||||
|
||||
- name: Run tests
|
||||
run: dotnet test src/Notifier/ --no-build --verbosity normal
|
||||
|
||||
smoke-test:
|
||||
name: Notification Smoke Test
|
||||
runs-on: ubuntu-latest
|
||||
needs: [unit-tests, notifier-tests]
|
||||
services:
|
||||
mongodb:
|
||||
image: mongo:7.0
|
||||
ports:
|
||||
- 27017:27017
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Build Notifier
|
||||
run: dotnet build src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/
|
||||
|
||||
- name: Start service
|
||||
run: |
|
||||
dotnet run --project src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/ &
|
||||
sleep 10
|
||||
|
||||
- name: Health check
|
||||
run: |
|
||||
for i in {1..30}; do
|
||||
if curl -s http://localhost:5000/health > /dev/null; then
|
||||
echo "Service is healthy"
|
||||
exit 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
echo "Service failed to start"
|
||||
exit 1
|
||||
|
||||
- name: Test notification endpoint
|
||||
run: |
|
||||
# Test dry-run notification
|
||||
curl -X POST http://localhost:5000/api/v1/notifications/test \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"channel": "log", "message": "Smoke test", "dryRun": true}' \
|
||||
|| echo "Warning: Notification test endpoint not available"
|
||||
@@ -1,59 +0,0 @@
|
||||
name: oas-ci
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "src/Api/**"
|
||||
- "scripts/api-*.mjs"
|
||||
- "package.json"
|
||||
- "package-lock.json"
|
||||
pull_request:
|
||||
paths:
|
||||
- "src/Api/**"
|
||||
- "scripts/api-*.mjs"
|
||||
- "package.json"
|
||||
- "package-lock.json"
|
||||
|
||||
jobs:
|
||||
oas-validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Install deps
|
||||
run: npm install --ignore-scripts --no-progress
|
||||
|
||||
- name: Compose aggregate OpenAPI
|
||||
run: npm run api:compose
|
||||
|
||||
- name: Lint (spectral)
|
||||
run: npm run api:lint
|
||||
|
||||
- name: Validate examples coverage
|
||||
run: npm run api:examples
|
||||
|
||||
- name: Compat diff (previous commit)
|
||||
run: |
|
||||
set -e
|
||||
if git show HEAD~1:src/Api/StellaOps.Api.OpenApi/stella.yaml > /tmp/stella-prev.yaml 2>/dev/null; then
|
||||
node scripts/api-compat-diff.mjs /tmp/stella-prev.yaml src/Api/StellaOps.Api.OpenApi/stella.yaml --output text --fail-on-breaking
|
||||
else
|
||||
echo "[oas-ci] previous stella.yaml not found; skipping"
|
||||
fi
|
||||
|
||||
- name: Contract tests
|
||||
run: npm run api:compat:test
|
||||
|
||||
- name: Upload aggregate spec
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: stella-openapi
|
||||
path: src/Api/StellaOps.Api.OpenApi/stella.yaml
|
||||
@@ -1,46 +0,0 @@
|
||||
name: obs-slo
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
prom_url:
|
||||
description: "Prometheus base URL"
|
||||
required: true
|
||||
default: "http://localhost:9090"
|
||||
|
||||
jobs:
|
||||
slo-eval:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Python (telemetry schema checks)
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install telemetry schema deps
|
||||
run: python -m pip install --upgrade pip jsonschema
|
||||
|
||||
- name: Run SLO evaluator
|
||||
env:
|
||||
PROM_URL: ${{ github.event.inputs.prom_url }}
|
||||
run: |
|
||||
chmod +x scripts/observability/slo-evaluator.sh
|
||||
scripts/observability/slo-evaluator.sh
|
||||
|
||||
- name: Telemetry schema/bundle checks
|
||||
env:
|
||||
TELEMETRY_BUNDLE_SCHEMA: docs/modules/telemetry/schemas/telemetry-bundle.schema.json
|
||||
run: |
|
||||
chmod +x devops/telemetry/tests/ci-run.sh
|
||||
devops/telemetry/tests/ci-run.sh
|
||||
|
||||
- name: Upload SLO results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: obs-slo
|
||||
path: out/obs-slo/**
|
||||
@@ -1,37 +0,0 @@
|
||||
name: obs-stream
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
nats_url:
|
||||
description: "NATS server URL"
|
||||
required: false
|
||||
default: "nats://localhost:4222"
|
||||
|
||||
jobs:
|
||||
stream-validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Install nats CLI
|
||||
run: |
|
||||
curl -sSL https://github.com/nats-io/natscli/releases/download/v0.1.4/nats-0.1.4-linux-amd64.tar.gz -o /tmp/natscli.tgz
|
||||
tar -C /tmp -xzf /tmp/natscli.tgz
|
||||
sudo mv /tmp/nats /usr/local/bin/nats
|
||||
|
||||
- name: Validate streaming knobs
|
||||
env:
|
||||
NATS_URL: ${{ github.event.inputs.nats_url }}
|
||||
run: |
|
||||
chmod +x scripts/observability/streaming-validate.sh
|
||||
scripts/observability/streaming-validate.sh
|
||||
|
||||
- name: Upload stream validation
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: obs-stream
|
||||
path: out/obs-stream/**
|
||||
@@ -1,122 +0,0 @@
|
||||
name: Offline E2E Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/AirGap/**'
|
||||
- 'src/Scanner/**'
|
||||
- 'src/__Tests/offline/**'
|
||||
schedule:
|
||||
- cron: '0 4 * * *' # Nightly at 4 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
STELLAOPS_OFFLINE_MODE: 'true'
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
|
||||
jobs:
|
||||
offline-e2e:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nuget-
|
||||
|
||||
- name: Download offline bundle
|
||||
run: |
|
||||
# In real scenario, bundle would be pre-built and cached
|
||||
# For now, create minimal fixture structure
|
||||
mkdir -p ./offline-bundle/{images,feeds,policies,keys,certs,vex}
|
||||
echo '{}' > ./offline-bundle/manifest.json
|
||||
|
||||
- name: Build in isolated environment
|
||||
run: |
|
||||
# Build offline test library
|
||||
dotnet build src/__Libraries/StellaOps.Testing.AirGap/StellaOps.Testing.AirGap.csproj
|
||||
|
||||
# Build offline E2E tests
|
||||
dotnet build src/__Tests/offline/StellaOps.Offline.E2E.Tests/StellaOps.Offline.E2E.Tests.csproj
|
||||
|
||||
- name: Run offline E2E tests with network isolation
|
||||
run: |
|
||||
# Set offline bundle path
|
||||
export STELLAOPS_OFFLINE_BUNDLE=$(pwd)/offline-bundle
|
||||
|
||||
# Run tests
|
||||
dotnet test src/__Tests/offline/StellaOps.Offline.E2E.Tests \
|
||||
--logger "trx;LogFileName=offline-e2e.trx" \
|
||||
--logger "console;verbosity=detailed" \
|
||||
--results-directory ./results
|
||||
|
||||
- name: Verify no network calls
|
||||
if: always()
|
||||
run: |
|
||||
# Parse test output for any NetworkIsolationViolationException
|
||||
if [ -f "./results/offline-e2e.trx" ]; then
|
||||
if grep -q "NetworkIsolationViolation" ./results/offline-e2e.trx; then
|
||||
echo "::error::Tests attempted network calls in offline mode!"
|
||||
exit 1
|
||||
else
|
||||
echo "✅ No network isolation violations detected"
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Upload results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: offline-e2e-results
|
||||
path: ./results/
|
||||
|
||||
verify-isolation:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: offline-e2e
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Download results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: offline-e2e-results
|
||||
path: ./results
|
||||
|
||||
- name: Generate summary
|
||||
run: |
|
||||
echo "## Offline E2E Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [ -f "./results/offline-e2e.trx" ]; then
|
||||
# Parse test results
|
||||
TOTAL=$(grep -o 'total="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||
PASSED=$(grep -o 'passed="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||
FAILED=$(grep -o 'failed="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||
|
||||
echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Total Tests | ${TOTAL} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Passed | ${PASSED} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Failed | ${FAILED} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if grep -q "NetworkIsolationViolation" ./results/offline-e2e.trx; then
|
||||
echo "❌ **Network isolation was violated**" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "✅ **Network isolation verified - no egress detected**" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
echo "⚠️ No test results found" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
name: Parity Tests
|
||||
|
||||
# Parity testing workflow: compares StellaOps against competitor scanners
|
||||
# (Syft, Grype, Trivy) on a standardized fixture set.
|
||||
#
|
||||
# Schedule: Nightly at 02:00 UTC; Weekly full run on Sunday 00:00 UTC
|
||||
# NOT a PR gate - too slow and has external dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Nightly at 02:00 UTC (quick fixture set)
|
||||
- cron: '0 2 * * *'
|
||||
# Weekly on Sunday at 00:00 UTC (full fixture set)
|
||||
- cron: '0 0 * * 0'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
fixture_set:
|
||||
description: 'Fixture set to use'
|
||||
required: false
|
||||
default: 'quick'
|
||||
type: choice
|
||||
options:
|
||||
- quick
|
||||
- full
|
||||
enable_drift_detection:
|
||||
description: 'Enable drift detection analysis'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
SYFT_VERSION: '1.9.0'
|
||||
GRYPE_VERSION: '0.79.3'
|
||||
TRIVY_VERSION: '0.54.1'
|
||||
PARITY_RESULTS_PATH: 'bench/results/parity'
|
||||
|
||||
jobs:
|
||||
parity-tests:
|
||||
name: Competitor Parity Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Install Syft
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.SYFT_VERSION }}
|
||||
syft version
|
||||
|
||||
- name: Install Grype
|
||||
run: |
|
||||
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.GRYPE_VERSION }}
|
||||
grype version
|
||||
|
||||
- name: Install Trivy
|
||||
run: |
|
||||
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v${{ env.TRIVY_VERSION }}
|
||||
trivy --version
|
||||
|
||||
- name: Determine fixture set
|
||||
id: fixtures
|
||||
run: |
|
||||
# Weekly runs use full fixture set
|
||||
if [[ "${{ github.event.schedule }}" == "0 0 * * 0" ]]; then
|
||||
echo "fixture_set=full" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
echo "fixture_set=${{ inputs.fixture_set }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "fixture_set=quick" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build parity tests
|
||||
run: |
|
||||
dotnet build src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj -c Release
|
||||
|
||||
- name: Run parity tests
|
||||
id: parity
|
||||
run: |
|
||||
mkdir -p ${{ env.PARITY_RESULTS_PATH }}
|
||||
RUN_ID=$(date -u +%Y%m%dT%H%M%SZ)
|
||||
echo "run_id=${RUN_ID}" >> $GITHUB_OUTPUT
|
||||
|
||||
dotnet test src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj \
|
||||
-c Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=parity-results.trx" \
|
||||
--results-directory ${{ env.PARITY_RESULTS_PATH }} \
|
||||
-e PARITY_FIXTURE_SET=${{ steps.fixtures.outputs.fixture_set }} \
|
||||
-e PARITY_RUN_ID=${RUN_ID} \
|
||||
-e PARITY_OUTPUT_PATH=${{ env.PARITY_RESULTS_PATH }} \
|
||||
|| true # Don't fail workflow on test failures
|
||||
|
||||
- name: Store parity results
|
||||
run: |
|
||||
# Copy JSON results to time-series storage
|
||||
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json" ]; then
|
||||
echo "Parity results stored successfully"
|
||||
cat ${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json | jq .
|
||||
else
|
||||
echo "Warning: No parity results file found"
|
||||
fi
|
||||
|
||||
- name: Run drift detection
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || inputs.enable_drift_detection == 'true' }}
|
||||
run: |
|
||||
# Analyze drift from historical results
|
||||
dotnet run --project src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj \
|
||||
--no-build \
|
||||
-- analyze-drift \
|
||||
--results-path ${{ env.PARITY_RESULTS_PATH }} \
|
||||
--threshold 0.05 \
|
||||
--trend-days 3 \
|
||||
|| true
|
||||
|
||||
- name: Upload parity results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: parity-results-${{ steps.parity.outputs.run_id }}
|
||||
path: ${{ env.PARITY_RESULTS_PATH }}
|
||||
retention-days: 90
|
||||
|
||||
- name: Export Prometheus metrics
|
||||
if: ${{ env.PROMETHEUS_PUSH_GATEWAY != '' }}
|
||||
env:
|
||||
PROMETHEUS_PUSH_GATEWAY: ${{ secrets.PROMETHEUS_PUSH_GATEWAY }}
|
||||
run: |
|
||||
# Push metrics to Prometheus Push Gateway if configured
|
||||
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-metrics.txt" ]; then
|
||||
curl -X POST \
|
||||
-H "Content-Type: text/plain" \
|
||||
--data-binary @${{ env.PARITY_RESULTS_PATH }}/parity-metrics.txt \
|
||||
"${PROMETHEUS_PUSH_GATEWAY}/metrics/job/parity_tests/instance/${{ steps.parity.outputs.run_id }}"
|
||||
fi
|
||||
|
||||
- name: Generate comparison report
|
||||
run: |
|
||||
echo "## Parity Test Results - ${{ steps.parity.outputs.run_id }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Fixture Set:** ${{ steps.fixtures.outputs.fixture_set }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Competitor Versions:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Syft: ${{ env.SYFT_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Grype: ${{ env.GRYPE_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Trivy: ${{ env.TRIVY_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json" ]; then
|
||||
echo "### Metrics Summary" >> $GITHUB_STEP_SUMMARY
|
||||
jq -r '
|
||||
"| Metric | StellaOps | Grype | Trivy |",
|
||||
"|--------|-----------|-------|-------|",
|
||||
"| SBOM Packages | \(.sbomMetrics.stellaOpsPackageCount) | \(.sbomMetrics.syftPackageCount) | - |",
|
||||
"| Vulnerability Recall | \(.vulnMetrics.recall | . * 100 | round / 100)% | - | - |",
|
||||
"| Vulnerability F1 | \(.vulnMetrics.f1Score | . * 100 | round / 100)% | - | - |",
|
||||
"| Latency P95 (ms) | \(.latencyMetrics.stellaOpsP95Ms | round) | \(.latencyMetrics.grypeP95Ms | round) | \(.latencyMetrics.trivyP95Ms | round) |"
|
||||
' ${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json >> $GITHUB_STEP_SUMMARY || echo "Could not parse results" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Alert on critical drift
|
||||
if: failure()
|
||||
uses: slackapi/slack-github-action@v1.25.0
|
||||
with:
|
||||
payload: |
|
||||
{
|
||||
"text": "⚠️ Parity test drift detected",
|
||||
"blocks": [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "*Parity Test Alert*\nDrift detected in competitor comparison metrics.\n<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Results>"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
|
||||
continue-on-error: true
|
||||
@@ -1,71 +0,0 @@
|
||||
name: Policy Lint & Smoke
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/policy/**'
|
||||
- 'docs/examples/policies/**'
|
||||
- 'src/Cli/**'
|
||||
- '.gitea/workflows/policy-lint.yml'
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'docs/policy/**'
|
||||
- 'docs/examples/policies/**'
|
||||
- 'src/Cli/**'
|
||||
- '.gitea/workflows/policy-lint.yml'
|
||||
|
||||
jobs:
|
||||
policy-lint:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.nuget/packages
|
||||
.nuget/packages
|
||||
key: policy-lint-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||
|
||||
- name: Restore CLI
|
||||
run: |
|
||||
dotnet restore src/Cli/StellaOps.Cli/StellaOps.Cli.csproj --configfile nuget.config
|
||||
|
||||
- name: Lint policies (deterministic)
|
||||
run: |
|
||||
mkdir -p out/policy-lint
|
||||
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -- \
|
||||
policy lint docs/examples/policies/*.stella \
|
||||
--format json --no-color \
|
||||
> out/policy-lint/lint.json
|
||||
|
||||
- name: Smoke simulate entrypoint
|
||||
run: |
|
||||
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -- policy simulate --help > out/policy-lint/simulate-help.txt
|
||||
|
||||
- name: Upload lint artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: policy-lint
|
||||
path: out/policy-lint
|
||||
retention-days: 7
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
name: Policy Simulation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/policy/**'
|
||||
- 'docs/examples/policies/**'
|
||||
- 'scripts/policy/**'
|
||||
- '.gitea/workflows/policy-simulate.yml'
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'docs/policy/**'
|
||||
- 'docs/examples/policies/**'
|
||||
- 'scripts/policy/**'
|
||||
- '.gitea/workflows/policy-simulate.yml'
|
||||
|
||||
jobs:
|
||||
policy-simulate:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
THRESHOLD: 0
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.nuget/packages
|
||||
.nuget/packages
|
||||
key: policy-sim-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||
|
||||
- name: Restore CLI
|
||||
run: |
|
||||
dotnet restore src/Cli/StellaOps.Cli/StellaOps.Cli.csproj --configfile nuget.config
|
||||
|
||||
- name: Generate policy signing key (ephemeral)
|
||||
run: |
|
||||
OUT_DIR=out/policy-sign/keys PREFIX=ci-policy COSIGN_PASSWORD= scripts/policy/rotate-key.sh
|
||||
|
||||
- name: Sign sample policy blob
|
||||
run: |
|
||||
export COSIGN_KEY_B64=$(base64 -w0 out/policy-sign/keys/ci-policy-cosign.key)
|
||||
COSIGN_PASSWORD= \
|
||||
.gitea/scripts/sign/sign-policy.sh --file docs/examples/policies/baseline.stella --out-dir out/policy-sign
|
||||
|
||||
- name: Attest and verify sample policy blob
|
||||
run: |
|
||||
export COSIGN_KEY_B64=$(base64 -w0 out/policy-sign/keys/ci-policy-cosign.key)
|
||||
COSIGN_PASSWORD= \
|
||||
scripts/policy/attest-verify.sh --file docs/examples/policies/baseline.stella --out-dir out/policy-sign
|
||||
|
||||
- name: Run batch policy simulation
|
||||
run: |
|
||||
scripts/policy/batch-simulate.sh
|
||||
|
||||
- name: Upload simulation artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: policy-simulation
|
||||
path: out/policy-sim
|
||||
retention-days: 7
|
||||
|
||||
- name: Upload signing artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: policy-signing
|
||||
path: out/policy-sign
|
||||
retention-days: 7
|
||||
|
||||
@@ -1,210 +0,0 @@
|
||||
# .gitea/workflows/promote.yml
|
||||
# Manual promotion workflow to copy staged artefacts to production
|
||||
|
||||
name: Promote Feedser (Manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
include_docs:
|
||||
description: 'Also promote the generated documentation bundle'
|
||||
required: false
|
||||
default: 'true'
|
||||
type: boolean
|
||||
tag:
|
||||
description: 'Optional build identifier to record in the summary'
|
||||
required: false
|
||||
default: 'latest'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
environment: production
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Resolve staging credentials
|
||||
id: staging
|
||||
run: |
|
||||
missing=()
|
||||
|
||||
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
|
||||
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
|
||||
|
||||
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
|
||||
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
|
||||
|
||||
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
|
||||
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
|
||||
if [ -z "$path" ]; then missing+=("STAGING_DEPLOYMENT_PATH")
|
||||
fi
|
||||
|
||||
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
|
||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
|
||||
|
||||
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
|
||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
|
||||
|
||||
if [ ${#missing[@]} -gt 0 ]; then
|
||||
echo "❌ Missing staging configuration: ${missing[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
key_file="$RUNNER_TEMP/staging_key"
|
||||
printf '%s\n' "$key" > "$key_file"
|
||||
chmod 600 "$key_file"
|
||||
|
||||
echo "host=$host" >> $GITHUB_OUTPUT
|
||||
echo "user=$user" >> $GITHUB_OUTPUT
|
||||
echo "path=$path" >> $GITHUB_OUTPUT
|
||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Resolve production credentials
|
||||
id: production
|
||||
run: |
|
||||
missing=()
|
||||
|
||||
host="${{ secrets.PRODUCTION_DEPLOYMENT_HOST }}"
|
||||
if [ -z "$host" ]; then host="${{ vars.PRODUCTION_DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||||
if [ -z "$host" ]; then missing+=("PRODUCTION_DEPLOYMENT_HOST"); fi
|
||||
|
||||
user="${{ secrets.PRODUCTION_DEPLOYMENT_USERNAME }}"
|
||||
if [ -z "$user" ]; then user="${{ vars.PRODUCTION_DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||||
if [ -z "$user" ]; then missing+=("PRODUCTION_DEPLOYMENT_USERNAME"); fi
|
||||
|
||||
path="${{ secrets.PRODUCTION_DEPLOYMENT_PATH }}"
|
||||
if [ -z "$path" ]; then path="${{ vars.PRODUCTION_DEPLOYMENT_PATH }}"; fi
|
||||
if [ -z "$path" ]; then missing+=("PRODUCTION_DEPLOYMENT_PATH")
|
||||
fi
|
||||
|
||||
docs_path="${{ secrets.PRODUCTION_DOCS_PATH }}"
|
||||
if [ -z "$docs_path" ]; then docs_path="${{ vars.PRODUCTION_DOCS_PATH }}"; fi
|
||||
|
||||
key="${{ secrets.PRODUCTION_DEPLOYMENT_KEY }}"
|
||||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.PRODUCTION_DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||||
if [ -z "$key" ]; then missing+=("PRODUCTION_DEPLOYMENT_KEY"); fi
|
||||
|
||||
if [ ${#missing[@]} -gt 0 ]; then
|
||||
echo "❌ Missing production configuration: ${missing[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
key_file="$RUNNER_TEMP/production_key"
|
||||
printf '%s\n' "$key" > "$key_file"
|
||||
chmod 600 "$key_file"
|
||||
|
||||
echo "host=$host" >> $GITHUB_OUTPUT
|
||||
echo "user=$user" >> $GITHUB_OUTPUT
|
||||
echo "path=$path" >> $GITHUB_OUTPUT
|
||||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install rsync
|
||||
run: |
|
||||
if command -v rsync >/dev/null 2>&1; then
|
||||
exit 0
|
||||
fi
|
||||
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
|
||||
mkdir -p "$CACHE_DIR"
|
||||
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
|
||||
DEB_DIR="$CACHE_DIR/$KEY"
|
||||
mkdir -p "$DEB_DIR"
|
||||
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
|
||||
else
|
||||
apt-get update
|
||||
apt-get download rsync libpopt0
|
||||
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
|
||||
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
|
||||
fi
|
||||
|
||||
- name: Fetch staging artefacts
|
||||
id: fetch
|
||||
run: |
|
||||
staging_root="${{ runner.temp }}/staging"
|
||||
mkdir -p "$staging_root/service" "$staging_root/docs"
|
||||
|
||||
echo "📥 Copying service bundle from staging"
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs.path }}/" \
|
||||
"$staging_root/service/"
|
||||
|
||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ] && [ -n "${{ steps.staging.outputs['docs-path'] }}" ]; then
|
||||
echo "📥 Copying documentation bundle from staging"
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs['docs-path'] }}/" \
|
||||
"$staging_root/docs/"
|
||||
else
|
||||
echo "ℹ️ Documentation promotion skipped"
|
||||
fi
|
||||
|
||||
echo "service-dir=$staging_root/service" >> $GITHUB_OUTPUT
|
||||
echo "docs-dir=$staging_root/docs" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Backup production service content
|
||||
run: |
|
||||
ssh -o StrictHostKeyChecking=no -i "${{ steps.production.outputs['key-file'] }}" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}" \
|
||||
"set -e; TARGET='${{ steps.production.outputs.path }}'; \
|
||||
if [ -d \"$TARGET\" ]; then \
|
||||
parent=\$(dirname \"$TARGET\"); \
|
||||
base=\$(basename \"$TARGET\"); \
|
||||
backup=\"\$parent/\${base}.backup.\$(date +%Y%m%d_%H%M%S)\"; \
|
||||
mkdir -p \"\$backup\"; \
|
||||
rsync -a --delete \"$TARGET/\" \"\$backup/\"; \
|
||||
ls -dt \"\$parent/\${base}.backup.*\" 2>/dev/null | tail -n +6 | xargs rm -rf || true; \
|
||||
echo 'Backup created at ' \"\$backup\"; \
|
||||
else \
|
||||
echo 'Production service path missing; skipping backup'; \
|
||||
fi"
|
||||
|
||||
- name: Publish service to production
|
||||
run: |
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.fetch.outputs['service-dir'] }}/" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs.path }}/"
|
||||
|
||||
- name: Promote documentation bundle
|
||||
if: github.event.inputs.include_docs == 'true' && steps.production.outputs['docs-path'] != ''
|
||||
run: |
|
||||
rsync -az --delete \
|
||||
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
|
||||
"${{ steps.fetch.outputs['docs-dir'] }}/" \
|
||||
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs['docs-path'] }}/"
|
||||
|
||||
- name: Promotion summary
|
||||
run: |
|
||||
echo "✅ Promotion completed"
|
||||
echo " Tag: ${{ github.event.inputs.tag }}"
|
||||
echo " Service: ${{ steps.staging.outputs.host }} → ${{ steps.production.outputs.host }}"
|
||||
if [ "${{ github.event.inputs.include_docs }}" = "true" ]; then
|
||||
echo " Docs: included"
|
||||
else
|
||||
echo " Docs: skipped"
|
||||
fi
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
name: provenance-check
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Emit provenance summary
|
||||
run: |
|
||||
mkdir -p out/provenance
|
||||
echo "run_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")" > out/provenance/summary.txt
|
||||
|
||||
- name: Upload provenance summary
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: provenance-summary
|
||||
path: out/provenance/**
|
||||
@@ -1,307 +0,0 @@
|
||||
name: Reachability Benchmark
|
||||
|
||||
# Sprint: SPRINT_3500_0003_0001
|
||||
# Task: CORPUS-009 - Create Gitea workflow for reachability benchmark
|
||||
# Task: CORPUS-010 - Configure nightly + per-PR benchmark runs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
baseline_version:
|
||||
description: 'Baseline version to compare against'
|
||||
required: false
|
||||
default: 'latest'
|
||||
verbose:
|
||||
description: 'Enable verbose output'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'datasets/reachability/**'
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||
- 'bench/reachability-benchmark/**'
|
||||
- '.gitea/workflows/reachability-bench.yaml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'datasets/reachability/**'
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||
- 'bench/reachability-benchmark/**'
|
||||
schedule:
|
||||
# Nightly at 02:00 UTC
|
||||
- cron: '0 2 * * *'
|
||||
|
||||
jobs:
|
||||
benchmark:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
STELLAOPS_OFFLINE: 'true'
|
||||
STELLAOPS_DETERMINISTIC: 'true'
|
||||
outputs:
|
||||
precision: ${{ steps.metrics.outputs.precision }}
|
||||
recall: ${{ steps.metrics.outputs.recall }}
|
||||
f1: ${{ steps.metrics.outputs.f1 }}
|
||||
pr_auc: ${{ steps.metrics.outputs.pr_auc }}
|
||||
regression: ${{ steps.compare.outputs.regression }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET 10
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nuget-
|
||||
|
||||
- name: Restore benchmark project
|
||||
run: |
|
||||
dotnet restore src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||
--configfile nuget.config
|
||||
|
||||
- name: Build benchmark project
|
||||
run: |
|
||||
dotnet build src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||
-c Release \
|
||||
--no-restore
|
||||
|
||||
- name: Validate corpus integrity
|
||||
run: |
|
||||
echo "::group::Validating corpus index"
|
||||
if [ ! -f datasets/reachability/corpus.json ]; then
|
||||
echo "::error::corpus.json not found"
|
||||
exit 1
|
||||
fi
|
||||
python3 -c "import json; data = json.load(open('datasets/reachability/corpus.json')); print(f'Corpus contains {len(data.get(\"samples\", []))} samples')"
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Run benchmark
|
||||
id: benchmark
|
||||
run: |
|
||||
echo "::group::Running reachability benchmark"
|
||||
mkdir -p bench/results
|
||||
|
||||
# Run the corpus benchmark
|
||||
dotnet run \
|
||||
--project src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||
-c Release \
|
||||
--no-build \
|
||||
-- corpus run \
|
||||
--corpus datasets/reachability/corpus.json \
|
||||
--output bench/results/benchmark-${{ github.sha }}.json \
|
||||
--format json \
|
||||
${{ inputs.verbose == 'true' && '--verbose' || '' }}
|
||||
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Extract metrics
|
||||
id: metrics
|
||||
run: |
|
||||
echo "::group::Extracting metrics"
|
||||
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||
|
||||
if [ -f "$RESULT_FILE" ]; then
|
||||
PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||
RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||
F1=$(jq -r '.metrics.f1 // 0' "$RESULT_FILE")
|
||||
PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||
|
||||
echo "precision=$PRECISION" >> $GITHUB_OUTPUT
|
||||
echo "recall=$RECALL" >> $GITHUB_OUTPUT
|
||||
echo "f1=$F1" >> $GITHUB_OUTPUT
|
||||
echo "pr_auc=$PR_AUC" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "Precision: $PRECISION"
|
||||
echo "Recall: $RECALL"
|
||||
echo "F1: $F1"
|
||||
echo "PR-AUC: $PR_AUC"
|
||||
else
|
||||
echo "::error::Benchmark result file not found"
|
||||
exit 1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Get baseline
|
||||
id: baseline
|
||||
run: |
|
||||
echo "::group::Loading baseline"
|
||||
BASELINE_VERSION="${{ inputs.baseline_version || 'latest' }}"
|
||||
|
||||
if [ "$BASELINE_VERSION" = "latest" ]; then
|
||||
BASELINE_FILE=$(ls -t bench/baselines/*.json 2>/dev/null | head -1)
|
||||
else
|
||||
BASELINE_FILE="bench/baselines/$BASELINE_VERSION.json"
|
||||
fi
|
||||
|
||||
if [ -f "$BASELINE_FILE" ]; then
|
||||
echo "baseline_file=$BASELINE_FILE" >> $GITHUB_OUTPUT
|
||||
echo "Using baseline: $BASELINE_FILE"
|
||||
else
|
||||
echo "::warning::No baseline found, skipping comparison"
|
||||
echo "baseline_file=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Compare to baseline
|
||||
id: compare
|
||||
if: steps.baseline.outputs.baseline_file != ''
|
||||
run: |
|
||||
echo "::group::Comparing to baseline"
|
||||
BASELINE_FILE="${{ steps.baseline.outputs.baseline_file }}"
|
||||
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||
|
||||
# Extract baseline metrics
|
||||
BASELINE_PRECISION=$(jq -r '.metrics.precision // 0' "$BASELINE_FILE")
|
||||
BASELINE_RECALL=$(jq -r '.metrics.recall // 0' "$BASELINE_FILE")
|
||||
BASELINE_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$BASELINE_FILE")
|
||||
|
||||
# Extract current metrics
|
||||
CURRENT_PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||
CURRENT_RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||
CURRENT_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||
|
||||
# Calculate deltas
|
||||
PRECISION_DELTA=$(echo "$CURRENT_PRECISION - $BASELINE_PRECISION" | bc -l)
|
||||
RECALL_DELTA=$(echo "$CURRENT_RECALL - $BASELINE_RECALL" | bc -l)
|
||||
PR_AUC_DELTA=$(echo "$CURRENT_PR_AUC - $BASELINE_PR_AUC" | bc -l)
|
||||
|
||||
echo "Precision delta: $PRECISION_DELTA"
|
||||
echo "Recall delta: $RECALL_DELTA"
|
||||
echo "PR-AUC delta: $PR_AUC_DELTA"
|
||||
|
||||
# Check for regression (PR-AUC drop > 2%)
|
||||
REGRESSION_THRESHOLD=-0.02
|
||||
if (( $(echo "$PR_AUC_DELTA < $REGRESSION_THRESHOLD" | bc -l) )); then
|
||||
echo "::error::PR-AUC regression detected: $PR_AUC_DELTA (threshold: $REGRESSION_THRESHOLD)"
|
||||
echo "regression=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "regression=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Generate markdown report
|
||||
run: |
|
||||
echo "::group::Generating report"
|
||||
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||
REPORT_FILE="bench/results/benchmark-${{ github.sha }}.md"
|
||||
|
||||
cat > "$REPORT_FILE" << 'EOF'
|
||||
# Reachability Benchmark Report
|
||||
|
||||
**Commit:** ${{ github.sha }}
|
||||
**Run:** ${{ github.run_number }}
|
||||
**Date:** $(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
## Metrics
|
||||
|
||||
| Metric | Value |
|
||||
|--------|-------|
|
||||
| Precision | ${{ steps.metrics.outputs.precision }} |
|
||||
| Recall | ${{ steps.metrics.outputs.recall }} |
|
||||
| F1 Score | ${{ steps.metrics.outputs.f1 }} |
|
||||
| PR-AUC | ${{ steps.metrics.outputs.pr_auc }} |
|
||||
|
||||
## Comparison
|
||||
|
||||
${{ steps.compare.outputs.regression == 'true' && '⚠️ **REGRESSION DETECTED**' || '✅ No regression' }}
|
||||
EOF
|
||||
|
||||
echo "Report generated: $REPORT_FILE"
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Upload results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: benchmark-results-${{ github.sha }}
|
||||
path: |
|
||||
bench/results/benchmark-${{ github.sha }}.json
|
||||
bench/results/benchmark-${{ github.sha }}.md
|
||||
retention-days: 90
|
||||
|
||||
- name: Fail on regression
|
||||
if: steps.compare.outputs.regression == 'true' && github.event_name == 'pull_request'
|
||||
run: |
|
||||
echo "::error::Benchmark regression detected. PR-AUC dropped below threshold."
|
||||
exit 1
|
||||
|
||||
update-baseline:
|
||||
needs: benchmark
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.benchmark.outputs.regression != 'true'
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: benchmark-results-${{ github.sha }}
|
||||
path: bench/results/
|
||||
|
||||
- name: Update baseline (nightly only)
|
||||
if: github.event_name == 'schedule'
|
||||
run: |
|
||||
DATE=$(date +%Y%m%d)
|
||||
cp bench/results/benchmark-${{ github.sha }}.json bench/baselines/baseline-$DATE.json
|
||||
echo "Updated baseline to baseline-$DATE.json"
|
||||
|
||||
notify-pr:
|
||||
needs: benchmark
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Comment on PR
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const precision = '${{ needs.benchmark.outputs.precision }}';
|
||||
const recall = '${{ needs.benchmark.outputs.recall }}';
|
||||
const f1 = '${{ needs.benchmark.outputs.f1 }}';
|
||||
const prAuc = '${{ needs.benchmark.outputs.pr_auc }}';
|
||||
const regression = '${{ needs.benchmark.outputs.regression }}' === 'true';
|
||||
|
||||
const status = regression ? '⚠️ REGRESSION' : '✅ PASS';
|
||||
|
||||
const body = `## Reachability Benchmark Results ${status}
|
||||
|
||||
| Metric | Value |
|
||||
|--------|-------|
|
||||
| Precision | ${precision} |
|
||||
| Recall | ${recall} |
|
||||
| F1 Score | ${f1} |
|
||||
| PR-AUC | ${prAuc} |
|
||||
|
||||
${regression ? '### ⚠️ Regression Detected\nPR-AUC dropped below threshold. Please review changes.' : ''}
|
||||
|
||||
<details>
|
||||
<summary>Details</summary>
|
||||
|
||||
- Commit: \`${{ github.sha }}\`
|
||||
- Run: [#${{ github.run_number }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
|
||||
</details>`;
|
||||
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: body
|
||||
});
|
||||
|
||||
@@ -1,268 +0,0 @@
|
||||
name: Reachability Corpus Validation
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/__Tests/reachability/corpus/**'
|
||||
- 'src/__Tests/reachability/fixtures/**'
|
||||
- 'src/__Tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||
- 'scripts/reachability/**'
|
||||
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/__Tests/reachability/corpus/**'
|
||||
- 'src/__Tests/reachability/fixtures/**'
|
||||
- 'src/__Tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||
- 'scripts/reachability/**'
|
||||
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||
|
||||
jobs:
|
||||
validate-corpus:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100
|
||||
include-prerelease: true
|
||||
|
||||
- name: Verify corpus manifest integrity
|
||||
run: |
|
||||
echo "Verifying corpus manifest..."
|
||||
cd src/__Tests/reachability/corpus
|
||||
if [ ! -f manifest.json ]; then
|
||||
echo "::error::Corpus manifest.json not found"
|
||||
exit 1
|
||||
fi
|
||||
echo "Manifest exists, checking JSON validity..."
|
||||
python3 -c "import json; json.load(open('manifest.json'))"
|
||||
echo "Manifest is valid JSON"
|
||||
|
||||
- name: Verify reachbench index integrity
|
||||
run: |
|
||||
echo "Verifying reachbench fixtures..."
|
||||
cd src/__Tests/reachability/fixtures/reachbench-2025-expanded
|
||||
if [ ! -f INDEX.json ]; then
|
||||
echo "::error::Reachbench INDEX.json not found"
|
||||
exit 1
|
||||
fi
|
||||
echo "INDEX exists, checking JSON validity..."
|
||||
python3 -c "import json; json.load(open('INDEX.json'))"
|
||||
echo "INDEX is valid JSON"
|
||||
|
||||
- name: Restore test project
|
||||
run: dotnet restore src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj --configfile nuget.config
|
||||
|
||||
- name: Build test project
|
||||
run: dotnet build src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj -c Release --no-restore
|
||||
|
||||
- name: Run corpus fixture tests
|
||||
run: |
|
||||
dotnet test src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||
-c Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=corpus-results.trx" \
|
||||
--results-directory ./TestResults \
|
||||
--filter "FullyQualifiedName~CorpusFixtureTests"
|
||||
|
||||
- name: Run reachbench fixture tests
|
||||
run: |
|
||||
dotnet test src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||
-c Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=reachbench-results.trx" \
|
||||
--results-directory ./TestResults \
|
||||
--filter "FullyQualifiedName~ReachbenchFixtureTests"
|
||||
|
||||
- name: Verify deterministic hashes
|
||||
run: |
|
||||
echo "Verifying SHA-256 hashes in corpus manifest..."
|
||||
chmod +x scripts/reachability/verify_corpus_hashes.sh || true
|
||||
if [ -f scripts/reachability/verify_corpus_hashes.sh ]; then
|
||||
scripts/reachability/verify_corpus_hashes.sh
|
||||
else
|
||||
echo "Hash verification script not found, using inline verification..."
|
||||
cd src/__Tests/reachability/corpus
|
||||
python3 << 'EOF'
|
||||
import json
|
||||
import hashlib
|
||||
import sys
|
||||
import os
|
||||
|
||||
with open('manifest.json') as f:
|
||||
manifest = json.load(f)
|
||||
|
||||
errors = []
|
||||
for entry in manifest:
|
||||
case_id = entry['id']
|
||||
lang = entry['language']
|
||||
case_dir = os.path.join(lang, case_id)
|
||||
for filename, expected_hash in entry['files'].items():
|
||||
filepath = os.path.join(case_dir, filename)
|
||||
if not os.path.exists(filepath):
|
||||
errors.append(f"{case_id}: missing {filename}")
|
||||
continue
|
||||
with open(filepath, 'rb') as f:
|
||||
actual_hash = hashlib.sha256(f.read()).hexdigest()
|
||||
if actual_hash != expected_hash:
|
||||
errors.append(f"{case_id}: {filename} hash mismatch (expected {expected_hash}, got {actual_hash})")
|
||||
|
||||
if errors:
|
||||
for err in errors:
|
||||
print(f"::error::{err}")
|
||||
sys.exit(1)
|
||||
print(f"All {len(manifest)} corpus entries verified")
|
||||
EOF
|
||||
fi
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: corpus-test-results-${{ github.run_number }}
|
||||
path: ./TestResults/*.trx
|
||||
retention-days: 14
|
||||
|
||||
validate-ground-truths:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
TZ: UTC
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate ground-truth schema version
|
||||
run: |
|
||||
echo "Validating ground-truth files..."
|
||||
cd src/__Tests/reachability
|
||||
python3 << 'EOF'
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
EXPECTED_SCHEMA = "reachbench.reachgraph.truth/v1"
|
||||
ALLOWED_VARIANTS = {"reachable", "unreachable"}
|
||||
errors = []
|
||||
|
||||
# Validate corpus ground-truths
|
||||
corpus_manifest = 'corpus/manifest.json'
|
||||
if os.path.exists(corpus_manifest):
|
||||
with open(corpus_manifest) as f:
|
||||
manifest = json.load(f)
|
||||
for entry in manifest:
|
||||
case_id = entry['id']
|
||||
lang = entry['language']
|
||||
truth_path = os.path.join('corpus', lang, case_id, 'ground-truth.json')
|
||||
if not os.path.exists(truth_path):
|
||||
errors.append(f"corpus/{case_id}: missing ground-truth.json")
|
||||
continue
|
||||
with open(truth_path) as f:
|
||||
truth = json.load(f)
|
||||
if truth.get('schema_version') != EXPECTED_SCHEMA:
|
||||
errors.append(f"corpus/{case_id}: wrong schema_version")
|
||||
if truth.get('variant') not in ALLOWED_VARIANTS:
|
||||
errors.append(f"corpus/{case_id}: invalid variant '{truth.get('variant')}'")
|
||||
if not isinstance(truth.get('paths'), list):
|
||||
errors.append(f"corpus/{case_id}: paths must be an array")
|
||||
|
||||
# Validate reachbench ground-truths
|
||||
reachbench_index = 'fixtures/reachbench-2025-expanded/INDEX.json'
|
||||
if os.path.exists(reachbench_index):
|
||||
with open(reachbench_index) as f:
|
||||
index = json.load(f)
|
||||
for case in index.get('cases', []):
|
||||
case_id = case['id']
|
||||
case_path = case.get('path', os.path.join('cases', case_id))
|
||||
for variant in ['reachable', 'unreachable']:
|
||||
truth_path = os.path.join('fixtures/reachbench-2025-expanded', case_path, 'images', variant, 'reachgraph.truth.json')
|
||||
if not os.path.exists(truth_path):
|
||||
errors.append(f"reachbench/{case_id}/{variant}: missing reachgraph.truth.json")
|
||||
continue
|
||||
with open(truth_path) as f:
|
||||
truth = json.load(f)
|
||||
if not truth.get('schema_version'):
|
||||
errors.append(f"reachbench/{case_id}/{variant}: missing schema_version")
|
||||
if not isinstance(truth.get('paths'), list):
|
||||
errors.append(f"reachbench/{case_id}/{variant}: paths must be an array")
|
||||
|
||||
if errors:
|
||||
for err in errors:
|
||||
print(f"::error::{err}")
|
||||
sys.exit(1)
|
||||
print("All ground-truth files validated successfully")
|
||||
EOF
|
||||
|
||||
determinism-check:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
TZ: UTC
|
||||
needs: validate-corpus
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Verify JSON determinism (sorted keys, no trailing whitespace)
|
||||
run: |
|
||||
echo "Checking JSON determinism..."
|
||||
cd src/__Tests/reachability
|
||||
python3 << 'EOF'
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
def check_json_sorted(filepath):
|
||||
"""Check if JSON has sorted keys (deterministic)."""
|
||||
with open(filepath) as f:
|
||||
content = f.read()
|
||||
parsed = json.loads(content)
|
||||
reserialized = json.dumps(parsed, sort_keys=True, indent=2)
|
||||
# Normalize line endings
|
||||
content_normalized = content.replace('\r\n', '\n').strip()
|
||||
reserialized_normalized = reserialized.strip()
|
||||
return content_normalized == reserialized_normalized
|
||||
|
||||
errors = []
|
||||
json_files = []
|
||||
|
||||
# Collect JSON files from corpus
|
||||
for root, dirs, files in os.walk('corpus'):
|
||||
for f in files:
|
||||
if f.endswith('.json'):
|
||||
json_files.append(os.path.join(root, f))
|
||||
|
||||
# Check determinism
|
||||
non_deterministic = []
|
||||
for filepath in json_files:
|
||||
try:
|
||||
if not check_json_sorted(filepath):
|
||||
non_deterministic.append(filepath)
|
||||
except json.JSONDecodeError as e:
|
||||
errors.append(f"{filepath}: invalid JSON - {e}")
|
||||
|
||||
if non_deterministic:
|
||||
print(f"::warning::Found {len(non_deterministic)} non-deterministic JSON files (keys not sorted or whitespace differs)")
|
||||
for f in non_deterministic[:10]:
|
||||
print(f" - {f}")
|
||||
if len(non_deterministic) > 10:
|
||||
print(f" ... and {len(non_deterministic) - 10} more")
|
||||
|
||||
if errors:
|
||||
for err in errors:
|
||||
print(f"::error::{err}")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Checked {len(json_files)} JSON files")
|
||||
EOF
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
name: registry-compatibility
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/ExportCenter/**'
|
||||
- 'src/ReleaseOrchestrator/**/Connectors/Registry/**'
|
||||
- 'src/__Tests/**Registry**'
|
||||
- 'src/__Libraries/StellaOps.Doctor.Plugins.Integration/**'
|
||||
schedule:
|
||||
- cron: '0 4 * * 1' # Weekly on Monday at 4 AM UTC
|
||||
workflow_dispatch: {}
|
||||
|
||||
env:
|
||||
DOTNET_NOLOGO: true
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||
|
||||
jobs:
|
||||
registry-matrix:
|
||||
name: Registry ${{ matrix.registry }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
registry: [generic-oci, zot, distribution, harbor]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: |
|
||||
dotnet restore src/__Tests/__Libraries/StellaOps.Infrastructure.Registry.Testing.Tests/StellaOps.Infrastructure.Registry.Testing.Tests.csproj
|
||||
|
||||
- name: Build test project
|
||||
run: |
|
||||
dotnet build src/__Tests/__Libraries/StellaOps.Infrastructure.Registry.Testing.Tests/StellaOps.Infrastructure.Registry.Testing.Tests.csproj --no-restore
|
||||
|
||||
- name: Run compatibility tests for ${{ matrix.registry }}
|
||||
run: |
|
||||
dotnet test src/__Tests/__Libraries/StellaOps.Infrastructure.Registry.Testing.Tests/StellaOps.Infrastructure.Registry.Testing.Tests.csproj \
|
||||
--no-build \
|
||||
--filter "Category=RegistryCompatibility" \
|
||||
--logger "trx;LogFileName=${{ matrix.registry }}-results.trx" \
|
||||
--results-directory TestResults \
|
||||
-- xunit.parallelizeTestCollections=false
|
||||
timeout-minutes: 15
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: registry-compat-${{ matrix.registry }}
|
||||
path: TestResults/
|
||||
retention-days: 30
|
||||
|
||||
compatibility-report:
|
||||
name: Generate Compatibility Report
|
||||
runs-on: ubuntu-latest
|
||||
needs: registry-matrix
|
||||
if: always()
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: TestResults
|
||||
pattern: registry-compat-*
|
||||
|
||||
- name: Generate compatibility matrix
|
||||
run: |
|
||||
echo "# Registry Compatibility Matrix" > compatibility-report.md
|
||||
echo "" >> compatibility-report.md
|
||||
echo "Generated: $(date -u +%Y-%m-%dT%H:%M:%SZ)" >> compatibility-report.md
|
||||
echo "" >> compatibility-report.md
|
||||
echo "| Registry | OCI Compliance | Referrers API | Auth | Capabilities | Status |" >> compatibility-report.md
|
||||
echo "|----------|---------------|---------------|------|--------------|--------|" >> compatibility-report.md
|
||||
|
||||
for registry in generic-oci zot distribution harbor; do
|
||||
trx_file="TestResults/registry-compat-${registry}/${registry}-results.trx"
|
||||
if [ -f "$trx_file" ]; then
|
||||
# Count passed/failed from trx file
|
||||
passed=$(grep -c 'outcome="Passed"' "$trx_file" 2>/dev/null || echo "0")
|
||||
failed=$(grep -c 'outcome="Failed"' "$trx_file" 2>/dev/null || echo "0")
|
||||
|
||||
if [ "$failed" -eq "0" ]; then
|
||||
status="Pass"
|
||||
else
|
||||
status="Fail ($failed)"
|
||||
fi
|
||||
else
|
||||
status="No results"
|
||||
fi
|
||||
|
||||
# Referrers API support
|
||||
case $registry in
|
||||
generic-oci) referrers="Fallback" ;;
|
||||
zot|harbor|distribution) referrers="Native" ;;
|
||||
esac
|
||||
|
||||
echo "| $registry | $passed tests | $referrers | Basic | Full | $status |" >> compatibility-report.md
|
||||
done
|
||||
|
||||
echo "" >> compatibility-report.md
|
||||
echo "## Legend" >> compatibility-report.md
|
||||
echo "- **Native**: Full OCI 1.1 referrers API support" >> compatibility-report.md
|
||||
echo "- **Fallback**: Uses tag-based discovery (sha256-{digest}.*)" >> compatibility-report.md
|
||||
|
||||
cat compatibility-report.md
|
||||
|
||||
- name: Upload compatibility report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: compatibility-report
|
||||
path: compatibility-report.md
|
||||
retention-days: 90
|
||||
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const report = fs.readFileSync('compatibility-report.md', 'utf8');
|
||||
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: report
|
||||
});
|
||||
|
||||
doctor-checks:
|
||||
name: Doctor Registry Checks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Build Doctor plugin tests
|
||||
run: |
|
||||
dotnet build src/__Tests/__Libraries/StellaOps.Doctor.Plugins.Integration.Tests/StellaOps.Doctor.Plugins.Integration.Tests.csproj
|
||||
|
||||
- name: Run Doctor check tests
|
||||
run: |
|
||||
dotnet test src/__Tests/__Libraries/StellaOps.Doctor.Plugins.Integration.Tests/StellaOps.Doctor.Plugins.Integration.Tests.csproj \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=doctor-registry-results.trx" \
|
||||
--results-directory TestResults
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: doctor-registry-checks
|
||||
path: TestResults/
|
||||
retention-days: 30
|
||||
@@ -1,537 +0,0 @@
|
||||
# .gitea/workflows/release-evidence-pack.yml
|
||||
# Generates Release Evidence Pack for customer-facing verification
|
||||
#
|
||||
# This workflow depends on all test pipelines completing successfully before
|
||||
# generating the evidence pack to ensure only verified releases are attested.
|
||||
|
||||
name: Release Evidence Pack
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Release version (e.g., 1.2.3)"
|
||||
required: true
|
||||
type: string
|
||||
release_tag:
|
||||
description: "Git tag for the release"
|
||||
required: true
|
||||
type: string
|
||||
signing_mode:
|
||||
description: "Signing mode"
|
||||
required: false
|
||||
default: "keyless"
|
||||
type: choice
|
||||
options:
|
||||
- keyless
|
||||
- key-based
|
||||
include_rekor_proofs:
|
||||
description: "Include Rekor transparency log proofs"
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
# Trigger after release workflow completes
|
||||
workflow_run:
|
||||
workflows: ["Release Bundle"]
|
||||
types: [completed]
|
||||
branches: [main]
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: "10.0.100"
|
||||
EVIDENCE_PACK_DIR: ${{ github.workspace }}/evidence-pack
|
||||
|
||||
jobs:
|
||||
# ============================================================================
|
||||
# Gate: Ensure all test pipelines have passed
|
||||
# ============================================================================
|
||||
verify-test-gates:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
if: >-
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')
|
||||
outputs:
|
||||
tests_passed: ${{ steps.check-tests.outputs.passed }}
|
||||
release_version: ${{ steps.meta.outputs.version }}
|
||||
release_tag: ${{ steps.meta.outputs.tag }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.release_tag || github.event.workflow_run.head_sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Determine release metadata
|
||||
id: meta
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
TAG="${{ github.event.inputs.release_tag }}"
|
||||
else
|
||||
# Extract from workflow_run
|
||||
TAG="${{ github.event.workflow_run.head_branch }}"
|
||||
VERSION="${TAG#v}"
|
||||
fi
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=$TAG" >> "$GITHUB_OUTPUT"
|
||||
echo "sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Verify test workflows have passed
|
||||
id: check-tests
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
SHA="${{ steps.meta.outputs.sha || github.sha }}"
|
||||
|
||||
echo "Checking test status for commit: $SHA"
|
||||
|
||||
# Required workflows that must pass
|
||||
REQUIRED_WORKFLOWS=(
|
||||
"Build Test Deploy"
|
||||
"test-matrix"
|
||||
"integration-tests-gate"
|
||||
"security-testing"
|
||||
"determinism-gate"
|
||||
)
|
||||
|
||||
FAILED=()
|
||||
PENDING=()
|
||||
|
||||
for workflow in "${REQUIRED_WORKFLOWS[@]}"; do
|
||||
echo "Checking workflow: $workflow"
|
||||
|
||||
# Get workflow runs for this commit
|
||||
STATUS=$(gh api \
|
||||
"/repos/${{ github.repository }}/actions/workflows" \
|
||||
--jq ".workflows[] | select(.name == \"$workflow\") | .id" 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$STATUS" ]; then
|
||||
echo " Warning: Workflow '$workflow' not found, skipping..."
|
||||
continue
|
||||
fi
|
||||
|
||||
# Get latest run for this commit
|
||||
RUN_STATUS=$(gh api \
|
||||
"/repos/${{ github.repository }}/actions/workflows/$STATUS/runs?head_sha=$SHA&per_page=1" \
|
||||
--jq '.workflow_runs[0].conclusion // .workflow_runs[0].status' 2>/dev/null || echo "not_found")
|
||||
|
||||
echo " Status: $RUN_STATUS"
|
||||
|
||||
case "$RUN_STATUS" in
|
||||
success|skipped)
|
||||
echo " ✓ Passed"
|
||||
;;
|
||||
in_progress|queued|waiting|pending)
|
||||
PENDING+=("$workflow")
|
||||
;;
|
||||
not_found)
|
||||
echo " ⚠ No run found for this commit"
|
||||
;;
|
||||
*)
|
||||
FAILED+=("$workflow ($RUN_STATUS)")
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ ${#FAILED[@]} -gt 0 ]; then
|
||||
echo "::error::The following required workflows have not passed: ${FAILED[*]}"
|
||||
echo "passed=false" >> "$GITHUB_OUTPUT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ${#PENDING[@]} -gt 0 ]; then
|
||||
echo "::warning::The following workflows are still running: ${PENDING[*]}"
|
||||
echo "::warning::Consider waiting for them to complete before generating evidence pack."
|
||||
fi
|
||||
|
||||
echo "✓ All required test workflows have passed"
|
||||
echo "passed=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# ============================================================================
|
||||
# Build Evidence Pack
|
||||
# ============================================================================
|
||||
build-evidence-pack:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: verify-test-gates
|
||||
if: needs.verify-test-gates.outputs.tests_passed == 'true'
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write # For keyless signing
|
||||
packages: read
|
||||
env:
|
||||
VERSION: ${{ needs.verify-test-gates.outputs.release_version }}
|
||||
TAG: ${{ needs.verify-test-gates.outputs.release_tag }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.TAG }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
|
||||
- name: Install Syft
|
||||
run: |
|
||||
set -euo pipefail
|
||||
SYFT_VERSION="v1.21.0"
|
||||
curl -fsSL "https://github.com/anchore/syft/releases/download/${SYFT_VERSION}/syft_${SYFT_VERSION#v}_linux_amd64.tar.gz" -o /tmp/syft.tgz
|
||||
tar -xzf /tmp/syft.tgz -C /tmp
|
||||
sudo install -m 0755 /tmp/syft /usr/local/bin/syft
|
||||
|
||||
- name: Install rekor-cli
|
||||
run: |
|
||||
set -euo pipefail
|
||||
REKOR_VERSION="v1.3.6"
|
||||
curl -fsSL "https://github.com/sigstore/rekor/releases/download/${REKOR_VERSION}/rekor-cli-linux-amd64" -o /tmp/rekor-cli
|
||||
sudo install -m 0755 /tmp/rekor-cli /usr/local/bin/rekor-cli
|
||||
|
||||
- name: Download release artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p artifacts/
|
||||
gh release download "$TAG" -D artifacts/ || {
|
||||
echo "::warning::Could not download release artifacts. Using build artifacts instead."
|
||||
# Fallback: download from workflow artifacts
|
||||
gh run download --name "stellaops-release-$VERSION" -D artifacts/ || true
|
||||
}
|
||||
|
||||
ls -la artifacts/
|
||||
|
||||
- name: Compute SOURCE_DATE_EPOCH
|
||||
id: epoch
|
||||
run: |
|
||||
set -euo pipefail
|
||||
EPOCH=$(git show -s --format=%ct HEAD)
|
||||
echo "epoch=$EPOCH" >> "$GITHUB_OUTPUT"
|
||||
echo "SOURCE_DATE_EPOCH=$EPOCH"
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p checksums/
|
||||
|
||||
cd artifacts/
|
||||
sha256sum * 2>/dev/null | grep -v '\.sig$' | grep -v '\.cert$' > ../checksums/SHA256SUMS || true
|
||||
sha512sum * 2>/dev/null | grep -v '\.sig$' | grep -v '\.cert$' > ../checksums/SHA512SUMS || true
|
||||
cd ..
|
||||
|
||||
echo "Generated checksums:"
|
||||
cat checksums/SHA256SUMS
|
||||
|
||||
- name: Sign checksums
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
COSIGN_KEY_REF: ${{ secrets.COSIGN_KEY_REF }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
SIGN_ARGS=(--yes)
|
||||
if [ "${{ github.event.inputs.signing_mode || 'keyless' }}" = "key-based" ] && [ -n "${COSIGN_KEY_REF:-}" ]; then
|
||||
SIGN_ARGS+=(--key "$COSIGN_KEY_REF")
|
||||
fi
|
||||
|
||||
cosign sign-blob "${SIGN_ARGS[@]}" \
|
||||
--output-signature checksums/SHA256SUMS.sig \
|
||||
--output-certificate checksums/SHA256SUMS.cert \
|
||||
checksums/SHA256SUMS
|
||||
|
||||
cosign sign-blob "${SIGN_ARGS[@]}" \
|
||||
--output-signature checksums/SHA512SUMS.sig \
|
||||
--output-certificate checksums/SHA512SUMS.cert \
|
||||
checksums/SHA512SUMS
|
||||
|
||||
echo "✓ Checksums signed"
|
||||
|
||||
- name: Generate SBOMs
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p sbom/
|
||||
|
||||
for artifact in artifacts/stella-*.tar.gz artifacts/stella-*.zip; do
|
||||
[ -f "$artifact" ] || continue
|
||||
|
||||
base=$(basename "$artifact" | sed 's/\.\(tar\.gz\|zip\)$//')
|
||||
echo "Generating SBOM for: $base"
|
||||
|
||||
syft "$artifact" -o cyclonedx-json > "sbom/${base}.cdx.json"
|
||||
done
|
||||
|
||||
# Sign SBOMs
|
||||
for sbom in sbom/*.cdx.json; do
|
||||
[ -f "$sbom" ] || continue
|
||||
SIGN_ARGS=(--yes)
|
||||
if [ "${{ github.event.inputs.signing_mode || 'keyless' }}" = "key-based" ] && [ -n "${COSIGN_KEY_REF:-}" ]; then
|
||||
SIGN_ARGS+=(--key "$COSIGN_KEY_REF")
|
||||
fi
|
||||
cosign sign-blob "${SIGN_ARGS[@]}" \
|
||||
--output-signature "${sbom}.sig" \
|
||||
--output-certificate "${sbom}.cert" \
|
||||
"$sbom"
|
||||
done
|
||||
|
||||
echo "✓ SBOMs generated and signed"
|
||||
|
||||
- name: Generate SLSA provenance
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p provenance/
|
||||
|
||||
SOURCE_EPOCH="${{ steps.epoch.outputs.epoch }}"
|
||||
GIT_SHA="${{ github.sha }}"
|
||||
BUILD_TIME=$(date -u -d "@$SOURCE_EPOCH" +"%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
# Generate SLSA v1.0 provenance for each artifact
|
||||
for artifact in artifacts/stella-*.tar.gz artifacts/stella-*.zip; do
|
||||
[ -f "$artifact" ] || continue
|
||||
|
||||
base=$(basename "$artifact" | sed 's/\.\(tar\.gz\|zip\)$//')
|
||||
ARTIFACT_SHA256=$(sha256sum "$artifact" | awk '{print $1}')
|
||||
|
||||
cat > "provenance/${base}.slsa.intoto.jsonl" <<EOF
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"subject": [
|
||||
{
|
||||
"name": "$(basename "$artifact")",
|
||||
"digest": {
|
||||
"sha256": "$ARTIFACT_SHA256"
|
||||
}
|
||||
}
|
||||
],
|
||||
"predicateType": "https://slsa.dev/provenance/v1",
|
||||
"predicate": {
|
||||
"buildDefinition": {
|
||||
"buildType": "https://stella-ops.io/ReleaseBuilder/v1",
|
||||
"externalParameters": {
|
||||
"version": "$VERSION",
|
||||
"target": "$base"
|
||||
},
|
||||
"internalParameters": {},
|
||||
"resolvedDependencies": [
|
||||
{
|
||||
"uri": "git+https://git.stella-ops.org/stella-ops.org/git.stella-ops.org@$TAG",
|
||||
"digest": {
|
||||
"gitCommit": "$GIT_SHA"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"runDetails": {
|
||||
"builder": {
|
||||
"id": "https://ci.stella-ops.org/builder/v1",
|
||||
"version": {
|
||||
"ci": "${{ github.run_id }}"
|
||||
}
|
||||
},
|
||||
"metadata": {
|
||||
"invocationId": "${{ github.run_id }}/${{ github.run_attempt }}",
|
||||
"startedOn": "$BUILD_TIME",
|
||||
"finishedOn": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
||||
},
|
||||
"byproducts": []
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
# Sign provenance
|
||||
SIGN_ARGS=(--yes)
|
||||
if [ "${{ github.event.inputs.signing_mode || 'keyless' }}" = "key-based" ] && [ -n "${COSIGN_KEY_REF:-}" ]; then
|
||||
SIGN_ARGS+=(--key "$COSIGN_KEY_REF")
|
||||
fi
|
||||
cosign sign-blob "${SIGN_ARGS[@]}" \
|
||||
--output-signature "provenance/${base}.slsa.intoto.jsonl.sig" \
|
||||
--output-certificate "provenance/${base}.slsa.intoto.jsonl.cert" \
|
||||
"provenance/${base}.slsa.intoto.jsonl"
|
||||
done
|
||||
|
||||
echo "✓ SLSA provenance generated and signed"
|
||||
|
||||
- name: Collect Rekor proofs
|
||||
if: github.event.inputs.include_rekor_proofs != 'false'
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p rekor-proofs/log-entries/
|
||||
|
||||
# Collect Rekor entries for signed artifacts
|
||||
for sig in artifacts/*.sig checksums/*.sig sbom/*.sig provenance/*.sig; do
|
||||
[ -f "$sig" ] || continue
|
||||
|
||||
artifact="${sig%.sig}"
|
||||
[ -f "$artifact" ] || continue
|
||||
|
||||
echo "Looking up Rekor entry for: $artifact"
|
||||
|
||||
# Search Rekor for this artifact
|
||||
ENTRY=$(rekor-cli search --artifact "$artifact" 2>/dev/null | head -1 || echo "")
|
||||
|
||||
if [ -n "$ENTRY" ]; then
|
||||
UUID=$(basename "$ENTRY")
|
||||
echo " Found entry: $UUID"
|
||||
|
||||
# Get the full entry
|
||||
rekor-cli get --uuid "$UUID" --format json > "rekor-proofs/log-entries/${UUID}.json" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Get current checkpoint
|
||||
rekor-cli loginfo --format json > rekor-proofs/checkpoint.json 2>/dev/null || true
|
||||
|
||||
echo "✓ Rekor proofs collected"
|
||||
|
||||
- name: Extract signing key fingerprint
|
||||
id: key-fingerprint
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract fingerprint from certificate or key
|
||||
if [ -f checksums/SHA256SUMS.cert ]; then
|
||||
FINGERPRINT=$(openssl x509 -in checksums/SHA256SUMS.cert -noout -fingerprint -sha256 2>/dev/null | cut -d= -f2 | tr -d ':' | tr '[:upper:]' '[:lower:]')
|
||||
elif [ -n "${COSIGN_KEY_REF:-}" ]; then
|
||||
FINGERPRINT="key-based-signing"
|
||||
else
|
||||
FINGERPRINT="keyless-fulcio"
|
||||
fi
|
||||
|
||||
echo "fingerprint=$FINGERPRINT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build evidence pack using .NET tool
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Build the EvidencePack library
|
||||
dotnet build src/Attestor/__Libraries/StellaOps.Attestor.EvidencePack/StellaOps.Attestor.EvidencePack.csproj \
|
||||
--configuration Release
|
||||
|
||||
# Create evidence pack structure manually for now
|
||||
# (CLI tool would be: dotnet run --project src/Attestor/.../EvidencePack.Cli build-pack ...)
|
||||
|
||||
PACK_DIR="evidence-pack/stella-release-${VERSION}-evidence-pack"
|
||||
mkdir -p "$PACK_DIR"/{artifacts,checksums,sbom,provenance,attestations,rekor-proofs/log-entries}
|
||||
|
||||
# Copy files
|
||||
cp -r artifacts/* "$PACK_DIR/artifacts/" 2>/dev/null || true
|
||||
cp -r checksums/* "$PACK_DIR/checksums/" 2>/dev/null || true
|
||||
cp -r sbom/* "$PACK_DIR/sbom/" 2>/dev/null || true
|
||||
cp -r provenance/* "$PACK_DIR/provenance/" 2>/dev/null || true
|
||||
cp -r rekor-proofs/* "$PACK_DIR/rekor-proofs/" 2>/dev/null || true
|
||||
|
||||
# Copy signing public key
|
||||
if [ -f checksums/SHA256SUMS.cert ]; then
|
||||
# Extract public key from certificate
|
||||
openssl x509 -in checksums/SHA256SUMS.cert -pubkey -noout > "$PACK_DIR/cosign.pub"
|
||||
elif [ -n "${COSIGN_PUBLIC_KEY:-}" ]; then
|
||||
echo "$COSIGN_PUBLIC_KEY" > "$PACK_DIR/cosign.pub"
|
||||
fi
|
||||
|
||||
# Generate manifest.json
|
||||
cat > "$PACK_DIR/manifest.json" <<EOF
|
||||
{
|
||||
"bundleFormatVersion": "1.0.0",
|
||||
"releaseVersion": "$VERSION",
|
||||
"createdAt": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
||||
"sourceCommit": "${{ github.sha }}",
|
||||
"sourceDateEpoch": ${{ steps.epoch.outputs.epoch }},
|
||||
"signingKeyFingerprint": "${{ steps.key-fingerprint.outputs.fingerprint }}"
|
||||
}
|
||||
EOF
|
||||
|
||||
# Copy verification scripts from templates
|
||||
cp src/Attestor/__Libraries/StellaOps.Attestor.EvidencePack/Templates/verify.sh.template "$PACK_DIR/verify.sh"
|
||||
cp src/Attestor/__Libraries/StellaOps.Attestor.EvidencePack/Templates/verify.ps1.template "$PACK_DIR/verify.ps1"
|
||||
chmod +x "$PACK_DIR/verify.sh"
|
||||
|
||||
# Generate VERIFY.md
|
||||
sed -e "s/{{VERSION}}/$VERSION/g" \
|
||||
-e "s/{{SOURCE_COMMIT}}/${{ github.sha }}/g" \
|
||||
-e "s/{{SOURCE_DATE_EPOCH}}/${{ steps.epoch.outputs.epoch }}/g" \
|
||||
-e "s/{{KEY_FINGERPRINT}}/${{ steps.key-fingerprint.outputs.fingerprint }}/g" \
|
||||
-e "s/{{TIMESTAMP}}/$(date -u +"%Y-%m-%dT%H:%M:%SZ")/g" \
|
||||
-e "s/{{BUNDLE_VERSION}}/1.0.0/g" \
|
||||
-e "s/{{REKOR_LOG_ID}}/sigstore/g" \
|
||||
-e "s/{{REKOR_ENTRIES}}/See rekor-proofs\/ directory/g" \
|
||||
src/Attestor/__Libraries/StellaOps.Attestor.EvidencePack/Templates/VERIFY.md.template \
|
||||
> "$PACK_DIR/VERIFY.md"
|
||||
|
||||
echo "✓ Evidence pack built"
|
||||
ls -la "$PACK_DIR/"
|
||||
|
||||
- name: Self-verify evidence pack
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
cd "evidence-pack/stella-release-${VERSION}-evidence-pack"
|
||||
|
||||
echo "Running self-verification..."
|
||||
./verify.sh --verbose || {
|
||||
echo "::warning::Self-verification had issues (may be expected if artifacts not fully present)"
|
||||
}
|
||||
|
||||
- name: Create archives
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
cd evidence-pack
|
||||
|
||||
# Create tar.gz
|
||||
tar -czvf "stella-release-${VERSION}-evidence-pack.tgz" "stella-release-${VERSION}-evidence-pack"
|
||||
|
||||
# Create zip
|
||||
zip -r "stella-release-${VERSION}-evidence-pack.zip" "stella-release-${VERSION}-evidence-pack"
|
||||
|
||||
echo "✓ Archives created"
|
||||
ls -la *.tgz *.zip
|
||||
|
||||
- name: Upload evidence pack artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: evidence-pack-${{ env.VERSION }}
|
||||
path: |
|
||||
evidence-pack/*.tgz
|
||||
evidence-pack/*.zip
|
||||
if-no-files-found: error
|
||||
retention-days: 90
|
||||
|
||||
- name: Attach to GitHub release
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
gh release upload "$TAG" \
|
||||
"evidence-pack/stella-release-${VERSION}-evidence-pack.tgz" \
|
||||
"evidence-pack/stella-release-${VERSION}-evidence-pack.zip" \
|
||||
--clobber || echo "::warning::Could not attach to release"
|
||||
|
||||
echo "✓ Evidence pack attached to release $TAG"
|
||||
|
||||
# ============================================================================
|
||||
# Notify on completion
|
||||
# ============================================================================
|
||||
notify:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [verify-test-gates, build-evidence-pack]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Report status
|
||||
run: |
|
||||
if [ "${{ needs.build-evidence-pack.result }}" = "success" ]; then
|
||||
echo "✅ Evidence pack generated successfully for version ${{ needs.verify-test-gates.outputs.release_version }}"
|
||||
elif [ "${{ needs.verify-test-gates.result }}" = "failure" ]; then
|
||||
echo "❌ Evidence pack generation blocked: test gates not passed"
|
||||
else
|
||||
echo "⚠️ Evidence pack generation failed or skipped"
|
||||
fi
|
||||
@@ -1,400 +0,0 @@
|
||||
# .gitea/workflows/release-keyless-sign.yml
|
||||
# Keyless signing for StellaOps release artifacts
|
||||
#
|
||||
# This workflow signs release artifacts using keyless signing (Fulcio).
|
||||
# It demonstrates dogfooding of the keyless signing feature.
|
||||
#
|
||||
# Triggers:
|
||||
# - After release bundle is published
|
||||
# - Manual trigger for re-signing
|
||||
#
|
||||
# Artifacts signed:
|
||||
# - Container images
|
||||
# - CLI binaries
|
||||
# - SBOM documents
|
||||
# - Release manifest
|
||||
|
||||
name: Release Keyless Signing
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Release version to sign (e.g., 2025.12.0)'
|
||||
required: true
|
||||
type: string
|
||||
dry_run:
|
||||
description: 'Dry run (skip actual signing)'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
STELLAOPS_URL: "https://api.stella-ops.internal"
|
||||
REGISTRY: registry.stella-ops.org
|
||||
|
||||
jobs:
|
||||
sign-images:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
outputs:
|
||||
scanner-attestation: ${{ steps.sign-scanner.outputs.attestation-digest }}
|
||||
cli-attestation: ${{ steps.sign-cli.outputs.attestation-digest }}
|
||||
gateway-attestation: ${{ steps.sign-gateway.outputs.attestation-digest }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Determine Version
|
||||
id: version
|
||||
run: |
|
||||
if [[ -n "${{ github.event.inputs.version }}" ]]; then
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
else
|
||||
VERSION="${{ github.event.release.tag_name }}"
|
||||
VERSION="${VERSION#v}"
|
||||
fi
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Release version: ${VERSION}"
|
||||
|
||||
- name: Install StellaOps CLI
|
||||
run: |
|
||||
curl -sL https://get.stella-ops.org/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Log in to Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Get OIDC Token
|
||||
id: oidc
|
||||
run: |
|
||||
OIDC_TOKEN="${ACTIONS_ID_TOKEN}"
|
||||
if [[ -z "$OIDC_TOKEN" ]]; then
|
||||
echo "::error::OIDC token not available"
|
||||
exit 1
|
||||
fi
|
||||
echo "::add-mask::${OIDC_TOKEN}"
|
||||
echo "token=${OIDC_TOKEN}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Sign Scanner Image
|
||||
id: sign-scanner
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
IMAGE="${REGISTRY}/stellaops/scanner:${VERSION}"
|
||||
|
||||
echo "Signing scanner image: ${IMAGE}"
|
||||
DIGEST=$(docker manifest inspect "${IMAGE}" -v | jq -r '.Descriptor.digest')
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type image \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
REKOR=$(echo "$RESULT" | jq -r '.rekorUuid')
|
||||
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
echo "rekor-uuid=${REKOR}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Push attestation to registry
|
||||
stella attest push \
|
||||
--attestation "${ATTESTATION}" \
|
||||
--registry "stellaops/scanner"
|
||||
|
||||
- name: Sign CLI Image
|
||||
id: sign-cli
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
IMAGE="${REGISTRY}/stellaops/cli:${VERSION}"
|
||||
|
||||
echo "Signing CLI image: ${IMAGE}"
|
||||
DIGEST=$(docker manifest inspect "${IMAGE}" -v | jq -r '.Descriptor.digest')
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type image \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
|
||||
stella attest push \
|
||||
--attestation "${ATTESTATION}" \
|
||||
--registry "stellaops/cli"
|
||||
|
||||
- name: Sign Gateway Image
|
||||
id: sign-gateway
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
IMAGE="${REGISTRY}/stellaops/gateway:${VERSION}"
|
||||
|
||||
echo "Signing gateway image: ${IMAGE}"
|
||||
DIGEST=$(docker manifest inspect "${IMAGE}" -v | jq -r '.Descriptor.digest')
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type image \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
|
||||
stella attest push \
|
||||
--attestation "${ATTESTATION}" \
|
||||
--registry "stellaops/gateway"
|
||||
|
||||
sign-binaries:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
outputs:
|
||||
cli-linux-x64: ${{ steps.sign-cli-linux-x64.outputs.attestation-digest }}
|
||||
cli-linux-arm64: ${{ steps.sign-cli-linux-arm64.outputs.attestation-digest }}
|
||||
cli-darwin-x64: ${{ steps.sign-cli-darwin-x64.outputs.attestation-digest }}
|
||||
cli-darwin-arm64: ${{ steps.sign-cli-darwin-arm64.outputs.attestation-digest }}
|
||||
cli-windows-x64: ${{ steps.sign-cli-windows-x64.outputs.attestation-digest }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Determine Version
|
||||
id: version
|
||||
run: |
|
||||
if [[ -n "${{ github.event.inputs.version }}" ]]; then
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
else
|
||||
VERSION="${{ github.event.release.tag_name }}"
|
||||
VERSION="${VERSION#v}"
|
||||
fi
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install StellaOps CLI
|
||||
run: |
|
||||
curl -sL https://get.stella-ops.org/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Download Release Artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
mkdir -p artifacts
|
||||
|
||||
# Download CLI binaries
|
||||
gh release download "v${VERSION}" \
|
||||
--pattern "stellaops-cli-*" \
|
||||
--dir artifacts \
|
||||
|| echo "No CLI binaries found"
|
||||
|
||||
- name: Get OIDC Token
|
||||
id: oidc
|
||||
run: |
|
||||
OIDC_TOKEN="${ACTIONS_ID_TOKEN}"
|
||||
echo "::add-mask::${OIDC_TOKEN}"
|
||||
echo "token=${OIDC_TOKEN}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Sign CLI Binary (linux-x64)
|
||||
id: sign-cli-linux-x64
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
BINARY="artifacts/stellaops-cli-linux-x64"
|
||||
if [[ -f "$BINARY" ]]; then
|
||||
DIGEST="sha256:$(sha256sum "$BINARY" | cut -d' ' -f1)"
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type binary \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Sign CLI Binary (linux-arm64)
|
||||
id: sign-cli-linux-arm64
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
BINARY="artifacts/stellaops-cli-linux-arm64"
|
||||
if [[ -f "$BINARY" ]]; then
|
||||
DIGEST="sha256:$(sha256sum "$BINARY" | cut -d' ' -f1)"
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type binary \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Sign CLI Binary (darwin-x64)
|
||||
id: sign-cli-darwin-x64
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
BINARY="artifacts/stellaops-cli-darwin-x64"
|
||||
if [[ -f "$BINARY" ]]; then
|
||||
DIGEST="sha256:$(sha256sum "$BINARY" | cut -d' ' -f1)"
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type binary \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Sign CLI Binary (darwin-arm64)
|
||||
id: sign-cli-darwin-arm64
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
BINARY="artifacts/stellaops-cli-darwin-arm64"
|
||||
if [[ -f "$BINARY" ]]; then
|
||||
DIGEST="sha256:$(sha256sum "$BINARY" | cut -d' ' -f1)"
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type binary \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Sign CLI Binary (windows-x64)
|
||||
id: sign-cli-windows-x64
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
env:
|
||||
STELLAOPS_OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
|
||||
run: |
|
||||
BINARY="artifacts/stellaops-cli-windows-x64.exe"
|
||||
if [[ -f "$BINARY" ]]; then
|
||||
DIGEST="sha256:$(sha256sum "$BINARY" | cut -d' ' -f1)"
|
||||
|
||||
RESULT=$(stella attest sign \
|
||||
--keyless \
|
||||
--artifact "${DIGEST}" \
|
||||
--type binary \
|
||||
--rekor \
|
||||
--output json)
|
||||
|
||||
ATTESTATION=$(echo "$RESULT" | jq -r '.attestationDigest')
|
||||
echo "attestation-digest=${ATTESTATION}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
verify-signatures:
|
||||
needs: [sign-images, sign-binaries]
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: read
|
||||
|
||||
steps:
|
||||
- name: Install StellaOps CLI
|
||||
run: |
|
||||
curl -sL https://get.stella-ops.org/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Determine Version
|
||||
id: version
|
||||
run: |
|
||||
if [[ -n "${{ github.event.inputs.version }}" ]]; then
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
else
|
||||
VERSION="${{ github.event.release.tag_name }}"
|
||||
VERSION="${VERSION#v}"
|
||||
fi
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Verify Scanner Image
|
||||
if: ${{ github.event.inputs.dry_run != 'true' }}
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
IMAGE="${REGISTRY}/stellaops/scanner:${VERSION}"
|
||||
DIGEST=$(docker manifest inspect "${IMAGE}" -v | jq -r '.Descriptor.digest')
|
||||
|
||||
stella attest verify \
|
||||
--artifact "${DIGEST}" \
|
||||
--certificate-identity "stella-ops.org/git.stella-ops.org:ref:refs/tags/v${VERSION}" \
|
||||
--certificate-oidc-issuer "https://git.stella-ops.org" \
|
||||
--require-rekor
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
cat >> $GITHUB_STEP_SUMMARY << EOF
|
||||
## Release v${VERSION} Signed
|
||||
|
||||
### Container Images
|
||||
|
||||
| Image | Attestation |
|
||||
|-------|-------------|
|
||||
| scanner | \`${{ needs.sign-images.outputs.scanner-attestation }}\` |
|
||||
| cli | \`${{ needs.sign-images.outputs.cli-attestation }}\` |
|
||||
| gateway | \`${{ needs.sign-images.outputs.gateway-attestation }}\` |
|
||||
|
||||
### CLI Binaries
|
||||
|
||||
| Platform | Attestation |
|
||||
|----------|-------------|
|
||||
| linux-x64 | \`${{ needs.sign-binaries.outputs.cli-linux-x64 }}\` |
|
||||
| linux-arm64 | \`${{ needs.sign-binaries.outputs.cli-linux-arm64 }}\` |
|
||||
| darwin-x64 | \`${{ needs.sign-binaries.outputs.cli-darwin-x64 }}\` |
|
||||
| darwin-arm64 | \`${{ needs.sign-binaries.outputs.cli-darwin-arm64 }}\` |
|
||||
| windows-x64 | \`${{ needs.sign-binaries.outputs.cli-windows-x64 }}\` |
|
||||
|
||||
### Verification
|
||||
|
||||
\`\`\`bash
|
||||
stella attest verify \\
|
||||
--artifact "sha256:..." \\
|
||||
--certificate-identity "stella-ops.org/git.stella-ops.org:ref:refs/tags/v${VERSION}" \\
|
||||
--certificate-oidc-issuer "https://git.stella-ops.org"
|
||||
\`\`\`
|
||||
EOF
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
name: release-manifest-verify
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- devops/releases/2025.09-stable.yaml
|
||||
- devops/releases/2025.09-airgap.yaml
|
||||
- devops/downloads/manifest.json
|
||||
- devops/release/check_release_manifest.py
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
verify:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Validate release & downloads manifests
|
||||
run: |
|
||||
python devops/release/check_release_manifest.py
|
||||
@@ -1,915 +0,0 @@
|
||||
# .gitea/workflows/release-suite.yml
|
||||
# Full suite release pipeline with Ubuntu-style versioning (YYYY.MM)
|
||||
# Sprint: SPRINT_20251226_005_CICD
|
||||
|
||||
name: Suite Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Suite version (YYYY.MM format, e.g., 2026.04)'
|
||||
required: true
|
||||
type: string
|
||||
codename:
|
||||
description: 'Release codename (e.g., Nova, Orion, Pulsar)'
|
||||
required: true
|
||||
type: string
|
||||
channel:
|
||||
description: 'Release channel'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- edge
|
||||
- stable
|
||||
- lts
|
||||
default: edge
|
||||
skip_tests:
|
||||
description: 'Skip test execution (use with caution)'
|
||||
type: boolean
|
||||
default: false
|
||||
dry_run:
|
||||
description: 'Dry run (build but do not publish)'
|
||||
type: boolean
|
||||
default: false
|
||||
push:
|
||||
tags:
|
||||
- 'suite-*' # e.g., suite-2026.04
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
REGISTRY: git.stella-ops.org
|
||||
NUGET_SOURCE: https://git.stella-ops.org/api/packages/stella-ops.org/nuget/index.json
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PARSE TAG (for tag-triggered builds)
|
||||
# ===========================================================================
|
||||
|
||||
parse-tag:
|
||||
name: Parse Tag
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
if: github.event_name == 'push'
|
||||
outputs:
|
||||
version: ${{ steps.parse.outputs.version }}
|
||||
codename: ${{ steps.parse.outputs.codename }}
|
||||
channel: ${{ steps.parse.outputs.channel }}
|
||||
steps:
|
||||
- name: Parse version from tag
|
||||
id: parse
|
||||
run: |
|
||||
TAG="${{ github.ref_name }}"
|
||||
# Expected format: suite-{YYYY.MM} or suite-{YYYY.MM}-{codename}
|
||||
if [[ "$TAG" =~ ^suite-([0-9]{4}\.(04|10))(-([a-zA-Z]+))?$ ]]; then
|
||||
VERSION="${BASH_REMATCH[1]}"
|
||||
CODENAME="${BASH_REMATCH[4]:-TBD}"
|
||||
|
||||
# Determine channel based on month (04 = LTS, 10 = feature)
|
||||
MONTH="${BASH_REMATCH[2]}"
|
||||
if [[ "$MONTH" == "04" ]]; then
|
||||
CHANNEL="lts"
|
||||
else
|
||||
CHANNEL="stable"
|
||||
fi
|
||||
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "codename=$CODENAME" >> "$GITHUB_OUTPUT"
|
||||
echo "channel=$CHANNEL" >> "$GITHUB_OUTPUT"
|
||||
echo "Parsed: version=$VERSION, codename=$CODENAME, channel=$CHANNEL"
|
||||
else
|
||||
echo "::error::Invalid tag format. Expected: suite-YYYY.MM or suite-YYYY.MM-codename"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# VALIDATE
|
||||
# ===========================================================================
|
||||
|
||||
validate:
|
||||
name: Validate Release
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [parse-tag]
|
||||
if: always() && (needs.parse-tag.result == 'success' || needs.parse-tag.result == 'skipped')
|
||||
outputs:
|
||||
version: ${{ steps.resolve.outputs.version }}
|
||||
codename: ${{ steps.resolve.outputs.codename }}
|
||||
channel: ${{ steps.resolve.outputs.channel }}
|
||||
dry_run: ${{ steps.resolve.outputs.dry_run }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Resolve inputs
|
||||
id: resolve
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
VERSION="${{ needs.parse-tag.outputs.version }}"
|
||||
CODENAME="${{ needs.parse-tag.outputs.codename }}"
|
||||
CHANNEL="${{ needs.parse-tag.outputs.channel }}"
|
||||
DRY_RUN="false"
|
||||
else
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
CODENAME="${{ github.event.inputs.codename }}"
|
||||
CHANNEL="${{ github.event.inputs.channel }}"
|
||||
DRY_RUN="${{ github.event.inputs.dry_run }}"
|
||||
fi
|
||||
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "codename=$CODENAME" >> "$GITHUB_OUTPUT"
|
||||
echo "channel=$CHANNEL" >> "$GITHUB_OUTPUT"
|
||||
echo "dry_run=$DRY_RUN" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "=== Suite Release Configuration ==="
|
||||
echo "Version: $VERSION"
|
||||
echo "Codename: $CODENAME"
|
||||
echo "Channel: $CHANNEL"
|
||||
echo "Dry Run: $DRY_RUN"
|
||||
|
||||
- name: Validate version format
|
||||
run: |
|
||||
VERSION="${{ steps.resolve.outputs.version }}"
|
||||
if ! [[ "$VERSION" =~ ^[0-9]{4}\.(04|10)$ ]]; then
|
||||
echo "::error::Invalid version format. Expected YYYY.MM where MM is 04 or 10 (e.g., 2026.04)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate codename
|
||||
run: |
|
||||
CODENAME="${{ steps.resolve.outputs.codename }}"
|
||||
if [[ -z "$CODENAME" || "$CODENAME" == "TBD" ]]; then
|
||||
echo "::warning::No codename provided, release will use 'TBD'"
|
||||
elif ! [[ "$CODENAME" =~ ^[A-Z][a-z]+$ ]]; then
|
||||
echo "::warning::Codename should be capitalized (e.g., Nova, Orion)"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# RUN TESTS (unless skipped)
|
||||
# ===========================================================================
|
||||
|
||||
test-gate:
|
||||
name: Test Gate
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate]
|
||||
if: github.event.inputs.skip_tests != 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Release Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Unit|Category=Architecture|Category=Contract" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=release-tests.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: release-test-results
|
||||
path: ./TestResults
|
||||
retention-days: 14
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD MODULES (matrix strategy)
|
||||
# ===========================================================================
|
||||
|
||||
build-modules:
|
||||
name: Build ${{ matrix.module }}
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, test-gate]
|
||||
if: always() && needs.validate.result == 'success' && (needs.test-gate.result == 'success' || needs.test-gate.result == 'skipped')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module:
|
||||
- name: Authority
|
||||
project: src/Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj
|
||||
- name: Attestor
|
||||
project: src/Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj
|
||||
- name: Concelier
|
||||
project: src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj
|
||||
- name: Scanner
|
||||
project: src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj
|
||||
- name: Policy
|
||||
project: src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj
|
||||
- name: Signer
|
||||
project: src/Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj
|
||||
- name: Excititor
|
||||
project: src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj
|
||||
- name: Gateway
|
||||
project: src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj
|
||||
- name: Scheduler
|
||||
project: src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Determine module version
|
||||
id: version
|
||||
run: |
|
||||
MODULE_NAME="${{ matrix.module.name }}"
|
||||
MODULE_LOWER=$(echo "$MODULE_NAME" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
# Try to read version from version.txt, fallback to 1.0.0
|
||||
VERSION_FILE="src/${MODULE_NAME}/version.txt"
|
||||
if [[ -f "$VERSION_FILE" ]]; then
|
||||
MODULE_VERSION=$(cat "$VERSION_FILE" | tr -d '[:space:]')
|
||||
else
|
||||
MODULE_VERSION="1.0.0"
|
||||
fi
|
||||
|
||||
echo "module_version=$MODULE_VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "module_lower=$MODULE_LOWER" >> "$GITHUB_OUTPUT"
|
||||
echo "Module: $MODULE_NAME, Version: $MODULE_VERSION"
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore ${{ matrix.module.project }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build ${{ matrix.module.project }} \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
-p:Version=${{ steps.version.outputs.module_version }}
|
||||
|
||||
- name: Pack NuGet
|
||||
run: |
|
||||
dotnet pack ${{ matrix.module.project }} \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
-p:Version=${{ steps.version.outputs.module_version }} \
|
||||
-p:PackageVersion=${{ steps.version.outputs.module_version }} \
|
||||
--output out/packages
|
||||
|
||||
- name: Push NuGet
|
||||
if: needs.validate.outputs.dry_run != 'true'
|
||||
run: |
|
||||
for nupkg in out/packages/*.nupkg; do
|
||||
if [[ -f "$nupkg" ]]; then
|
||||
echo "Pushing: $nupkg"
|
||||
dotnet nuget push "$nupkg" \
|
||||
--source "${{ env.NUGET_SOURCE }}" \
|
||||
--api-key "${{ secrets.GITEA_TOKEN }}" \
|
||||
--skip-duplicate
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Upload NuGet artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nuget-${{ matrix.module.name }}
|
||||
path: out/packages/*.nupkg
|
||||
retention-days: 30
|
||||
if-no-files-found: ignore
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD CONTAINERS
|
||||
# ===========================================================================
|
||||
|
||||
build-containers:
|
||||
name: Container ${{ matrix.module }}
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, build-modules]
|
||||
if: needs.validate.outputs.dry_run != 'true'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module:
|
||||
- authority
|
||||
- attestor
|
||||
- concelier
|
||||
- scanner
|
||||
- policy
|
||||
- signer
|
||||
- excititor
|
||||
- gateway
|
||||
- scheduler
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITEA_TOKEN }}
|
||||
|
||||
- name: Build and push container
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: devops/docker/Dockerfile.platform
|
||||
target: ${{ matrix.module }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/stella-ops.org/${{ matrix.module }}:${{ needs.validate.outputs.version }}
|
||||
${{ env.REGISTRY }}/stella-ops.org/${{ matrix.module }}:${{ needs.validate.outputs.channel }}
|
||||
${{ env.REGISTRY }}/stella-ops.org/${{ matrix.module }}:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: |
|
||||
org.opencontainers.image.title=StellaOps ${{ matrix.module }}
|
||||
org.opencontainers.image.version=${{ needs.validate.outputs.version }}
|
||||
org.opencontainers.image.description=StellaOps ${{ needs.validate.outputs.version }} ${{ needs.validate.outputs.codename }}
|
||||
org.opencontainers.image.source=https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD CLI (multi-platform)
|
||||
# ===========================================================================
|
||||
|
||||
build-cli:
|
||||
name: CLI (${{ matrix.runtime }})
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, test-gate]
|
||||
if: always() && needs.validate.result == 'success' && (needs.test-gate.result == 'success' || needs.test-gate.result == 'skipped')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
runtime:
|
||||
- linux-x64
|
||||
- linux-arm64
|
||||
- win-x64
|
||||
- osx-x64
|
||||
- osx-arm64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install cross-compilation tools
|
||||
if: matrix.runtime == 'linux-arm64'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends binutils-aarch64-linux-gnu
|
||||
|
||||
- name: Publish CLI
|
||||
run: |
|
||||
dotnet publish src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||
--configuration Release \
|
||||
--runtime ${{ matrix.runtime }} \
|
||||
--self-contained true \
|
||||
-p:Version=${{ needs.validate.outputs.version }}.0 \
|
||||
-p:PublishSingleFile=true \
|
||||
-p:PublishTrimmed=true \
|
||||
-p:EnableCompressionInSingleFile=true \
|
||||
--output out/cli/${{ matrix.runtime }}
|
||||
|
||||
- name: Create archive
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
RUNTIME="${{ matrix.runtime }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
cd out/cli/$RUNTIME
|
||||
if [[ "$RUNTIME" == win-* ]]; then
|
||||
zip -r "../stellaops-cli-${VERSION}-${CODENAME}-${RUNTIME}.zip" .
|
||||
else
|
||||
tar -czvf "../stellaops-cli-${VERSION}-${CODENAME}-${RUNTIME}.tar.gz" .
|
||||
fi
|
||||
|
||||
- name: Upload CLI artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cli-${{ needs.validate.outputs.version }}-${{ matrix.runtime }}
|
||||
path: |
|
||||
out/cli/*.zip
|
||||
out/cli/*.tar.gz
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD HELM CHART
|
||||
# ===========================================================================
|
||||
|
||||
build-helm:
|
||||
name: Helm Chart
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Helm
|
||||
run: |
|
||||
curl -fsSL https://get.helm.sh/helm-v3.16.0-linux-amd64.tar.gz | \
|
||||
tar -xzf - -C /tmp
|
||||
sudo install -m 0755 /tmp/linux-amd64/helm /usr/local/bin/helm
|
||||
|
||||
- name: Lint Helm chart
|
||||
run: helm lint devops/helm/stellaops
|
||||
|
||||
- name: Package Helm chart
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
helm package devops/helm/stellaops \
|
||||
--version "$VERSION" \
|
||||
--app-version "$VERSION" \
|
||||
--destination out/helm
|
||||
|
||||
- name: Upload Helm chart
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: helm-chart-${{ needs.validate.outputs.version }}
|
||||
path: out/helm/*.tgz
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# GENERATE RELEASE MANIFEST
|
||||
# ===========================================================================
|
||||
|
||||
release-manifest:
|
||||
name: Release Manifest
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, build-modules, build-cli, build-helm]
|
||||
if: always() && needs.validate.result == 'success'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Generate release manifest
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
CHANNEL="${{ needs.validate.outputs.channel }}"
|
||||
|
||||
mkdir -p out/release
|
||||
|
||||
cat > out/release/suite-${VERSION}.yaml << EOF
|
||||
apiVersion: stellaops.org/v1
|
||||
kind: SuiteRelease
|
||||
metadata:
|
||||
version: "${VERSION}"
|
||||
codename: "${CODENAME}"
|
||||
channel: "${CHANNEL}"
|
||||
date: "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
gitSha: "${{ github.sha }}"
|
||||
gitRef: "${{ github.ref }}"
|
||||
spec:
|
||||
modules:
|
||||
authority: "1.0.0"
|
||||
attestor: "1.0.0"
|
||||
concelier: "1.0.0"
|
||||
scanner: "1.0.0"
|
||||
policy: "1.0.0"
|
||||
signer: "1.0.0"
|
||||
excititor: "1.0.0"
|
||||
gateway: "1.0.0"
|
||||
scheduler: "1.0.0"
|
||||
platforms:
|
||||
- linux-x64
|
||||
- linux-arm64
|
||||
- win-x64
|
||||
- osx-x64
|
||||
- osx-arm64
|
||||
artifacts:
|
||||
containers: "${{ env.REGISTRY }}/stella-ops.org/*:${VERSION}"
|
||||
nuget: "${{ env.NUGET_SOURCE }}"
|
||||
helm: "stellaops-${VERSION}.tgz"
|
||||
EOF
|
||||
|
||||
echo "=== Release Manifest ==="
|
||||
cat out/release/suite-${VERSION}.yaml
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
cd artifacts
|
||||
find . -type f \( -name "*.nupkg" -o -name "*.tgz" -o -name "*.zip" -o -name "*.tar.gz" \) \
|
||||
-exec sha256sum {} \; > ../out/release/SHA256SUMS-${VERSION}.txt
|
||||
|
||||
echo "=== Checksums ==="
|
||||
cat ../out/release/SHA256SUMS-${VERSION}.txt
|
||||
|
||||
- name: Upload release manifest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-manifest-${{ needs.validate.outputs.version }}
|
||||
path: out/release
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# GENERATE CHANGELOG (AI-assisted)
|
||||
# ===========================================================================
|
||||
|
||||
generate-changelog:
|
||||
name: Generate Changelog
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, build-modules]
|
||||
if: always() && needs.validate.result == 'success'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Find previous release tag
|
||||
id: prev-tag
|
||||
run: |
|
||||
PREV_TAG=$(git tag -l "suite-*" --sort=-creatordate | head -1)
|
||||
echo "Previous tag: ${PREV_TAG:-none}"
|
||||
echo "prev_tag=${PREV_TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate changelog
|
||||
env:
|
||||
AI_API_KEY: ${{ secrets.AI_API_KEY }}
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
PREV_TAG="${{ steps.prev-tag.outputs.prev_tag }}"
|
||||
|
||||
mkdir -p out/docs
|
||||
|
||||
ARGS="$VERSION --codename $CODENAME --output out/docs/CHANGELOG.md"
|
||||
if [[ -n "$PREV_TAG" ]]; then
|
||||
ARGS="$ARGS --from-tag $PREV_TAG"
|
||||
fi
|
||||
if [[ -n "$AI_API_KEY" ]]; then
|
||||
ARGS="$ARGS --ai"
|
||||
fi
|
||||
|
||||
python3 .gitea/scripts/release/generate_changelog.py $ARGS
|
||||
|
||||
echo "=== Generated Changelog ==="
|
||||
head -50 out/docs/CHANGELOG.md
|
||||
|
||||
- name: Upload changelog
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: changelog-${{ needs.validate.outputs.version }}
|
||||
path: out/docs/CHANGELOG.md
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# GENERATE SUITE DOCUMENTATION
|
||||
# ===========================================================================
|
||||
|
||||
generate-suite-docs:
|
||||
name: Generate Suite Docs
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, generate-changelog, release-manifest]
|
||||
if: always() && needs.validate.result == 'success'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install python-dateutil
|
||||
|
||||
- name: Download changelog
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: changelog-${{ needs.validate.outputs.version }}
|
||||
path: changelog
|
||||
|
||||
- name: Find previous version
|
||||
id: prev-version
|
||||
run: |
|
||||
PREV_TAG=$(git tag -l "suite-*" --sort=-creatordate | head -1)
|
||||
if [[ -n "$PREV_TAG" ]]; then
|
||||
PREV_VERSION=$(echo "$PREV_TAG" | sed 's/suite-//')
|
||||
echo "prev_version=$PREV_VERSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate suite documentation
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
CHANNEL="${{ needs.validate.outputs.channel }}"
|
||||
PREV="${{ steps.prev-version.outputs.prev_version }}"
|
||||
|
||||
ARGS="$VERSION $CODENAME --channel $CHANNEL"
|
||||
if [[ -f "changelog/CHANGELOG.md" ]]; then
|
||||
ARGS="$ARGS --changelog changelog/CHANGELOG.md"
|
||||
fi
|
||||
if [[ -n "$PREV" ]]; then
|
||||
ARGS="$ARGS --previous $PREV"
|
||||
fi
|
||||
|
||||
python3 .gitea/scripts/release/generate_suite_docs.py $ARGS
|
||||
|
||||
echo "=== Generated Documentation ==="
|
||||
ls -la docs/releases/$VERSION/
|
||||
|
||||
- name: Upload suite docs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: suite-docs-${{ needs.validate.outputs.version }}
|
||||
path: docs/releases/${{ needs.validate.outputs.version }}
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# GENERATE DOCKER COMPOSE FILES
|
||||
# ===========================================================================
|
||||
|
||||
generate-compose:
|
||||
name: Generate Docker Compose
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, release-manifest]
|
||||
if: always() && needs.validate.result == 'success'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Generate Docker Compose files
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
mkdir -p out/compose
|
||||
|
||||
# Standard compose
|
||||
python3 .gitea/scripts/release/generate_compose.py \
|
||||
"$VERSION" "$CODENAME" \
|
||||
--output out/compose/docker-compose.yml
|
||||
|
||||
# Air-gap variant
|
||||
python3 .gitea/scripts/release/generate_compose.py \
|
||||
"$VERSION" "$CODENAME" \
|
||||
--airgap \
|
||||
--output out/compose/docker-compose.airgap.yml
|
||||
|
||||
echo "=== Generated Compose Files ==="
|
||||
ls -la out/compose/
|
||||
|
||||
- name: Upload compose files
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: compose-${{ needs.validate.outputs.version }}
|
||||
path: out/compose
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# COMMIT DOCS TO REPOSITORY
|
||||
# ===========================================================================
|
||||
|
||||
commit-docs:
|
||||
name: Commit Documentation
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, generate-suite-docs, generate-compose, create-release]
|
||||
if: needs.validate.outputs.dry_run != 'true' && needs.create-release.result == 'success'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITEA_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Download suite docs
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: suite-docs-${{ needs.validate.outputs.version }}
|
||||
path: docs/releases/${{ needs.validate.outputs.version }}
|
||||
|
||||
- name: Download compose files
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: compose-${{ needs.validate.outputs.version }}
|
||||
path: docs/releases/${{ needs.validate.outputs.version }}
|
||||
|
||||
- name: Commit documentation
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
git add "docs/releases/${VERSION}"
|
||||
|
||||
if git diff --cached --quiet; then
|
||||
echo "No documentation changes to commit"
|
||||
else
|
||||
git commit -m "docs: add release documentation for ${VERSION} ${CODENAME}
|
||||
|
||||
Generated documentation for StellaOps ${VERSION} \"${CODENAME}\"
|
||||
|
||||
- README.md
|
||||
- CHANGELOG.md
|
||||
- services.md
|
||||
- upgrade-guide.md
|
||||
- docker-compose.yml
|
||||
- docker-compose.airgap.yml
|
||||
- manifest.yaml
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.com/claude-code)
|
||||
|
||||
Co-Authored-By: github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
git push
|
||||
echo "Documentation committed and pushed"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# CREATE GITEA RELEASE
|
||||
# ===========================================================================
|
||||
|
||||
create-release:
|
||||
name: Create Gitea Release
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, build-modules, build-containers, build-cli, build-helm, release-manifest]
|
||||
if: needs.validate.outputs.dry_run != 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Prepare release assets
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
mkdir -p release-assets
|
||||
|
||||
# Copy CLI archives
|
||||
find artifacts -name "*.zip" -exec cp {} release-assets/ \;
|
||||
find artifacts -name "*.tar.gz" -exec cp {} release-assets/ \;
|
||||
|
||||
# Copy Helm chart
|
||||
find artifacts -name "*.tgz" -exec cp {} release-assets/ \;
|
||||
|
||||
# Copy manifest and checksums
|
||||
find artifacts -name "suite-*.yaml" -exec cp {} release-assets/ \;
|
||||
find artifacts -name "SHA256SUMS-*.txt" -exec cp {} release-assets/ \;
|
||||
|
||||
ls -la release-assets/
|
||||
|
||||
- name: Generate release notes
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
CHANNEL="${{ needs.validate.outputs.channel }}"
|
||||
|
||||
cat > release-notes.md << 'EOF'
|
||||
## StellaOps ${{ needs.validate.outputs.version }} "${{ needs.validate.outputs.codename }}"
|
||||
|
||||
### Release Information
|
||||
- **Version:** ${{ needs.validate.outputs.version }}
|
||||
- **Codename:** ${{ needs.validate.outputs.codename }}
|
||||
- **Channel:** ${{ needs.validate.outputs.channel }}
|
||||
- **Date:** $(date -u +%Y-%m-%d)
|
||||
- **Git SHA:** ${{ github.sha }}
|
||||
|
||||
### Included Modules
|
||||
| Module | Version | Container |
|
||||
|--------|---------|-----------|
|
||||
| Authority | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/authority:${{ needs.validate.outputs.version }}` |
|
||||
| Attestor | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/attestor:${{ needs.validate.outputs.version }}` |
|
||||
| Concelier | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/concelier:${{ needs.validate.outputs.version }}` |
|
||||
| Scanner | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/scanner:${{ needs.validate.outputs.version }}` |
|
||||
| Policy | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/policy:${{ needs.validate.outputs.version }}` |
|
||||
| Signer | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/signer:${{ needs.validate.outputs.version }}` |
|
||||
| Excititor | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/excititor:${{ needs.validate.outputs.version }}` |
|
||||
| Gateway | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/gateway:${{ needs.validate.outputs.version }}` |
|
||||
| Scheduler | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/scheduler:${{ needs.validate.outputs.version }}` |
|
||||
|
||||
### CLI Downloads
|
||||
| Platform | Download |
|
||||
|----------|----------|
|
||||
| Linux x64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-linux-x64.tar.gz` |
|
||||
| Linux ARM64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-linux-arm64.tar.gz` |
|
||||
| Windows x64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-win-x64.zip` |
|
||||
| macOS x64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-osx-x64.tar.gz` |
|
||||
| macOS ARM64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-osx-arm64.tar.gz` |
|
||||
|
||||
### Installation
|
||||
|
||||
#### Helm
|
||||
```bash
|
||||
helm install stellaops ./stellaops-${{ needs.validate.outputs.version }}.tgz
|
||||
```
|
||||
|
||||
#### Docker Compose
|
||||
```bash
|
||||
docker compose -f devops/compose/docker-compose.yml up -d
|
||||
```
|
||||
|
||||
---
|
||||
See [CHANGELOG.md](CHANGELOG.md) for detailed changes.
|
||||
EOF
|
||||
|
||||
- name: Create Gitea release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
CHANNEL="${{ needs.validate.outputs.channel }}"
|
||||
|
||||
# Determine if prerelease
|
||||
PRERELEASE_FLAG=""
|
||||
if [[ "$CHANNEL" == "edge" ]]; then
|
||||
PRERELEASE_FLAG="--prerelease"
|
||||
fi
|
||||
|
||||
gh release create "suite-${VERSION}" \
|
||||
--title "StellaOps ${VERSION} ${CODENAME}" \
|
||||
--notes-file release-notes.md \
|
||||
$PRERELEASE_FLAG \
|
||||
release-assets/*
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: Release Summary
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [validate, build-modules, build-containers, build-cli, build-helm, release-manifest, generate-changelog, generate-suite-docs, generate-compose, create-release, commit-docs]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate Summary
|
||||
run: |
|
||||
echo "## Suite Release Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Release Information" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Property | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Version | ${{ needs.validate.outputs.version }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Codename | ${{ needs.validate.outputs.codename }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Channel | ${{ needs.validate.outputs.channel }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Dry Run | ${{ needs.validate.outputs.dry_run }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Job Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Job | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build Modules | ${{ needs.build-modules.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build Containers | ${{ needs.build-containers.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build CLI | ${{ needs.build-cli.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build Helm | ${{ needs.build-helm.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Release Manifest | ${{ needs.release-manifest.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Generate Changelog | ${{ needs.generate-changelog.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Generate Suite Docs | ${{ needs.generate-suite-docs.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Generate Compose | ${{ needs.generate-compose.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Create Release | ${{ needs.create-release.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Commit Documentation | ${{ needs.commit-docs.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for failures
|
||||
if: contains(needs.*.result, 'failure')
|
||||
run: |
|
||||
echo "::error::One or more release jobs failed"
|
||||
exit 1
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
name: Release Validation
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'devops/**'
|
||||
- 'scripts/release/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_PREFIX: stellaops
|
||||
|
||||
jobs:
|
||||
validate-manifests:
|
||||
name: Validate Release Manifests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate Helm charts
|
||||
run: |
|
||||
helm lint devops/helm/stellaops
|
||||
helm template stellaops devops/helm/stellaops --dry-run
|
||||
|
||||
- name: Validate Kubernetes manifests
|
||||
run: |
|
||||
for f in devops/k8s/*.yaml; do
|
||||
kubectl apply --dry-run=client -f "$f" || exit 1
|
||||
done
|
||||
|
||||
- name: Check required images exist
|
||||
run: |
|
||||
REQUIRED_IMAGES=(
|
||||
"concelier"
|
||||
"scanner"
|
||||
"authority"
|
||||
"signer"
|
||||
"attestor"
|
||||
"excititor"
|
||||
"policy"
|
||||
"scheduler"
|
||||
"notify"
|
||||
)
|
||||
for img in "${REQUIRED_IMAGES[@]}"; do
|
||||
echo "Checking $img..."
|
||||
# Validate Dockerfile exists
|
||||
if [ ! -f "src/${img^}/Dockerfile" ] && [ ! -f "devops/docker/${img}/Dockerfile" ]; then
|
||||
echo "Warning: Dockerfile not found for $img"
|
||||
fi
|
||||
done
|
||||
|
||||
validate-checksums:
|
||||
name: Validate Artifact Checksums
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Verify SHA256SUMS files
|
||||
run: |
|
||||
find . -name "SHA256SUMS" -type f | while read f; do
|
||||
dir=$(dirname "$f")
|
||||
echo "Validating $f..."
|
||||
cd "$dir"
|
||||
if ! sha256sum -c SHA256SUMS --quiet 2>/dev/null; then
|
||||
echo "Warning: Checksum mismatch in $dir"
|
||||
fi
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
validate-schemas:
|
||||
name: Validate Schema Integrity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install ajv-cli
|
||||
run: npm install -g ajv-cli ajv-formats
|
||||
|
||||
- name: Validate JSON schemas
|
||||
run: |
|
||||
for schema in docs/schemas/*.schema.json; do
|
||||
echo "Validating $schema..."
|
||||
ajv compile -s "$schema" --spec=draft2020 || echo "Warning: $schema validation issue"
|
||||
done
|
||||
|
||||
release-notes:
|
||||
name: Generate Release Notes
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
needs: [validate-manifests, validate-checksums, validate-schemas]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate changelog
|
||||
run: |
|
||||
PREV_TAG=$(git describe --abbrev=0 --tags HEAD^ 2>/dev/null || echo "")
|
||||
if [ -n "$PREV_TAG" ]; then
|
||||
echo "## Changes since $PREV_TAG" > RELEASE_NOTES.md
|
||||
git log --pretty=format:"- %s (%h)" "$PREV_TAG"..HEAD >> RELEASE_NOTES.md
|
||||
else
|
||||
echo "## Initial Release" > RELEASE_NOTES.md
|
||||
fi
|
||||
|
||||
- name: Upload release notes
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-notes
|
||||
path: RELEASE_NOTES.md
|
||||
@@ -1,252 +0,0 @@
|
||||
# .gitea/workflows/release.yml
|
||||
# Deterministic release pipeline producing signed images, SBOMs, provenance, and manifest
|
||||
|
||||
name: Release Bundle
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Release version (overrides tag, e.g. 2025.10.0-edge)'
|
||||
required: false
|
||||
type: string
|
||||
channel:
|
||||
description: 'Release channel (edge|stable|lts)'
|
||||
required: false
|
||||
default: 'edge'
|
||||
type: choice
|
||||
options:
|
||||
- edge
|
||||
- stable
|
||||
- lts
|
||||
calendar:
|
||||
description: 'Calendar tag (YYYY.MM) - optional override'
|
||||
required: false
|
||||
type: string
|
||||
push_images:
|
||||
description: 'Push container images to registry'
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
build-release:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
REGISTRY: registry.stella-ops.org
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Validate NuGet restore source ordering
|
||||
run: python3 devops/validate_restore_sources.py
|
||||
|
||||
- name: Validate telemetry storage configuration
|
||||
run: python3 devops/telemetry/validate_storage_stack.py
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set up Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.14.0'
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install cross-arch objcopy tooling
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends binutils-aarch64-linux-gnu
|
||||
|
||||
- name: Publish Python analyzer plug-in
|
||||
run: |
|
||||
set -euo pipefail
|
||||
dotnet publish src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj \
|
||||
--configuration Release \
|
||||
--output out/analyzers/python \
|
||||
--no-self-contained
|
||||
mkdir -p plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python
|
||||
cp out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.dll plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/
|
||||
if [ -f out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.pdb ]; then
|
||||
cp out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.pdb plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/
|
||||
fi
|
||||
|
||||
- name: Run Python analyzer smoke checks
|
||||
run: |
|
||||
dotnet run \
|
||||
--project tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj \
|
||||
--configuration Release \
|
||||
-- \
|
||||
--repo-root .
|
||||
|
||||
# Note: this step enforces DEVOPS-REL-14-004 by signing the restart-only Python plug-in.
|
||||
# Ensure COSIGN_KEY_REF or COSIGN_IDENTITY_TOKEN is configured, otherwise the job will fail.
|
||||
- name: Sign Python analyzer artefacts
|
||||
env:
|
||||
COSIGN_KEY_REF: ${{ secrets.COSIGN_KEY_REF }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
COSIGN_IDENTITY_TOKEN: ${{ secrets.COSIGN_IDENTITY_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -z "${COSIGN_KEY_REF:-}" && -z "${COSIGN_IDENTITY_TOKEN:-}" ]]; then
|
||||
echo "::error::COSIGN_KEY_REF or COSIGN_IDENTITY_TOKEN must be provided to sign analyzer artefacts." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export COSIGN_PASSWORD="${COSIGN_PASSWORD:-}"
|
||||
export COSIGN_EXPERIMENTAL=1
|
||||
|
||||
PLUGIN_DIR="plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python"
|
||||
ARTIFACTS=(
|
||||
"StellaOps.Scanner.Analyzers.Lang.Python.dll"
|
||||
"manifest.json"
|
||||
)
|
||||
|
||||
for artifact in "${ARTIFACTS[@]}"; do
|
||||
FILE="${PLUGIN_DIR}/${artifact}"
|
||||
if [[ ! -f "${FILE}" ]]; then
|
||||
echo "::error::Missing analyzer artefact ${FILE}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sha256sum "${FILE}" | awk '{print $1}' > "${FILE}.sha256"
|
||||
|
||||
SIGN_ARGS=(--yes "${FILE}")
|
||||
if [[ -n "${COSIGN_KEY_REF:-}" ]]; then
|
||||
SIGN_ARGS=(--key "${COSIGN_KEY_REF}" "${SIGN_ARGS[@]}")
|
||||
fi
|
||||
if [[ -n "${COSIGN_IDENTITY_TOKEN:-}" ]]; then
|
||||
SIGN_ARGS=(--identity-token "${COSIGN_IDENTITY_TOKEN}" "${SIGN_ARGS[@]}")
|
||||
fi
|
||||
|
||||
cosign sign-blob "${SIGN_ARGS[@]}" > "${FILE}.sig"
|
||||
done
|
||||
|
||||
- name: Install Helm 3.16.0
|
||||
run: |
|
||||
curl -fsSL https://get.helm.sh/helm-v3.16.0-linux-amd64.tar.gz -o /tmp/helm.tgz
|
||||
tar -xzf /tmp/helm.tgz -C /tmp
|
||||
sudo install -m 0755 /tmp/linux-amd64/helm /usr/local/bin/helm
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
|
||||
- name: Install Syft
|
||||
run: |
|
||||
set -euo pipefail
|
||||
SYFT_VERSION="v1.21.0"
|
||||
curl -fsSL "https://github.com/anchore/syft/releases/download/${SYFT_VERSION}/syft_${SYFT_VERSION#v}_linux_amd64.tar.gz" -o /tmp/syft.tgz
|
||||
tar -xzf /tmp/syft.tgz -C /tmp
|
||||
sudo install -m 0755 /tmp/syft /usr/local/bin/syft
|
||||
|
||||
- name: Determine release metadata
|
||||
id: meta
|
||||
run: |
|
||||
set -euo pipefail
|
||||
RAW_VERSION="${{ github.ref_name }}"
|
||||
if [[ "${{ github.event_name }}" != "push" ]]; then
|
||||
RAW_VERSION="${{ github.event.inputs.version }}"
|
||||
fi
|
||||
if [[ -z "$RAW_VERSION" ]]; then
|
||||
echo "::error::Release version not provided" >&2
|
||||
exit 1
|
||||
fi
|
||||
VERSION="${RAW_VERSION#v}"
|
||||
CHANNEL="${{ github.event.inputs.channel || '' }}"
|
||||
if [[ -z "$CHANNEL" ]]; then
|
||||
CHANNEL="edge"
|
||||
fi
|
||||
CALENDAR_INPUT="${{ github.event.inputs.calendar || '' }}"
|
||||
if [[ -z "$CALENDAR_INPUT" ]]; then
|
||||
YEAR=$(echo "$VERSION" | awk -F'.' '{print $1}')
|
||||
MONTH=$(echo "$VERSION" | awk -F'.' '{print $2}')
|
||||
if [[ -n "$YEAR" && -n "$MONTH" ]]; then
|
||||
CALENDAR_INPUT="$YEAR.$MONTH"
|
||||
else
|
||||
CALENDAR_INPUT=$(date -u +'%Y.%m')
|
||||
fi
|
||||
fi
|
||||
PUSH_INPUT="${{ github.event.inputs.push_images || '' }}"
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
PUSH_INPUT="true"
|
||||
elif [[ -z "$PUSH_INPUT" ]]; then
|
||||
PUSH_INPUT="true"
|
||||
fi
|
||||
if [[ "$PUSH_INPUT" == "false" || "$PUSH_INPUT" == "0" ]]; then
|
||||
PUSH_FLAG="false"
|
||||
else
|
||||
PUSH_FLAG="true"
|
||||
fi
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "channel=$CHANNEL" >> "$GITHUB_OUTPUT"
|
||||
echo "calendar=$CALENDAR_INPUT" >> "$GITHUB_OUTPUT"
|
||||
echo "push=$PUSH_FLAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Enforce CLI parity gate
|
||||
run: |
|
||||
python3 .gitea/scripts/release/check_cli_parity.py
|
||||
|
||||
- name: Log in to registry
|
||||
if: steps.meta.outputs.push == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USERNAME }}
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Prepare release output directory
|
||||
run: |
|
||||
rm -rf out/release
|
||||
mkdir -p out/release
|
||||
|
||||
- name: Build release bundle
|
||||
# NOTE (DEVOPS-REL-17-004): build_release.py now fails if out/release/debug is missing
|
||||
env:
|
||||
COSIGN_KEY_REF: ${{ secrets.COSIGN_KEY_REF }}
|
||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||
COSIGN_IDENTITY_TOKEN: ${{ secrets.COSIGN_IDENTITY_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
EXTRA_ARGS=()
|
||||
if [[ "${{ steps.meta.outputs.push }}" != "true" ]]; then
|
||||
EXTRA_ARGS+=("--no-push")
|
||||
fi
|
||||
./.gitea/scripts/release/build_release.py \
|
||||
--version "${{ steps.meta.outputs.version }}" \
|
||||
--channel "${{ steps.meta.outputs.channel }}" \
|
||||
--calendar "${{ steps.meta.outputs.calendar }}" \
|
||||
--git-sha "${{ github.sha }}" \
|
||||
"${EXTRA_ARGS[@]}"
|
||||
|
||||
- name: Verify release artefacts
|
||||
run: |
|
||||
python .gitea/scripts/release/verify_release.py --release-dir out/release
|
||||
|
||||
- name: Upload release artefacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: stellaops-release-${{ steps.meta.outputs.version }}
|
||||
path: out/release
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload debug artefacts (build-id store)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: stellaops-debug-${{ steps.meta.outputs.version }}
|
||||
path: out/release/debug
|
||||
if-no-files-found: error
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
# Renovate Bot Workflow for Gitea
|
||||
# Sprint: CI/CD Enhancement - Dependency Management Automation
|
||||
#
|
||||
# Purpose: Run Renovate Bot to automatically update dependencies
|
||||
# Schedule: Twice daily (03:00 and 15:00 UTC)
|
||||
#
|
||||
# Requirements:
|
||||
# - RENOVATE_TOKEN secret with repo write access
|
||||
# - renovate.json configuration in repo root
|
||||
|
||||
name: Renovate
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run at 03:00 and 15:00 UTC
|
||||
- cron: '0 3,15 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dry_run:
|
||||
description: 'Dry run (no PRs created)'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
log_level:
|
||||
description: 'Log level'
|
||||
required: false
|
||||
type: choice
|
||||
options:
|
||||
- debug
|
||||
- info
|
||||
- warn
|
||||
default: 'info'
|
||||
|
||||
env:
|
||||
RENOVATE_VERSION: '37.100.0'
|
||||
LOG_LEVEL: ${{ github.event.inputs.log_level || 'info' }}
|
||||
|
||||
jobs:
|
||||
renovate:
|
||||
name: Run Renovate
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate configuration
|
||||
run: |
|
||||
if [[ ! -f "renovate.json" ]]; then
|
||||
echo "::error::renovate.json not found in repository root"
|
||||
exit 1
|
||||
fi
|
||||
echo "Renovate configuration found"
|
||||
cat renovate.json | head -20
|
||||
|
||||
- name: Run Renovate
|
||||
env:
|
||||
RENOVATE_TOKEN: ${{ secrets.RENOVATE_TOKEN }}
|
||||
RENOVATE_PLATFORM: gitea
|
||||
RENOVATE_ENDPOINT: ${{ github.server_url }}/api/v1
|
||||
RENOVATE_REPOSITORIES: ${{ github.repository }}
|
||||
RENOVATE_DRY_RUN: ${{ github.event.inputs.dry_run == 'true' && 'full' || 'null' }}
|
||||
LOG_LEVEL: ${{ env.LOG_LEVEL }}
|
||||
run: |
|
||||
# Install Renovate
|
||||
npm install -g renovate@${{ env.RENOVATE_VERSION }}
|
||||
|
||||
# Configure Renovate
|
||||
export RENOVATE_CONFIG_FILE="${GITHUB_WORKSPACE}/renovate.json"
|
||||
|
||||
# Set dry run mode
|
||||
if [[ "$RENOVATE_DRY_RUN" == "full" ]]; then
|
||||
echo "Running in DRY RUN mode - no PRs will be created"
|
||||
export RENOVATE_DRY_RUN="full"
|
||||
fi
|
||||
|
||||
# Run Renovate
|
||||
renovate \
|
||||
--platform="$RENOVATE_PLATFORM" \
|
||||
--endpoint="$RENOVATE_ENDPOINT" \
|
||||
--token="$RENOVATE_TOKEN" \
|
||||
"$RENOVATE_REPOSITORIES" \
|
||||
2>&1 | tee renovate.log
|
||||
|
||||
- name: Upload Renovate log
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: renovate-log-${{ github.run_id }}
|
||||
path: renovate.log
|
||||
retention-days: 7
|
||||
|
||||
- name: Summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Renovate Run Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Property | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Version | ${{ env.RENOVATE_VERSION }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Log Level | ${{ env.LOG_LEVEL }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Dry Run | ${{ github.event.inputs.dry_run || 'false' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Trigger | ${{ github.event_name }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [[ -f renovate.log ]]; then
|
||||
# Count PRs created/updated
|
||||
CREATED=$(grep -c "PR created" renovate.log 2>/dev/null || echo "0")
|
||||
UPDATED=$(grep -c "PR updated" renovate.log 2>/dev/null || echo "0")
|
||||
echo "### Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- PRs Created: $CREATED" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- PRs Updated: $UPDATED" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
@@ -1,40 +0,0 @@
|
||||
name: Replay Verification
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Scanner/**'
|
||||
- 'src/__Libraries/StellaOps.Canonicalization/**'
|
||||
- 'src/__Libraries/StellaOps.Replay/**'
|
||||
- 'src/__Libraries/StellaOps.Testing.Manifests/**'
|
||||
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||
|
||||
jobs:
|
||||
replay-verification:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
|
||||
- name: Build CLI
|
||||
run: dotnet build src/Cli/StellaOps.Cli -c Release
|
||||
|
||||
- name: Run replay verification on corpus
|
||||
run: |
|
||||
dotnet run --project src/Cli/StellaOps.Cli -- replay batch \
|
||||
--corpus src/__Tests/__Benchmarks/golden-corpus/ \
|
||||
--output results/ \
|
||||
--verify-determinism \
|
||||
--fail-on-diff
|
||||
|
||||
- name: Upload diff report
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: replay-diff-report
|
||||
path: results/diff-report.json
|
||||
|
||||
@@ -1,199 +0,0 @@
|
||||
name: Risk Bundle CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
include_osv:
|
||||
description: 'Include OSV providers (larger bundle)'
|
||||
type: boolean
|
||||
default: false
|
||||
publish_checksums:
|
||||
description: 'Publish checksums to artifact store'
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
risk-bundle-build:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
BUNDLE_OUTPUT: ${{ github.workspace }}/.artifacts/risk-bundle
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||
|
||||
- name: Test RiskBundle unit tests
|
||||
run: |
|
||||
mkdir -p $ARTIFACT_DIR
|
||||
dotnet test src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj \
|
||||
-c Release \
|
||||
--filter "FullyQualifiedName~RiskBundle" \
|
||||
--logger "trx;LogFileName=risk-bundle-tests.trx" \
|
||||
--results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Build risk bundle (fixtures)
|
||||
run: |
|
||||
mkdir -p $BUNDLE_OUTPUT
|
||||
devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||
|
||||
- name: Verify bundle integrity
|
||||
run: devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd $BUNDLE_OUTPUT
|
||||
sha256sum risk-bundle.tar.gz > risk-bundle.tar.gz.sha256
|
||||
sha256sum manifest.json > manifest.json.sha256
|
||||
cat risk-bundle.tar.gz.sha256 manifest.json.sha256 > checksums.txt
|
||||
echo "Bundle checksums:"
|
||||
cat checksums.txt
|
||||
|
||||
- name: Upload risk bundle artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: |
|
||||
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz
|
||||
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz.sig
|
||||
${{ env.BUNDLE_OUTPUT }}/manifest.json
|
||||
${{ env.BUNDLE_OUTPUT }}/checksums.txt
|
||||
${{ env.ARTIFACT_DIR }}/*.trx
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: risk-bundle-test-results
|
||||
path: ${{ env.ARTIFACT_DIR }}/*.trx
|
||||
|
||||
risk-bundle-offline-kit:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: risk-bundle-build
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
OFFLINE_KIT_DIR: ${{ github.workspace }}/.artifacts/offline-kit
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download risk bundle artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
- name: Package for offline kit
|
||||
run: |
|
||||
mkdir -p $OFFLINE_KIT_DIR/risk-bundles
|
||||
cp $ARTIFACT_DIR/risk-bundle.tar.gz $OFFLINE_KIT_DIR/risk-bundles/
|
||||
cp $ARTIFACT_DIR/risk-bundle.tar.gz.sig $OFFLINE_KIT_DIR/risk-bundles/ 2>/dev/null || true
|
||||
cp $ARTIFACT_DIR/manifest.json $OFFLINE_KIT_DIR/risk-bundles/
|
||||
cp $ARTIFACT_DIR/checksums.txt $OFFLINE_KIT_DIR/risk-bundles/
|
||||
|
||||
# Create offline kit manifest entry
|
||||
cat > $OFFLINE_KIT_DIR/risk-bundles/kit-manifest.json <<EOF
|
||||
{
|
||||
"component": "risk-bundle",
|
||||
"version": "$(date -u +%Y%m%d-%H%M%S)",
|
||||
"files": [
|
||||
{"path": "risk-bundle.tar.gz", "checksum_file": "risk-bundle.tar.gz.sha256"},
|
||||
{"path": "manifest.json", "checksum_file": "manifest.json.sha256"}
|
||||
],
|
||||
"verification": {
|
||||
"checksums": "checksums.txt",
|
||||
"signature": "risk-bundle.tar.gz.sig"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Verify offline kit structure
|
||||
run: |
|
||||
echo "Offline kit structure:"
|
||||
find $OFFLINE_KIT_DIR -type f
|
||||
echo ""
|
||||
echo "Checksum verification:"
|
||||
cd $OFFLINE_KIT_DIR/risk-bundles
|
||||
sha256sum -c checksums.txt
|
||||
|
||||
- name: Upload offline kit
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-offline-kit
|
||||
path: ${{ env.OFFLINE_KIT_DIR }}
|
||||
|
||||
publish-checksums:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: risk-bundle-build
|
||||
if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event.inputs.publish_checksums == 'true')
|
||||
env:
|
||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download risk bundle artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-artifacts
|
||||
path: ${{ env.ARTIFACT_DIR }}
|
||||
|
||||
- name: Publish checksums
|
||||
run: |
|
||||
echo "Publishing checksums for risk bundle..."
|
||||
CHECKSUM_DIR=out/checksums/risk-bundle/$(date -u +%Y-%m-%d)
|
||||
mkdir -p $CHECKSUM_DIR
|
||||
cp $ARTIFACT_DIR/checksums.txt $CHECKSUM_DIR/
|
||||
cp $ARTIFACT_DIR/manifest.json $CHECKSUM_DIR/
|
||||
|
||||
# Create latest symlink manifest
|
||||
cat > out/checksums/risk-bundle/latest.json <<EOF
|
||||
{
|
||||
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"path": "$(date -u +%Y-%m-%d)/checksums.txt",
|
||||
"manifest": "$(date -u +%Y-%m-%d)/manifest.json"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "Checksums published to $CHECKSUM_DIR"
|
||||
cat $CHECKSUM_DIR/checksums.txt
|
||||
|
||||
- name: Upload published checksums
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: risk-bundle-published-checksums
|
||||
path: out/checksums/risk-bundle/
|
||||
|
||||
@@ -1,404 +0,0 @@
|
||||
# .gitea/workflows/rollback-lag.yml
|
||||
# Rollback lag measurement for deployment SLO validation
|
||||
# Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
|
||||
# Task: CCUT-025
|
||||
#
|
||||
# WORKFLOW PURPOSE:
|
||||
# =================
|
||||
# Measures the time required to rollback a deployment and restore service health.
|
||||
# This validates the rollback SLO (< 5 minutes) and provides visibility into
|
||||
# deployment reversibility characteristics.
|
||||
#
|
||||
# The workflow performs a controlled rollback, measures timing metrics, and
|
||||
# restores the original version afterward.
|
||||
|
||||
name: Rollback Lag Measurement
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Target environment'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
deployment:
|
||||
description: 'Deployment name to test'
|
||||
required: true
|
||||
type: string
|
||||
default: 'stellaops-api'
|
||||
namespace:
|
||||
description: 'Kubernetes namespace'
|
||||
required: true
|
||||
type: string
|
||||
default: 'stellaops'
|
||||
rollback_slo_seconds:
|
||||
description: 'Rollback SLO in seconds'
|
||||
required: false
|
||||
type: number
|
||||
default: 300
|
||||
dry_run:
|
||||
description: 'Dry run (do not actually rollback)'
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
schedule:
|
||||
# Run weekly on staging to track trends
|
||||
- cron: '0 3 * * 0'
|
||||
|
||||
env:
|
||||
DEFAULT_NAMESPACE: stellaops
|
||||
DEFAULT_DEPLOYMENT: stellaops-api
|
||||
DEFAULT_SLO: 300
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PRE-FLIGHT CHECKS
|
||||
# ===========================================================================
|
||||
|
||||
preflight:
|
||||
name: Pre-Flight Checks
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
environment: ${{ inputs.environment || 'staging' }}
|
||||
outputs:
|
||||
current-version: ${{ steps.current.outputs.version }}
|
||||
current-image: ${{ steps.current.outputs.image }}
|
||||
previous-version: ${{ steps.previous.outputs.version }}
|
||||
previous-image: ${{ steps.previous.outputs.image }}
|
||||
can-rollback: ${{ steps.check.outputs.can_rollback }}
|
||||
replica-count: ${{ steps.current.outputs.replicas }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup kubectl
|
||||
uses: azure/setup-kubectl@v4
|
||||
with:
|
||||
version: 'latest'
|
||||
|
||||
- name: Configure Kubernetes
|
||||
run: |
|
||||
echo "${{ secrets.KUBECONFIG }}" | base64 -d > kubeconfig.yaml
|
||||
export KUBECONFIG=kubeconfig.yaml
|
||||
|
||||
- name: Get Current Deployment State
|
||||
id: current
|
||||
run: |
|
||||
NAMESPACE="${{ inputs.namespace || env.DEFAULT_NAMESPACE }}"
|
||||
DEPLOYMENT="${{ inputs.deployment || env.DEFAULT_DEPLOYMENT }}"
|
||||
|
||||
# Get current image
|
||||
CURRENT_IMAGE=$(kubectl get deployment "$DEPLOYMENT" -n "$NAMESPACE" \
|
||||
-o jsonpath='{.spec.template.spec.containers[0].image}' 2>/dev/null || echo "unknown")
|
||||
|
||||
# Extract version from image tag
|
||||
CURRENT_VERSION=$(echo "$CURRENT_IMAGE" | sed 's/.*://')
|
||||
|
||||
# Get replica count
|
||||
REPLICAS=$(kubectl get deployment "$DEPLOYMENT" -n "$NAMESPACE" \
|
||||
-o jsonpath='{.spec.replicas}' 2>/dev/null || echo "1")
|
||||
|
||||
echo "image=$CURRENT_IMAGE" >> $GITHUB_OUTPUT
|
||||
echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "replicas=$REPLICAS" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "Current deployment: $DEPLOYMENT"
|
||||
echo "Current image: $CURRENT_IMAGE"
|
||||
echo "Current version: $CURRENT_VERSION"
|
||||
echo "Replicas: $REPLICAS"
|
||||
|
||||
- name: Get Previous Version
|
||||
id: previous
|
||||
run: |
|
||||
NAMESPACE="${{ inputs.namespace || env.DEFAULT_NAMESPACE }}"
|
||||
DEPLOYMENT="${{ inputs.deployment || env.DEFAULT_DEPLOYMENT }}"
|
||||
|
||||
# Get rollout history
|
||||
HISTORY=$(kubectl rollout history deployment "$DEPLOYMENT" -n "$NAMESPACE" 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$HISTORY" ]; then
|
||||
echo "version=unknown" >> $GITHUB_OUTPUT
|
||||
echo "image=unknown" >> $GITHUB_OUTPUT
|
||||
echo "No rollout history available"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get previous revision number
|
||||
PREV_REVISION=$(echo "$HISTORY" | grep -E '^[0-9]+' | tail -2 | head -1 | awk '{print $1}')
|
||||
|
||||
if [ -z "$PREV_REVISION" ]; then
|
||||
echo "version=unknown" >> $GITHUB_OUTPUT
|
||||
echo "image=unknown" >> $GITHUB_OUTPUT
|
||||
echo "No previous revision found"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get image from previous revision
|
||||
PREV_IMAGE=$(kubectl rollout history deployment "$DEPLOYMENT" -n "$NAMESPACE" \
|
||||
--revision="$PREV_REVISION" -o jsonpath='{.spec.template.spec.containers[0].image}' 2>/dev/null || echo "unknown")
|
||||
|
||||
PREV_VERSION=$(echo "$PREV_IMAGE" | sed 's/.*://')
|
||||
|
||||
echo "image=$PREV_IMAGE" >> $GITHUB_OUTPUT
|
||||
echo "version=$PREV_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "Previous revision: $PREV_REVISION"
|
||||
echo "Previous image: $PREV_IMAGE"
|
||||
echo "Previous version: $PREV_VERSION"
|
||||
|
||||
- name: Check Rollback Feasibility
|
||||
id: check
|
||||
run: |
|
||||
CURRENT="${{ steps.current.outputs.version }}"
|
||||
PREVIOUS="${{ steps.previous.outputs.version }}"
|
||||
|
||||
if [ "$PREVIOUS" = "unknown" ] || [ -z "$PREVIOUS" ]; then
|
||||
echo "can_rollback=false" >> $GITHUB_OUTPUT
|
||||
echo "::warning::No previous version available for rollback"
|
||||
elif [ "$CURRENT" = "$PREVIOUS" ]; then
|
||||
echo "can_rollback=false" >> $GITHUB_OUTPUT
|
||||
echo "::warning::Current and previous versions are the same"
|
||||
else
|
||||
echo "can_rollback=true" >> $GITHUB_OUTPUT
|
||||
echo "Rollback feasible: $CURRENT -> $PREVIOUS"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# MEASURE ROLLBACK LAG
|
||||
# ===========================================================================
|
||||
|
||||
measure:
|
||||
name: Measure Rollback Lag
|
||||
needs: preflight
|
||||
if: needs.preflight.outputs.can-rollback == 'true'
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
environment: ${{ inputs.environment || 'staging' }}
|
||||
outputs:
|
||||
rollback-time: ${{ steps.timing.outputs.rollback_time }}
|
||||
health-recovery-time: ${{ steps.timing.outputs.health_time }}
|
||||
total-lag: ${{ steps.timing.outputs.total_lag }}
|
||||
slo-met: ${{ steps.timing.outputs.slo_met }}
|
||||
steps:
|
||||
- name: Setup kubectl
|
||||
uses: azure/setup-kubectl@v4
|
||||
with:
|
||||
version: 'latest'
|
||||
|
||||
- name: Configure Kubernetes
|
||||
run: |
|
||||
echo "${{ secrets.KUBECONFIG }}" | base64 -d > kubeconfig.yaml
|
||||
export KUBECONFIG=kubeconfig.yaml
|
||||
|
||||
- name: Record Start Time
|
||||
id: start
|
||||
run: |
|
||||
START_TIME=$(date +%s)
|
||||
echo "time=$START_TIME" >> $GITHUB_OUTPUT
|
||||
echo "Rollback measurement started at: $(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
|
||||
- name: Trigger Rollback
|
||||
id: rollback
|
||||
run: |
|
||||
NAMESPACE="${{ inputs.namespace || env.DEFAULT_NAMESPACE }}"
|
||||
DEPLOYMENT="${{ inputs.deployment || env.DEFAULT_DEPLOYMENT }}"
|
||||
DRY_RUN="${{ inputs.dry_run || 'true' }}"
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo "DRY RUN: Would execute rollback"
|
||||
echo "kubectl rollout undo deployment/$DEPLOYMENT -n $NAMESPACE"
|
||||
ROLLBACK_TIME=$(date +%s)
|
||||
else
|
||||
echo "Executing rollback..."
|
||||
kubectl rollout undo deployment/"$DEPLOYMENT" -n "$NAMESPACE"
|
||||
ROLLBACK_TIME=$(date +%s)
|
||||
fi
|
||||
|
||||
echo "time=$ROLLBACK_TIME" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Wait for Rollout Complete
|
||||
id: rollout
|
||||
run: |
|
||||
NAMESPACE="${{ inputs.namespace || env.DEFAULT_NAMESPACE }}"
|
||||
DEPLOYMENT="${{ inputs.deployment || env.DEFAULT_DEPLOYMENT }}"
|
||||
DRY_RUN="${{ inputs.dry_run || 'true' }}"
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo "DRY RUN: Simulating rollout wait"
|
||||
sleep 5
|
||||
ROLLOUT_COMPLETE_TIME=$(date +%s)
|
||||
else
|
||||
echo "Waiting for rollout to complete..."
|
||||
kubectl rollout status deployment/"$DEPLOYMENT" -n "$NAMESPACE" --timeout=600s
|
||||
ROLLOUT_COMPLETE_TIME=$(date +%s)
|
||||
fi
|
||||
|
||||
echo "time=$ROLLOUT_COMPLETE_TIME" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Wait for Health Recovery
|
||||
id: health
|
||||
run: |
|
||||
NAMESPACE="${{ inputs.namespace || env.DEFAULT_NAMESPACE }}"
|
||||
DEPLOYMENT="${{ inputs.deployment || env.DEFAULT_DEPLOYMENT }}"
|
||||
DRY_RUN="${{ inputs.dry_run || 'true' }}"
|
||||
REPLICAS="${{ needs.preflight.outputs.replica-count }}"
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo "DRY RUN: Simulating health check"
|
||||
sleep 3
|
||||
HEALTH_TIME=$(date +%s)
|
||||
else
|
||||
echo "Waiting for health checks to pass..."
|
||||
|
||||
# Wait for all pods to be ready
|
||||
MAX_WAIT=300
|
||||
WAITED=0
|
||||
while [ "$WAITED" -lt "$MAX_WAIT" ]; do
|
||||
READY=$(kubectl get deployment "$DEPLOYMENT" -n "$NAMESPACE" \
|
||||
-o jsonpath='{.status.readyReplicas}' 2>/dev/null || echo "0")
|
||||
|
||||
if [ "$READY" = "$REPLICAS" ]; then
|
||||
echo "All $READY replicas are ready"
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Ready: $READY / $REPLICAS (waited ${WAITED}s)"
|
||||
sleep 5
|
||||
WAITED=$((WAITED + 5))
|
||||
done
|
||||
|
||||
HEALTH_TIME=$(date +%s)
|
||||
fi
|
||||
|
||||
echo "time=$HEALTH_TIME" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Calculate Timing Metrics
|
||||
id: timing
|
||||
run: |
|
||||
START_TIME=${{ steps.start.outputs.time }}
|
||||
ROLLBACK_TIME=${{ steps.rollback.outputs.time }}
|
||||
ROLLOUT_TIME=${{ steps.rollout.outputs.time }}
|
||||
HEALTH_TIME=${{ steps.health.outputs.time }}
|
||||
SLO_SECONDS="${{ inputs.rollback_slo_seconds || env.DEFAULT_SLO }}"
|
||||
|
||||
# Calculate durations
|
||||
ROLLBACK_DURATION=$((ROLLOUT_TIME - ROLLBACK_TIME))
|
||||
HEALTH_DURATION=$((HEALTH_TIME - ROLLOUT_TIME))
|
||||
TOTAL_LAG=$((HEALTH_TIME - START_TIME))
|
||||
|
||||
# Check SLO
|
||||
if [ "$TOTAL_LAG" -le "$SLO_SECONDS" ]; then
|
||||
SLO_MET="true"
|
||||
else
|
||||
SLO_MET="false"
|
||||
fi
|
||||
|
||||
echo "rollback_time=$ROLLBACK_DURATION" >> $GITHUB_OUTPUT
|
||||
echo "health_time=$HEALTH_DURATION" >> $GITHUB_OUTPUT
|
||||
echo "total_lag=$TOTAL_LAG" >> $GITHUB_OUTPUT
|
||||
echo "slo_met=$SLO_MET" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "=== Rollback Timing Metrics ==="
|
||||
echo "Rollback execution: ${ROLLBACK_DURATION}s"
|
||||
echo "Health recovery: ${HEALTH_DURATION}s"
|
||||
echo "Total lag: ${TOTAL_LAG}s"
|
||||
echo "SLO (${SLO_SECONDS}s): $SLO_MET"
|
||||
|
||||
- name: Restore Original Version
|
||||
if: inputs.dry_run != true
|
||||
run: |
|
||||
NAMESPACE="${{ inputs.namespace || env.DEFAULT_NAMESPACE }}"
|
||||
DEPLOYMENT="${{ inputs.deployment || env.DEFAULT_DEPLOYMENT }}"
|
||||
ORIGINAL_IMAGE="${{ needs.preflight.outputs.current-image }}"
|
||||
|
||||
echo "Restoring original version: $ORIGINAL_IMAGE"
|
||||
kubectl set image deployment/"$DEPLOYMENT" \
|
||||
"$DEPLOYMENT"="$ORIGINAL_IMAGE" \
|
||||
-n "$NAMESPACE"
|
||||
|
||||
kubectl rollout status deployment/"$DEPLOYMENT" -n "$NAMESPACE" --timeout=600s
|
||||
echo "Original version restored"
|
||||
|
||||
# ===========================================================================
|
||||
# GENERATE REPORT
|
||||
# ===========================================================================
|
||||
|
||||
report:
|
||||
name: Generate Report
|
||||
needs: [preflight, measure]
|
||||
if: always() && needs.preflight.result == 'success'
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Generate Report
|
||||
run: |
|
||||
SLO_SECONDS="${{ inputs.rollback_slo_seconds || 300 }}"
|
||||
TOTAL_LAG="${{ needs.measure.outputs.total-lag || 'N/A' }}"
|
||||
SLO_MET="${{ needs.measure.outputs.slo-met || 'unknown' }}"
|
||||
|
||||
if [ "$SLO_MET" = "true" ]; then
|
||||
STATUS=":white_check_mark: PASSED"
|
||||
elif [ "$SLO_MET" = "false" ]; then
|
||||
STATUS=":x: FAILED"
|
||||
else
|
||||
STATUS=":grey_question: UNKNOWN"
|
||||
fi
|
||||
|
||||
cat > rollback-lag-report.md << EOF
|
||||
## Rollback Lag Measurement Report
|
||||
|
||||
**Environment:** ${{ inputs.environment || 'staging' }}
|
||||
**Deployment:** ${{ inputs.deployment || 'stellaops-api' }}
|
||||
**Dry Run:** ${{ inputs.dry_run || 'true' }}
|
||||
|
||||
### Version Information
|
||||
|
||||
| Version | Image |
|
||||
|---------|-------|
|
||||
| Current | \`${{ needs.preflight.outputs.current-version }}\` |
|
||||
| Previous | \`${{ needs.preflight.outputs.previous-version }}\` |
|
||||
|
||||
### Timing Metrics
|
||||
|
||||
| Metric | Value | SLO |
|
||||
|--------|-------|-----|
|
||||
| Rollback Execution | ${{ needs.measure.outputs.rollback-time || 'N/A' }}s | - |
|
||||
| Health Recovery | ${{ needs.measure.outputs.health-recovery-time || 'N/A' }}s | - |
|
||||
| **Total Lag** | **${TOTAL_LAG}s** | < ${SLO_SECONDS}s |
|
||||
|
||||
### SLO Status: ${STATUS}
|
||||
|
||||
---
|
||||
|
||||
*Report generated at $(date -u +%Y-%m-%dT%H:%M:%SZ)*
|
||||
|
||||
<details>
|
||||
<summary>Measurement Details</summary>
|
||||
|
||||
- Can Rollback: ${{ needs.preflight.outputs.can-rollback }}
|
||||
- Replica Count: ${{ needs.preflight.outputs.replica-count }}
|
||||
- Current Image: \`${{ needs.preflight.outputs.current-image }}\`
|
||||
- Previous Image: \`${{ needs.preflight.outputs.previous-image }}\`
|
||||
|
||||
</details>
|
||||
EOF
|
||||
|
||||
cat rollback-lag-report.md
|
||||
|
||||
# Add to job summary
|
||||
cat rollback-lag-report.md >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: rollback-lag-report
|
||||
path: rollback-lag-report.md
|
||||
|
||||
- name: Check SLO and Fail if Exceeded
|
||||
if: needs.measure.outputs.slo-met == 'false'
|
||||
run: |
|
||||
TOTAL_LAG="${{ needs.measure.outputs.total-lag }}"
|
||||
SLO_SECONDS="${{ inputs.rollback_slo_seconds || 300 }}"
|
||||
echo "::error::Rollback took ${TOTAL_LAG}s, exceeds SLO of ${SLO_SECONDS}s"
|
||||
exit 1
|
||||
|
||||
@@ -1,277 +0,0 @@
|
||||
# Emergency Rollback Workflow
|
||||
# Sprint: CI/CD Enhancement - Deployment Safety
|
||||
#
|
||||
# Purpose: Automated rollback to previous known-good version
|
||||
# Triggers: Manual dispatch only (emergency procedure)
|
||||
#
|
||||
# SLA Target: < 5 minutes from trigger to rollback complete
|
||||
|
||||
name: Emergency Rollback
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Target environment'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
service:
|
||||
description: 'Service to rollback (or "all" for full rollback)'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- all
|
||||
- authority
|
||||
- attestor
|
||||
- concelier
|
||||
- scanner
|
||||
- policy
|
||||
- excititor
|
||||
- gateway
|
||||
- scheduler
|
||||
- cli
|
||||
target_version:
|
||||
description: 'Version to rollback to (leave empty for previous version)'
|
||||
required: false
|
||||
type: string
|
||||
reason:
|
||||
description: 'Reason for rollback'
|
||||
required: true
|
||||
type: string
|
||||
skip_health_check:
|
||||
description: 'Skip health check (use only in emergencies)'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
ROLLBACK_TIMEOUT: 300 # 5 minutes
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
name: Validate Rollback Request
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
target_version: ${{ steps.resolve.outputs.version }}
|
||||
services: ${{ steps.resolve.outputs.services }}
|
||||
approved: ${{ steps.validate.outputs.approved }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate inputs
|
||||
id: validate
|
||||
run: |
|
||||
echo "## Rollback Request Validation" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Parameter | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Environment | ${{ inputs.environment }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Service | ${{ inputs.service }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Target Version | ${{ inputs.target_version || 'previous' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Reason | ${{ inputs.reason }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Triggered By | ${{ github.actor }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Timestamp | $(date -u +"%Y-%m-%dT%H:%M:%SZ") |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Production requires additional validation
|
||||
if [[ "${{ inputs.environment }}" == "production" ]]; then
|
||||
echo ""
|
||||
echo "### Production Rollback Warning" >> $GITHUB_STEP_SUMMARY
|
||||
echo "This will affect production users immediately." >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "approved=true" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Resolve target version
|
||||
id: resolve
|
||||
run: |
|
||||
VERSION="${{ inputs.target_version }}"
|
||||
SERVICE="${{ inputs.service }}"
|
||||
|
||||
# If no version specified, get previous from manifest
|
||||
if [[ -z "$VERSION" ]]; then
|
||||
MANIFEST="devops/releases/service-versions.json"
|
||||
if [[ -f "$MANIFEST" ]]; then
|
||||
if [[ "$SERVICE" == "all" ]]; then
|
||||
# Get oldest version across all services
|
||||
VERSION=$(jq -r '.services | to_entries | map(.value.version) | sort | first // "unknown"' "$MANIFEST")
|
||||
else
|
||||
VERSION=$(jq -r --arg svc "$SERVICE" '.services[$svc].previousVersion // .services[$svc].version // "unknown"' "$MANIFEST")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Determine services to rollback
|
||||
if [[ "$SERVICE" == "all" ]]; then
|
||||
SERVICES='["authority","attestor","concelier","scanner","policy","excititor","gateway","scheduler"]'
|
||||
else
|
||||
SERVICES="[\"$SERVICE\"]"
|
||||
fi
|
||||
|
||||
echo "Resolved version: $VERSION"
|
||||
echo "Services: $SERVICES"
|
||||
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "services=$SERVICES" >> $GITHUB_OUTPUT
|
||||
|
||||
rollback:
|
||||
name: Execute Rollback
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate]
|
||||
if: needs.validate.outputs.approved == 'true'
|
||||
environment: ${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup kubectl
|
||||
uses: azure/setup-kubectl@v3
|
||||
with:
|
||||
version: 'latest'
|
||||
|
||||
- name: Setup Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: 'latest'
|
||||
|
||||
- name: Configure deployment access
|
||||
run: |
|
||||
echo "::notice::Configure deployment access for ${{ inputs.environment }}"
|
||||
# TODO: Configure kubectl context / kubeconfig
|
||||
# kubectl config use-context ${{ inputs.environment }}
|
||||
|
||||
- name: Execute rollback
|
||||
id: rollback
|
||||
run: |
|
||||
echo "Starting rollback..."
|
||||
START_TIME=$(date +%s)
|
||||
|
||||
TARGET_VERSION="${{ needs.validate.outputs.target_version }}"
|
||||
SERVICES='${{ needs.validate.outputs.services }}'
|
||||
ENVIRONMENT="${{ inputs.environment }}"
|
||||
|
||||
# Execute rollback script
|
||||
if [[ -f ".gitea/scripts/release/rollback.sh" ]]; then
|
||||
.gitea/scripts/release/rollback.sh \
|
||||
--environment "$ENVIRONMENT" \
|
||||
--version "$TARGET_VERSION" \
|
||||
--services "$SERVICES" \
|
||||
--reason "${{ inputs.reason }}"
|
||||
else
|
||||
echo "::warning::Rollback script not found - using placeholder"
|
||||
echo ""
|
||||
echo "Rollback would execute:"
|
||||
echo " Environment: $ENVIRONMENT"
|
||||
echo " Version: $TARGET_VERSION"
|
||||
echo " Services: $SERVICES"
|
||||
echo ""
|
||||
echo "TODO: Implement rollback.sh script"
|
||||
fi
|
||||
|
||||
END_TIME=$(date +%s)
|
||||
DURATION=$((END_TIME - START_TIME))
|
||||
|
||||
echo "duration=$DURATION" >> $GITHUB_OUTPUT
|
||||
echo "Rollback completed in ${DURATION}s"
|
||||
|
||||
- name: Health check
|
||||
if: inputs.skip_health_check != true
|
||||
run: |
|
||||
echo "Running health checks..."
|
||||
|
||||
SERVICES='${{ needs.validate.outputs.services }}'
|
||||
|
||||
echo "$SERVICES" | jq -r '.[]' | while read -r service; do
|
||||
echo "Checking $service..."
|
||||
# TODO: Implement service-specific health checks
|
||||
# curl -sf "https://${service}.${{ inputs.environment }}.stella-ops.org/health" || exit 1
|
||||
echo " Status: OK (placeholder)"
|
||||
done
|
||||
|
||||
echo "All health checks passed"
|
||||
|
||||
- name: Rollback summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "## Rollback Execution" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [[ "${{ steps.rollback.outcome }}" == "success" ]]; then
|
||||
echo "### Rollback Successful" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Duration: ${{ steps.rollback.outputs.duration }}s" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Target Version: ${{ needs.validate.outputs.target_version }}" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### Rollback Failed" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Please investigate immediately and consider manual intervention." >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
notify:
|
||||
name: Send Notifications
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate, rollback]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Notify team
|
||||
run: |
|
||||
STATUS="${{ needs.rollback.result }}"
|
||||
ENVIRONMENT="${{ inputs.environment }}"
|
||||
SERVICE="${{ inputs.service }}"
|
||||
ACTOR="${{ github.actor }}"
|
||||
REASON="${{ inputs.reason }}"
|
||||
VERSION="${{ needs.validate.outputs.target_version }}"
|
||||
|
||||
# Build notification message
|
||||
if [[ "$STATUS" == "success" ]]; then
|
||||
EMOJI="white_check_mark"
|
||||
TITLE="Rollback Completed Successfully"
|
||||
else
|
||||
EMOJI="x"
|
||||
TITLE="Rollback Failed - Immediate Attention Required"
|
||||
fi
|
||||
|
||||
echo "Notification:"
|
||||
echo " Title: $TITLE"
|
||||
echo " Environment: $ENVIRONMENT"
|
||||
echo " Service: $SERVICE"
|
||||
echo " Version: $VERSION"
|
||||
echo " Actor: $ACTOR"
|
||||
echo " Reason: $REASON"
|
||||
|
||||
# TODO: Send to Slack/Teams/PagerDuty
|
||||
# - name: Slack notification
|
||||
# uses: slackapi/slack-github-action@v1
|
||||
# with:
|
||||
# payload: |
|
||||
# {
|
||||
# "text": "${{ env.TITLE }}",
|
||||
# "blocks": [...]
|
||||
# }
|
||||
|
||||
- name: Create incident record
|
||||
run: |
|
||||
echo "Creating incident record..."
|
||||
|
||||
# Log to incident tracking
|
||||
INCIDENT_LOG="devops/incidents/$(date +%Y-%m-%d)-rollback.json"
|
||||
echo "{
|
||||
\"timestamp\": \"$(date -u +"%Y-%m-%dT%H:%M:%SZ")\",
|
||||
\"type\": \"rollback\",
|
||||
\"environment\": \"${{ inputs.environment }}\",
|
||||
\"service\": \"${{ inputs.service }}\",
|
||||
\"target_version\": \"${{ needs.validate.outputs.target_version }}\",
|
||||
\"reason\": \"${{ inputs.reason }}\",
|
||||
\"actor\": \"${{ github.actor }}\",
|
||||
\"status\": \"${{ needs.rollback.result }}\",
|
||||
\"run_id\": \"${{ github.run_id }}\"
|
||||
}"
|
||||
|
||||
echo "::notice::Incident record would be created at $INCIDENT_LOG"
|
||||
@@ -1,307 +0,0 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# router-chaos.yml
|
||||
# Sprint: SPRINT_5100_0005_0001_router_chaos_suite
|
||||
# Task: T5 - CI Chaos Workflow
|
||||
# Description: CI workflow for running router chaos tests.
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
name: Router Chaos Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * *' # Nightly at 3 AM UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
spike_multiplier:
|
||||
description: 'Load spike multiplier (e.g., 10, 50, 100)'
|
||||
default: '10'
|
||||
type: choice
|
||||
options:
|
||||
- '10'
|
||||
- '50'
|
||||
- '100'
|
||||
run_valkey_tests:
|
||||
description: 'Run Valkey failure injection tests'
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
TZ: UTC
|
||||
ROUTER_URL: http://localhost:8080
|
||||
|
||||
jobs:
|
||||
load-tests:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 30
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: stellaops
|
||||
POSTGRES_PASSWORD: test
|
||||
POSTGRES_DB: stellaops_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
--health-cmd "valkey-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install k6
|
||||
run: |
|
||||
curl -sSL https://github.com/grafana/k6/releases/download/v0.54.0/k6-v0.54.0-linux-amd64.tar.gz | tar xz
|
||||
sudo mv k6-v0.54.0-linux-amd64/k6 /usr/local/bin/
|
||||
k6 version
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: chaos-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||
|
||||
- name: Build Router
|
||||
run: |
|
||||
dotnet restore src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj
|
||||
dotnet build src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj -c Release --no-restore
|
||||
|
||||
- name: Start Router
|
||||
run: |
|
||||
dotnet run --project src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj -c Release --no-build &
|
||||
echo $! > router.pid
|
||||
|
||||
# Wait for router to start
|
||||
for i in {1..30}; do
|
||||
if curl -s http://localhost:8080/health > /dev/null 2>&1; then
|
||||
echo "Router is ready"
|
||||
break
|
||||
fi
|
||||
echo "Waiting for router... ($i/30)"
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Run k6 spike test
|
||||
id: k6
|
||||
run: |
|
||||
mkdir -p results
|
||||
|
||||
k6 run src/__Tests/load/router/spike-test.js \
|
||||
-e ROUTER_URL=${{ env.ROUTER_URL }} \
|
||||
--out json=results/k6-results.json \
|
||||
--summary-export results/k6-summary.json \
|
||||
2>&1 | tee results/k6-output.txt
|
||||
|
||||
# Check exit code
|
||||
if [ ${PIPESTATUS[0]} -ne 0 ]; then
|
||||
echo "k6_status=failed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "k6_status=passed" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload k6 results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: k6-results-${{ github.run_id }}
|
||||
path: results/
|
||||
retention-days: 30
|
||||
|
||||
- name: Stop Router
|
||||
if: always()
|
||||
run: |
|
||||
if [ -f router.pid ]; then
|
||||
kill $(cat router.pid) 2>/dev/null || true
|
||||
fi
|
||||
|
||||
chaos-unit-tests:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
needs: load-tests
|
||||
if: always()
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: stellaops
|
||||
POSTGRES_PASSWORD: test
|
||||
POSTGRES_DB: stellaops_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
include-prerelease: true
|
||||
|
||||
- name: Build Chaos Tests
|
||||
run: |
|
||||
dotnet restore src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj
|
||||
dotnet build src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj -c Release --no-restore
|
||||
|
||||
- name: Start Router for Tests
|
||||
run: |
|
||||
dotnet run --project src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj -c Release &
|
||||
sleep 15 # Wait for startup
|
||||
|
||||
- name: Run Chaos Unit Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj \
|
||||
-c Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=chaos-results.trx" \
|
||||
--logger "console;verbosity=detailed" \
|
||||
--results-directory results \
|
||||
-- RunConfiguration.TestSessionTimeout=600000
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: chaos-test-results-${{ github.run_id }}
|
||||
path: results/
|
||||
retention-days: 30
|
||||
|
||||
valkey-failure-tests:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
needs: load-tests
|
||||
if: ${{ github.event.inputs.run_valkey_tests != 'false' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: '10.0.100'
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install Docker Compose
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y docker-compose
|
||||
|
||||
- name: Run Valkey Failure Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj \
|
||||
-c Release \
|
||||
--filter "Category=Valkey" \
|
||||
--logger "trx;LogFileName=valkey-results.trx" \
|
||||
--results-directory results \
|
||||
-- RunConfiguration.TestSessionTimeout=600000
|
||||
|
||||
- name: Upload Valkey Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: valkey-test-results-${{ github.run_id }}
|
||||
path: results/
|
||||
|
||||
analyze-results:
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [load-tests, chaos-unit-tests]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download k6 Results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: k6-results-${{ github.run_id }}
|
||||
path: k6-results/
|
||||
|
||||
- name: Download Chaos Test Results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: chaos-test-results-${{ github.run_id }}
|
||||
path: chaos-results/
|
||||
|
||||
- name: Analyze Results
|
||||
id: analysis
|
||||
run: |
|
||||
mkdir -p analysis
|
||||
|
||||
# Parse k6 summary
|
||||
if [ -f k6-results/k6-summary.json ]; then
|
||||
echo "=== k6 Test Summary ===" | tee analysis/summary.txt
|
||||
|
||||
# Extract key metrics
|
||||
jq -r '.metrics | to_entries[] | "\(.key): \(.value)"' k6-results/k6-summary.json >> analysis/summary.txt 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Check thresholds
|
||||
THRESHOLDS_PASSED=true
|
||||
if [ -f k6-results/k6-summary.json ]; then
|
||||
# Check if any threshold failed
|
||||
FAILED_THRESHOLDS=$(jq -r '.thresholds | to_entries[] | select(.value.ok == false) | .key' k6-results/k6-summary.json 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$FAILED_THRESHOLDS" ]; then
|
||||
echo "Failed thresholds: $FAILED_THRESHOLDS"
|
||||
THRESHOLDS_PASSED=false
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "thresholds_passed=$THRESHOLDS_PASSED" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload Analysis
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: chaos-analysis-${{ github.run_id }}
|
||||
path: analysis/
|
||||
|
||||
- name: Create Summary
|
||||
run: |
|
||||
echo "## Router Chaos Test Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "### Load Test Results" >> $GITHUB_STEP_SUMMARY
|
||||
if [ -f k6-results/k6-summary.json ]; then
|
||||
echo "- Total Requests: $(jq -r '.metrics.http_reqs.values.count // "N/A"' k6-results/k6-summary.json)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Failed Rate: $(jq -r '.metrics.http_req_failed.values.rate // "N/A"' k6-results/k6-summary.json)" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "- No k6 results found" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Thresholds" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Status: ${{ steps.analysis.outputs.thresholds_passed == 'true' && 'PASSED' || 'FAILED' }}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
@@ -1,387 +0,0 @@
|
||||
# .gitea/workflows/sast-scan.yml
|
||||
# Static Application Security Testing (SAST) Workflow
|
||||
# Sprint: CI/CD Enhancement - Security Scanning (Tier 2)
|
||||
#
|
||||
# Purpose: Detect security vulnerabilities in source code through static analysis
|
||||
# - Code injection vulnerabilities
|
||||
# - Authentication/authorization issues
|
||||
# - Cryptographic weaknesses
|
||||
# - Data exposure risks
|
||||
# - OWASP Top 10 detection
|
||||
#
|
||||
# Supported Languages: C#/.NET, JavaScript/TypeScript, Python, YAML, Dockerfile
|
||||
#
|
||||
# PLACEHOLDER: Choose your SAST scanner implementation below
|
||||
# Options:
|
||||
# 1. Semgrep - Fast, open-source, good .NET support
|
||||
# 2. CodeQL - GitHub's analysis engine
|
||||
# 3. SonarQube - Enterprise-grade with dashboards
|
||||
# 4. Snyk Code - Commercial with good accuracy
|
||||
|
||||
name: SAST Scanning
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
- 'src/**'
|
||||
- '*.csproj'
|
||||
- '*.cs'
|
||||
- '*.ts'
|
||||
- '*.js'
|
||||
- '*.py'
|
||||
- 'Dockerfile*'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/**'
|
||||
- '*.csproj'
|
||||
- '*.cs'
|
||||
- '*.ts'
|
||||
- '*.js'
|
||||
- '*.py'
|
||||
- 'Dockerfile*'
|
||||
schedule:
|
||||
- cron: '30 3 * * 1' # Weekly on Monday at 3:30 AM UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
scan_level:
|
||||
description: 'Scan thoroughness level'
|
||||
type: choice
|
||||
options:
|
||||
- quick
|
||||
- standard
|
||||
- comprehensive
|
||||
default: standard
|
||||
fail_on_findings:
|
||||
description: 'Fail workflow on findings'
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
TZ: UTC
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PLACEHOLDER SAST IMPLEMENTATION
|
||||
# ===========================================================================
|
||||
#
|
||||
# IMPORTANT: Configure your preferred SAST tool by uncommenting ONE of the
|
||||
# implementation options below. Each option includes the necessary steps
|
||||
# and configuration for that specific tool.
|
||||
#
|
||||
# ===========================================================================
|
||||
|
||||
sast-scan:
|
||||
name: SAST Analysis
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# =========================================================================
|
||||
# PLACEHOLDER: Uncomment your preferred SAST tool configuration
|
||||
# =========================================================================
|
||||
|
||||
- name: SAST Scan Placeholder
|
||||
run: |
|
||||
echo "::notice::SAST scanning placeholder - configure your scanner below"
|
||||
echo ""
|
||||
echo "Available SAST options:"
|
||||
echo ""
|
||||
echo "1. SEMGREP (Recommended for open-source)"
|
||||
echo " Uncomment the Semgrep section below"
|
||||
echo " - Fast, accurate, good .NET support"
|
||||
echo " - Free for open-source projects"
|
||||
echo ""
|
||||
echo "2. CODEQL (GitHub native)"
|
||||
echo " Uncomment the CodeQL section below"
|
||||
echo " - Deep analysis capabilities"
|
||||
echo " - Native GitHub integration"
|
||||
echo ""
|
||||
echo "3. SONARQUBE (Enterprise)"
|
||||
echo " Uncomment the SonarQube section below"
|
||||
echo " - Comprehensive dashboards"
|
||||
echo " - Technical debt tracking"
|
||||
echo ""
|
||||
echo "4. SNYK CODE (Commercial)"
|
||||
echo " Uncomment the Snyk section below"
|
||||
echo " - High accuracy"
|
||||
echo " - Good IDE integration"
|
||||
|
||||
# =========================================================================
|
||||
# OPTION 1: SEMGREP
|
||||
# =========================================================================
|
||||
# Uncomment the following section to use Semgrep:
|
||||
#
|
||||
# - name: Run Semgrep
|
||||
# uses: returntocorp/semgrep-action@v1
|
||||
# with:
|
||||
# config: >-
|
||||
# p/default
|
||||
# p/security-audit
|
||||
# p/owasp-top-ten
|
||||
# p/csharp
|
||||
# p/javascript
|
||||
# p/typescript
|
||||
# p/python
|
||||
# p/docker
|
||||
# env:
|
||||
# SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
|
||||
|
||||
# =========================================================================
|
||||
# OPTION 2: CODEQL
|
||||
# =========================================================================
|
||||
# Uncomment the following section to use CodeQL:
|
||||
#
|
||||
# - name: Initialize CodeQL
|
||||
# uses: github/codeql-action/init@v3
|
||||
# with:
|
||||
# languages: csharp, javascript
|
||||
# queries: security-and-quality
|
||||
#
|
||||
# - name: Build for CodeQL
|
||||
# run: |
|
||||
# dotnet build src/StellaOps.sln --configuration Release
|
||||
#
|
||||
# - name: Perform CodeQL Analysis
|
||||
# uses: github/codeql-action/analyze@v3
|
||||
# with:
|
||||
# category: "/language:csharp"
|
||||
|
||||
# =========================================================================
|
||||
# OPTION 3: SONARQUBE
|
||||
# =========================================================================
|
||||
# Uncomment the following section to use SonarQube:
|
||||
#
|
||||
# - name: SonarQube Scan
|
||||
# uses: SonarSource/sonarqube-scan-action@master
|
||||
# env:
|
||||
# SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
# SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }}
|
||||
# with:
|
||||
# args: >
|
||||
# -Dsonar.projectKey=stellaops
|
||||
# -Dsonar.sources=src/
|
||||
# -Dsonar.exclusions=**/bin/**,**/obj/**,**/node_modules/**
|
||||
|
||||
# =========================================================================
|
||||
# OPTION 4: SNYK CODE
|
||||
# =========================================================================
|
||||
# Uncomment the following section to use Snyk Code:
|
||||
#
|
||||
# - name: Setup Snyk
|
||||
# uses: snyk/actions/setup@master
|
||||
#
|
||||
# - name: Snyk Code Test
|
||||
# run: snyk code test --sarif-file-output=snyk-code.sarif
|
||||
# env:
|
||||
# SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
|
||||
# continue-on-error: true
|
||||
#
|
||||
# - name: Upload Snyk results
|
||||
# uses: github/codeql-action/upload-sarif@v3
|
||||
# with:
|
||||
# sarif_file: snyk-code.sarif
|
||||
|
||||
# ===========================================================================
|
||||
# .NET SECURITY ANALYSIS (built-in)
|
||||
# ===========================================================================
|
||||
|
||||
dotnet-security:
|
||||
name: .NET Security Analysis
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore packages
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Run Security Code Analysis
|
||||
run: |
|
||||
# Enable nullable reference types warnings as errors for security
|
||||
dotnet build src/StellaOps.sln \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
/p:TreatWarningsAsErrors=false \
|
||||
/p:EnableNETAnalyzers=true \
|
||||
/p:AnalysisLevel=latest \
|
||||
/warnaserror:CA2100,CA2109,CA2119,CA2153,CA2300,CA2301,CA2302,CA2305,CA2310,CA2311,CA2312,CA2315,CA2321,CA2322,CA2326,CA2327,CA2328,CA2329,CA2330,CA2350,CA2351,CA2352,CA2353,CA2354,CA2355,CA2356,CA2361,CA2362,CA3001,CA3002,CA3003,CA3004,CA3005,CA3006,CA3007,CA3008,CA3009,CA3010,CA3011,CA3012,CA3061,CA3075,CA3076,CA3077,CA3147,CA5350,CA5351,CA5358,CA5359,CA5360,CA5361,CA5362,CA5363,CA5364,CA5365,CA5366,CA5367,CA5368,CA5369,CA5370,CA5371,CA5372,CA5373,CA5374,CA5375,CA5376,CA5377,CA5378,CA5379,CA5380,CA5381,CA5382,CA5383,CA5384,CA5385,CA5386,CA5387,CA5388,CA5389,CA5390,CA5391,CA5392,CA5393,CA5394,CA5395,CA5396,CA5397,CA5398,CA5399,CA5400,CA5401,CA5402,CA5403 \
|
||||
2>&1 | tee build-security.log || true
|
||||
|
||||
- name: Parse security warnings
|
||||
run: |
|
||||
echo "### .NET Security Analysis" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Count security warnings
|
||||
SECURITY_WARNINGS=$(grep -E "warning CA[235][0-9]{3}" build-security.log | wc -l || echo "0")
|
||||
echo "- Security warnings found: $SECURITY_WARNINGS" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [[ $SECURITY_WARNINGS -gt 0 ]]; then
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "<details><summary>Security Warnings</summary>" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
grep -E "warning CA[235][0-9]{3}" build-security.log | head -50 >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "</details>" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Upload security log
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: sast-dotnet-security-log
|
||||
path: build-security.log
|
||||
retention-days: 14
|
||||
|
||||
# ===========================================================================
|
||||
# DEPENDENCY VULNERABILITY CHECK
|
||||
# ===========================================================================
|
||||
|
||||
dependency-check:
|
||||
name: Dependency Vulnerabilities
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Run vulnerability audit
|
||||
run: |
|
||||
echo "### Dependency Vulnerability Audit" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Check for known vulnerabilities in NuGet packages
|
||||
dotnet list src/StellaOps.sln package --vulnerable --include-transitive 2>&1 | tee vuln-report.txt || true
|
||||
|
||||
# Parse results
|
||||
VULN_COUNT=$(grep -c "has the following vulnerable packages" vuln-report.txt || echo "0")
|
||||
|
||||
if [[ $VULN_COUNT -gt 0 ]]; then
|
||||
echo "::warning::Found $VULN_COUNT projects with vulnerable dependencies"
|
||||
echo "- Projects with vulnerabilities: $VULN_COUNT" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "<details><summary>Vulnerability Report</summary>" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
cat vuln-report.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo "</details>" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No known vulnerabilities found in dependencies." >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Upload vulnerability report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: sast-vulnerability-report
|
||||
path: vuln-report.txt
|
||||
retention-days: 14
|
||||
|
||||
# ===========================================================================
|
||||
# DOCKERFILE SECURITY LINTING
|
||||
# ===========================================================================
|
||||
|
||||
dockerfile-lint:
|
||||
name: Dockerfile Security
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Find Dockerfiles
|
||||
id: find
|
||||
run: |
|
||||
DOCKERFILES=$(find . -name "Dockerfile*" -type f ! -path "./node_modules/*" | jq -R -s -c 'split("\n") | map(select(length > 0))')
|
||||
COUNT=$(echo "$DOCKERFILES" | jq 'length')
|
||||
echo "files=$DOCKERFILES" >> $GITHUB_OUTPUT
|
||||
echo "count=$COUNT" >> $GITHUB_OUTPUT
|
||||
echo "Found $COUNT Dockerfiles"
|
||||
|
||||
- name: Install Hadolint
|
||||
if: steps.find.outputs.count != '0'
|
||||
run: |
|
||||
wget -qO hadolint https://github.com/hadolint/hadolint/releases/download/v2.12.0/hadolint-Linux-x86_64
|
||||
chmod +x hadolint
|
||||
sudo mv hadolint /usr/local/bin/
|
||||
|
||||
- name: Lint Dockerfiles
|
||||
if: steps.find.outputs.count != '0'
|
||||
run: |
|
||||
echo "### Dockerfile Security Lint" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
TOTAL_ISSUES=0
|
||||
|
||||
for dockerfile in $(echo '${{ steps.find.outputs.files }}' | jq -r '.[]'); do
|
||||
echo "Linting: $dockerfile"
|
||||
ISSUES=$(hadolint --format json "$dockerfile" 2>/dev/null || echo "[]")
|
||||
ISSUE_COUNT=$(echo "$ISSUES" | jq 'length')
|
||||
TOTAL_ISSUES=$((TOTAL_ISSUES + ISSUE_COUNT))
|
||||
|
||||
if [[ $ISSUE_COUNT -gt 0 ]]; then
|
||||
echo "- **$dockerfile**: $ISSUE_COUNT issues" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
done
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Total issues found: $TOTAL_ISSUES**" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
if [[ $TOTAL_ISSUES -gt 0 ]] && [[ "${{ github.event.inputs.fail_on_findings }}" == "true" ]]; then
|
||||
echo "::warning::Found $TOTAL_ISSUES Dockerfile security issues"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: SAST Summary
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
needs: [sast-scan, dotnet-security, dependency-check, dockerfile-lint]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate summary
|
||||
run: |
|
||||
echo "## SAST Scan Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Check | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| SAST Analysis | ${{ needs.sast-scan.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| .NET Security | ${{ needs.dotnet-security.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Dependency Check | ${{ needs.dependency-check.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Dockerfile Lint | ${{ needs.dockerfile-lint.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for failures
|
||||
if: |
|
||||
github.event.inputs.fail_on_findings == 'true' &&
|
||||
(needs.sast-scan.result == 'failure' ||
|
||||
needs.dotnet-security.result == 'failure' ||
|
||||
needs.dependency-check.result == 'failure')
|
||||
run: exit 1
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
name: scanner-analyzers-release
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
rid:
|
||||
description: "RID (e.g., linux-x64)"
|
||||
required: false
|
||||
default: "linux-x64"
|
||||
|
||||
jobs:
|
||||
build-analyzers:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Install syft (SBOM)
|
||||
uses: anchore/sbom-action/download-syft@v0
|
||||
|
||||
- name: Package PHP analyzer
|
||||
run: |
|
||||
chmod +x scripts/scanner/package-analyzer.sh
|
||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Php/StellaOps.Scanner.Analyzers.Lang.Php.csproj php-analyzer
|
||||
|
||||
- name: Package Ruby analyzer
|
||||
run: |
|
||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj ruby-analyzer
|
||||
|
||||
- name: Package Native analyzer
|
||||
run: |
|
||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Native/StellaOps.Scanner.Analyzers.Native.csproj native-analyzer
|
||||
|
||||
- name: Package Java analyzer
|
||||
run: |
|
||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj java-analyzer
|
||||
|
||||
- name: Package DotNet analyzer
|
||||
run: |
|
||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj dotnet-analyzer
|
||||
|
||||
- name: Package Node analyzer
|
||||
run: |
|
||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj node-analyzer
|
||||
|
||||
- name: Upload analyzer artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: scanner-analyzers-${{ github.event.inputs.rid }}
|
||||
path: out/scanner-analyzers/**
|
||||
@@ -1,148 +0,0 @@
|
||||
name: Scanner Analyzers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.x'
|
||||
|
||||
jobs:
|
||||
discover-analyzers:
|
||||
name: Discover Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
analyzers: ${{ steps.find.outputs.analyzers }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Find analyzer projects
|
||||
id: find
|
||||
run: |
|
||||
ANALYZERS=$(find src/Scanner/__Libraries -name "StellaOps.Scanner.Analyzers.*.csproj" -exec dirname {} \; | xargs -I {} basename {} | sort -u | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo "analyzers=$ANALYZERS" >> $GITHUB_OUTPUT
|
||||
|
||||
build-analyzers:
|
||||
name: Build Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
needs: discover-analyzers
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
analyzer: ${{ fromJson(needs.discover-analyzers.outputs.analyzers) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/Scanner/__Libraries/${{ matrix.analyzer }}/
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/Scanner/__Libraries/${{ matrix.analyzer }}/ --no-restore
|
||||
|
||||
test-lang-analyzers:
|
||||
name: Test Language Analyzers
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-analyzers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Setup Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: '2.1.4'
|
||||
|
||||
- name: Run all language analyzer tests
|
||||
run: |
|
||||
EXIT_CODE=0
|
||||
for test_dir in src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.*.Tests; do
|
||||
if [ -d "$test_dir" ]; then
|
||||
echo "=== Running tests in $(basename "$test_dir") ==="
|
||||
if ! dotnet test "$test_dir/" --verbosity normal; then
|
||||
echo "FAILED: $(basename "$test_dir")"
|
||||
EXIT_CODE=1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
# Also run the shared Lang.Tests project
|
||||
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests" ]; then
|
||||
echo "=== Running tests in StellaOps.Scanner.Analyzers.Lang.Tests ==="
|
||||
if ! dotnet test "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/" --verbosity normal; then
|
||||
echo "FAILED: StellaOps.Scanner.Analyzers.Lang.Tests"
|
||||
EXIT_CODE=1
|
||||
fi
|
||||
fi
|
||||
exit $EXIT_CODE
|
||||
|
||||
fixture-validation:
|
||||
name: Validate Test Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate fixture structure
|
||||
run: |
|
||||
find src/Scanner/__Tests -name "expected.json" | while read f; do
|
||||
echo "Validating $f..."
|
||||
if ! jq empty "$f" 2>/dev/null; then
|
||||
echo "Error: Invalid JSON in $f"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Check fixture completeness
|
||||
run: |
|
||||
find src/Scanner/__Tests -type d -name "Fixtures" | while read fixtures_dir; do
|
||||
echo "Checking $fixtures_dir..."
|
||||
find "$fixtures_dir" -mindepth 1 -maxdepth 1 -type d | while read test_case; do
|
||||
if [ ! -f "$test_case/expected.json" ]; then
|
||||
echo "Warning: $test_case missing expected.json"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
determinism-check:
|
||||
name: Verify Deterministic Output
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-lang-analyzers
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Run determinism tests
|
||||
run: |
|
||||
# Run scanner on same input twice, compare outputs
|
||||
if [ -d "src/__Tests/fixtures/determinism" ]; then
|
||||
dotnet test --filter "Category=Determinism" --verbosity normal
|
||||
fi
|
||||
@@ -1,29 +0,0 @@
|
||||
name: scanner-determinism
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
|
||||
jobs:
|
||||
determinism:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "10.0.100"
|
||||
|
||||
- name: Run determinism harness
|
||||
run: |
|
||||
chmod +x .gitea/scripts/test/determinism-run.sh
|
||||
.gitea/scripts/test/determinism-run.sh
|
||||
|
||||
- name: Upload determinism artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: scanner-determinism
|
||||
path: out/scanner-determinism/**
|
||||
@@ -1,419 +0,0 @@
|
||||
# .gitea/workflows/schema-evolution.yml
|
||||
# Schema evolution testing workflow for backward/forward compatibility
|
||||
# Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
|
||||
# Task: CCUT-012
|
||||
#
|
||||
# WORKFLOW PURPOSE:
|
||||
# =================
|
||||
# Validates that code changes remain compatible with previous database schema
|
||||
# versions (N-1, N-2). This prevents breaking changes when new code is deployed
|
||||
# before database migrations complete, or when rollbacks occur.
|
||||
#
|
||||
# Uses Testcontainers with versioned PostgreSQL images to replay tests against
|
||||
# historical schema versions.
|
||||
|
||||
name: Schema Evolution Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'docs/db/**/*.sql'
|
||||
- 'src/**/Migrations/**'
|
||||
- 'src/**/*Repository*.cs'
|
||||
- 'src/**/*DbContext*.cs'
|
||||
- '.gitea/workflows/schema-evolution.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/db/**/*.sql'
|
||||
- 'src/**/Migrations/**'
|
||||
- 'src/**/*Repository*.cs'
|
||||
- 'src/**/*DbContext*.cs'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
schema_versions:
|
||||
description: 'Schema versions to test (comma-separated, e.g., N-1,N-2,N-3)'
|
||||
type: string
|
||||
default: 'N-1,N-2'
|
||||
modules:
|
||||
description: 'Modules to test (comma-separated, or "all")'
|
||||
type: string
|
||||
default: 'all'
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
SCHEMA_VERSIONS: 'N-1,N-2'
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# DISCOVER SCHEMA-AFFECTED MODULES
|
||||
# ===========================================================================
|
||||
|
||||
discover:
|
||||
name: Discover Changed Modules
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
outputs:
|
||||
modules: ${{ steps.detect.outputs.modules }}
|
||||
has-schema-changes: ${{ steps.detect.outputs.has_changes }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Detect Schema Changes
|
||||
id: detect
|
||||
run: |
|
||||
# Get changed files
|
||||
if [ "${{ github.event_name }}" = "pull_request" ]; then
|
||||
CHANGED_FILES=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.sha }})
|
||||
else
|
||||
CHANGED_FILES=$(git diff --name-only HEAD~1 HEAD)
|
||||
fi
|
||||
|
||||
echo "Changed files:"
|
||||
echo "$CHANGED_FILES"
|
||||
|
||||
# Map files to modules
|
||||
MODULES=""
|
||||
|
||||
if echo "$CHANGED_FILES" | grep -qE "src/Scanner/.*Repository|src/Scanner/.*Migrations|docs/db/.*scanner"; then
|
||||
MODULES="$MODULES,Scanner"
|
||||
fi
|
||||
|
||||
if echo "$CHANGED_FILES" | grep -qE "src/Concelier/.*Repository|src/Concelier/.*Migrations|docs/db/.*concelier|docs/db/.*advisory"; then
|
||||
MODULES="$MODULES,Concelier"
|
||||
fi
|
||||
|
||||
if echo "$CHANGED_FILES" | grep -qE "src/EvidenceLocker/.*Repository|src/EvidenceLocker/.*Migrations|docs/db/.*evidence"; then
|
||||
MODULES="$MODULES,EvidenceLocker"
|
||||
fi
|
||||
|
||||
if echo "$CHANGED_FILES" | grep -qE "src/Authority/.*Repository|src/Authority/.*Migrations|docs/db/.*authority|docs/db/.*auth"; then
|
||||
MODULES="$MODULES,Authority"
|
||||
fi
|
||||
|
||||
if echo "$CHANGED_FILES" | grep -qE "src/Policy/.*Repository|src/Policy/.*Migrations|docs/db/.*policy"; then
|
||||
MODULES="$MODULES,Policy"
|
||||
fi
|
||||
|
||||
if echo "$CHANGED_FILES" | grep -qE "src/SbomService/.*Repository|src/SbomService/.*Migrations|docs/db/.*sbom"; then
|
||||
MODULES="$MODULES,SbomService"
|
||||
fi
|
||||
|
||||
# Remove leading comma
|
||||
MODULES=$(echo "$MODULES" | sed 's/^,//')
|
||||
|
||||
if [ -z "$MODULES" ]; then
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
echo "modules=[]" >> $GITHUB_OUTPUT
|
||||
echo "No schema-related changes detected"
|
||||
else
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
# Convert to JSON array
|
||||
MODULES_JSON=$(echo "$MODULES" | tr ',' '\n' | jq -R . | jq -s .)
|
||||
echo "modules=$MODULES_JSON" >> $GITHUB_OUTPUT
|
||||
echo "Detected modules: $MODULES"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# RUN SCHEMA EVOLUTION TESTS
|
||||
# ===========================================================================
|
||||
|
||||
test:
|
||||
name: Test ${{ matrix.module }} (Schema ${{ matrix.schema-version }})
|
||||
needs: discover
|
||||
if: needs.discover.outputs.has-schema-changes == 'true' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module: ${{ fromJson(needs.discover.outputs.modules || '["Scanner","Concelier","EvidenceLocker"]') }}
|
||||
schema-version: ['N-1', 'N-2']
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: stellaops_test
|
||||
POSTGRES_PASSWORD: test_password
|
||||
POSTGRES_DB: stellaops_schema_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
env:
|
||||
STELLAOPS_TEST_POSTGRES_CONNECTION: "Host=localhost;Port=5432;Database=stellaops_schema_test;Username=stellaops_test;Password=test_password"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
|
||||
- name: Cache NuGet packages
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/Directory.Packages.props', '**/*.csproj') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nuget-
|
||||
|
||||
- name: Restore Dependencies
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Get Schema Version
|
||||
id: schema
|
||||
run: |
|
||||
# Get current schema version from migration history
|
||||
CURRENT_VERSION=$(ls -1 docs/db/migrations/${{ matrix.module }}/*.sql 2>/dev/null | wc -l || echo "1")
|
||||
|
||||
case "${{ matrix.schema-version }}" in
|
||||
"N-1")
|
||||
TARGET_VERSION=$((CURRENT_VERSION - 1))
|
||||
;;
|
||||
"N-2")
|
||||
TARGET_VERSION=$((CURRENT_VERSION - 2))
|
||||
;;
|
||||
"N-3")
|
||||
TARGET_VERSION=$((CURRENT_VERSION - 3))
|
||||
;;
|
||||
*)
|
||||
TARGET_VERSION=$CURRENT_VERSION
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$TARGET_VERSION" -lt 1 ]; then
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
echo "No previous schema version available for ${{ matrix.schema-version }}"
|
||||
else
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
echo "target_version=$TARGET_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Testing against schema version: $TARGET_VERSION"
|
||||
fi
|
||||
|
||||
- name: Apply Historical Schema
|
||||
if: steps.schema.outputs.skip != 'true'
|
||||
run: |
|
||||
# Apply schema up to target version
|
||||
TARGET=${{ steps.schema.outputs.target_version }}
|
||||
MODULE_LOWER=$(echo "${{ matrix.module }}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
echo "Applying schema migrations up to version $TARGET for $MODULE_LOWER"
|
||||
|
||||
# Apply base schema
|
||||
if [ -f "docs/db/schemas/${MODULE_LOWER}.sql" ]; then
|
||||
psql "$STELLAOPS_TEST_POSTGRES_CONNECTION" -f "docs/db/schemas/${MODULE_LOWER}.sql" || true
|
||||
fi
|
||||
|
||||
# Apply migrations up to target version
|
||||
MIGRATION_COUNT=0
|
||||
for migration in $(ls -1 docs/db/migrations/${MODULE_LOWER}/*.sql 2>/dev/null | sort -V); do
|
||||
MIGRATION_COUNT=$((MIGRATION_COUNT + 1))
|
||||
if [ "$MIGRATION_COUNT" -le "$TARGET" ]; then
|
||||
echo "Applying: $migration"
|
||||
psql "$STELLAOPS_TEST_POSTGRES_CONNECTION" -f "$migration" || true
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Applied $MIGRATION_COUNT migrations"
|
||||
|
||||
- name: Run Schema Evolution Tests
|
||||
if: steps.schema.outputs.skip != 'true'
|
||||
id: test
|
||||
run: |
|
||||
# Find and run schema evolution tests for the module
|
||||
TEST_PROJECT="src/${{ matrix.module }}/__Tests/StellaOps.${{ matrix.module }}.SchemaEvolution.Tests"
|
||||
|
||||
if [ -d "$TEST_PROJECT" ]; then
|
||||
dotnet test "$TEST_PROJECT" \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
--verbosity normal \
|
||||
--logger "trx;LogFileName=schema-evolution-${{ matrix.module }}-${{ matrix.schema-version }}.trx" \
|
||||
--results-directory ./test-results \
|
||||
-- RunConfiguration.EnvironmentVariables.SCHEMA_VERSION="${{ matrix.schema-version }}"
|
||||
else
|
||||
# Run tests with SchemaEvolution category from main test project
|
||||
TEST_PROJECT="src/${{ matrix.module }}/__Tests/StellaOps.${{ matrix.module }}.Tests"
|
||||
if [ -d "$TEST_PROJECT" ]; then
|
||||
dotnet test "$TEST_PROJECT" \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
--verbosity normal \
|
||||
--filter "Category=SchemaEvolution" \
|
||||
--logger "trx;LogFileName=schema-evolution-${{ matrix.module }}-${{ matrix.schema-version }}.trx" \
|
||||
--results-directory ./test-results \
|
||||
-- RunConfiguration.EnvironmentVariables.SCHEMA_VERSION="${{ matrix.schema-version }}"
|
||||
else
|
||||
echo "No test project found for ${{ matrix.module }}"
|
||||
echo "skip_reason=no_tests" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always() && steps.schema.outputs.skip != 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: schema-evolution-results-${{ matrix.module }}-${{ matrix.schema-version }}
|
||||
path: ./test-results/*.trx
|
||||
if-no-files-found: ignore
|
||||
|
||||
# ===========================================================================
|
||||
# COMPATIBILITY MATRIX REPORT
|
||||
# ===========================================================================
|
||||
|
||||
report:
|
||||
name: Generate Compatibility Report
|
||||
needs: [discover, test]
|
||||
if: always() && needs.discover.outputs.has-schema-changes == 'true'
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Download All Results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: schema-evolution-results-*
|
||||
merge-multiple: true
|
||||
path: ./results
|
||||
continue-on-error: true
|
||||
|
||||
- name: Generate Report
|
||||
run: |
|
||||
cat > schema-compatibility-report.md << 'EOF'
|
||||
## Schema Evolution Compatibility Report
|
||||
|
||||
| Module | Schema N-1 | Schema N-2 |
|
||||
|--------|------------|------------|
|
||||
EOF
|
||||
|
||||
# Parse test results and generate matrix
|
||||
for module in Scanner Concelier EvidenceLocker Authority Policy SbomService; do
|
||||
N1_STATUS="-"
|
||||
N2_STATUS="-"
|
||||
|
||||
if [ -f "results/schema-evolution-${module}-N-1.trx" ]; then
|
||||
if grep -q 'outcome="Passed"' "results/schema-evolution-${module}-N-1.trx" 2>/dev/null; then
|
||||
N1_STATUS=":white_check_mark:"
|
||||
elif grep -q 'outcome="Failed"' "results/schema-evolution-${module}-N-1.trx" 2>/dev/null; then
|
||||
N1_STATUS=":x:"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "results/schema-evolution-${module}-N-2.trx" ]; then
|
||||
if grep -q 'outcome="Passed"' "results/schema-evolution-${module}-N-2.trx" 2>/dev/null; then
|
||||
N2_STATUS=":white_check_mark:"
|
||||
elif grep -q 'outcome="Failed"' "results/schema-evolution-${module}-N-2.trx" 2>/dev/null; then
|
||||
N2_STATUS=":x:"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "| $module | $N1_STATUS | $N2_STATUS |" >> schema-compatibility-report.md
|
||||
done
|
||||
|
||||
echo "" >> schema-compatibility-report.md
|
||||
echo "*Report generated at $(date -u +%Y-%m-%dT%H:%M:%SZ)*" >> schema-compatibility-report.md
|
||||
|
||||
cat schema-compatibility-report.md
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: schema-compatibility-report
|
||||
path: schema-compatibility-report.md
|
||||
|
||||
# ===========================================================================
|
||||
# POST REPORT TO PR
|
||||
# ===========================================================================
|
||||
|
||||
comment:
|
||||
name: Post Report to PR
|
||||
needs: [discover, test, report]
|
||||
if: github.event_name == 'pull_request' && always()
|
||||
runs-on: ${{ vars.LINUX_RUNNER_LABEL || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Download Report
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: schema-compatibility-report
|
||||
continue-on-error: true
|
||||
|
||||
- name: Post Comment
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
let report = '';
|
||||
try {
|
||||
report = fs.readFileSync('schema-compatibility-report.md', 'utf8');
|
||||
} catch (e) {
|
||||
report = 'Schema compatibility report not available.';
|
||||
}
|
||||
|
||||
const hasChanges = '${{ needs.discover.outputs.has-schema-changes }}' === 'true';
|
||||
|
||||
if (!hasChanges) {
|
||||
return; // No schema changes, no comment needed
|
||||
}
|
||||
|
||||
const body = `## Schema Evolution Test Results
|
||||
|
||||
This PR includes changes that may affect database compatibility.
|
||||
|
||||
${report}
|
||||
|
||||
---
|
||||
<details>
|
||||
<summary>About Schema Evolution Tests</summary>
|
||||
|
||||
Schema evolution tests verify that:
|
||||
- Current code works with previous schema versions (N-1, N-2)
|
||||
- Rolling deployments don't break during migration windows
|
||||
- Rollbacks are safe when schema hasn't been migrated yet
|
||||
|
||||
If tests fail, consider:
|
||||
1. Adding backward-compatible default values
|
||||
2. Using nullable columns for new fields
|
||||
3. Creating migration-safe queries
|
||||
4. Updating the compatibility matrix
|
||||
|
||||
</details>
|
||||
`;
|
||||
|
||||
// Find existing comment
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number
|
||||
});
|
||||
|
||||
const botComment = comments.find(c =>
|
||||
c.user.type === 'Bot' &&
|
||||
c.body.includes('Schema Evolution Test Results')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: body
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,322 +0,0 @@
|
||||
# Schema Validation CI Workflow
|
||||
# Sprint: SPRINT_8200_0001_0003_sbom_schema_validation_ci
|
||||
# Tasks: SCHEMA-8200-007 through SCHEMA-8200-011
|
||||
#
|
||||
# Purpose: Validate SBOM fixtures against official JSON schemas to detect
|
||||
# schema drift before runtime. Fails CI if any fixture is invalid.
|
||||
|
||||
name: Schema Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||
- 'src/Scanner/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/validate-*.sh'
|
||||
- '.gitea/workflows/schema-validation.yml'
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||
- 'src/Scanner/**'
|
||||
- 'docs/schemas/**'
|
||||
- 'scripts/validate-*.sh'
|
||||
|
||||
env:
|
||||
SBOM_UTILITY_VERSION: "0.16.0"
|
||||
|
||||
jobs:
|
||||
validate-cyclonedx:
|
||||
name: Validate CycloneDX Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install sbom-utility
|
||||
run: |
|
||||
curl -sSfL "https://github.com/CycloneDX/sbom-utility/releases/download/v${SBOM_UTILITY_VERSION}/sbom-utility-v${SBOM_UTILITY_VERSION}-linux-amd64.tar.gz" | tar xz
|
||||
sudo mv sbom-utility /usr/local/bin/
|
||||
sbom-utility --version
|
||||
|
||||
- name: Validate CycloneDX fixtures
|
||||
run: |
|
||||
set -e
|
||||
SCHEMA="docs/schemas/cyclonedx-bom-1.6.schema.json"
|
||||
FIXTURE_DIRS=(
|
||||
"src/__Tests/__Benchmarks/golden-corpus"
|
||||
"src/__Tests/fixtures"
|
||||
"src/__Tests/__Datasets/seed-data"
|
||||
)
|
||||
|
||||
FOUND=0
|
||||
PASSED=0
|
||||
FAILED=0
|
||||
|
||||
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||
if [ -d "$dir" ]; then
|
||||
while IFS= read -r -d '' file; do
|
||||
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||
FOUND=$((FOUND + 1))
|
||||
echo "::group::Validating: $file"
|
||||
if sbom-utility validate --input-file "$file" --schema "$SCHEMA" 2>&1; then
|
||||
echo "✅ PASS: $file"
|
||||
PASSED=$((PASSED + 1))
|
||||
else
|
||||
echo "❌ FAIL: $file"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||
fi
|
||||
done
|
||||
|
||||
echo "================================================"
|
||||
echo "CycloneDX Validation Summary"
|
||||
echo "================================================"
|
||||
echo "Found: $FOUND fixtures"
|
||||
echo "Passed: $PASSED"
|
||||
echo "Failed: $FAILED"
|
||||
echo "================================================"
|
||||
|
||||
if [ "$FAILED" -gt 0 ]; then
|
||||
echo "::error::$FAILED CycloneDX fixtures failed validation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$FOUND" -eq 0 ]; then
|
||||
echo "::warning::No CycloneDX fixtures found to validate"
|
||||
fi
|
||||
|
||||
validate-spdx:
|
||||
name: Validate SPDX Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install SPDX tools
|
||||
run: |
|
||||
pip install spdx-tools
|
||||
pip install check-jsonschema
|
||||
|
||||
- name: Validate SPDX fixtures
|
||||
run: |
|
||||
set -e
|
||||
SCHEMA="docs/schemas/spdx-jsonld-3.0.1.schema.json"
|
||||
FIXTURE_DIRS=(
|
||||
"src/__Tests/__Benchmarks/golden-corpus"
|
||||
"src/__Tests/fixtures"
|
||||
"src/__Tests/__Datasets/seed-data"
|
||||
)
|
||||
|
||||
FOUND=0
|
||||
PASSED=0
|
||||
FAILED=0
|
||||
|
||||
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||
if [ -d "$dir" ]; then
|
||||
while IFS= read -r -d '' file; do
|
||||
# Check for SPDX markers
|
||||
if grep -qE '"spdxVersion"|"@context".*spdx' "$file" 2>/dev/null; then
|
||||
FOUND=$((FOUND + 1))
|
||||
echo "::group::Validating: $file"
|
||||
|
||||
# Try pyspdxtools first (semantic validation)
|
||||
if pyspdxtools validate "$file" 2>&1; then
|
||||
echo "✅ PASS (semantic): $file"
|
||||
PASSED=$((PASSED + 1))
|
||||
# Fall back to JSON schema validation
|
||||
elif check-jsonschema --schemafile "$SCHEMA" "$file" 2>&1; then
|
||||
echo "✅ PASS (schema): $file"
|
||||
PASSED=$((PASSED + 1))
|
||||
else
|
||||
echo "❌ FAIL: $file"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||
fi
|
||||
done
|
||||
|
||||
echo "================================================"
|
||||
echo "SPDX Validation Summary"
|
||||
echo "================================================"
|
||||
echo "Found: $FOUND fixtures"
|
||||
echo "Passed: $PASSED"
|
||||
echo "Failed: $FAILED"
|
||||
echo "================================================"
|
||||
|
||||
if [ "$FAILED" -gt 0 ]; then
|
||||
echo "::error::$FAILED SPDX fixtures failed validation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$FOUND" -eq 0 ]; then
|
||||
echo "::warning::No SPDX fixtures found to validate"
|
||||
fi
|
||||
|
||||
validate-vex:
|
||||
name: Validate OpenVEX Fixtures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install ajv-cli
|
||||
run: npm install -g ajv-cli ajv-formats
|
||||
|
||||
- name: Validate OpenVEX fixtures
|
||||
run: |
|
||||
set -e
|
||||
SCHEMA="docs/schemas/openvex-0.2.0.schema.json"
|
||||
FIXTURE_DIRS=(
|
||||
"src/__Tests/__Benchmarks/golden-corpus"
|
||||
"src/__Tests/__Benchmarks/vex-lattice"
|
||||
"src/__Tests/fixtures"
|
||||
"src/__Tests/__Datasets/seed-data"
|
||||
)
|
||||
|
||||
FOUND=0
|
||||
PASSED=0
|
||||
FAILED=0
|
||||
|
||||
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||
if [ -d "$dir" ]; then
|
||||
while IFS= read -r -d '' file; do
|
||||
# Check for OpenVEX markers
|
||||
if grep -qE '"@context".*openvex|"@type".*"https://openvex' "$file" 2>/dev/null; then
|
||||
FOUND=$((FOUND + 1))
|
||||
echo "::group::Validating: $file"
|
||||
if ajv validate -s "$SCHEMA" -d "$file" --strict=false -c ajv-formats 2>&1; then
|
||||
echo "✅ PASS: $file"
|
||||
PASSED=$((PASSED + 1))
|
||||
else
|
||||
echo "❌ FAIL: $file"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||
fi
|
||||
done
|
||||
|
||||
echo "================================================"
|
||||
echo "OpenVEX Validation Summary"
|
||||
echo "================================================"
|
||||
echo "Found: $FOUND fixtures"
|
||||
echo "Passed: $PASSED"
|
||||
echo "Failed: $FAILED"
|
||||
echo "================================================"
|
||||
|
||||
if [ "$FAILED" -gt 0 ]; then
|
||||
echo "::error::$FAILED OpenVEX fixtures failed validation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$FOUND" -eq 0 ]; then
|
||||
echo "::warning::No OpenVEX fixtures found to validate"
|
||||
fi
|
||||
|
||||
# Negative testing: verify that invalid fixtures are correctly rejected
|
||||
validate-negative:
|
||||
name: Validate Negative Test Cases
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install sbom-utility
|
||||
run: |
|
||||
curl -sSfL "https://github.com/CycloneDX/sbom-utility/releases/download/v${SBOM_UTILITY_VERSION}/sbom-utility-v${SBOM_UTILITY_VERSION}-linux-amd64.tar.gz" | tar xz
|
||||
sudo mv sbom-utility /usr/local/bin/
|
||||
sbom-utility --version
|
||||
|
||||
- name: Verify invalid fixtures fail validation
|
||||
run: |
|
||||
set -e
|
||||
SCHEMA="docs/schemas/cyclonedx-bom-1.6.schema.json"
|
||||
INVALID_DIR="src/__Tests/fixtures/invalid"
|
||||
|
||||
if [ ! -d "$INVALID_DIR" ]; then
|
||||
echo "::warning::No invalid fixtures directory found at $INVALID_DIR"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
EXPECTED_FAILURES=0
|
||||
ACTUAL_FAILURES=0
|
||||
UNEXPECTED_PASSES=0
|
||||
|
||||
while IFS= read -r -d '' file; do
|
||||
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||
EXPECTED_FAILURES=$((EXPECTED_FAILURES + 1))
|
||||
echo "::group::Testing invalid fixture: $file"
|
||||
|
||||
# This SHOULD fail - if it passes, that's an error
|
||||
if sbom-utility validate --input-file "$file" --schema "$SCHEMA" 2>&1; then
|
||||
echo "❌ UNEXPECTED PASS: $file (should have failed validation)"
|
||||
UNEXPECTED_PASSES=$((UNEXPECTED_PASSES + 1))
|
||||
else
|
||||
echo "✅ EXPECTED FAILURE: $file (correctly rejected)"
|
||||
ACTUAL_FAILURES=$((ACTUAL_FAILURES + 1))
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
done < <(find "$INVALID_DIR" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||
|
||||
echo "================================================"
|
||||
echo "Negative Test Summary"
|
||||
echo "================================================"
|
||||
echo "Expected failures: $EXPECTED_FAILURES"
|
||||
echo "Actual failures: $ACTUAL_FAILURES"
|
||||
echo "Unexpected passes: $UNEXPECTED_PASSES"
|
||||
echo "================================================"
|
||||
|
||||
if [ "$UNEXPECTED_PASSES" -gt 0 ]; then
|
||||
echo "::error::$UNEXPECTED_PASSES invalid fixtures passed validation unexpectedly"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$EXPECTED_FAILURES" -eq 0 ]; then
|
||||
echo "::warning::No invalid CycloneDX fixtures found for negative testing"
|
||||
fi
|
||||
|
||||
echo "✅ All invalid fixtures correctly rejected by schema validation"
|
||||
|
||||
summary:
|
||||
name: Validation Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-cyclonedx, validate-spdx, validate-vex, validate-negative]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Check results
|
||||
run: |
|
||||
echo "Schema Validation Results"
|
||||
echo "========================="
|
||||
echo "CycloneDX: ${{ needs.validate-cyclonedx.result }}"
|
||||
echo "SPDX: ${{ needs.validate-spdx.result }}"
|
||||
echo "OpenVEX: ${{ needs.validate-vex.result }}"
|
||||
echo "Negative Tests: ${{ needs.validate-negative.result }}"
|
||||
|
||||
if [ "${{ needs.validate-cyclonedx.result }}" = "failure" ] || \
|
||||
[ "${{ needs.validate-spdx.result }}" = "failure" ] || \
|
||||
[ "${{ needs.validate-vex.result }}" = "failure" ] || \
|
||||
[ "${{ needs.validate-negative.result }}" = "failure" ]; then
|
||||
echo "::error::One or more schema validations failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All schema validations passed or skipped"
|
||||
@@ -1,38 +0,0 @@
|
||||
name: sdk-generator-smoke
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "src/Sdk/StellaOps.Sdk.Generator/**"
|
||||
- "package.json"
|
||||
pull_request:
|
||||
paths:
|
||||
- "src/Sdk/StellaOps.Sdk.Generator/**"
|
||||
- "package.json"
|
||||
|
||||
jobs:
|
||||
sdk-smoke:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Setup Java 21
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: temurin
|
||||
java-version: "21"
|
||||
|
||||
- name: Install npm deps (scripts only)
|
||||
run: npm install --ignore-scripts --no-progress --no-audit --no-fund
|
||||
|
||||
- name: Run SDK smoke suite (TS/Python/Go/Java)
|
||||
run: npm run sdk:smoke
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user