Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Created SignerEndpointsTests to validate the SignDsse and VerifyReferrers endpoints. - Implemented StubBearerAuthenticationDefaults and StubBearerAuthenticationHandler for token-based authentication. - Developed ConcelierExporterClient for managing Trivy DB settings and export operations. - Added TrivyDbSettingsPageComponent for UI interactions with Trivy DB settings, including form handling and export triggering. - Implemented styles and HTML structure for Trivy DB settings page. - Created NotifySmokeCheck tool for validating Redis event streams and Notify deliveries.
592 lines
22 KiB
YAML
592 lines
22 KiB
YAML
# .gitea/workflows/build-test-deploy.yml
|
||
# Unified CI/CD workflow for git.stella-ops.org (Feedser monorepo)
|
||
|
||
name: Build Test Deploy
|
||
|
||
on:
|
||
push:
|
||
branches: [ main ]
|
||
paths:
|
||
- 'src/**'
|
||
- 'docs/**'
|
||
- 'scripts/**'
|
||
- 'Directory.Build.props'
|
||
- 'Directory.Build.targets'
|
||
- 'global.json'
|
||
- '.gitea/workflows/**'
|
||
pull_request:
|
||
branches: [ main, develop ]
|
||
paths:
|
||
- 'src/**'
|
||
- 'docs/**'
|
||
- 'scripts/**'
|
||
- '.gitea/workflows/**'
|
||
workflow_dispatch:
|
||
inputs:
|
||
force_deploy:
|
||
description: 'Ignore branch checks and run the deploy stage'
|
||
required: false
|
||
default: 'false'
|
||
type: boolean
|
||
|
||
env:
|
||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
||
BUILD_CONFIGURATION: Release
|
||
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
||
RUNNER_TOOL_CACHE: /toolcache
|
||
|
||
jobs:
|
||
profile-validation:
|
||
runs-on: ubuntu-22.04
|
||
steps:
|
||
- name: Checkout repository
|
||
uses: actions/checkout@v4
|
||
|
||
- name: Install Helm
|
||
run: |
|
||
curl -fsSL https://get.helm.sh/helm-v3.16.0-linux-amd64.tar.gz -o /tmp/helm.tgz
|
||
tar -xzf /tmp/helm.tgz -C /tmp
|
||
sudo install -m 0755 /tmp/linux-amd64/helm /usr/local/bin/helm
|
||
|
||
- name: Validate deployment profiles
|
||
run: ./deploy/tools/validate-profiles.sh
|
||
|
||
build-test:
|
||
runs-on: ubuntu-22.04
|
||
environment: ${{ github.event_name == 'pull_request' && 'preview' || 'staging' }}
|
||
env:
|
||
PUBLISH_DIR: ${{ github.workspace }}/artifacts/publish/webservice
|
||
AUTHORITY_PUBLISH_DIR: ${{ github.workspace }}/artifacts/publish/authority
|
||
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
|
||
steps:
|
||
- name: Checkout repository
|
||
uses: actions/checkout@v4
|
||
with:
|
||
fetch-depth: 0
|
||
|
||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||
uses: actions/setup-dotnet@v4
|
||
with:
|
||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||
include-prerelease: true
|
||
|
||
- name: Restore dependencies
|
||
run: dotnet restore src/StellaOps.Feedser.sln
|
||
|
||
- name: Build solution (warnings as errors)
|
||
run: dotnet build src/StellaOps.Feedser.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror
|
||
|
||
- name: Run unit and integration tests
|
||
run: |
|
||
mkdir -p "$TEST_RESULTS_DIR"
|
||
dotnet test src/StellaOps.Feedser.sln \
|
||
--configuration $BUILD_CONFIGURATION \
|
||
--no-build \
|
||
--logger "trx;LogFileName=stellaops-feedser-tests.trx" \
|
||
--results-directory "$TEST_RESULTS_DIR"
|
||
|
||
- name: Build scanner language analyzer projects
|
||
run: |
|
||
dotnet restore src/StellaOps.sln
|
||
for project in \
|
||
src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj \
|
||
src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj \
|
||
src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj \
|
||
src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj \
|
||
src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj \
|
||
src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj \
|
||
src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj
|
||
do
|
||
dotnet build "$project" --configuration $BUILD_CONFIGURATION --no-restore -warnaserror
|
||
done
|
||
|
||
- name: Run scanner language analyzer tests
|
||
run: |
|
||
dotnet test src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj \
|
||
--configuration $BUILD_CONFIGURATION \
|
||
--no-build \
|
||
--logger "trx;LogFileName=stellaops-scanner-lang-tests.trx" \
|
||
--results-directory "$TEST_RESULTS_DIR"
|
||
|
||
- name: Publish BuildX SBOM generator
|
||
run: |
|
||
dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \
|
||
--configuration $BUILD_CONFIGURATION \
|
||
--output out/buildx
|
||
|
||
- name: Verify BuildX descriptor determinism
|
||
run: |
|
||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \
|
||
--manifest out/buildx \
|
||
--cas out/cas
|
||
|
||
cat <<'JSON' > out/buildx-sbom.cdx.json
|
||
{"bomFormat":"CycloneDX","specVersion":"1.5"}
|
||
JSON
|
||
|
||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \
|
||
--manifest out/buildx \
|
||
--image sha256:5c2c5bfe0d4d77f1a0f9866fd415dd8da5b62af05d7c3d4b53f28de3ebef0101 \
|
||
--sbom out/buildx-sbom.cdx.json \
|
||
--sbom-name buildx-sbom.cdx.json \
|
||
--artifact-type application/vnd.stellaops.sbom.layer+json \
|
||
--sbom-format cyclonedx-json \
|
||
--sbom-kind inventory \
|
||
--repository ${{ github.repository }} \
|
||
--build-ref ${{ github.sha }} \
|
||
> out/buildx-descriptor.json
|
||
|
||
dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \
|
||
--manifest out/buildx \
|
||
--image sha256:5c2c5bfe0d4d77f1a0f9866fd415dd8da5b62af05d7c3d4b53f28de3ebef0101 \
|
||
--sbom out/buildx-sbom.cdx.json \
|
||
--sbom-name buildx-sbom.cdx.json \
|
||
--artifact-type application/vnd.stellaops.sbom.layer+json \
|
||
--sbom-format cyclonedx-json \
|
||
--sbom-kind inventory \
|
||
--repository ${{ github.repository }} \
|
||
--build-ref ${{ github.sha }} \
|
||
> out/buildx-descriptor-repeat.json
|
||
|
||
python - <<'PY'
|
||
import json, sys
|
||
from pathlib import Path
|
||
|
||
def normalize(path: str) -> dict:
|
||
data = json.loads(Path(path).read_text(encoding='utf-8'))
|
||
data.pop('generatedAt', None)
|
||
return data
|
||
|
||
baseline = normalize('out/buildx-descriptor.json')
|
||
repeat = normalize('out/buildx-descriptor-repeat.json')
|
||
|
||
if baseline != repeat:
|
||
sys.exit('BuildX descriptor output changed between runs.')
|
||
PY
|
||
|
||
- name: Upload BuildX determinism artifacts
|
||
uses: actions/upload-artifact@v4
|
||
with:
|
||
name: buildx-determinism
|
||
path: |
|
||
out/buildx-descriptor.json
|
||
out/buildx-descriptor-repeat.json
|
||
out/buildx-sbom.cdx.json
|
||
if-no-files-found: error
|
||
retention-days: 7
|
||
|
||
- name: Package OS analyzer plug-ins
|
||
run: |
|
||
if [ ! -d "plugins/scanner/analyzers/os" ]; then
|
||
echo "OS analyzer plug-in directory not found" >&2
|
||
exit 1
|
||
fi
|
||
|
||
mkdir -p artifacts/plugins/os
|
||
tar -czf artifacts/plugins/os/stellaops-scanner-os-analyzers.tar.gz -C plugins/scanner/analyzers/os .
|
||
sha256sum artifacts/plugins/os/stellaops-scanner-os-analyzers.tar.gz > artifacts/plugins/os/stellaops-scanner-os-analyzers.tar.gz.sha256
|
||
|
||
- name: Upload OS analyzer plug-ins
|
||
uses: actions/upload-artifact@v4
|
||
with:
|
||
name: scanner-os-analyzers
|
||
path: artifacts/plugins/os
|
||
if-no-files-found: error
|
||
retention-days: 7
|
||
|
||
- name: Publish Feedser web service
|
||
run: |
|
||
mkdir -p "$PUBLISH_DIR"
|
||
dotnet publish src/StellaOps.Feedser.WebService/StellaOps.Feedser.WebService.csproj \
|
||
--configuration $BUILD_CONFIGURATION \
|
||
--no-build \
|
||
--output "$PUBLISH_DIR"
|
||
|
||
- name: Upload published artifacts
|
||
uses: actions/upload-artifact@v4
|
||
with:
|
||
name: feedser-publish
|
||
path: ${{ env.PUBLISH_DIR }}
|
||
if-no-files-found: error
|
||
retention-days: 7
|
||
|
||
- name: Restore Authority solution
|
||
run: dotnet restore src/StellaOps.Authority/StellaOps.Authority.sln
|
||
|
||
- name: Build Authority solution
|
||
run: dotnet build src/StellaOps.Authority/StellaOps.Authority.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror
|
||
|
||
- name: Run Authority tests
|
||
run: |
|
||
dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj \
|
||
--configuration $BUILD_CONFIGURATION \
|
||
--no-build \
|
||
--logger "trx;LogFileName=stellaops-authority-tests.trx" \
|
||
--results-directory "$TEST_RESULTS_DIR"
|
||
|
||
- name: Publish Authority web service
|
||
run: |
|
||
mkdir -p "$AUTHORITY_PUBLISH_DIR"
|
||
dotnet publish src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj \
|
||
--configuration $BUILD_CONFIGURATION \
|
||
--no-build \
|
||
--output "$AUTHORITY_PUBLISH_DIR"
|
||
|
||
- name: Upload Authority artifacts
|
||
uses: actions/upload-artifact@v4
|
||
with:
|
||
name: authority-publish
|
||
path: ${{ env.AUTHORITY_PUBLISH_DIR }}
|
||
if-no-files-found: error
|
||
retention-days: 7
|
||
|
||
- name: Upload test results
|
||
if: always()
|
||
uses: actions/upload-artifact@v4
|
||
with:
|
||
name: feedser-test-results
|
||
path: ${{ env.TEST_RESULTS_DIR }}
|
||
if-no-files-found: ignore
|
||
retention-days: 7
|
||
|
||
authority-container:
|
||
runs-on: ubuntu-22.04
|
||
needs: build-test
|
||
steps:
|
||
- name: Checkout repository
|
||
uses: actions/checkout@v4
|
||
|
||
- name: Validate Authority compose file
|
||
run: docker compose -f ops/authority/docker-compose.authority.yaml config
|
||
|
||
- name: Build Authority container image
|
||
run: docker build -f ops/authority/Dockerfile -t stellaops-authority:ci .
|
||
|
||
docs:
|
||
runs-on: ubuntu-22.04
|
||
env:
|
||
DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-site
|
||
steps:
|
||
- name: Checkout repository
|
||
uses: actions/checkout@v4
|
||
|
||
- name: Setup Python
|
||
uses: actions/setup-python@v5
|
||
with:
|
||
python-version: '3.11'
|
||
|
||
- name: Install documentation dependencies
|
||
run: |
|
||
python -m pip install --upgrade pip
|
||
python -m pip install markdown pygments
|
||
|
||
- name: Render documentation bundle
|
||
run: |
|
||
python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean
|
||
|
||
- name: Upload documentation artifact
|
||
uses: actions/upload-artifact@v4
|
||
with:
|
||
name: feedser-docs-site
|
||
path: ${{ env.DOCS_OUTPUT_DIR }}
|
||
if-no-files-found: error
|
||
retention-days: 7
|
||
|
||
scanner-perf:
|
||
runs-on: ubuntu-22.04
|
||
needs: build-test
|
||
env:
|
||
BENCH_DIR: bench/Scanner.Analyzers
|
||
steps:
|
||
- name: Checkout repository
|
||
uses: actions/checkout@v4
|
||
|
||
- name: Setup Node.js
|
||
uses: actions/setup-node@v4
|
||
with:
|
||
node-version: '20'
|
||
|
||
- name: Run analyzer microbench
|
||
working-directory: ${{ env.BENCH_DIR }}
|
||
run: |
|
||
node run-bench.js \
|
||
--repo-root "${{ github.workspace }}" \
|
||
--out latest.csv \
|
||
--threshold-ms 5000
|
||
|
||
- name: Compare against baseline
|
||
working-directory: ${{ env.BENCH_DIR }}
|
||
run: |
|
||
node - <<'NODE'
|
||
const fs = require('fs');
|
||
const path = require('path');
|
||
|
||
function parseCsv(file) {
|
||
const rows = fs.readFileSync(file, 'utf8').trim().split(/\r?\n/);
|
||
rows.shift();
|
||
const data = {};
|
||
for (const row of rows) {
|
||
const [id, iterations, sampleCount, mean, p95, max] = row.split(',');
|
||
data[id] = {
|
||
iterations: Number(iterations),
|
||
sampleCount: Number(sampleCount),
|
||
mean: Number(mean),
|
||
p95: Number(p95),
|
||
max: Number(max),
|
||
};
|
||
}
|
||
return data;
|
||
}
|
||
|
||
const baseline = parseCsv('baseline.csv');
|
||
const latest = parseCsv('latest.csv');
|
||
const allowedMultiplier = 1.20;
|
||
const regressions = [];
|
||
|
||
for (const [id, baseMetrics] of Object.entries(baseline)) {
|
||
const current = latest[id];
|
||
if (!current) {
|
||
regressions.push(`Scenario ${id} missing from latest run`);
|
||
continue;
|
||
}
|
||
if (current.mean > baseMetrics.mean * allowedMultiplier) {
|
||
regressions.push(`Scenario ${id} mean ${current.mean.toFixed(2)}ms exceeded baseline ${baseMetrics.mean.toFixed(2)}ms by >20%`);
|
||
}
|
||
if (current.max > baseMetrics.max * allowedMultiplier) {
|
||
regressions.push(`Scenario ${id} max ${current.max.toFixed(2)}ms exceeded baseline ${baseMetrics.max.toFixed(2)}ms by >20%`);
|
||
}
|
||
}
|
||
|
||
if (regressions.length > 0) {
|
||
console.error('Performance regression detected:');
|
||
for (const msg of regressions) {
|
||
console.error(` - ${msg}`);
|
||
}
|
||
process.exit(1);
|
||
}
|
||
NODE
|
||
|
||
- name: Upload bench report
|
||
uses: actions/upload-artifact@v4
|
||
with:
|
||
name: scanner-analyzers-bench
|
||
path: ${{ env.BENCH_DIR }}/latest.csv
|
||
retention-days: 7
|
||
|
||
deploy:
|
||
runs-on: ubuntu-22.04
|
||
needs: [build-test, docs, scanner-perf]
|
||
if: >-
|
||
needs.build-test.result == 'success' &&
|
||
needs.docs.result == 'success' &&
|
||
needs.scanner-perf.result == 'success' &&
|
||
(
|
||
(github.event_name == 'push' && github.ref == 'refs/heads/main') ||
|
||
github.event_name == 'workflow_dispatch'
|
||
)
|
||
environment: staging
|
||
steps:
|
||
- name: Checkout repository
|
||
uses: actions/checkout@v4
|
||
with:
|
||
sparse-checkout: |
|
||
scripts
|
||
.gitea/workflows
|
||
sparse-checkout-cone-mode: true
|
||
|
||
- name: Check if deployment should proceed
|
||
id: check-deploy
|
||
run: |
|
||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||
if [ "${{ github.event.inputs.force_deploy }}" = "true" ]; then
|
||
echo "should-deploy=true" >> $GITHUB_OUTPUT
|
||
echo "✅ Manual deployment requested"
|
||
else
|
||
echo "should-deploy=false" >> $GITHUB_OUTPUT
|
||
echo "ℹ️ Manual dispatch without force_deploy=true — skipping"
|
||
fi
|
||
elif [ "${{ github.ref }}" = "refs/heads/main" ]; then
|
||
echo "should-deploy=true" >> $GITHUB_OUTPUT
|
||
echo "✅ Deploying latest main branch build"
|
||
else
|
||
echo "should-deploy=false" >> $GITHUB_OUTPUT
|
||
echo "ℹ️ Deployment restricted to main branch"
|
||
fi
|
||
|
||
- name: Resolve deployment credentials
|
||
id: params
|
||
if: steps.check-deploy.outputs.should-deploy == 'true'
|
||
run: |
|
||
missing=()
|
||
|
||
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
|
||
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
|
||
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
|
||
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
|
||
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
|
||
|
||
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
|
||
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
|
||
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
|
||
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
|
||
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
|
||
|
||
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
|
||
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
|
||
|
||
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
|
||
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
|
||
|
||
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
|
||
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
|
||
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
|
||
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
|
||
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
|
||
|
||
if [ ${#missing[@]} -gt 0 ]; then
|
||
echo "❌ Missing deployment configuration: ${missing[*]}"
|
||
exit 1
|
||
fi
|
||
|
||
key_file="$RUNNER_TEMP/staging_deploy_key"
|
||
printf '%s\n' "$key" > "$key_file"
|
||
chmod 600 "$key_file"
|
||
|
||
echo "host=$host" >> $GITHUB_OUTPUT
|
||
echo "user=$user" >> $GITHUB_OUTPUT
|
||
echo "path=$path" >> $GITHUB_OUTPUT
|
||
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
|
||
echo "key-file=$key_file" >> $GITHUB_OUTPUT
|
||
|
||
- name: Download service artifact
|
||
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs.path != ''
|
||
uses: actions/download-artifact@v4
|
||
with:
|
||
name: feedser-publish
|
||
path: artifacts/service
|
||
|
||
- name: Download documentation artifact
|
||
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs['docs-path'] != ''
|
||
uses: actions/download-artifact@v4
|
||
with:
|
||
name: feedser-docs-site
|
||
path: artifacts/docs
|
||
|
||
- name: Install rsync
|
||
if: steps.check-deploy.outputs.should-deploy == 'true'
|
||
run: |
|
||
if command -v rsync >/dev/null 2>&1; then
|
||
exit 0
|
||
fi
|
||
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
|
||
mkdir -p "$CACHE_DIR"
|
||
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
|
||
DEB_DIR="$CACHE_DIR/$KEY"
|
||
mkdir -p "$DEB_DIR"
|
||
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
|
||
apt-get update
|
||
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
|
||
else
|
||
apt-get update
|
||
apt-get download rsync libpopt0
|
||
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
|
||
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
|
||
fi
|
||
|
||
- name: Deploy service bundle
|
||
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs.path != ''
|
||
env:
|
||
HOST: ${{ steps.params.outputs.host }}
|
||
USER: ${{ steps.params.outputs.user }}
|
||
TARGET: ${{ steps.params.outputs.path }}
|
||
KEY_FILE: ${{ steps.params.outputs['key-file'] }}
|
||
run: |
|
||
SERVICE_DIR="artifacts/service/feedser-publish"
|
||
if [ ! -d "$SERVICE_DIR" ]; then
|
||
echo "❌ Service artifact directory missing ($SERVICE_DIR)"
|
||
exit 1
|
||
fi
|
||
echo "🚀 Deploying Feedser web service to $HOST:$TARGET"
|
||
rsync -az --delete \
|
||
-e "ssh -i $KEY_FILE -o StrictHostKeyChecking=no" \
|
||
"$SERVICE_DIR"/ \
|
||
"$USER@$HOST:$TARGET/"
|
||
|
||
- name: Deploy documentation bundle
|
||
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs['docs-path'] != ''
|
||
env:
|
||
HOST: ${{ steps.params.outputs.host }}
|
||
USER: ${{ steps.params.outputs.user }}
|
||
DOCS_TARGET: ${{ steps.params.outputs['docs-path'] }}
|
||
KEY_FILE: ${{ steps.params.outputs['key-file'] }}
|
||
run: |
|
||
DOCS_DIR="artifacts/docs/feedser-docs-site"
|
||
if [ ! -d "$DOCS_DIR" ]; then
|
||
echo "❌ Documentation artifact directory missing ($DOCS_DIR)"
|
||
exit 1
|
||
fi
|
||
echo "📚 Deploying documentation bundle to $HOST:$DOCS_TARGET"
|
||
rsync -az --delete \
|
||
-e "ssh -i $KEY_FILE -o StrictHostKeyChecking=no" \
|
||
"$DOCS_DIR"/ \
|
||
"$USER@$HOST:$DOCS_TARGET/"
|
||
|
||
- name: Deployment summary
|
||
if: steps.check-deploy.outputs.should-deploy == 'true'
|
||
run: |
|
||
echo "✅ Deployment completed"
|
||
echo " Host: ${{ steps.params.outputs.host }}"
|
||
echo " Service path: ${{ steps.params.outputs.path || '(skipped)' }}"
|
||
echo " Docs path: ${{ steps.params.outputs['docs-path'] || '(skipped)' }}"
|
||
|
||
- name: Deployment skipped summary
|
||
if: steps.check-deploy.outputs.should-deploy != 'true'
|
||
run: |
|
||
echo "ℹ️ Deployment stage skipped"
|
||
echo " Event: ${{ github.event_name }}"
|
||
echo " Ref: ${{ github.ref }}"
|
||
|
||
notify-smoke:
|
||
runs-on: ubuntu-22.04
|
||
needs: deploy
|
||
if: needs.deploy.result == 'success'
|
||
env:
|
||
DOTNET_VERSION: ${{ env.DOTNET_VERSION }}
|
||
NOTIFY_SMOKE_REDIS_DSN: ${{ secrets.NOTIFY_SMOKE_REDIS_DSN }}
|
||
NOTIFY_SMOKE_NOTIFY_BASEURL: ${{ secrets.NOTIFY_SMOKE_NOTIFY_BASEURL }}
|
||
NOTIFY_SMOKE_NOTIFY_TOKEN: ${{ secrets.NOTIFY_SMOKE_NOTIFY_TOKEN }}
|
||
NOTIFY_SMOKE_NOTIFY_TENANT: ${{ secrets.NOTIFY_SMOKE_NOTIFY_TENANT }}
|
||
NOTIFY_SMOKE_NOTIFY_TENANT_HEADER: ${{ secrets.NOTIFY_SMOKE_NOTIFY_TENANT_HEADER }}
|
||
NOTIFY_SMOKE_EXPECT_KINDS: ${{ vars.NOTIFY_SMOKE_EXPECT_KINDS || secrets.NOTIFY_SMOKE_EXPECT_KINDS }}
|
||
NOTIFY_SMOKE_LOOKBACK_MINUTES: ${{ vars.NOTIFY_SMOKE_LOOKBACK_MINUTES || secrets.NOTIFY_SMOKE_LOOKBACK_MINUTES }}
|
||
steps:
|
||
- name: Checkout repository
|
||
uses: actions/checkout@v4
|
||
|
||
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||
uses: actions/setup-dotnet@v4
|
||
with:
|
||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||
include-prerelease: true
|
||
|
||
- name: Validate Notify smoke configuration
|
||
run: |
|
||
missing=()
|
||
for name in NOTIFY_SMOKE_REDIS_DSN NOTIFY_SMOKE_NOTIFY_BASEURL NOTIFY_SMOKE_NOTIFY_TOKEN NOTIFY_SMOKE_NOTIFY_TENANT NOTIFY_SMOKE_EXPECT_KINDS NOTIFY_SMOKE_LOOKBACK_MINUTES
|
||
do
|
||
value="${!name}"
|
||
if [ -z "$value" ]; then
|
||
missing+=("$name")
|
||
fi
|
||
done
|
||
if [ ${#missing[@]} -gt 0 ]; then
|
||
echo "❌ Missing Notify smoke configuration: ${missing[*]}"
|
||
exit 1
|
||
fi
|
||
|
||
- name: Restore Notify smoke checker
|
||
run: dotnet restore tools/NotifySmokeCheck/NotifySmokeCheck.csproj
|
||
|
||
- name: Run Notify smoke validation
|
||
run: dotnet run --project tools/NotifySmokeCheck/NotifySmokeCheck.csproj --configuration Release
|