Initial commit (history squashed)
Some checks failed
Build Test Deploy / authority-container (push) Has been cancelled
Build Test Deploy / docs (push) Has been cancelled
Build Test Deploy / deploy (push) Has been cancelled
Build Test Deploy / build-test (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled

This commit is contained in:
2025-10-07 10:14:21 +03:00
commit b97fc7685a
1132 changed files with 117842 additions and 0 deletions

View File

@@ -0,0 +1,29 @@
name: Feedser CI
on:
push:
branches: ["main", "develop"]
pull_request:
branches: ["main", "develop"]
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Setup .NET 10 preview
uses: actions/setup-dotnet@v4
with:
dotnet-version: 10.0.100-rc.1.25451.107
include-prerelease: true
- name: Restore dependencies
run: dotnet restore src/StellaOps.Feedser/StellaOps.Feedser.sln
- name: Build
run: dotnet build src/StellaOps.Feedser/StellaOps.Feedser.sln --configuration Release --no-restore -warnaserror
- name: Test
run: dotnet test src/StellaOps.Feedser/StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj --configuration Release --no-restore --logger "trx;LogFileName=feedser-tests.trx"

View File

@@ -0,0 +1,87 @@
name: Feedser Tests CI
on:
push:
paths:
- 'StellaOps.Feedser/**'
- '.gitea/workflows/feedser-tests.yml'
pull_request:
paths:
- 'StellaOps.Feedser/**'
- '.gitea/workflows/feedser-tests.yml'
jobs:
advisory-store-performance:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up .NET SDK
uses: actions/setup-dotnet@v4
with:
dotnet-version: 10.0.100-rc.1
- name: Restore dependencies
working-directory: StellaOps.Feedser
run: dotnet restore StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj
- name: Run advisory store performance test
working-directory: StellaOps.Feedser
run: |
set -euo pipefail
dotnet test \
StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj \
--filter "FullyQualifiedName~AdvisoryStorePerformanceTests" \
--logger:"console;verbosity=detailed" | tee performance.log
- name: Upload performance log
if: always()
uses: actions/upload-artifact@v4
with:
name: advisory-store-performance-log
path: StellaOps.Feedser/performance.log
full-test-suite:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up .NET SDK
uses: actions/setup-dotnet@v4
with:
dotnet-version: 10.0.100-rc.1
- name: Restore dependencies
working-directory: StellaOps.Feedser
run: dotnet restore StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj
- name: Run full test suite with baseline guard
working-directory: StellaOps.Feedser
env:
BASELINE_SECONDS: "19.8"
TOLERANCE_PERCENT: "25"
run: |
set -euo pipefail
start=$(date +%s)
dotnet test StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj --no-build | tee full-tests.log
end=$(date +%s)
duration=$((end-start))
echo "Full test duration: ${duration}s"
export DURATION_SECONDS="$duration"
python - <<'PY'
import os, sys
duration = float(os.environ["DURATION_SECONDS"])
baseline = float(os.environ["BASELINE_SECONDS"])
tolerance = float(os.environ["TOLERANCE_PERCENT"])
threshold = baseline * (1 + tolerance / 100)
print(f"Baseline {baseline:.1f}s, threshold {threshold:.1f}s, observed {duration:.1f}s")
if duration > threshold:
sys.exit(f"Full test duration {duration:.1f}s exceeded threshold {threshold:.1f}s")
PY
- name: Upload full test log
if: always()
uses: actions/upload-artifact@v4
with:
name: full-test-suite-log
path: StellaOps.Feedser/full-tests.log

View File

@@ -0,0 +1,341 @@
# .gitea/workflows/build-test-deploy.yml
# Unified CI/CD workflow for git.stella-ops.org (Feedser monorepo)
name: Build Test Deploy
on:
push:
branches: [ main ]
paths:
- 'src/**'
- 'docs/**'
- 'scripts/**'
- 'Directory.Build.props'
- 'Directory.Build.targets'
- 'global.json'
- '.gitea/workflows/**'
pull_request:
branches: [ main, develop ]
paths:
- 'src/**'
- 'docs/**'
- 'scripts/**'
- '.gitea/workflows/**'
workflow_dispatch:
inputs:
force_deploy:
description: 'Ignore branch checks and run the deploy stage'
required: false
default: 'false'
type: boolean
env:
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
BUILD_CONFIGURATION: Release
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
RUNNER_TOOL_CACHE: /toolcache
jobs:
build-test:
runs-on: ubuntu-22.04
environment: ${{ github.event_name == 'pull_request' && 'preview' || 'staging' }}
env:
PUBLISH_DIR: ${{ github.workspace }}/artifacts/publish/webservice
AUTHORITY_PUBLISH_DIR: ${{ github.workspace }}/artifacts/publish/authority
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup .NET ${{ env.DOTNET_VERSION }}
uses: actions/setup-dotnet@v4
with:
dotnet-version: ${{ env.DOTNET_VERSION }}
include-prerelease: true
- name: Restore dependencies
run: dotnet restore src/StellaOps.Feedser.sln
- name: Build solution (warnings as errors)
run: dotnet build src/StellaOps.Feedser.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror
- name: Run unit and integration tests
run: |
mkdir -p "$TEST_RESULTS_DIR"
dotnet test src/StellaOps.Feedser.sln \
--configuration $BUILD_CONFIGURATION \
--no-build \
--logger "trx;LogFileName=stellaops-feedser-tests.trx" \
--results-directory "$TEST_RESULTS_DIR"
- name: Publish Feedser web service
run: |
mkdir -p "$PUBLISH_DIR"
dotnet publish src/StellaOps.Feedser.WebService/StellaOps.Feedser.WebService.csproj \
--configuration $BUILD_CONFIGURATION \
--no-build \
--output "$PUBLISH_DIR"
- name: Upload published artifacts
uses: actions/upload-artifact@v4
with:
name: feedser-publish
path: ${{ env.PUBLISH_DIR }}
if-no-files-found: error
retention-days: 7
- name: Restore Authority solution
run: dotnet restore src/StellaOps.Authority/StellaOps.Authority.sln
- name: Build Authority solution
run: dotnet build src/StellaOps.Authority/StellaOps.Authority.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror
- name: Run Authority tests
run: |
dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj \
--configuration $BUILD_CONFIGURATION \
--no-build \
--logger "trx;LogFileName=stellaops-authority-tests.trx" \
--results-directory "$TEST_RESULTS_DIR"
- name: Publish Authority web service
run: |
mkdir -p "$AUTHORITY_PUBLISH_DIR"
dotnet publish src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj \
--configuration $BUILD_CONFIGURATION \
--no-build \
--output "$AUTHORITY_PUBLISH_DIR"
- name: Upload Authority artifacts
uses: actions/upload-artifact@v4
with:
name: authority-publish
path: ${{ env.AUTHORITY_PUBLISH_DIR }}
if-no-files-found: error
retention-days: 7
- name: Upload test results
if: always()
uses: actions/upload-artifact@v4
with:
name: feedser-test-results
path: ${{ env.TEST_RESULTS_DIR }}
if-no-files-found: ignore
retention-days: 7
authority-container:
runs-on: ubuntu-22.04
needs: build-test
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Validate Authority compose file
run: docker compose -f ops/authority/docker-compose.authority.yaml config
- name: Build Authority container image
run: docker build -f ops/authority/Dockerfile -t stellaops-authority:ci .
docs:
runs-on: ubuntu-22.04
env:
DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-site
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install documentation dependencies
run: |
python -m pip install --upgrade pip
python -m pip install markdown pygments
- name: Render documentation bundle
run: |
python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean
- name: Upload documentation artifact
uses: actions/upload-artifact@v4
with:
name: feedser-docs-site
path: ${{ env.DOCS_OUTPUT_DIR }}
if-no-files-found: error
retention-days: 7
deploy:
runs-on: ubuntu-22.04
needs: [build-test, docs]
if: >-
needs.build-test.result == 'success' &&
needs.docs.result == 'success' &&
(
(github.event_name == 'push' && github.ref == 'refs/heads/main') ||
github.event_name == 'workflow_dispatch'
)
environment: staging
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
sparse-checkout: |
scripts
.gitea/workflows
sparse-checkout-cone-mode: true
- name: Check if deployment should proceed
id: check-deploy
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
if [ "${{ github.event.inputs.force_deploy }}" = "true" ]; then
echo "should-deploy=true" >> $GITHUB_OUTPUT
echo "✅ Manual deployment requested"
else
echo "should-deploy=false" >> $GITHUB_OUTPUT
echo " Manual dispatch without force_deploy=true — skipping"
fi
elif [ "${{ github.ref }}" = "refs/heads/main" ]; then
echo "should-deploy=true" >> $GITHUB_OUTPUT
echo "✅ Deploying latest main branch build"
else
echo "should-deploy=false" >> $GITHUB_OUTPUT
echo " Deployment restricted to main branch"
fi
- name: Resolve deployment credentials
id: params
if: steps.check-deploy.outputs.should-deploy == 'true'
run: |
missing=()
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
if [ ${#missing[@]} -gt 0 ]; then
echo "❌ Missing deployment configuration: ${missing[*]}"
exit 1
fi
key_file="$RUNNER_TEMP/staging_deploy_key"
printf '%s\n' "$key" > "$key_file"
chmod 600 "$key_file"
echo "host=$host" >> $GITHUB_OUTPUT
echo "user=$user" >> $GITHUB_OUTPUT
echo "path=$path" >> $GITHUB_OUTPUT
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
echo "key-file=$key_file" >> $GITHUB_OUTPUT
- name: Download service artifact
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs.path != ''
uses: actions/download-artifact@v4
with:
name: feedser-publish
path: artifacts/service
- name: Download documentation artifact
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs['docs-path'] != ''
uses: actions/download-artifact@v4
with:
name: feedser-docs-site
path: artifacts/docs
- name: Install rsync
if: steps.check-deploy.outputs.should-deploy == 'true'
run: |
if command -v rsync >/dev/null 2>&1; then
exit 0
fi
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
mkdir -p "$CACHE_DIR"
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
DEB_DIR="$CACHE_DIR/$KEY"
mkdir -p "$DEB_DIR"
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
apt-get update
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
else
apt-get update
apt-get download rsync libpopt0
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
fi
- name: Deploy service bundle
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs.path != ''
env:
HOST: ${{ steps.params.outputs.host }}
USER: ${{ steps.params.outputs.user }}
TARGET: ${{ steps.params.outputs.path }}
KEY_FILE: ${{ steps.params.outputs['key-file'] }}
run: |
SERVICE_DIR="artifacts/service/feedser-publish"
if [ ! -d "$SERVICE_DIR" ]; then
echo "❌ Service artifact directory missing ($SERVICE_DIR)"
exit 1
fi
echo "🚀 Deploying Feedser web service to $HOST:$TARGET"
rsync -az --delete \
-e "ssh -i $KEY_FILE -o StrictHostKeyChecking=no" \
"$SERVICE_DIR"/ \
"$USER@$HOST:$TARGET/"
- name: Deploy documentation bundle
if: steps.check-deploy.outputs.should-deploy == 'true' && steps.params.outputs['docs-path'] != ''
env:
HOST: ${{ steps.params.outputs.host }}
USER: ${{ steps.params.outputs.user }}
DOCS_TARGET: ${{ steps.params.outputs['docs-path'] }}
KEY_FILE: ${{ steps.params.outputs['key-file'] }}
run: |
DOCS_DIR="artifacts/docs/feedser-docs-site"
if [ ! -d "$DOCS_DIR" ]; then
echo "❌ Documentation artifact directory missing ($DOCS_DIR)"
exit 1
fi
echo "📚 Deploying documentation bundle to $HOST:$DOCS_TARGET"
rsync -az --delete \
-e "ssh -i $KEY_FILE -o StrictHostKeyChecking=no" \
"$DOCS_DIR"/ \
"$USER@$HOST:$DOCS_TARGET/"
- name: Deployment summary
if: steps.check-deploy.outputs.should-deploy == 'true'
run: |
echo "✅ Deployment completed"
echo " Host: ${{ steps.params.outputs.host }}"
echo " Service path: ${{ steps.params.outputs.path || '(skipped)' }}"
echo " Docs path: ${{ steps.params.outputs['docs-path'] || '(skipped)' }}"
- name: Deployment skipped summary
if: steps.check-deploy.outputs.should-deploy != 'true'
run: |
echo " Deployment stage skipped"
echo " Event: ${{ github.event_name }}"
echo " Ref: ${{ github.ref }}"

70
.gitea/workflows/docs.yml Executable file
View File

@@ -0,0 +1,70 @@
# .gitea/workflows/docs.yml
# Documentation quality checks and preview artefacts
name: Docs CI
on:
push:
paths:
- 'docs/**'
- 'scripts/render_docs.py'
- '.gitea/workflows/docs.yml'
pull_request:
paths:
- 'docs/**'
- 'scripts/render_docs.py'
- '.gitea/workflows/docs.yml'
workflow_dispatch: {}
env:
NODE_VERSION: '20'
PYTHON_VERSION: '3.11'
jobs:
lint-and-preview:
runs-on: ubuntu-22.04
env:
DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-preview
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Install markdown linters
run: |
npm install markdown-link-check remark-cli remark-preset-lint-recommended
- name: Link check
run: |
find docs -name '*.md' -print0 | \
xargs -0 -n1 -I{} npx markdown-link-check --quiet '{}'
- name: Remark lint
run: |
npx remark docs -qf
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install documentation dependencies
run: |
python -m pip install --upgrade pip
python -m pip install markdown pygments
- name: Render documentation preview bundle
run: |
python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean
- name: Upload documentation preview
if: always()
uses: actions/upload-artifact@v4
with:
name: feedser-docs-preview
path: ${{ env.DOCS_OUTPUT_DIR }}
retention-days: 7

View File

@@ -0,0 +1,206 @@
# .gitea/workflows/promote.yml
# Manual promotion workflow to copy staged artefacts to production
name: Promote Feedser (Manual)
on:
workflow_dispatch:
inputs:
include_docs:
description: 'Also promote the generated documentation bundle'
required: false
default: 'true'
type: boolean
tag:
description: 'Optional build identifier to record in the summary'
required: false
default: 'latest'
type: string
jobs:
promote:
runs-on: ubuntu-22.04
environment: production
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Resolve staging credentials
id: staging
run: |
missing=()
host="${{ secrets.STAGING_DEPLOYMENT_HOST }}"
if [ -z "$host" ]; then host="${{ vars.STAGING_DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then missing+=("STAGING_DEPLOYMENT_HOST"); fi
user="${{ secrets.STAGING_DEPLOYMENT_USERNAME }}"
if [ -z "$user" ]; then user="${{ vars.STAGING_DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then missing+=("STAGING_DEPLOYMENT_USERNAME"); fi
path="${{ secrets.STAGING_DEPLOYMENT_PATH }}"
if [ -z "$path" ]; then path="${{ vars.STAGING_DEPLOYMENT_PATH }}"; fi
if [ -z "$path" ]; then missing+=("STAGING_DEPLOYMENT_PATH")
fi
docs_path="${{ secrets.STAGING_DOCS_PATH }}"
if [ -z "$docs_path" ]; then docs_path="${{ vars.STAGING_DOCS_PATH }}"; fi
key="${{ secrets.STAGING_DEPLOYMENT_KEY }}"
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then key="${{ vars.STAGING_DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then missing+=("STAGING_DEPLOYMENT_KEY"); fi
if [ ${#missing[@]} -gt 0 ]; then
echo "❌ Missing staging configuration: ${missing[*]}"
exit 1
fi
key_file="$RUNNER_TEMP/staging_key"
printf '%s\n' "$key" > "$key_file"
chmod 600 "$key_file"
echo "host=$host" >> $GITHUB_OUTPUT
echo "user=$user" >> $GITHUB_OUTPUT
echo "path=$path" >> $GITHUB_OUTPUT
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
echo "key-file=$key_file" >> $GITHUB_OUTPUT
- name: Resolve production credentials
id: production
run: |
missing=()
host="${{ secrets.PRODUCTION_DEPLOYMENT_HOST }}"
if [ -z "$host" ]; then host="${{ vars.PRODUCTION_DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then host="${{ secrets.DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then host="${{ vars.DEPLOYMENT_HOST }}"; fi
if [ -z "$host" ]; then missing+=("PRODUCTION_DEPLOYMENT_HOST"); fi
user="${{ secrets.PRODUCTION_DEPLOYMENT_USERNAME }}"
if [ -z "$user" ]; then user="${{ vars.PRODUCTION_DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then user="${{ secrets.DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then user="${{ vars.DEPLOYMENT_USERNAME }}"; fi
if [ -z "$user" ]; then missing+=("PRODUCTION_DEPLOYMENT_USERNAME"); fi
path="${{ secrets.PRODUCTION_DEPLOYMENT_PATH }}"
if [ -z "$path" ]; then path="${{ vars.PRODUCTION_DEPLOYMENT_PATH }}"; fi
if [ -z "$path" ]; then missing+=("PRODUCTION_DEPLOYMENT_PATH")
fi
docs_path="${{ secrets.PRODUCTION_DOCS_PATH }}"
if [ -z "$docs_path" ]; then docs_path="${{ vars.PRODUCTION_DOCS_PATH }}"; fi
key="${{ secrets.PRODUCTION_DEPLOYMENT_KEY }}"
if [ -z "$key" ]; then key="${{ secrets.DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then key="${{ vars.PRODUCTION_DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then key="${{ vars.DEPLOYMENT_KEY }}"; fi
if [ -z "$key" ]; then missing+=("PRODUCTION_DEPLOYMENT_KEY"); fi
if [ ${#missing[@]} -gt 0 ]; then
echo "❌ Missing production configuration: ${missing[*]}"
exit 1
fi
key_file="$RUNNER_TEMP/production_key"
printf '%s\n' "$key" > "$key_file"
chmod 600 "$key_file"
echo "host=$host" >> $GITHUB_OUTPUT
echo "user=$user" >> $GITHUB_OUTPUT
echo "path=$path" >> $GITHUB_OUTPUT
echo "docs-path=$docs_path" >> $GITHUB_OUTPUT
echo "key-file=$key_file" >> $GITHUB_OUTPUT
- name: Install rsync
run: |
if command -v rsync >/dev/null 2>&1; then
exit 0
fi
CACHE_DIR="${CI_CACHE_ROOT:-/tmp}/apt"
mkdir -p "$CACHE_DIR"
KEY="rsync-$(lsb_release -rs 2>/dev/null || echo unknown)"
DEB_DIR="$CACHE_DIR/$KEY"
mkdir -p "$DEB_DIR"
if ls "$DEB_DIR"/rsync*.deb >/dev/null 2>&1; then
apt-get update
apt-get install -y --no-install-recommends "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb
else
apt-get update
apt-get download rsync libpopt0
mv rsync*.deb libpopt0*.deb "$DEB_DIR"/
dpkg -i "$DEB_DIR"/libpopt0*.deb "$DEB_DIR"/rsync*.deb || apt-get install -f -y
fi
- name: Fetch staging artefacts
id: fetch
run: |
staging_root="${{ runner.temp }}/staging"
mkdir -p "$staging_root/service" "$staging_root/docs"
echo "📥 Copying service bundle from staging"
rsync -az --delete \
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs.path }}/" \
"$staging_root/service/"
if [ "${{ github.event.inputs.include_docs }}" = "true" ] && [ -n "${{ steps.staging.outputs['docs-path'] }}" ]; then
echo "📥 Copying documentation bundle from staging"
rsync -az --delete \
-e "ssh -i ${{ steps.staging.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
"${{ steps.staging.outputs.user }}@${{ steps.staging.outputs.host }}:${{ steps.staging.outputs['docs-path'] }}/" \
"$staging_root/docs/"
else
echo " Documentation promotion skipped"
fi
echo "service-dir=$staging_root/service" >> $GITHUB_OUTPUT
echo "docs-dir=$staging_root/docs" >> $GITHUB_OUTPUT
- name: Backup production service content
run: |
ssh -o StrictHostKeyChecking=no -i "${{ steps.production.outputs['key-file'] }}" \
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}" \
"set -e; TARGET='${{ steps.production.outputs.path }}'; \
if [ -d \"$TARGET\" ]; then \
parent=\$(dirname \"$TARGET\"); \
base=\$(basename \"$TARGET\"); \
backup=\"\$parent/\${base}.backup.\$(date +%Y%m%d_%H%M%S)\"; \
mkdir -p \"\$backup\"; \
rsync -a --delete \"$TARGET/\" \"\$backup/\"; \
ls -dt \"\$parent/\${base}.backup.*\" 2>/dev/null | tail -n +6 | xargs rm -rf || true; \
echo 'Backup created at ' \"\$backup\"; \
else \
echo 'Production service path missing; skipping backup'; \
fi"
- name: Publish service to production
run: |
rsync -az --delete \
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
"${{ steps.fetch.outputs['service-dir'] }}/" \
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs.path }}/"
- name: Promote documentation bundle
if: github.event.inputs.include_docs == 'true' && steps.production.outputs['docs-path'] != ''
run: |
rsync -az --delete \
-e "ssh -i ${{ steps.production.outputs['key-file'] }} -o StrictHostKeyChecking=no" \
"${{ steps.fetch.outputs['docs-dir'] }}/" \
"${{ steps.production.outputs.user }}@${{ steps.production.outputs.host }}:${{ steps.production.outputs['docs-path'] }}/"
- name: Promotion summary
run: |
echo "✅ Promotion completed"
echo " Tag: ${{ github.event.inputs.tag }}"
echo " Service: ${{ steps.staging.outputs.host }} → ${{ steps.production.outputs.host }}"
if [ "${{ github.event.inputs.include_docs }}" = "true" ]; then
echo " Docs: included"
else
echo " Docs: skipped"
fi