CD/CD consolidation
This commit is contained in:
43
.gitea/scripts/build/build-airgap-bundle.sh
Normal file
43
.gitea/scripts/build/build-airgap-bundle.sh
Normal file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-CONTAINERS-46-001: build air-gap bundle from existing buildx OCI archive
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <image-tag> [bundle-dir]" >&2
|
||||
exit 64
|
||||
fi
|
||||
|
||||
IMAGE_TAG=$1
|
||||
BUNDLE_DIR=${2:-"out/bundles/$(echo "$IMAGE_TAG" | tr '/:' '__')"}
|
||||
SRC_DIR="out/buildx/$(echo "$IMAGE_TAG" | tr '/:' '__')"
|
||||
OCI_ARCHIVE="${SRC_DIR}/image.oci"
|
||||
|
||||
if [[ ! -f "$OCI_ARCHIVE" ]]; then
|
||||
echo "[airgap] OCI archive not found at $OCI_ARCHIVE. Run build-multiarch first." >&2
|
||||
exit 66
|
||||
fi
|
||||
|
||||
mkdir -p "$BUNDLE_DIR"
|
||||
|
||||
SBOM_FILE=""
|
||||
if [[ -f "${SRC_DIR}/sbom.syft.json" ]]; then
|
||||
SBOM_FILE="${SRC_DIR}/sbom.syft.json"
|
||||
fi
|
||||
|
||||
cat > "${BUNDLE_DIR}/bundle-manifest.json" <<EOF
|
||||
{
|
||||
"image": "${IMAGE_TAG}",
|
||||
"oci_archive": "image.oci",
|
||||
"sbom": "$( [[ -n "$SBOM_FILE" ]] && echo sbom.syft.json || echo null )",
|
||||
"created_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
||||
}
|
||||
EOF
|
||||
|
||||
cp "$OCI_ARCHIVE" "${BUNDLE_DIR}/image.oci"
|
||||
[[ -n "$SBOM_FILE" ]] && cp "$SBOM_FILE" "${BUNDLE_DIR}/sbom.syft.json"
|
||||
[[ -f "${SRC_DIR}/image.sha256" ]] && cp "${SRC_DIR}/image.sha256" "${BUNDLE_DIR}/image.sha256"
|
||||
[[ -f "${SRC_DIR}/image.sig" ]] && cp "${SRC_DIR}/image.sig" "${BUNDLE_DIR}/image.sig"
|
||||
|
||||
tar -C "$BUNDLE_DIR" -czf "${BUNDLE_DIR}.tgz" .
|
||||
echo "[airgap] bundle created at ${BUNDLE_DIR}.tgz"
|
||||
131
.gitea/scripts/build/build-cli.sh
Normal file
131
.gitea/scripts/build/build-cli.sh
Normal file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-CLI-41-001: Build multi-platform CLI binaries with SBOM and checksums.
|
||||
# Updated: SPRINT_5100_0001_0001 - CLI Consolidation: includes Aoc and Symbols plugins
|
||||
|
||||
RIDS="${RIDS:-linux-x64,win-x64,osx-arm64}"
|
||||
CONFIG="${CONFIG:-Release}"
|
||||
PROJECT="src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
|
||||
OUT_ROOT="out/cli"
|
||||
SBOM_TOOL="${SBOM_TOOL:-syft}" # syft|none
|
||||
SIGN="${SIGN:-false}"
|
||||
COSIGN_KEY="${COSIGN_KEY:-}"
|
||||
|
||||
# CLI Plugins to include in the distribution
|
||||
# SPRINT_5100_0001_0001: CLI Consolidation - stella aoc and stella symbols
|
||||
PLUGIN_PROJECTS=(
|
||||
"src/Cli/__Libraries/StellaOps.Cli.Plugins.Aoc/StellaOps.Cli.Plugins.Aoc.csproj"
|
||||
"src/Cli/__Libraries/StellaOps.Cli.Plugins.Symbols/StellaOps.Cli.Plugins.Symbols.csproj"
|
||||
)
|
||||
PLUGIN_MANIFESTS=(
|
||||
"src/Cli/plugins/cli/StellaOps.Cli.Plugins.Aoc/stellaops.cli.plugins.aoc.manifest.json"
|
||||
"src/Cli/plugins/cli/StellaOps.Cli.Plugins.Symbols/stellaops.cli.plugins.symbols.manifest.json"
|
||||
)
|
||||
|
||||
IFS=',' read -ra TARGETS <<< "$RIDS"
|
||||
|
||||
mkdir -p "$OUT_ROOT"
|
||||
|
||||
if ! command -v dotnet >/dev/null 2>&1; then
|
||||
echo "[cli-build] dotnet CLI not found" >&2
|
||||
exit 69
|
||||
fi
|
||||
|
||||
generate_sbom() {
|
||||
local dir="$1"
|
||||
local sbom="$2"
|
||||
if [[ "$SBOM_TOOL" == "syft" ]] && command -v syft >/dev/null 2>&1; then
|
||||
syft "dir:${dir}" -o json > "$sbom"
|
||||
fi
|
||||
}
|
||||
|
||||
sign_file() {
|
||||
local file="$1"
|
||||
if [[ "$SIGN" == "true" && -n "$COSIGN_KEY" && -x "$(command -v cosign || true)" ]]; then
|
||||
COSIGN_EXPERIMENTAL=1 cosign sign-blob --key "$COSIGN_KEY" --output-signature "${file}.sig" "$file"
|
||||
fi
|
||||
}
|
||||
|
||||
for rid in "${TARGETS[@]}"; do
|
||||
echo "[cli-build] publishing for $rid"
|
||||
out_dir="${OUT_ROOT}/${rid}"
|
||||
publish_dir="${out_dir}/publish"
|
||||
plugins_dir="${publish_dir}/plugins/cli"
|
||||
mkdir -p "$publish_dir"
|
||||
mkdir -p "$plugins_dir"
|
||||
|
||||
# Build main CLI
|
||||
dotnet publish "$PROJECT" -c "$CONFIG" -r "$rid" \
|
||||
-o "$publish_dir" \
|
||||
--self-contained true \
|
||||
-p:PublishSingleFile=true \
|
||||
-p:PublishTrimmed=false \
|
||||
-p:DebugType=None \
|
||||
>/dev/null
|
||||
|
||||
# Build and copy plugins
|
||||
# SPRINT_5100_0001_0001: CLI Consolidation
|
||||
for i in "${!PLUGIN_PROJECTS[@]}"; do
|
||||
plugin_project="${PLUGIN_PROJECTS[$i]}"
|
||||
manifest_path="${PLUGIN_MANIFESTS[$i]}"
|
||||
|
||||
if [[ ! -f "$plugin_project" ]]; then
|
||||
echo "[cli-build] WARNING: Plugin project not found: $plugin_project"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Get plugin name from project path
|
||||
plugin_name=$(basename "$(dirname "$plugin_project")")
|
||||
plugin_out="${plugins_dir}/${plugin_name}"
|
||||
mkdir -p "$plugin_out"
|
||||
|
||||
echo "[cli-build] building plugin: $plugin_name"
|
||||
dotnet publish "$plugin_project" -c "$CONFIG" -r "$rid" \
|
||||
-o "$plugin_out" \
|
||||
--self-contained false \
|
||||
-p:DebugType=None \
|
||||
>/dev/null 2>&1 || echo "[cli-build] WARNING: Plugin build failed for $plugin_name (may have pre-existing errors)"
|
||||
|
||||
# Copy manifest file
|
||||
if [[ -f "$manifest_path" ]]; then
|
||||
cp "$manifest_path" "$plugin_out/"
|
||||
else
|
||||
echo "[cli-build] WARNING: Manifest not found: $manifest_path"
|
||||
fi
|
||||
done
|
||||
|
||||
# Package
|
||||
archive_ext="tar.gz"
|
||||
archive_cmd=(tar -C "$publish_dir" -czf)
|
||||
if [[ "$rid" == win-* ]]; then
|
||||
archive_ext="zip"
|
||||
archive_cmd=(zip -jr)
|
||||
fi
|
||||
|
||||
archive_name="stella-cli-${rid}.${archive_ext}"
|
||||
archive_path="${out_dir}/${archive_name}"
|
||||
"${archive_cmd[@]}" "$archive_path" "$publish_dir"
|
||||
|
||||
sha256sum "$archive_path" > "${archive_path}.sha256"
|
||||
sign_file "$archive_path"
|
||||
|
||||
# SBOM
|
||||
generate_sbom "$publish_dir" "${archive_path}.sbom.json"
|
||||
done
|
||||
|
||||
# Build manifest
|
||||
manifest="${OUT_ROOT}/manifest.json"
|
||||
plugin_list=$(printf '"%s",' "${PLUGIN_PROJECTS[@]}" | sed 's/,.*//' | sed 's/.*\///' | sed 's/\.csproj//')
|
||||
cat > "$manifest" <<EOF
|
||||
{
|
||||
"generated_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
||||
"config": "$CONFIG",
|
||||
"rids": [$(printf '"%s",' "${TARGETS[@]}" | sed 's/,$//')],
|
||||
"plugins": ["stellaops.cli.plugins.aoc", "stellaops.cli.plugins.symbols"],
|
||||
"artifacts_root": "$OUT_ROOT",
|
||||
"notes": "CLI Consolidation (SPRINT_5100_0001_0001) - includes aoc and symbols plugins"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "[cli-build] artifacts in $OUT_ROOT"
|
||||
93
.gitea/scripts/build/build-multiarch.sh
Normal file
93
.gitea/scripts/build/build-multiarch.sh
Normal file
@@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Multi-arch buildx helper for DEVOPS-CONTAINERS-44-001
|
||||
# Requirements: docker CLI with buildx, optional syft (for SBOM) and cosign (for signing).
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <image-tag> <context-dir> [--platform linux/amd64,linux/arm64] [--push] [--sbom syft|none] [--sign <cosign-key>]" >&2
|
||||
exit 64
|
||||
}
|
||||
|
||||
if [[ $# -lt 2 ]]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
IMAGE_TAG=$1; shift
|
||||
CONTEXT_DIR=$1; shift
|
||||
|
||||
PLATFORMS="linux/amd64,linux/arm64"
|
||||
PUSH=false
|
||||
SBOM_TOOL="syft"
|
||||
COSIGN_KEY=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--platform) PLATFORMS="$2"; shift 2;;
|
||||
--push) PUSH=true; shift;;
|
||||
--sbom) SBOM_TOOL="$2"; shift 2;;
|
||||
--sign) COSIGN_KEY="$2"; shift 2;;
|
||||
*) echo "Unknown option: $1" >&2; usage;;
|
||||
esac
|
||||
done
|
||||
|
||||
if ! command -v docker >/dev/null 2>&1; then
|
||||
echo "[buildx] docker CLI not found" >&2
|
||||
exit 69
|
||||
fi
|
||||
|
||||
OUT_ROOT="out/buildx/$(echo "$IMAGE_TAG" | tr '/:' '__')"
|
||||
mkdir -p "$OUT_ROOT"
|
||||
|
||||
BUILDER_NAME="stellaops-multiarch"
|
||||
if ! docker buildx inspect "$BUILDER_NAME" >/dev/null 2>&1; then
|
||||
docker buildx create --name "$BUILDER_NAME" --driver docker-container --use >/dev/null
|
||||
else
|
||||
docker buildx use "$BUILDER_NAME" >/dev/null
|
||||
fi
|
||||
|
||||
BUILD_OPTS=(
|
||||
--platform "$PLATFORMS"
|
||||
-t "$IMAGE_TAG"
|
||||
--provenance=false
|
||||
--sbom=false
|
||||
--output "type=oci,dest=${OUT_ROOT}/image.oci"
|
||||
)
|
||||
|
||||
if $PUSH; then
|
||||
BUILD_OPTS+=("--push")
|
||||
fi
|
||||
|
||||
echo "[buildx] building $IMAGE_TAG for $PLATFORMS"
|
||||
docker buildx build "${BUILD_OPTS[@]}" "$CONTEXT_DIR"
|
||||
|
||||
echo "[buildx] computing digest"
|
||||
IMAGE_DIGEST=$(sha256sum "${OUT_ROOT}/image.oci" | awk '{print $1}')
|
||||
echo "$IMAGE_DIGEST image.oci" > "${OUT_ROOT}/image.sha256"
|
||||
|
||||
if [[ "$SBOM_TOOL" == "syft" ]] && command -v syft >/dev/null 2>&1; then
|
||||
echo "[buildx] generating SBOM via syft"
|
||||
syft "oci-archive:${OUT_ROOT}/image.oci" -o json > "${OUT_ROOT}/sbom.syft.json"
|
||||
else
|
||||
echo "[buildx] skipping SBOM (tool=$SBOM_TOOL, syft available? $(command -v syft >/dev/null && echo yes || echo no))"
|
||||
fi
|
||||
|
||||
if [[ -n "$COSIGN_KEY" ]] && command -v cosign >/dev/null 2>&1; then
|
||||
echo "[buildx] signing digest with cosign key"
|
||||
COSIGN_EXPERIMENTAL=1 cosign sign-blob --key "$COSIGN_KEY" --output-signature "${OUT_ROOT}/image.sig" --output-certificate "${OUT_ROOT}/image.cert" "${OUT_ROOT}/image.oci"
|
||||
else
|
||||
echo "[buildx] signature skipped (no key provided or cosign missing)"
|
||||
fi
|
||||
|
||||
cat > "${OUT_ROOT}/build-metadata.json" <<EOF
|
||||
{
|
||||
"image": "${IMAGE_TAG}",
|
||||
"platforms": "${PLATFORMS}",
|
||||
"pushed": ${PUSH},
|
||||
"digest_sha256": "${IMAGE_DIGEST}",
|
||||
"generated_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
||||
"sbom": "$( [[ -f ${OUT_ROOT}/sbom.syft.json ]] && echo sbom.syft.json || echo null )"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "[buildx] artifacts written to ${OUT_ROOT}"
|
||||
43
.gitea/scripts/evidence/signals-upload-evidence.sh
Normal file
43
.gitea/scripts/evidence/signals-upload-evidence.sh
Normal file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
STAGED_DIR="evidence-locker/signals/2025-12-05"
|
||||
MODULE_ROOT="docs/modules/signals"
|
||||
TAR_OUT="/tmp/signals-evidence.tar"
|
||||
|
||||
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
|
||||
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
tmpdir=$(mktemp -d)
|
||||
trap 'rm -rf "$tmpdir"' EXIT
|
||||
|
||||
rsync -a --relative \
|
||||
"$STAGED_DIR/SHA256SUMS" \
|
||||
"$STAGED_DIR/confidence_decay_config.sigstore.json" \
|
||||
"$STAGED_DIR/unknowns_scoring_manifest.sigstore.json" \
|
||||
"$STAGED_DIR/heuristics_catalog.sigstore.json" \
|
||||
"$MODULE_ROOT/decay/confidence_decay_config.yaml" \
|
||||
"$MODULE_ROOT/unknowns/unknowns_scoring_manifest.json" \
|
||||
"$MODULE_ROOT/heuristics/heuristics.catalog.json" \
|
||||
"$tmpdir/"
|
||||
|
||||
pushd "$tmpdir/$STAGED_DIR" >/dev/null
|
||||
sha256sum --check SHA256SUMS
|
||||
popd >/dev/null
|
||||
|
||||
# Build deterministic tarball
|
||||
pushd "$tmpdir" >/dev/null
|
||||
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
|
||||
-cf "$TAR_OUT" .
|
||||
popd >/dev/null
|
||||
|
||||
sha256sum "$TAR_OUT"
|
||||
|
||||
curl --retry 3 --retry-delay 2 --fail \
|
||||
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
|
||||
--data-binary "@$TAR_OUT"
|
||||
|
||||
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/signals/2025-12-05/"
|
||||
46
.gitea/scripts/evidence/upload-all-evidence.sh
Normal file
46
.gitea/scripts/evidence/upload-all-evidence.sh
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Upload both Zastava and Signals evidence bundles to the locker.
|
||||
# Requires EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN.
|
||||
|
||||
EVIDENCE_LOCKER_URL=${EVIDENCE_LOCKER_URL:-}
|
||||
CI_EVIDENCE_LOCKER_TOKEN=${CI_EVIDENCE_LOCKER_TOKEN:-}
|
||||
|
||||
if [[ -z "$EVIDENCE_LOCKER_URL" || -z "$CI_EVIDENCE_LOCKER_TOKEN" ]]; then
|
||||
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Defaults
|
||||
ZASTAVA_TAR=${ZASTAVA_TAR:-evidence-locker/zastava/2025-12-02/zastava-evidence.tar}
|
||||
ZASTAVA_VERIFY=${ZASTAVA_VERIFY:-tools/zastava-verify-evidence-tar.sh}
|
||||
ZASTAVA_PATH=\$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar
|
||||
|
||||
SIGNALS_TAR=${SIGNALS_TAR:-evidence-locker/signals/2025-12-05/signals-evidence.tar}
|
||||
SIGNALS_VERIFY=${SIGNALS_VERIFY:-tools/signals-verify-evidence-tar.sh}
|
||||
SIGNALS_PATH=\$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar
|
||||
|
||||
# Verify
|
||||
if [[ -x "$ZASTAVA_VERIFY" ]]; then
|
||||
"$ZASTAVA_VERIFY" "$ZASTAVA_TAR"
|
||||
fi
|
||||
if [[ -x "$SIGNALS_VERIFY" ]]; then
|
||||
"$SIGNALS_VERIFY" "$SIGNALS_TAR"
|
||||
fi
|
||||
|
||||
# Upload Zastava
|
||||
curl --retry 3 --retry-delay 2 --fail \
|
||||
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
|
||||
--data-binary @"$ZASTAVA_TAR"
|
||||
|
||||
echo "Uploaded Zastava evidence to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar"
|
||||
|
||||
# Upload Signals
|
||||
curl --retry 3 --retry-delay 2 --fail \
|
||||
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
|
||||
--data-binary @"$SIGNALS_TAR"
|
||||
|
||||
echo "Uploaded Signals evidence to $EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar"
|
||||
48
.gitea/scripts/evidence/zastava-upload-evidence.sh
Normal file
48
.gitea/scripts/evidence/zastava-upload-evidence.sh
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
|
||||
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
STAGED_DIR="evidence-locker/zastava/2025-12-02"
|
||||
TAR_OUT="/tmp/zastava-evidence.tar"
|
||||
MODULE_ROOT="docs/modules/zastava"
|
||||
|
||||
test -d "$MODULE_ROOT" || { echo "missing module root $MODULE_ROOT" >&2; exit 1; }
|
||||
mkdir -p "$STAGED_DIR"
|
||||
|
||||
tmpdir=$(mktemp -d)
|
||||
trap 'rm -rf "$tmpdir"' EXIT
|
||||
|
||||
rsync -a --relative \
|
||||
"$MODULE_ROOT/SHA256SUMS" \
|
||||
"$MODULE_ROOT/schemas/" \
|
||||
"$MODULE_ROOT/exports/" \
|
||||
"$MODULE_ROOT/thresholds.yaml" \
|
||||
"$MODULE_ROOT/thresholds.yaml.dsse" \
|
||||
"$MODULE_ROOT/kit/verify.sh" \
|
||||
"$MODULE_ROOT/kit/README.md" \
|
||||
"$MODULE_ROOT/kit/ed25519.pub" \
|
||||
"$MODULE_ROOT/kit/zastava-kit.tzst" \
|
||||
"$MODULE_ROOT/kit/zastava-kit.tzst.dsse" \
|
||||
"$MODULE_ROOT/evidence/README.md" \
|
||||
"$tmpdir/"
|
||||
|
||||
pushd "$tmpdir/docs/modules/zastava" >/dev/null
|
||||
sha256sum --check SHA256SUMS
|
||||
|
||||
# Build deterministic tarball for reproducibility (payloads + DSSE)
|
||||
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
|
||||
-cf "$TAR_OUT" .
|
||||
popd >/dev/null
|
||||
|
||||
sha256sum "$TAR_OUT"
|
||||
|
||||
curl --retry 3 --retry-delay 2 --fail \
|
||||
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
|
||||
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
|
||||
--data-binary "@$TAR_OUT"
|
||||
|
||||
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/"
|
||||
287
.gitea/scripts/metrics/compute-reachability-metrics.sh
Normal file
287
.gitea/scripts/metrics/compute-reachability-metrics.sh
Normal file
@@ -0,0 +1,287 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# compute-reachability-metrics.sh
|
||||
# Computes reachability metrics against ground-truth corpus
|
||||
#
|
||||
# Usage: ./compute-reachability-metrics.sh [options]
|
||||
# --corpus-path PATH Path to ground-truth corpus (default: src/__Tests/reachability/corpus)
|
||||
# --output FILE Output JSON file (default: stdout)
|
||||
# --dry-run Show what would be computed without running scanner
|
||||
# --strict Exit non-zero if any threshold is violated
|
||||
# --verbose Enable verbose output
|
||||
#
|
||||
# Output: JSON with recall, precision, accuracy metrics per vulnerability class
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# Default paths
|
||||
CORPUS_PATH="${REPO_ROOT}/src/__Tests/reachability/corpus"
|
||||
OUTPUT_FILE=""
|
||||
DRY_RUN=false
|
||||
STRICT=false
|
||||
VERBOSE=false
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--corpus-path)
|
||||
CORPUS_PATH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--output)
|
||||
OUTPUT_FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=true
|
||||
shift
|
||||
;;
|
||||
--strict)
|
||||
STRICT=true
|
||||
shift
|
||||
;;
|
||||
--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
head -20 "$0" | tail -15
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
log() {
|
||||
if [[ "${VERBOSE}" == "true" ]]; then
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
|
||||
fi
|
||||
}
|
||||
|
||||
error() {
|
||||
echo "[ERROR] $*" >&2
|
||||
}
|
||||
|
||||
# Validate corpus exists
|
||||
if [[ ! -d "${CORPUS_PATH}" ]]; then
|
||||
error "Corpus directory not found: ${CORPUS_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MANIFEST_FILE="${CORPUS_PATH}/manifest.json"
|
||||
if [[ ! -f "${MANIFEST_FILE}" ]]; then
|
||||
error "Corpus manifest not found: ${MANIFEST_FILE}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Loading corpus from ${CORPUS_PATH}"
|
||||
log "Manifest: ${MANIFEST_FILE}"
|
||||
|
||||
# Initialize counters for each vulnerability class
|
||||
declare -A true_positives
|
||||
declare -A false_positives
|
||||
declare -A false_negatives
|
||||
declare -A total_expected
|
||||
|
||||
CLASSES=("runtime_dep" "os_pkg" "code" "config")
|
||||
|
||||
for class in "${CLASSES[@]}"; do
|
||||
true_positives[$class]=0
|
||||
false_positives[$class]=0
|
||||
false_negatives[$class]=0
|
||||
total_expected[$class]=0
|
||||
done
|
||||
|
||||
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||
log "[DRY RUN] Would process corpus fixtures..."
|
||||
|
||||
# Generate mock metrics for dry-run
|
||||
cat <<EOF
|
||||
{
|
||||
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||
"corpus_path": "${CORPUS_PATH}",
|
||||
"dry_run": true,
|
||||
"metrics": {
|
||||
"runtime_dep": {
|
||||
"recall": 0.96,
|
||||
"precision": 0.94,
|
||||
"f1_score": 0.95,
|
||||
"total_expected": 100,
|
||||
"true_positives": 96,
|
||||
"false_positives": 6,
|
||||
"false_negatives": 4
|
||||
},
|
||||
"os_pkg": {
|
||||
"recall": 0.98,
|
||||
"precision": 0.97,
|
||||
"f1_score": 0.975,
|
||||
"total_expected": 50,
|
||||
"true_positives": 49,
|
||||
"false_positives": 2,
|
||||
"false_negatives": 1
|
||||
},
|
||||
"code": {
|
||||
"recall": 0.92,
|
||||
"precision": 0.90,
|
||||
"f1_score": 0.91,
|
||||
"total_expected": 25,
|
||||
"true_positives": 23,
|
||||
"false_positives": 3,
|
||||
"false_negatives": 2
|
||||
},
|
||||
"config": {
|
||||
"recall": 0.88,
|
||||
"precision": 0.85,
|
||||
"f1_score": 0.865,
|
||||
"total_expected": 20,
|
||||
"true_positives": 18,
|
||||
"false_positives": 3,
|
||||
"false_negatives": 2
|
||||
}
|
||||
},
|
||||
"aggregate": {
|
||||
"overall_recall": 0.9538,
|
||||
"overall_precision": 0.9302,
|
||||
"reachability_accuracy": 0.9268
|
||||
}
|
||||
}
|
||||
EOF
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Process each fixture in the corpus
|
||||
log "Processing corpus fixtures..."
|
||||
|
||||
# Read manifest and iterate fixtures
|
||||
FIXTURE_COUNT=$(jq -r '.fixtures | length' "${MANIFEST_FILE}")
|
||||
log "Found ${FIXTURE_COUNT} fixtures"
|
||||
|
||||
for i in $(seq 0 $((FIXTURE_COUNT - 1))); do
|
||||
FIXTURE_ID=$(jq -r ".fixtures[$i].id" "${MANIFEST_FILE}")
|
||||
FIXTURE_PATH="${CORPUS_PATH}/$(jq -r ".fixtures[$i].path" "${MANIFEST_FILE}")"
|
||||
FIXTURE_CLASS=$(jq -r ".fixtures[$i].class" "${MANIFEST_FILE}")
|
||||
EXPECTED_REACHABLE=$(jq -r ".fixtures[$i].expected_reachable // 0" "${MANIFEST_FILE}")
|
||||
EXPECTED_UNREACHABLE=$(jq -r ".fixtures[$i].expected_unreachable // 0" "${MANIFEST_FILE}")
|
||||
|
||||
log "Processing fixture: ${FIXTURE_ID} (class: ${FIXTURE_CLASS})"
|
||||
|
||||
if [[ ! -d "${FIXTURE_PATH}" ]] && [[ ! -f "${FIXTURE_PATH}" ]]; then
|
||||
error "Fixture not found: ${FIXTURE_PATH}"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Update expected counts
|
||||
total_expected[$FIXTURE_CLASS]=$((${total_expected[$FIXTURE_CLASS]} + EXPECTED_REACHABLE))
|
||||
|
||||
# Run scanner on fixture (deterministic mode, offline)
|
||||
SCAN_RESULT_FILE=$(mktemp)
|
||||
trap "rm -f ${SCAN_RESULT_FILE}" EXIT
|
||||
|
||||
if dotnet run --project "${REPO_ROOT}/src/Scanner/StellaOps.Scanner.Cli" -- \
|
||||
scan --input "${FIXTURE_PATH}" \
|
||||
--output "${SCAN_RESULT_FILE}" \
|
||||
--deterministic \
|
||||
--offline \
|
||||
--format json \
|
||||
2>/dev/null; then
|
||||
|
||||
# Parse scanner results
|
||||
DETECTED_REACHABLE=$(jq -r '[.findings[] | select(.reachable == true)] | length' "${SCAN_RESULT_FILE}" 2>/dev/null || echo "0")
|
||||
DETECTED_UNREACHABLE=$(jq -r '[.findings[] | select(.reachable == false)] | length' "${SCAN_RESULT_FILE}" 2>/dev/null || echo "0")
|
||||
|
||||
# Calculate TP, FP, FN for this fixture
|
||||
TP=$((DETECTED_REACHABLE < EXPECTED_REACHABLE ? DETECTED_REACHABLE : EXPECTED_REACHABLE))
|
||||
FP=$((DETECTED_REACHABLE > EXPECTED_REACHABLE ? DETECTED_REACHABLE - EXPECTED_REACHABLE : 0))
|
||||
FN=$((EXPECTED_REACHABLE - TP))
|
||||
|
||||
true_positives[$FIXTURE_CLASS]=$((${true_positives[$FIXTURE_CLASS]} + TP))
|
||||
false_positives[$FIXTURE_CLASS]=$((${false_positives[$FIXTURE_CLASS]} + FP))
|
||||
false_negatives[$FIXTURE_CLASS]=$((${false_negatives[$FIXTURE_CLASS]} + FN))
|
||||
else
|
||||
error "Scanner failed for fixture: ${FIXTURE_ID}"
|
||||
false_negatives[$FIXTURE_CLASS]=$((${false_negatives[$FIXTURE_CLASS]} + EXPECTED_REACHABLE))
|
||||
fi
|
||||
done
|
||||
|
||||
# Calculate metrics per class
|
||||
calculate_metrics() {
|
||||
local class=$1
|
||||
local tp=${true_positives[$class]}
|
||||
local fp=${false_positives[$class]}
|
||||
local fn=${false_negatives[$class]}
|
||||
local total=${total_expected[$class]}
|
||||
|
||||
local recall=0
|
||||
local precision=0
|
||||
local f1=0
|
||||
|
||||
if [[ $((tp + fn)) -gt 0 ]]; then
|
||||
recall=$(echo "scale=4; $tp / ($tp + $fn)" | bc)
|
||||
fi
|
||||
|
||||
if [[ $((tp + fp)) -gt 0 ]]; then
|
||||
precision=$(echo "scale=4; $tp / ($tp + $fp)" | bc)
|
||||
fi
|
||||
|
||||
if (( $(echo "$recall + $precision > 0" | bc -l) )); then
|
||||
f1=$(echo "scale=4; 2 * $recall * $precision / ($recall + $precision)" | bc)
|
||||
fi
|
||||
|
||||
echo "{\"recall\": $recall, \"precision\": $precision, \"f1_score\": $f1, \"total_expected\": $total, \"true_positives\": $tp, \"false_positives\": $fp, \"false_negatives\": $fn}"
|
||||
}
|
||||
|
||||
# Generate output JSON
|
||||
OUTPUT=$(cat <<EOF
|
||||
{
|
||||
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||
"corpus_path": "${CORPUS_PATH}",
|
||||
"dry_run": false,
|
||||
"metrics": {
|
||||
"runtime_dep": $(calculate_metrics "runtime_dep"),
|
||||
"os_pkg": $(calculate_metrics "os_pkg"),
|
||||
"code": $(calculate_metrics "code"),
|
||||
"config": $(calculate_metrics "config")
|
||||
},
|
||||
"aggregate": {
|
||||
"overall_recall": $(echo "scale=4; (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]}) / (${total_expected[runtime_dep]} + ${total_expected[os_pkg]} + ${total_expected[code]} + ${total_expected[config]} + 0.0001)" | bc),
|
||||
"overall_precision": $(echo "scale=4; (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]}) / (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]} + ${false_positives[runtime_dep]} + ${false_positives[os_pkg]} + ${false_positives[code]} + ${false_positives[config]} + 0.0001)" | bc)
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Output results
|
||||
if [[ -n "${OUTPUT_FILE}" ]]; then
|
||||
echo "${OUTPUT}" > "${OUTPUT_FILE}"
|
||||
log "Results written to ${OUTPUT_FILE}"
|
||||
else
|
||||
echo "${OUTPUT}"
|
||||
fi
|
||||
|
||||
# Check thresholds in strict mode
|
||||
if [[ "${STRICT}" == "true" ]]; then
|
||||
THRESHOLDS_FILE="${SCRIPT_DIR}/reachability-thresholds.yaml"
|
||||
if [[ -f "${THRESHOLDS_FILE}" ]]; then
|
||||
log "Checking thresholds from ${THRESHOLDS_FILE}"
|
||||
|
||||
# Extract thresholds and check
|
||||
MIN_RECALL=$(yq -r '.thresholds.runtime_dependency_recall.min // 0.95' "${THRESHOLDS_FILE}")
|
||||
ACTUAL_RECALL=$(echo "${OUTPUT}" | jq -r '.metrics.runtime_dep.recall')
|
||||
|
||||
if (( $(echo "$ACTUAL_RECALL < $MIN_RECALL" | bc -l) )); then
|
||||
error "Runtime dependency recall ${ACTUAL_RECALL} below threshold ${MIN_RECALL}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "All thresholds passed"
|
||||
fi
|
||||
fi
|
||||
|
||||
exit 0
|
||||
313
.gitea/scripts/metrics/compute-ttfs-metrics.sh
Normal file
313
.gitea/scripts/metrics/compute-ttfs-metrics.sh
Normal file
@@ -0,0 +1,313 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# compute-ttfs-metrics.sh
|
||||
# Computes Time-to-First-Signal (TTFS) metrics from test runs
|
||||
#
|
||||
# Usage: ./compute-ttfs-metrics.sh [options]
|
||||
# --results-path PATH Path to test results directory
|
||||
# --output FILE Output JSON file (default: stdout)
|
||||
# --baseline FILE Baseline TTFS file for comparison
|
||||
# --dry-run Show what would be computed
|
||||
# --strict Exit non-zero if thresholds are violated
|
||||
# --verbose Enable verbose output
|
||||
#
|
||||
# Output: JSON with TTFS p50, p95, p99 metrics and regression status
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# Default paths
|
||||
RESULTS_PATH="${REPO_ROOT}/src/__Tests/__Benchmarks/results"
|
||||
OUTPUT_FILE=""
|
||||
BASELINE_FILE="${REPO_ROOT}/src/__Tests/__Benchmarks/baselines/ttfs-baseline.json"
|
||||
DRY_RUN=false
|
||||
STRICT=false
|
||||
VERBOSE=false
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--results-path)
|
||||
RESULTS_PATH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--output)
|
||||
OUTPUT_FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--baseline)
|
||||
BASELINE_FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=true
|
||||
shift
|
||||
;;
|
||||
--strict)
|
||||
STRICT=true
|
||||
shift
|
||||
;;
|
||||
--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
head -20 "$0" | tail -15
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
log() {
|
||||
if [[ "${VERBOSE}" == "true" ]]; then
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
|
||||
fi
|
||||
}
|
||||
|
||||
error() {
|
||||
echo "[ERROR] $*" >&2
|
||||
}
|
||||
|
||||
warn() {
|
||||
echo "[WARN] $*" >&2
|
||||
}
|
||||
|
||||
# Calculate percentiles from sorted array
|
||||
percentile() {
|
||||
local -n arr=$1
|
||||
local p=$2
|
||||
local n=${#arr[@]}
|
||||
|
||||
if [[ $n -eq 0 ]]; then
|
||||
echo "0"
|
||||
return
|
||||
fi
|
||||
|
||||
local idx=$(echo "scale=0; ($n - 1) * $p / 100" | bc)
|
||||
echo "${arr[$idx]}"
|
||||
}
|
||||
|
||||
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||
log "[DRY RUN] Would process TTFS metrics..."
|
||||
|
||||
cat <<EOF
|
||||
{
|
||||
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||
"dry_run": true,
|
||||
"results_path": "${RESULTS_PATH}",
|
||||
"metrics": {
|
||||
"ttfs_ms": {
|
||||
"p50": 1250,
|
||||
"p95": 3500,
|
||||
"p99": 5200,
|
||||
"min": 450,
|
||||
"max": 8500,
|
||||
"mean": 1850,
|
||||
"sample_count": 100
|
||||
},
|
||||
"by_scan_type": {
|
||||
"image_scan": {
|
||||
"p50": 2100,
|
||||
"p95": 4500,
|
||||
"p99": 6800
|
||||
},
|
||||
"filesystem_scan": {
|
||||
"p50": 850,
|
||||
"p95": 1800,
|
||||
"p99": 2500
|
||||
},
|
||||
"sbom_scan": {
|
||||
"p50": 320,
|
||||
"p95": 650,
|
||||
"p99": 950
|
||||
}
|
||||
}
|
||||
},
|
||||
"baseline_comparison": {
|
||||
"baseline_path": "${BASELINE_FILE}",
|
||||
"p50_regression_pct": -2.5,
|
||||
"p95_regression_pct": 1.2,
|
||||
"regression_detected": false
|
||||
}
|
||||
}
|
||||
EOF
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Validate results directory
|
||||
if [[ ! -d "${RESULTS_PATH}" ]]; then
|
||||
error "Results directory not found: ${RESULTS_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Processing TTFS results from ${RESULTS_PATH}"
|
||||
|
||||
# Collect all TTFS values from result files
|
||||
declare -a ttfs_values=()
|
||||
declare -a image_ttfs=()
|
||||
declare -a fs_ttfs=()
|
||||
declare -a sbom_ttfs=()
|
||||
|
||||
# Find and process all result files
|
||||
for result_file in "${RESULTS_PATH}"/*.json "${RESULTS_PATH}"/**/*.json; do
|
||||
[[ -f "${result_file}" ]] || continue
|
||||
|
||||
log "Processing: ${result_file}"
|
||||
|
||||
# Extract TTFS value if present
|
||||
TTFS=$(jq -r '.ttfs_ms // .time_to_first_signal_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||
SCAN_TYPE=$(jq -r '.scan_type // "unknown"' "${result_file}" 2>/dev/null || echo "unknown")
|
||||
|
||||
if [[ -n "${TTFS}" ]] && [[ "${TTFS}" != "null" ]]; then
|
||||
ttfs_values+=("${TTFS}")
|
||||
|
||||
case "${SCAN_TYPE}" in
|
||||
image|image_scan|container)
|
||||
image_ttfs+=("${TTFS}")
|
||||
;;
|
||||
filesystem|fs|fs_scan)
|
||||
fs_ttfs+=("${TTFS}")
|
||||
;;
|
||||
sbom|sbom_scan)
|
||||
sbom_ttfs+=("${TTFS}")
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
done
|
||||
|
||||
# Sort arrays for percentile calculation
|
||||
IFS=$'\n' ttfs_sorted=($(sort -n <<<"${ttfs_values[*]}")); unset IFS
|
||||
IFS=$'\n' image_sorted=($(sort -n <<<"${image_ttfs[*]}")); unset IFS
|
||||
IFS=$'\n' fs_sorted=($(sort -n <<<"${fs_ttfs[*]}")); unset IFS
|
||||
IFS=$'\n' sbom_sorted=($(sort -n <<<"${sbom_ttfs[*]}")); unset IFS
|
||||
|
||||
# Calculate overall metrics
|
||||
SAMPLE_COUNT=${#ttfs_values[@]}
|
||||
if [[ $SAMPLE_COUNT -eq 0 ]]; then
|
||||
warn "No TTFS samples found"
|
||||
P50=0
|
||||
P95=0
|
||||
P99=0
|
||||
MIN=0
|
||||
MAX=0
|
||||
MEAN=0
|
||||
else
|
||||
P50=$(percentile ttfs_sorted 50)
|
||||
P95=$(percentile ttfs_sorted 95)
|
||||
P99=$(percentile ttfs_sorted 99)
|
||||
MIN=${ttfs_sorted[0]}
|
||||
MAX=${ttfs_sorted[-1]}
|
||||
|
||||
# Calculate mean
|
||||
SUM=0
|
||||
for v in "${ttfs_values[@]}"; do
|
||||
SUM=$((SUM + v))
|
||||
done
|
||||
MEAN=$((SUM / SAMPLE_COUNT))
|
||||
fi
|
||||
|
||||
# Calculate per-type metrics
|
||||
IMAGE_P50=$(percentile image_sorted 50)
|
||||
IMAGE_P95=$(percentile image_sorted 95)
|
||||
IMAGE_P99=$(percentile image_sorted 99)
|
||||
|
||||
FS_P50=$(percentile fs_sorted 50)
|
||||
FS_P95=$(percentile fs_sorted 95)
|
||||
FS_P99=$(percentile fs_sorted 99)
|
||||
|
||||
SBOM_P50=$(percentile sbom_sorted 50)
|
||||
SBOM_P95=$(percentile sbom_sorted 95)
|
||||
SBOM_P99=$(percentile sbom_sorted 99)
|
||||
|
||||
# Compare against baseline if available
|
||||
REGRESSION_DETECTED=false
|
||||
P50_REGRESSION_PCT=0
|
||||
P95_REGRESSION_PCT=0
|
||||
|
||||
if [[ -f "${BASELINE_FILE}" ]]; then
|
||||
log "Comparing against baseline: ${BASELINE_FILE}"
|
||||
|
||||
BASELINE_P50=$(jq -r '.metrics.ttfs_ms.p50 // 0' "${BASELINE_FILE}")
|
||||
BASELINE_P95=$(jq -r '.metrics.ttfs_ms.p95 // 0' "${BASELINE_FILE}")
|
||||
|
||||
if [[ $BASELINE_P50 -gt 0 ]]; then
|
||||
P50_REGRESSION_PCT=$(echo "scale=2; (${P50} - ${BASELINE_P50}) * 100 / ${BASELINE_P50}" | bc)
|
||||
fi
|
||||
|
||||
if [[ $BASELINE_P95 -gt 0 ]]; then
|
||||
P95_REGRESSION_PCT=$(echo "scale=2; (${P95} - ${BASELINE_P95}) * 100 / ${BASELINE_P95}" | bc)
|
||||
fi
|
||||
|
||||
# Check for regression (>10% increase)
|
||||
if (( $(echo "${P50_REGRESSION_PCT} > 10" | bc -l) )) || (( $(echo "${P95_REGRESSION_PCT} > 10" | bc -l) )); then
|
||||
REGRESSION_DETECTED=true
|
||||
warn "TTFS regression detected: p50=${P50_REGRESSION_PCT}%, p95=${P95_REGRESSION_PCT}%"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate output
|
||||
OUTPUT=$(cat <<EOF
|
||||
{
|
||||
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||
"dry_run": false,
|
||||
"results_path": "${RESULTS_PATH}",
|
||||
"metrics": {
|
||||
"ttfs_ms": {
|
||||
"p50": ${P50},
|
||||
"p95": ${P95},
|
||||
"p99": ${P99},
|
||||
"min": ${MIN},
|
||||
"max": ${MAX},
|
||||
"mean": ${MEAN},
|
||||
"sample_count": ${SAMPLE_COUNT}
|
||||
},
|
||||
"by_scan_type": {
|
||||
"image_scan": {
|
||||
"p50": ${IMAGE_P50:-0},
|
||||
"p95": ${IMAGE_P95:-0},
|
||||
"p99": ${IMAGE_P99:-0}
|
||||
},
|
||||
"filesystem_scan": {
|
||||
"p50": ${FS_P50:-0},
|
||||
"p95": ${FS_P95:-0},
|
||||
"p99": ${FS_P99:-0}
|
||||
},
|
||||
"sbom_scan": {
|
||||
"p50": ${SBOM_P50:-0},
|
||||
"p95": ${SBOM_P95:-0},
|
||||
"p99": ${SBOM_P99:-0}
|
||||
}
|
||||
}
|
||||
},
|
||||
"baseline_comparison": {
|
||||
"baseline_path": "${BASELINE_FILE}",
|
||||
"p50_regression_pct": ${P50_REGRESSION_PCT},
|
||||
"p95_regression_pct": ${P95_REGRESSION_PCT},
|
||||
"regression_detected": ${REGRESSION_DETECTED}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Output results
|
||||
if [[ -n "${OUTPUT_FILE}" ]]; then
|
||||
echo "${OUTPUT}" > "${OUTPUT_FILE}"
|
||||
log "Results written to ${OUTPUT_FILE}"
|
||||
else
|
||||
echo "${OUTPUT}"
|
||||
fi
|
||||
|
||||
# Strict mode: fail on regression
|
||||
if [[ "${STRICT}" == "true" ]] && [[ "${REGRESSION_DETECTED}" == "true" ]]; then
|
||||
error "TTFS regression exceeds threshold"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
326
.gitea/scripts/metrics/enforce-performance-slos.sh
Normal file
326
.gitea/scripts/metrics/enforce-performance-slos.sh
Normal file
@@ -0,0 +1,326 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# enforce-performance-slos.sh
|
||||
# Enforces scan time and compute budget SLOs in CI
|
||||
#
|
||||
# Usage: ./enforce-performance-slos.sh [options]
|
||||
# --results-path PATH Path to benchmark results directory
|
||||
# --slos-file FILE Path to SLO definitions (default: scripts/ci/performance-slos.yaml)
|
||||
# --output FILE Output JSON file (default: stdout)
|
||||
# --dry-run Show what would be enforced
|
||||
# --strict Exit non-zero if any SLO is violated
|
||||
# --verbose Enable verbose output
|
||||
#
|
||||
# Output: JSON with SLO evaluation results and violations
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# Default paths
|
||||
RESULTS_PATH="${REPO_ROOT}/src/__Tests/__Benchmarks/results"
|
||||
SLOS_FILE="${SCRIPT_DIR}/performance-slos.yaml"
|
||||
OUTPUT_FILE=""
|
||||
DRY_RUN=false
|
||||
STRICT=false
|
||||
VERBOSE=false
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--results-path)
|
||||
RESULTS_PATH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--slos-file)
|
||||
SLOS_FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--output)
|
||||
OUTPUT_FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=true
|
||||
shift
|
||||
;;
|
||||
--strict)
|
||||
STRICT=true
|
||||
shift
|
||||
;;
|
||||
--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
head -20 "$0" | tail -15
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
log() {
|
||||
if [[ "${VERBOSE}" == "true" ]]; then
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
|
||||
fi
|
||||
}
|
||||
|
||||
error() {
|
||||
echo "[ERROR] $*" >&2
|
||||
}
|
||||
|
||||
warn() {
|
||||
echo "[WARN] $*" >&2
|
||||
}
|
||||
|
||||
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||
log "[DRY RUN] Would enforce performance SLOs..."
|
||||
|
||||
cat <<EOF
|
||||
{
|
||||
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||
"dry_run": true,
|
||||
"results_path": "${RESULTS_PATH}",
|
||||
"slos_file": "${SLOS_FILE}",
|
||||
"slo_evaluations": {
|
||||
"scan_time_p95": {
|
||||
"slo_name": "Scan Time P95",
|
||||
"threshold_ms": 30000,
|
||||
"actual_ms": 25000,
|
||||
"passed": true,
|
||||
"margin_pct": 16.7
|
||||
},
|
||||
"memory_peak_mb": {
|
||||
"slo_name": "Peak Memory Usage",
|
||||
"threshold_mb": 2048,
|
||||
"actual_mb": 1650,
|
||||
"passed": true,
|
||||
"margin_pct": 19.4
|
||||
},
|
||||
"cpu_time_seconds": {
|
||||
"slo_name": "CPU Time",
|
||||
"threshold_seconds": 60,
|
||||
"actual_seconds": 45,
|
||||
"passed": true,
|
||||
"margin_pct": 25.0
|
||||
}
|
||||
},
|
||||
"summary": {
|
||||
"total_slos": 3,
|
||||
"passed": 3,
|
||||
"failed": 0,
|
||||
"all_passed": true
|
||||
}
|
||||
}
|
||||
EOF
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Validate paths
|
||||
if [[ ! -d "${RESULTS_PATH}" ]]; then
|
||||
error "Results directory not found: ${RESULTS_PATH}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "${SLOS_FILE}" ]]; then
|
||||
warn "SLOs file not found: ${SLOS_FILE}, using defaults"
|
||||
fi
|
||||
|
||||
log "Enforcing SLOs from ${SLOS_FILE}"
|
||||
log "Results path: ${RESULTS_PATH}"
|
||||
|
||||
# Initialize evaluation results
|
||||
declare -A slo_results
|
||||
VIOLATIONS=()
|
||||
TOTAL_SLOS=0
|
||||
PASSED_SLOS=0
|
||||
|
||||
# Define default SLOs
|
||||
declare -A SLOS
|
||||
SLOS["scan_time_p95_ms"]=30000
|
||||
SLOS["scan_time_p99_ms"]=60000
|
||||
SLOS["memory_peak_mb"]=2048
|
||||
SLOS["cpu_time_seconds"]=120
|
||||
SLOS["sbom_gen_time_ms"]=10000
|
||||
SLOS["policy_eval_time_ms"]=5000
|
||||
|
||||
# Load SLOs from file if exists
|
||||
if [[ -f "${SLOS_FILE}" ]]; then
|
||||
while IFS=: read -r key value; do
|
||||
key=$(echo "$key" | tr -d ' ')
|
||||
value=$(echo "$value" | tr -d ' ')
|
||||
if [[ -n "$key" ]] && [[ -n "$value" ]] && [[ "$key" != "#"* ]]; then
|
||||
SLOS["$key"]=$value
|
||||
log "Loaded SLO: ${key}=${value}"
|
||||
fi
|
||||
done < <(yq -r 'to_entries | .[] | "\(.key):\(.value.threshold // .value)"' "${SLOS_FILE}" 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
# Collect metrics from results
|
||||
SCAN_TIMES=()
|
||||
MEMORY_VALUES=()
|
||||
CPU_TIMES=()
|
||||
SBOM_TIMES=()
|
||||
POLICY_TIMES=()
|
||||
|
||||
for result_file in "${RESULTS_PATH}"/*.json "${RESULTS_PATH}"/**/*.json; do
|
||||
[[ -f "${result_file}" ]] || continue
|
||||
|
||||
log "Processing: ${result_file}"
|
||||
|
||||
# Extract metrics
|
||||
SCAN_TIME=$(jq -r '.duration_ms // .scan_time_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||
MEMORY=$(jq -r '.peak_memory_mb // .memory_mb // empty' "${result_file}" 2>/dev/null || true)
|
||||
CPU_TIME=$(jq -r '.cpu_time_seconds // .cpu_seconds // empty' "${result_file}" 2>/dev/null || true)
|
||||
SBOM_TIME=$(jq -r '.sbom_generation_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||
POLICY_TIME=$(jq -r '.policy_evaluation_ms // empty' "${result_file}" 2>/dev/null || true)
|
||||
|
||||
[[ -n "${SCAN_TIME}" ]] && SCAN_TIMES+=("${SCAN_TIME}")
|
||||
[[ -n "${MEMORY}" ]] && MEMORY_VALUES+=("${MEMORY}")
|
||||
[[ -n "${CPU_TIME}" ]] && CPU_TIMES+=("${CPU_TIME}")
|
||||
[[ -n "${SBOM_TIME}" ]] && SBOM_TIMES+=("${SBOM_TIME}")
|
||||
[[ -n "${POLICY_TIME}" ]] && POLICY_TIMES+=("${POLICY_TIME}")
|
||||
done
|
||||
|
||||
# Helper: calculate percentile from array
|
||||
calc_percentile() {
|
||||
local -n values=$1
|
||||
local pct=$2
|
||||
|
||||
if [[ ${#values[@]} -eq 0 ]]; then
|
||||
echo "0"
|
||||
return
|
||||
fi
|
||||
|
||||
IFS=$'\n' sorted=($(sort -n <<<"${values[*]}")); unset IFS
|
||||
local n=${#sorted[@]}
|
||||
local idx=$(echo "scale=0; ($n - 1) * $pct / 100" | bc)
|
||||
echo "${sorted[$idx]}"
|
||||
}
|
||||
|
||||
# Helper: calculate max from array
|
||||
calc_max() {
|
||||
local -n values=$1
|
||||
|
||||
if [[ ${#values[@]} -eq 0 ]]; then
|
||||
echo "0"
|
||||
return
|
||||
fi
|
||||
|
||||
local max=0
|
||||
for v in "${values[@]}"; do
|
||||
if (( $(echo "$v > $max" | bc -l) )); then
|
||||
max=$v
|
||||
fi
|
||||
done
|
||||
echo "$max"
|
||||
}
|
||||
|
||||
# Evaluate each SLO
|
||||
evaluate_slo() {
|
||||
local name=$1
|
||||
local threshold=$2
|
||||
local actual=$3
|
||||
local unit=$4
|
||||
|
||||
((TOTAL_SLOS++))
|
||||
|
||||
local passed=true
|
||||
local margin_pct=0
|
||||
|
||||
if (( $(echo "$actual > $threshold" | bc -l) )); then
|
||||
passed=false
|
||||
margin_pct=$(echo "scale=2; ($actual - $threshold) * 100 / $threshold" | bc)
|
||||
VIOLATIONS+=("${name}: ${actual}${unit} exceeds threshold ${threshold}${unit} (+${margin_pct}%)")
|
||||
warn "SLO VIOLATION: ${name} = ${actual}${unit} (threshold: ${threshold}${unit})"
|
||||
else
|
||||
((PASSED_SLOS++))
|
||||
margin_pct=$(echo "scale=2; ($threshold - $actual) * 100 / $threshold" | bc)
|
||||
log "SLO PASSED: ${name} = ${actual}${unit} (threshold: ${threshold}${unit}, margin: ${margin_pct}%)"
|
||||
fi
|
||||
|
||||
echo "{\"slo_name\": \"${name}\", \"threshold\": ${threshold}, \"actual\": ${actual}, \"unit\": \"${unit}\", \"passed\": ${passed}, \"margin_pct\": ${margin_pct}}"
|
||||
}
|
||||
|
||||
# Calculate actuals
|
||||
SCAN_P95=$(calc_percentile SCAN_TIMES 95)
|
||||
SCAN_P99=$(calc_percentile SCAN_TIMES 99)
|
||||
MEMORY_MAX=$(calc_max MEMORY_VALUES)
|
||||
CPU_MAX=$(calc_max CPU_TIMES)
|
||||
SBOM_P95=$(calc_percentile SBOM_TIMES 95)
|
||||
POLICY_P95=$(calc_percentile POLICY_TIMES 95)
|
||||
|
||||
# Run evaluations
|
||||
SLO_SCAN_P95=$(evaluate_slo "Scan Time P95" "${SLOS[scan_time_p95_ms]}" "${SCAN_P95}" "ms")
|
||||
SLO_SCAN_P99=$(evaluate_slo "Scan Time P99" "${SLOS[scan_time_p99_ms]}" "${SCAN_P99}" "ms")
|
||||
SLO_MEMORY=$(evaluate_slo "Peak Memory" "${SLOS[memory_peak_mb]}" "${MEMORY_MAX}" "MB")
|
||||
SLO_CPU=$(evaluate_slo "CPU Time" "${SLOS[cpu_time_seconds]}" "${CPU_MAX}" "s")
|
||||
SLO_SBOM=$(evaluate_slo "SBOM Generation P95" "${SLOS[sbom_gen_time_ms]}" "${SBOM_P95}" "ms")
|
||||
SLO_POLICY=$(evaluate_slo "Policy Evaluation P95" "${SLOS[policy_eval_time_ms]}" "${POLICY_P95}" "ms")
|
||||
|
||||
# Generate output
|
||||
ALL_PASSED=true
|
||||
if [[ ${#VIOLATIONS[@]} -gt 0 ]]; then
|
||||
ALL_PASSED=false
|
||||
fi
|
||||
|
||||
# Build violations JSON array
|
||||
VIOLATIONS_JSON="[]"
|
||||
if [[ ${#VIOLATIONS[@]} -gt 0 ]]; then
|
||||
VIOLATIONS_JSON="["
|
||||
for i in "${!VIOLATIONS[@]}"; do
|
||||
[[ $i -gt 0 ]] && VIOLATIONS_JSON+=","
|
||||
VIOLATIONS_JSON+="\"${VIOLATIONS[$i]}\""
|
||||
done
|
||||
VIOLATIONS_JSON+="]"
|
||||
fi
|
||||
|
||||
OUTPUT=$(cat <<EOF
|
||||
{
|
||||
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||
"dry_run": false,
|
||||
"results_path": "${RESULTS_PATH}",
|
||||
"slos_file": "${SLOS_FILE}",
|
||||
"slo_evaluations": {
|
||||
"scan_time_p95": ${SLO_SCAN_P95},
|
||||
"scan_time_p99": ${SLO_SCAN_P99},
|
||||
"memory_peak_mb": ${SLO_MEMORY},
|
||||
"cpu_time_seconds": ${SLO_CPU},
|
||||
"sbom_gen_time_ms": ${SLO_SBOM},
|
||||
"policy_eval_time_ms": ${SLO_POLICY}
|
||||
},
|
||||
"summary": {
|
||||
"total_slos": ${TOTAL_SLOS},
|
||||
"passed": ${PASSED_SLOS},
|
||||
"failed": $((TOTAL_SLOS - PASSED_SLOS)),
|
||||
"all_passed": ${ALL_PASSED},
|
||||
"violations": ${VIOLATIONS_JSON}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Output results
|
||||
if [[ -n "${OUTPUT_FILE}" ]]; then
|
||||
echo "${OUTPUT}" > "${OUTPUT_FILE}"
|
||||
log "Results written to ${OUTPUT_FILE}"
|
||||
else
|
||||
echo "${OUTPUT}"
|
||||
fi
|
||||
|
||||
# Strict mode: fail on violations
|
||||
if [[ "${STRICT}" == "true" ]] && [[ "${ALL_PASSED}" == "false" ]]; then
|
||||
error "Performance SLO violations detected"
|
||||
for v in "${VIOLATIONS[@]}"; do
|
||||
error " - ${v}"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
94
.gitea/scripts/metrics/performance-slos.yaml
Normal file
94
.gitea/scripts/metrics/performance-slos.yaml
Normal file
@@ -0,0 +1,94 @@
|
||||
# =============================================================================
|
||||
# Performance SLOs (Service Level Objectives)
|
||||
# Reference: Testing and Quality Guardrails Technical Reference
|
||||
#
|
||||
# These SLOs define the performance budgets for CI quality gates.
|
||||
# Violations will be flagged and may block releases.
|
||||
# =============================================================================
|
||||
|
||||
# Scan Time SLOs (milliseconds)
|
||||
scan_time:
|
||||
p50:
|
||||
threshold: 15000
|
||||
description: "50th percentile scan time"
|
||||
severity: "info"
|
||||
p95:
|
||||
threshold: 30000
|
||||
description: "95th percentile scan time - primary SLO"
|
||||
severity: "warning"
|
||||
p99:
|
||||
threshold: 60000
|
||||
description: "99th percentile scan time - tail latency"
|
||||
severity: "critical"
|
||||
|
||||
# Memory Usage SLOs (megabytes)
|
||||
memory:
|
||||
peak_mb:
|
||||
threshold: 2048
|
||||
description: "Peak memory usage during scan"
|
||||
severity: "warning"
|
||||
average_mb:
|
||||
threshold: 1024
|
||||
description: "Average memory usage"
|
||||
severity: "info"
|
||||
|
||||
# CPU Time SLOs (seconds)
|
||||
cpu:
|
||||
max_seconds:
|
||||
threshold: 120
|
||||
description: "Maximum CPU time per scan"
|
||||
severity: "warning"
|
||||
average_seconds:
|
||||
threshold: 60
|
||||
description: "Average CPU time per scan"
|
||||
severity: "info"
|
||||
|
||||
# Component-Specific SLOs (milliseconds)
|
||||
components:
|
||||
sbom_generation:
|
||||
p95:
|
||||
threshold: 10000
|
||||
description: "SBOM generation time P95"
|
||||
severity: "warning"
|
||||
policy_evaluation:
|
||||
p95:
|
||||
threshold: 5000
|
||||
description: "Policy evaluation time P95"
|
||||
severity: "warning"
|
||||
reachability_analysis:
|
||||
p95:
|
||||
threshold: 20000
|
||||
description: "Reachability analysis time P95"
|
||||
severity: "warning"
|
||||
vulnerability_matching:
|
||||
p95:
|
||||
threshold: 8000
|
||||
description: "Vulnerability matching time P95"
|
||||
severity: "warning"
|
||||
|
||||
# Resource Budget SLOs
|
||||
resource_budgets:
|
||||
disk_io_mb:
|
||||
threshold: 500
|
||||
description: "Maximum disk I/O per scan"
|
||||
network_calls:
|
||||
threshold: 0
|
||||
description: "Network calls (should be zero for offline scans)"
|
||||
temp_storage_mb:
|
||||
threshold: 1024
|
||||
description: "Maximum temporary storage usage"
|
||||
|
||||
# Regression Thresholds
|
||||
regression:
|
||||
max_degradation_pct: 10
|
||||
warning_threshold_pct: 5
|
||||
baseline_window_days: 30
|
||||
|
||||
# Override Configuration
|
||||
overrides:
|
||||
allowed_labels:
|
||||
- "performance-override"
|
||||
- "large-scan"
|
||||
required_approvers:
|
||||
- "platform"
|
||||
- "performance"
|
||||
102
.gitea/scripts/metrics/reachability-thresholds.yaml
Normal file
102
.gitea/scripts/metrics/reachability-thresholds.yaml
Normal file
@@ -0,0 +1,102 @@
|
||||
# =============================================================================
|
||||
# Reachability Quality Gate Thresholds
|
||||
# Reference: Testing and Quality Guardrails Technical Reference
|
||||
#
|
||||
# These thresholds are enforced by CI quality gates. Violations will block PRs
|
||||
# unless an override is explicitly approved.
|
||||
# =============================================================================
|
||||
|
||||
thresholds:
|
||||
# Runtime dependency recall: percentage of runtime dependency vulns detected
|
||||
runtime_dependency_recall:
|
||||
min: 0.95
|
||||
description: "Percentage of runtime dependency vulnerabilities detected"
|
||||
severity: "critical"
|
||||
|
||||
# OS package recall: percentage of OS package vulns detected
|
||||
os_package_recall:
|
||||
min: 0.97
|
||||
description: "Percentage of OS package vulnerabilities detected"
|
||||
severity: "critical"
|
||||
|
||||
# Code vulnerability recall: percentage of code-level vulns detected
|
||||
code_vulnerability_recall:
|
||||
min: 0.90
|
||||
description: "Percentage of code vulnerabilities detected"
|
||||
severity: "high"
|
||||
|
||||
# Configuration vulnerability recall
|
||||
config_vulnerability_recall:
|
||||
min: 0.85
|
||||
description: "Percentage of configuration vulnerabilities detected"
|
||||
severity: "medium"
|
||||
|
||||
# False positive rate for unreachable findings
|
||||
unreachable_false_positives:
|
||||
max: 0.05
|
||||
description: "Rate of false positives for unreachable findings"
|
||||
severity: "high"
|
||||
|
||||
# Reachability underreport rate: missed reachable findings
|
||||
reachability_underreport:
|
||||
max: 0.10
|
||||
description: "Rate of reachable findings incorrectly marked unreachable"
|
||||
severity: "critical"
|
||||
|
||||
# Overall precision across all classes
|
||||
overall_precision:
|
||||
min: 0.90
|
||||
description: "Overall precision across all vulnerability classes"
|
||||
severity: "high"
|
||||
|
||||
# F1 score threshold
|
||||
f1_score_min:
|
||||
min: 0.90
|
||||
description: "Minimum F1 score across vulnerability classes"
|
||||
severity: "high"
|
||||
|
||||
# Class-specific thresholds
|
||||
class_thresholds:
|
||||
runtime_dep:
|
||||
recall_min: 0.95
|
||||
precision_min: 0.92
|
||||
f1_min: 0.93
|
||||
|
||||
os_pkg:
|
||||
recall_min: 0.97
|
||||
precision_min: 0.95
|
||||
f1_min: 0.96
|
||||
|
||||
code:
|
||||
recall_min: 0.90
|
||||
precision_min: 0.88
|
||||
f1_min: 0.89
|
||||
|
||||
config:
|
||||
recall_min: 0.85
|
||||
precision_min: 0.80
|
||||
f1_min: 0.82
|
||||
|
||||
# Regression detection settings
|
||||
regression:
|
||||
# Maximum allowed regression from baseline (percentage points)
|
||||
max_recall_regression: 0.02
|
||||
max_precision_regression: 0.03
|
||||
|
||||
# Path to baseline metrics file
|
||||
baseline_path: "bench/baselines/reachability-baseline.json"
|
||||
|
||||
# How many consecutive failures before blocking
|
||||
failure_threshold: 2
|
||||
|
||||
# Override configuration
|
||||
overrides:
|
||||
# Allow temporary bypass for specific PR labels
|
||||
bypass_labels:
|
||||
- "quality-gate-override"
|
||||
- "wip"
|
||||
|
||||
# Require explicit approval from these teams
|
||||
required_approvers:
|
||||
- "platform"
|
||||
- "reachability"
|
||||
1103
.gitea/scripts/release/build_release.py
Normal file
1103
.gitea/scripts/release/build_release.py
Normal file
File diff suppressed because it is too large
Load Diff
53
.gitea/scripts/release/check_cli_parity.py
Normal file
53
.gitea/scripts/release/check_cli_parity.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Ensure CLI parity matrix contains no outstanding blockers before release."""
|
||||
from __future__ import annotations
|
||||
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parents[2]
|
||||
PARITY_DOC = REPO_ROOT / "docs/cli-vs-ui-parity.md"
|
||||
|
||||
BLOCKERS = {
|
||||
"🟥": "blocking gap",
|
||||
"❌": "missing feature",
|
||||
"🚫": "unsupported",
|
||||
}
|
||||
WARNINGS = {
|
||||
"🟡": "partial support",
|
||||
"⚠️": "warning",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if not PARITY_DOC.exists():
|
||||
print(f"❌ Parity matrix not found at {PARITY_DOC}", file=sys.stderr)
|
||||
return 1
|
||||
text = PARITY_DOC.read_text(encoding="utf-8")
|
||||
blockers: list[str] = []
|
||||
warnings: list[str] = []
|
||||
for line in text.splitlines():
|
||||
for symbol, label in BLOCKERS.items():
|
||||
if symbol in line:
|
||||
blockers.append(f"{label}: {line.strip()}")
|
||||
for symbol, label in WARNINGS.items():
|
||||
if symbol in line:
|
||||
warnings.append(f"{label}: {line.strip()}")
|
||||
if blockers:
|
||||
print("❌ CLI parity gate failed — blocking items present:", file=sys.stderr)
|
||||
for item in blockers:
|
||||
print(f" - {item}", file=sys.stderr)
|
||||
return 1
|
||||
if warnings:
|
||||
print("⚠️ CLI parity gate warnings detected:", file=sys.stderr)
|
||||
for item in warnings:
|
||||
print(f" - {item}", file=sys.stderr)
|
||||
print("Treat warnings as failures until parity matrix is fully green.", file=sys.stderr)
|
||||
return 1
|
||||
print("✅ CLI parity matrix has no blocking or warning entries.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
334
.gitea/scripts/release/verify_release.py
Normal file
334
.gitea/scripts/release/verify_release.py
Normal file
@@ -0,0 +1,334 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Verify release artefacts (SBOMs, provenance, signatures, manifest hashes)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Mapping, Optional
|
||||
|
||||
from build_release import dump_yaml # type: ignore import-not-found
|
||||
|
||||
|
||||
class VerificationError(Exception):
|
||||
"""Raised when release artefacts fail verification."""
|
||||
|
||||
|
||||
def compute_sha256(path: pathlib.Path) -> str:
|
||||
sha = hashlib.sha256()
|
||||
with path.open("rb") as handle:
|
||||
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
|
||||
sha.update(chunk)
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def parse_sha_file(path: pathlib.Path) -> Optional[str]:
|
||||
if not path.exists():
|
||||
return None
|
||||
content = path.read_text(encoding="utf-8").strip()
|
||||
if not content:
|
||||
return None
|
||||
return content.split()[0]
|
||||
|
||||
|
||||
def resolve_path(path_str: str, release_dir: pathlib.Path) -> pathlib.Path:
|
||||
candidate = pathlib.Path(path_str.replace("\\", "/"))
|
||||
if candidate.is_absolute():
|
||||
return candidate
|
||||
|
||||
for base in (release_dir, release_dir.parent, release_dir.parent.parent):
|
||||
resolved = (base / candidate).resolve()
|
||||
if resolved.exists():
|
||||
return resolved
|
||||
# Fall back to release_dir joined path even if missing to surface in caller.
|
||||
return (release_dir / candidate).resolve()
|
||||
|
||||
|
||||
def load_manifest(release_dir: pathlib.Path) -> OrderedDict[str, Any]:
|
||||
manifest_path = release_dir / "release.json"
|
||||
if not manifest_path.exists():
|
||||
raise VerificationError(f"Release manifest JSON missing at {manifest_path}")
|
||||
try:
|
||||
with manifest_path.open("r", encoding="utf-8") as handle:
|
||||
return json.load(handle, object_pairs_hook=OrderedDict)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise VerificationError(f"Failed to parse {manifest_path}: {exc}") from exc
|
||||
|
||||
|
||||
def verify_manifest_hashes(
|
||||
manifest: Mapping[str, Any],
|
||||
release_dir: pathlib.Path,
|
||||
errors: list[str],
|
||||
) -> None:
|
||||
yaml_path = release_dir / "release.yaml"
|
||||
if not yaml_path.exists():
|
||||
errors.append(f"Missing release.yaml at {yaml_path}")
|
||||
return
|
||||
|
||||
recorded_yaml_sha = parse_sha_file(yaml_path.with_name(yaml_path.name + ".sha256"))
|
||||
actual_yaml_sha = compute_sha256(yaml_path)
|
||||
if recorded_yaml_sha and recorded_yaml_sha != actual_yaml_sha:
|
||||
errors.append(
|
||||
f"release.yaml.sha256 recorded {recorded_yaml_sha} but file hashes to {actual_yaml_sha}"
|
||||
)
|
||||
|
||||
json_path = release_dir / "release.json"
|
||||
recorded_json_sha = parse_sha_file(json_path.with_name(json_path.name + ".sha256"))
|
||||
actual_json_sha = compute_sha256(json_path)
|
||||
if recorded_json_sha and recorded_json_sha != actual_json_sha:
|
||||
errors.append(
|
||||
f"release.json.sha256 recorded {recorded_json_sha} but file hashes to {actual_json_sha}"
|
||||
)
|
||||
|
||||
checksums = manifest.get("checksums")
|
||||
if isinstance(checksums, Mapping):
|
||||
recorded_digest = checksums.get("sha256")
|
||||
base_manifest = OrderedDict(manifest)
|
||||
base_manifest.pop("checksums", None)
|
||||
yaml_without_checksums = dump_yaml(base_manifest)
|
||||
computed_digest = hashlib.sha256(yaml_without_checksums.encode("utf-8")).hexdigest()
|
||||
if recorded_digest != computed_digest:
|
||||
errors.append(
|
||||
"Manifest checksum mismatch: "
|
||||
f"recorded {recorded_digest}, computed {computed_digest}"
|
||||
)
|
||||
|
||||
|
||||
def verify_artifact_entry(
|
||||
entry: Mapping[str, Any],
|
||||
release_dir: pathlib.Path,
|
||||
label: str,
|
||||
component_name: str,
|
||||
errors: list[str],
|
||||
) -> None:
|
||||
path_str = entry.get("path")
|
||||
if not path_str:
|
||||
errors.append(f"{component_name}: {label} missing 'path' field.")
|
||||
return
|
||||
resolved = resolve_path(str(path_str), release_dir)
|
||||
if not resolved.exists():
|
||||
errors.append(f"{component_name}: {label} path does not exist → {resolved}")
|
||||
return
|
||||
recorded_sha = entry.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(resolved)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"{component_name}: {label} SHA mismatch for {resolved} "
|
||||
f"(recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
|
||||
def verify_components(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
for component in manifest.get("components", []):
|
||||
if not isinstance(component, Mapping):
|
||||
errors.append("Component entry is not a mapping.")
|
||||
continue
|
||||
name = str(component.get("name", "<unknown>"))
|
||||
for key, label in (
|
||||
("sbom", "SBOM"),
|
||||
("provenance", "provenance"),
|
||||
("signature", "signature"),
|
||||
("metadata", "metadata"),
|
||||
):
|
||||
entry = component.get(key)
|
||||
if not entry:
|
||||
continue
|
||||
if not isinstance(entry, Mapping):
|
||||
errors.append(f"{name}: {label} entry must be a mapping.")
|
||||
continue
|
||||
verify_artifact_entry(entry, release_dir, label, name, errors)
|
||||
|
||||
|
||||
def verify_collections(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
for collection, label in (
|
||||
("charts", "chart"),
|
||||
("compose", "compose file"),
|
||||
):
|
||||
for item in manifest.get(collection, []):
|
||||
if not isinstance(item, Mapping):
|
||||
errors.append(f"{collection} entry is not a mapping.")
|
||||
continue
|
||||
path_value = item.get("path")
|
||||
if not path_value:
|
||||
errors.append(f"{collection} entry missing path.")
|
||||
continue
|
||||
resolved = resolve_path(str(path_value), release_dir)
|
||||
if not resolved.exists():
|
||||
errors.append(f"{label} missing file → {resolved}")
|
||||
continue
|
||||
recorded_sha = item.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(resolved)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"{label} SHA mismatch for {resolved} "
|
||||
f"(recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
|
||||
def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
debug = manifest.get("debugStore")
|
||||
if not isinstance(debug, Mapping):
|
||||
return
|
||||
manifest_path_str = debug.get("manifest")
|
||||
manifest_data: Optional[Mapping[str, Any]] = None
|
||||
if manifest_path_str:
|
||||
manifest_path = resolve_path(str(manifest_path_str), release_dir)
|
||||
if not manifest_path.exists():
|
||||
errors.append(f"Debug manifest missing → {manifest_path}")
|
||||
else:
|
||||
recorded_sha = debug.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(manifest_path)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"Debug manifest SHA mismatch (recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
sha_sidecar = manifest_path.with_suffix(manifest_path.suffix + ".sha256")
|
||||
sidecar_sha = parse_sha_file(sha_sidecar)
|
||||
if sidecar_sha and recorded_sha and sidecar_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"Debug manifest sidecar digest {sidecar_sha} disagrees with recorded {recorded_sha}"
|
||||
)
|
||||
try:
|
||||
with manifest_path.open("r", encoding="utf-8") as handle:
|
||||
manifest_data = json.load(handle)
|
||||
except json.JSONDecodeError as exc:
|
||||
errors.append(f"Debug manifest JSON invalid: {exc}")
|
||||
directory = debug.get("directory")
|
||||
if directory:
|
||||
debug_dir = resolve_path(str(directory), release_dir)
|
||||
if not debug_dir.exists():
|
||||
errors.append(f"Debug directory missing → {debug_dir}")
|
||||
|
||||
if manifest_data:
|
||||
artifacts = manifest_data.get("artifacts")
|
||||
if not isinstance(artifacts, list) or not artifacts:
|
||||
errors.append("Debug manifest contains no artefacts.")
|
||||
return
|
||||
|
||||
declared_entries = debug.get("entries")
|
||||
if isinstance(declared_entries, int) and declared_entries != len(artifacts):
|
||||
errors.append(
|
||||
f"Debug manifest reports {declared_entries} entries but contains {len(artifacts)} artefacts."
|
||||
)
|
||||
|
||||
for artefact in artifacts:
|
||||
if not isinstance(artefact, Mapping):
|
||||
errors.append("Debug manifest artefact entry is not a mapping.")
|
||||
continue
|
||||
debug_path = artefact.get("debugPath")
|
||||
artefact_sha = artefact.get("sha256")
|
||||
if not debug_path or not artefact_sha:
|
||||
errors.append("Debug manifest artefact missing debugPath or sha256.")
|
||||
continue
|
||||
resolved_debug = resolve_path(str(debug_path), release_dir)
|
||||
if not resolved_debug.exists():
|
||||
errors.append(f"Debug artefact missing → {resolved_debug}")
|
||||
continue
|
||||
actual_sha = compute_sha256(resolved_debug)
|
||||
if actual_sha != artefact_sha:
|
||||
errors.append(
|
||||
f"Debug artefact SHA mismatch for {resolved_debug} "
|
||||
f"(recorded {artefact_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None:
|
||||
sig_path_value = signature.get("path")
|
||||
if not sig_path_value:
|
||||
errors.append(f"{component_name}: {label} signature missing path.")
|
||||
return
|
||||
sig_path = resolve_path(str(sig_path_value), release_dir)
|
||||
if not sig_path.exists():
|
||||
errors.append(f"{component_name}: {label} signature missing → {sig_path}")
|
||||
return
|
||||
recorded_sha = signature.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(sig_path)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"{component_name}: {label} signature SHA mismatch for {sig_path} "
|
||||
f"(recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
|
||||
def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
cli_entries = manifest.get("cli")
|
||||
if not cli_entries:
|
||||
return
|
||||
if not isinstance(cli_entries, list):
|
||||
errors.append("CLI manifest section must be a list.")
|
||||
return
|
||||
for entry in cli_entries:
|
||||
if not isinstance(entry, Mapping):
|
||||
errors.append("CLI entry must be a mapping.")
|
||||
continue
|
||||
runtime = entry.get("runtime", "<unknown>")
|
||||
component_name = f"cli[{runtime}]"
|
||||
archive = entry.get("archive")
|
||||
if not isinstance(archive, Mapping):
|
||||
errors.append(f"{component_name}: archive metadata missing or invalid.")
|
||||
else:
|
||||
verify_artifact_entry(archive, release_dir, "archive", component_name, errors)
|
||||
signature = archive.get("signature")
|
||||
if isinstance(signature, Mapping):
|
||||
verify_signature(signature, release_dir, "archive", component_name, errors)
|
||||
elif signature is not None:
|
||||
errors.append(f"{component_name}: archive signature must be an object.")
|
||||
sbom = entry.get("sbom")
|
||||
if sbom:
|
||||
if not isinstance(sbom, Mapping):
|
||||
errors.append(f"{component_name}: sbom entry must be a mapping.")
|
||||
else:
|
||||
verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors)
|
||||
signature = sbom.get("signature")
|
||||
if isinstance(signature, Mapping):
|
||||
verify_signature(signature, release_dir, "sbom", component_name, errors)
|
||||
elif signature is not None:
|
||||
errors.append(f"{component_name}: sbom signature must be an object.")
|
||||
|
||||
|
||||
def verify_release(release_dir: pathlib.Path) -> None:
|
||||
if not release_dir.exists():
|
||||
raise VerificationError(f"Release directory not found: {release_dir}")
|
||||
manifest = load_manifest(release_dir)
|
||||
errors: list[str] = []
|
||||
verify_manifest_hashes(manifest, release_dir, errors)
|
||||
verify_components(manifest, release_dir, errors)
|
||||
verify_cli_entries(manifest, release_dir, errors)
|
||||
verify_collections(manifest, release_dir, errors)
|
||||
verify_debug_store(manifest, release_dir, errors)
|
||||
if errors:
|
||||
bullet_list = "\n - ".join(errors)
|
||||
raise VerificationError(f"Release verification failed:\n - {bullet_list}")
|
||||
|
||||
|
||||
def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--release-dir",
|
||||
type=pathlib.Path,
|
||||
default=pathlib.Path("out/release"),
|
||||
help="Path to the release artefact directory (default: %(default)s)",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
args = parse_args(argv)
|
||||
try:
|
||||
verify_release(args.release_dir.resolve())
|
||||
except VerificationError as exc:
|
||||
print(str(exc), file=sys.stderr)
|
||||
return 1
|
||||
print(f"✅ Release artefacts verified OK in {args.release_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
106
.gitea/scripts/sign/sign-authority-gaps.sh
Normal file
106
.gitea/scripts/sign/sign-authority-gaps.sh
Normal file
@@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Deterministic DSSE signing helper for Authority gap artefacts (AU1–AU10, RR1–RR10).
|
||||
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
COSIGN_BIN="${COSIGN_BIN:-}"
|
||||
|
||||
# Detect cosign binary
|
||||
if [[ -z "$COSIGN_BIN" ]]; then
|
||||
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
|
||||
COSIGN_BIN="/usr/local/bin/cosign"
|
||||
elif command -v cosign >/dev/null 2>&1; then
|
||||
COSIGN_BIN="$(command -v cosign)"
|
||||
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
|
||||
COSIGN_BIN="$ROOT/tools/cosign/cosign"
|
||||
else
|
||||
echo "cosign not found; install or set COSIGN_BIN" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Resolve key
|
||||
TMP_KEY=""
|
||||
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
|
||||
KEY_FILE="$COSIGN_KEY_FILE"
|
||||
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
|
||||
TMP_KEY="$(mktemp)"
|
||||
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
|
||||
chmod 600 "$TMP_KEY"
|
||||
KEY_FILE="$TMP_KEY"
|
||||
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
|
||||
KEY_FILE="$ROOT/tools/cosign/cosign.key"
|
||||
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
|
||||
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
|
||||
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
|
||||
else
|
||||
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
OUT_BASE="${OUT_DIR:-$ROOT/docs/modules/authority/gaps/dsse/2025-12-04}"
|
||||
if [[ "$OUT_BASE" != /* ]]; then
|
||||
OUT_BASE="$ROOT/$OUT_BASE"
|
||||
fi
|
||||
mkdir -p "$OUT_BASE"
|
||||
|
||||
ARTEFACTS=(
|
||||
"docs/modules/authority/gaps/artifacts/authority-scope-role-catalog.v1.json|authority-scope-role-catalog"
|
||||
"docs/modules/authority/gaps/artifacts/authority-jwks-metadata.schema.json|authority-jwks-metadata.schema"
|
||||
"docs/modules/authority/gaps/artifacts/crypto-profile-registry.v1.json|crypto-profile-registry"
|
||||
"docs/modules/authority/gaps/artifacts/authority-offline-verifier-bundle.v1.json|authority-offline-verifier-bundle"
|
||||
"docs/modules/authority/gaps/artifacts/authority-abac.schema.json|authority-abac.schema"
|
||||
"docs/modules/authority/gaps/artifacts/rekor-receipt-policy.v1.json|rekor-receipt-policy"
|
||||
"docs/modules/authority/gaps/artifacts/rekor-receipt.schema.json|rekor-receipt.schema"
|
||||
"docs/modules/authority/gaps/artifacts/rekor-receipt-bundle.v1.json|rekor-receipt-bundle"
|
||||
)
|
||||
|
||||
USE_BUNDLE=0
|
||||
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
|
||||
USE_BUNDLE=1
|
||||
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
|
||||
USE_BUNDLE=1
|
||||
fi
|
||||
|
||||
SHA_FILE="$OUT_BASE/SHA256SUMS"
|
||||
: > "$SHA_FILE"
|
||||
|
||||
for entry in "${ARTEFACTS[@]}"; do
|
||||
IFS="|" read -r path stem <<<"$entry"
|
||||
if [[ ! -f "$ROOT/$path" ]]; then
|
||||
echo "Missing artefact: $path" >&2
|
||||
exit 3
|
||||
fi
|
||||
if (( USE_BUNDLE )); then
|
||||
bundle="$OUT_BASE/${stem}.sigstore.json"
|
||||
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||
"$COSIGN_BIN" sign-blob \
|
||||
--key "$KEY_FILE" \
|
||||
--yes \
|
||||
--tlog-upload=false \
|
||||
--bundle "$bundle" \
|
||||
"$ROOT/$path"
|
||||
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
|
||||
else
|
||||
sig="$OUT_BASE/${stem}.dsse"
|
||||
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||
"$COSIGN_BIN" sign-blob \
|
||||
--key "$KEY_FILE" \
|
||||
--yes \
|
||||
--tlog-upload=false \
|
||||
--output-signature "$sig" \
|
||||
"$ROOT/$path"
|
||||
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
|
||||
fi
|
||||
|
||||
printf "%s %s\n" "$(sha256sum "$ROOT/$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$ROOT/$path")" >> "$SHA_FILE"
|
||||
echo "Signed $path"
|
||||
done
|
||||
|
||||
echo "Signed artefacts written to $OUT_BASE"
|
||||
|
||||
if [[ -n "$TMP_KEY" ]]; then
|
||||
rm -f "$TMP_KEY"
|
||||
fi
|
||||
50
.gitea/scripts/sign/sign-policy.sh
Normal file
50
.gitea/scripts/sign/sign-policy.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
# Signs a policy file with cosign and verifies it. Intended for CI and offline use.
|
||||
# Requires COSIGN_KEY_B64 (private key PEM base64) or KMS envs; optional COSIGN_PASSWORD.
|
||||
|
||||
usage() {
|
||||
cat <<'USAGE'
|
||||
Usage: sign-policy.sh --file <path> [--out-dir out/policy-sign]
|
||||
Env:
|
||||
COSIGN_KEY_B64 base64-encoded PEM private key (if not using KMS)
|
||||
COSIGN_PASSWORD passphrase for the key (can be empty for test keys)
|
||||
COSIGN_PUBLIC_KEY_PATH optional path to write public key for verify step
|
||||
USAGE
|
||||
}
|
||||
|
||||
FILE=""
|
||||
OUT_DIR="out/policy-sign"
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--file) FILE="$2"; shift 2;;
|
||||
--out-dir) OUT_DIR="$2"; shift 2;;
|
||||
-h|--help) usage; exit 0;;
|
||||
*) echo "Unknown arg: $1" >&2; usage; exit 1;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "$FILE" ]]; then echo "--file is required" >&2; exit 1; fi
|
||||
if [[ ! -f "$FILE" ]]; then echo "file not found: $FILE" >&2; exit 1; fi
|
||||
|
||||
mkdir -p "$OUT_DIR"
|
||||
BASENAME=$(basename "$FILE")
|
||||
SIG="$OUT_DIR/${BASENAME}.sig"
|
||||
PUB_OUT="${COSIGN_PUBLIC_KEY_PATH:-$OUT_DIR/cosign.pub}"
|
||||
|
||||
if [[ -n "${COSIGN_KEY_B64:-}" ]]; then
|
||||
KEYFILE="$OUT_DIR/cosign.key"
|
||||
printf "%s" "$COSIGN_KEY_B64" | base64 -d > "$KEYFILE"
|
||||
chmod 600 "$KEYFILE"
|
||||
export COSIGN_KEY="$KEYFILE"
|
||||
fi
|
||||
|
||||
export COSIGN_PASSWORD=${COSIGN_PASSWORD:-}
|
||||
cosign version >/dev/null
|
||||
|
||||
cosign sign-blob "$FILE" --output-signature "$SIG"
|
||||
cosign public-key --key "$COSIGN_KEY" > "$PUB_OUT"
|
||||
cosign verify-blob --key "$PUB_OUT" --signature "$SIG" "$FILE"
|
||||
|
||||
printf "Signed %s -> %s\nPublic key -> %s\n" "$FILE" "$SIG" "$PUB_OUT"
|
||||
106
.gitea/scripts/sign/sign-signals.sh
Normal file
106
.gitea/scripts/sign/sign-signals.sh
Normal file
@@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Deterministic DSSE signing helper for Signals artifacts.
|
||||
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
COSIGN_BIN="${COSIGN_BIN:-}"
|
||||
|
||||
# Detect cosign binary (v3 preferred).
|
||||
if [[ -z "$COSIGN_BIN" ]]; then
|
||||
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
|
||||
COSIGN_BIN="/usr/local/bin/cosign"
|
||||
elif command -v cosign >/dev/null 2>&1; then
|
||||
COSIGN_BIN="$(command -v cosign)"
|
||||
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
|
||||
COSIGN_BIN="$ROOT/tools/cosign/cosign"
|
||||
else
|
||||
echo "cosign not found; install or set COSIGN_BIN" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Resolve key
|
||||
TMP_KEY=""
|
||||
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
|
||||
KEY_FILE="$COSIGN_KEY_FILE"
|
||||
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
|
||||
TMP_KEY="$(mktemp)"
|
||||
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
|
||||
chmod 600 "$TMP_KEY"
|
||||
KEY_FILE="$TMP_KEY"
|
||||
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
|
||||
KEY_FILE="$ROOT/tools/cosign/cosign.key"
|
||||
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
|
||||
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
|
||||
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
|
||||
else
|
||||
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
OUT_BASE="${OUT_DIR:-$ROOT/evidence-locker/signals/2025-12-01}"
|
||||
# Normalize OUT_BASE to absolute to avoid pushd-relative path issues.
|
||||
if [[ "$OUT_BASE" != /* ]]; then
|
||||
OUT_BASE="$ROOT/$OUT_BASE"
|
||||
fi
|
||||
mkdir -p "$OUT_BASE"
|
||||
|
||||
ARTIFACTS=(
|
||||
"decay/confidence_decay_config.yaml|stella.ops/confidenceDecayConfig@v1|confidence_decay_config"
|
||||
"unknowns/unknowns_scoring_manifest.json|stella.ops/unknownsScoringManifest@v1|unknowns_scoring_manifest"
|
||||
"heuristics/heuristics.catalog.json|stella.ops/heuristicCatalog@v1|heuristics_catalog"
|
||||
)
|
||||
|
||||
USE_BUNDLE=0
|
||||
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
|
||||
USE_BUNDLE=1
|
||||
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
|
||||
USE_BUNDLE=1
|
||||
fi
|
||||
|
||||
pushd "$ROOT/docs/modules/signals" >/dev/null
|
||||
|
||||
SHA_FILE="$OUT_BASE/SHA256SUMS"
|
||||
: > "$SHA_FILE"
|
||||
|
||||
for entry in "${ARTIFACTS[@]}"; do
|
||||
IFS="|" read -r path predicate stem <<<"$entry"
|
||||
if [[ ! -f "$path" ]]; then
|
||||
echo "Missing artifact: $path" >&2
|
||||
exit 3
|
||||
fi
|
||||
|
||||
if (( USE_BUNDLE )); then
|
||||
bundle="$OUT_BASE/${stem}.sigstore.json"
|
||||
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||
"$COSIGN_BIN" sign-blob \
|
||||
--key "$KEY_FILE" \
|
||||
--yes \
|
||||
--tlog-upload=false \
|
||||
--bundle "$bundle" \
|
||||
"$path"
|
||||
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
|
||||
else
|
||||
sig="$OUT_BASE/${stem}.dsse"
|
||||
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
|
||||
"$COSIGN_BIN" sign-blob \
|
||||
--key "$KEY_FILE" \
|
||||
--yes \
|
||||
--tlog-upload=false \
|
||||
--output-signature "$sig" \
|
||||
"$path"
|
||||
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
|
||||
fi
|
||||
|
||||
printf "%s %s\n" "$(sha256sum "$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$path")" >> "$SHA_FILE"
|
||||
done
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
echo "Signed artifacts written to $OUT_BASE"
|
||||
|
||||
if [[ -n "$TMP_KEY" ]]; then
|
||||
rm -f "$TMP_KEY"
|
||||
fi
|
||||
22
.gitea/scripts/test/determinism-run.sh
Normal file
22
.gitea/scripts/test/determinism-run.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# DEVOPS-SCAN-90-004: run determinism harness/tests and collect report
|
||||
|
||||
ROOT="$(git rev-parse --show-toplevel)"
|
||||
OUT="${ROOT}/out/scanner-determinism"
|
||||
mkdir -p "$OUT"
|
||||
|
||||
PROJECT="src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj"
|
||||
|
||||
echo "[determinism] running dotnet test (filter=Determinism)"
|
||||
dotnet test "$PROJECT" --no-build --logger "trx;LogFileName=determinism.trx" --filter Determinism
|
||||
|
||||
find "$(dirname "$PROJECT")" -name "*.trx" -print -exec cp {} "$OUT/" \;
|
||||
|
||||
echo "[determinism] summarizing"
|
||||
printf "project=%s\n" "$PROJECT" > "$OUT/summary.txt"
|
||||
printf "timestamp=%s\n" "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> "$OUT/summary.txt"
|
||||
|
||||
tar -C "$OUT" -czf "$OUT/determinism-artifacts.tgz" .
|
||||
echo "[determinism] artifacts at $OUT"
|
||||
7
.gitea/scripts/test/run-fixtures-check.sh
Normal file
7
.gitea/scripts/test/run-fixtures-check.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
root_dir=$(cd "$(dirname "$0")/.." && pwd)
|
||||
verifier="$root_dir/packs/verify_offline_bundle.py"
|
||||
python3 "$verifier" --bundle "$root_dir/packs/__fixtures__/good" --manifest bundle.json --require-dsse
|
||||
python3 "$verifier" --bundle "$root_dir/packs/__fixtures__/bad" --manifest bundle-missing-quota.json --require-dsse && exit 1 || true
|
||||
echo "fixture checks completed"
|
||||
16
.gitea/scripts/util/cleanup-runner-space.sh
Normal file
16
.gitea/scripts/util/cleanup-runner-space.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
# Safe-ish workspace cleanup when the runner hits “No space left on device”.
|
||||
# Deletes build/test outputs that are regenerated; preserves offline caches and sources.
|
||||
set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
|
||||
echo "Cleaning workspace outputs under: ${ROOT}"
|
||||
|
||||
rm -rf "${ROOT}/TestResults" || true
|
||||
rm -rf "${ROOT}/out" || true
|
||||
rm -rf "${ROOT}/artifacts" || true
|
||||
|
||||
# Trim common temp locations if they exist in repo workspace
|
||||
[ -d "${ROOT}/tmp" ] && find "${ROOT}/tmp" -mindepth 1 -maxdepth 1 -exec rm -rf {} +
|
||||
|
||||
echo "Done. Consider also clearing any runner-level /tmp outside the workspace if safe."
|
||||
27
.gitea/scripts/util/dotnet-filter.sh
Normal file
27
.gitea/scripts/util/dotnet-filter.sh
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env bash
|
||||
# Thin wrapper to strip the harness-injected "workdir:" switch that breaks dotnet/msbuild parsing.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
real_dotnet="$(command -v dotnet)"
|
||||
if [[ -z "${real_dotnet}" ]]; then
|
||||
echo "dotnet executable not found in PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
filtered_args=()
|
||||
for arg in "$@"; do
|
||||
# Drop any argument that is exactly or contains the injected workdir switch.
|
||||
if [[ "${arg}" == *"workdir:"* ]]; then
|
||||
# If the arg also contains other comma-separated parts, keep the non-workdir pieces.
|
||||
IFS=',' read -r -a parts <<< "${arg}"
|
||||
for part in "${parts[@]}"; do
|
||||
[[ "${part}" == *"workdir:"* || -z "${part}" ]] && continue
|
||||
filtered_args+=("${part}")
|
||||
done
|
||||
continue
|
||||
fi
|
||||
filtered_args+=("${arg}")
|
||||
done
|
||||
|
||||
exec "${real_dotnet}" "${filtered_args[@]}"
|
||||
26
.gitea/scripts/util/enable-openssl11-shim.sh
Normal file
26
.gitea/scripts/util/enable-openssl11-shim.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Ensures OpenSSL 1.1 shim is discoverable for Mongo2Go by exporting LD_LIBRARY_PATH.
|
||||
# Safe for repeated invocation; respects STELLAOPS_OPENSSL11_SHIM override.
|
||||
|
||||
ROOT=${STELLAOPS_REPO_ROOT:-$(git rev-parse --show-toplevel 2>/dev/null || pwd)}
|
||||
SHIM_DIR=${STELLAOPS_OPENSSL11_SHIM:-"${ROOT}/src/__Tests/native/openssl-1.1/linux-x64"}
|
||||
|
||||
if [[ ! -d "${SHIM_DIR}" ]]; then
|
||||
echo "::warning ::OpenSSL 1.1 shim directory not found at ${SHIM_DIR}; Mongo2Go tests may fail" >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export LD_LIBRARY_PATH="${SHIM_DIR}:${LD_LIBRARY_PATH:-}"
|
||||
export STELLAOPS_OPENSSL11_SHIM="${SHIM_DIR}"
|
||||
|
||||
# Persist for subsequent CI steps when available
|
||||
if [[ -n "${GITHUB_ENV:-}" ]]; then
|
||||
{
|
||||
echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}"
|
||||
echo "STELLAOPS_OPENSSL11_SHIM=${STELLAOPS_OPENSSL11_SHIM}"
|
||||
} >> "${GITHUB_ENV}"
|
||||
fi
|
||||
|
||||
echo "OpenSSL 1.1 shim enabled (LD_LIBRARY_PATH=${LD_LIBRARY_PATH})"
|
||||
53
.gitea/scripts/validate/validate-compose.sh
Normal file
53
.gitea/scripts/validate/validate-compose.sh
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
# validate-compose.sh - Validate all Docker Compose profiles
|
||||
# Used by CI/CD pipelines to ensure Compose configurations are valid
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
COMPOSE_DIR="${REPO_ROOT}/devops/compose"
|
||||
|
||||
# Default profiles to validate
|
||||
PROFILES=(dev stage prod airgap mirror)
|
||||
|
||||
echo "=== Docker Compose Validation ==="
|
||||
echo "Compose directory: $COMPOSE_DIR"
|
||||
|
||||
# Check if compose directory exists
|
||||
if [[ ! -d "$COMPOSE_DIR" ]]; then
|
||||
echo "::warning::Compose directory not found at $COMPOSE_DIR"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check for base docker-compose.yml
|
||||
BASE_COMPOSE="$COMPOSE_DIR/docker-compose.yml"
|
||||
if [[ ! -f "$BASE_COMPOSE" ]]; then
|
||||
echo "::warning::Base docker-compose.yml not found at $BASE_COMPOSE"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
FAILED=0
|
||||
|
||||
for profile in "${PROFILES[@]}"; do
|
||||
OVERLAY="$COMPOSE_DIR/docker-compose.$profile.yml"
|
||||
|
||||
if [[ -f "$OVERLAY" ]]; then
|
||||
echo "=== Validating docker-compose.$profile.yml ==="
|
||||
if docker compose -f "$BASE_COMPOSE" -f "$OVERLAY" config --quiet 2>&1; then
|
||||
echo "✓ Profile '$profile' is valid"
|
||||
else
|
||||
echo "✗ Profile '$profile' validation failed"
|
||||
FAILED=1
|
||||
fi
|
||||
else
|
||||
echo "⊘ Skipping profile '$profile' (no overlay file)"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $FAILED -eq 1 ]]; then
|
||||
echo "::error::One or more Compose profiles failed validation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "=== All Compose profiles valid! ==="
|
||||
59
.gitea/scripts/validate/validate-helm.sh
Normal file
59
.gitea/scripts/validate/validate-helm.sh
Normal file
@@ -0,0 +1,59 @@
|
||||
#!/bin/bash
|
||||
# validate-helm.sh - Validate Helm charts
|
||||
# Used by CI/CD pipelines to ensure Helm charts are valid
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
HELM_DIR="${REPO_ROOT}/devops/helm"
|
||||
|
||||
echo "=== Helm Chart Validation ==="
|
||||
echo "Helm directory: $HELM_DIR"
|
||||
|
||||
# Check if helm is installed
|
||||
if ! command -v helm &>/dev/null; then
|
||||
echo "::error::Helm is not installed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if helm directory exists
|
||||
if [[ ! -d "$HELM_DIR" ]]; then
|
||||
echo "::warning::Helm directory not found at $HELM_DIR"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
FAILED=0
|
||||
|
||||
# Find all Chart.yaml files (indicates a Helm chart)
|
||||
while IFS= read -r -d '' chart_file; do
|
||||
chart_dir="$(dirname "$chart_file")"
|
||||
chart_name="$(basename "$chart_dir")"
|
||||
|
||||
echo "=== Validating chart: $chart_name ==="
|
||||
|
||||
# Lint the chart
|
||||
if helm lint "$chart_dir" 2>&1; then
|
||||
echo "✓ Chart '$chart_name' lint passed"
|
||||
else
|
||||
echo "✗ Chart '$chart_name' lint failed"
|
||||
FAILED=1
|
||||
continue
|
||||
fi
|
||||
|
||||
# Template the chart (dry-run)
|
||||
if helm template "$chart_name" "$chart_dir" --debug >/dev/null 2>&1; then
|
||||
echo "✓ Chart '$chart_name' template succeeded"
|
||||
else
|
||||
echo "✗ Chart '$chart_name' template failed"
|
||||
FAILED=1
|
||||
fi
|
||||
|
||||
done < <(find "$HELM_DIR" -name "Chart.yaml" -print0)
|
||||
|
||||
if [[ $FAILED -eq 1 ]]; then
|
||||
echo "::error::One or more Helm charts failed validation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "=== All Helm charts valid! ==="
|
||||
244
.gitea/scripts/validate/validate-sbom.sh
Normal file
244
.gitea/scripts/validate/validate-sbom.sh
Normal file
@@ -0,0 +1,244 @@
|
||||
#!/bin/bash
|
||||
# scripts/validate-sbom.sh
|
||||
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
|
||||
# Task: SCHEMA-8200-004 - Create validate-sbom.sh wrapper for sbom-utility
|
||||
#
|
||||
# Validates SBOM files against official CycloneDX JSON schemas.
|
||||
# Uses sbom-utility for CycloneDX validation.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/validate-sbom.sh <sbom-file> [--schema <schema-path>]
|
||||
# ./scripts/validate-sbom.sh src/__Tests/__Benchmarks/golden-corpus/sample.cyclonedx.json
|
||||
# ./scripts/validate-sbom.sh --all # Validate all CycloneDX fixtures
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 - All validations passed
|
||||
# 1 - Validation failed or error
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
|
||||
DEFAULT_SCHEMA="${SCHEMA_DIR}/cyclonedx-bom-1.6.schema.json"
|
||||
SBOM_UTILITY_VERSION="v0.16.0"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
log_info() {
|
||||
echo -e "${GREEN}[INFO]${NC} $*"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
echo -e "${YELLOW}[WARN]${NC} $*"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $*"
|
||||
}
|
||||
|
||||
check_sbom_utility() {
|
||||
if ! command -v sbom-utility &> /dev/null; then
|
||||
log_warn "sbom-utility not found in PATH"
|
||||
log_info "Installing sbom-utility ${SBOM_UTILITY_VERSION}..."
|
||||
|
||||
# Detect OS and architecture
|
||||
local os arch
|
||||
case "$(uname -s)" in
|
||||
Linux*) os="linux";;
|
||||
Darwin*) os="darwin";;
|
||||
MINGW*|MSYS*|CYGWIN*) os="windows";;
|
||||
*) log_error "Unsupported OS: $(uname -s)"; exit 1;;
|
||||
esac
|
||||
|
||||
case "$(uname -m)" in
|
||||
x86_64|amd64) arch="amd64";;
|
||||
arm64|aarch64) arch="arm64";;
|
||||
*) log_error "Unsupported architecture: $(uname -m)"; exit 1;;
|
||||
esac
|
||||
|
||||
local url="https://github.com/CycloneDX/sbom-utility/releases/download/${SBOM_UTILITY_VERSION}/sbom-utility-${SBOM_UTILITY_VERSION}-${os}-${arch}.tar.gz"
|
||||
local temp_dir
|
||||
temp_dir=$(mktemp -d)
|
||||
|
||||
log_info "Downloading from ${url}..."
|
||||
curl -sSfL "${url}" | tar xz -C "${temp_dir}"
|
||||
|
||||
if [[ "$os" == "windows" ]]; then
|
||||
log_info "Please add ${temp_dir}/sbom-utility.exe to your PATH"
|
||||
export PATH="${temp_dir}:${PATH}"
|
||||
else
|
||||
log_info "Installing to /usr/local/bin (may require sudo)..."
|
||||
if [[ -w /usr/local/bin ]]; then
|
||||
mv "${temp_dir}/sbom-utility" /usr/local/bin/
|
||||
else
|
||||
sudo mv "${temp_dir}/sbom-utility" /usr/local/bin/
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -rf "${temp_dir}"
|
||||
log_info "sbom-utility installed successfully"
|
||||
fi
|
||||
}
|
||||
|
||||
validate_cyclonedx() {
|
||||
local sbom_file="$1"
|
||||
local schema="${2:-$DEFAULT_SCHEMA}"
|
||||
|
||||
if [[ ! -f "$sbom_file" ]]; then
|
||||
log_error "File not found: $sbom_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$schema" ]]; then
|
||||
log_error "Schema not found: $schema"
|
||||
log_info "Expected schema at: ${DEFAULT_SCHEMA}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Detect if it's a CycloneDX file
|
||||
if ! grep -q '"bomFormat"' "$sbom_file" 2>/dev/null; then
|
||||
log_warn "File does not appear to be CycloneDX: $sbom_file"
|
||||
log_info "Skipping (use validate-spdx.sh for SPDX files)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Validating: $sbom_file"
|
||||
|
||||
# Run sbom-utility validation
|
||||
if sbom-utility validate --input-file "$sbom_file" --format json 2>&1; then
|
||||
log_info "✓ Validation passed: $sbom_file"
|
||||
return 0
|
||||
else
|
||||
log_error "✗ Validation failed: $sbom_file"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
validate_all() {
|
||||
local fixture_dir="${REPO_ROOT}/src/__Tests/__Benchmarks/golden-corpus"
|
||||
local failed=0
|
||||
local passed=0
|
||||
local skipped=0
|
||||
|
||||
log_info "Validating all CycloneDX fixtures in ${fixture_dir}..."
|
||||
|
||||
if [[ ! -d "$fixture_dir" ]]; then
|
||||
log_error "Fixture directory not found: $fixture_dir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
while IFS= read -r -d '' file; do
|
||||
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||
if validate_cyclonedx "$file"; then
|
||||
((passed++))
|
||||
else
|
||||
((failed++))
|
||||
fi
|
||||
else
|
||||
log_info "Skipping non-CycloneDX file: $file"
|
||||
((skipped++))
|
||||
fi
|
||||
done < <(find "$fixture_dir" -type f -name '*.json' -print0)
|
||||
|
||||
echo ""
|
||||
log_info "Validation Summary:"
|
||||
log_info " Passed: ${passed}"
|
||||
log_info " Failed: ${failed}"
|
||||
log_info " Skipped: ${skipped}"
|
||||
|
||||
if [[ $failed -gt 0 ]]; then
|
||||
log_error "Some validations failed!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "All CycloneDX validations passed!"
|
||||
return 0
|
||||
}
|
||||
|
||||
usage() {
|
||||
cat << EOF
|
||||
Usage: $(basename "$0") [OPTIONS] <sbom-file>
|
||||
|
||||
Validates CycloneDX SBOM files against official JSON schemas.
|
||||
|
||||
Options:
|
||||
--all Validate all CycloneDX fixtures in src/__Tests/__Benchmarks/golden-corpus/
|
||||
--schema <path> Use custom schema file (default: docs/schemas/cyclonedx-bom-1.6.schema.json)
|
||||
--help, -h Show this help message
|
||||
|
||||
Examples:
|
||||
$(basename "$0") sample.cyclonedx.json
|
||||
$(basename "$0") --schema custom-schema.json sample.json
|
||||
$(basename "$0") --all
|
||||
|
||||
Exit codes:
|
||||
0 All validations passed
|
||||
1 Validation failed or error
|
||||
EOF
|
||||
}
|
||||
|
||||
main() {
|
||||
local schema="$DEFAULT_SCHEMA"
|
||||
local validate_all_flag=false
|
||||
local files=()
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--all)
|
||||
validate_all_flag=true
|
||||
shift
|
||||
;;
|
||||
--schema)
|
||||
schema="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
-*)
|
||||
log_error "Unknown option: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
files+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Ensure sbom-utility is available
|
||||
check_sbom_utility
|
||||
|
||||
if [[ "$validate_all_flag" == "true" ]]; then
|
||||
validate_all
|
||||
exit $?
|
||||
fi
|
||||
|
||||
if [[ ${#files[@]} -eq 0 ]]; then
|
||||
log_error "No SBOM file specified"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local failed=0
|
||||
for file in "${files[@]}"; do
|
||||
if ! validate_cyclonedx "$file" "$schema"; then
|
||||
((failed++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $failed -gt 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
main "$@"
|
||||
277
.gitea/scripts/validate/validate-spdx.sh
Normal file
277
.gitea/scripts/validate/validate-spdx.sh
Normal file
@@ -0,0 +1,277 @@
|
||||
#!/bin/bash
|
||||
# scripts/validate-spdx.sh
|
||||
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
|
||||
# Task: SCHEMA-8200-005 - Create validate-spdx.sh wrapper for SPDX validation
|
||||
#
|
||||
# Validates SPDX files against SPDX 3.0.1 JSON schema.
|
||||
# Uses pyspdxtools (spdx-tools) for SPDX validation.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/validate-spdx.sh <spdx-file>
|
||||
# ./scripts/validate-spdx.sh bench/golden-corpus/sample.spdx.json
|
||||
# ./scripts/validate-spdx.sh --all # Validate all SPDX fixtures
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 - All validations passed
|
||||
# 1 - Validation failed or error
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
|
||||
DEFAULT_SCHEMA="${SCHEMA_DIR}/spdx-jsonld-3.0.1.schema.json"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
log_info() {
|
||||
echo -e "${GREEN}[INFO]${NC} $*"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
echo -e "${YELLOW}[WARN]${NC} $*"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $*"
|
||||
}
|
||||
|
||||
check_spdx_tools() {
|
||||
if ! command -v pyspdxtools &> /dev/null; then
|
||||
log_warn "pyspdxtools not found in PATH"
|
||||
log_info "Installing spdx-tools via pip..."
|
||||
|
||||
if command -v pip3 &> /dev/null; then
|
||||
pip3 install --user spdx-tools
|
||||
elif command -v pip &> /dev/null; then
|
||||
pip install --user spdx-tools
|
||||
else
|
||||
log_error "pip not found. Please install Python and pip first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "spdx-tools installed successfully"
|
||||
|
||||
# Refresh PATH for newly installed tools
|
||||
if [[ -d "${HOME}/.local/bin" ]]; then
|
||||
export PATH="${HOME}/.local/bin:${PATH}"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
check_ajv() {
|
||||
if ! command -v ajv &> /dev/null; then
|
||||
log_warn "ajv-cli not found in PATH"
|
||||
log_info "Installing ajv-cli via npm..."
|
||||
|
||||
if command -v npm &> /dev/null; then
|
||||
npm install -g ajv-cli ajv-formats
|
||||
else
|
||||
log_warn "npm not found. JSON schema validation will be skipped."
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "ajv-cli installed successfully"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
validate_spdx_schema() {
|
||||
local spdx_file="$1"
|
||||
local schema="$2"
|
||||
|
||||
if check_ajv; then
|
||||
log_info "Validating against JSON schema: $schema"
|
||||
if ajv validate -s "$schema" -d "$spdx_file" --spec=draft2020 2>&1; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
log_warn "Skipping JSON schema validation (ajv not available)"
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
validate_spdx() {
|
||||
local spdx_file="$1"
|
||||
local schema="${2:-$DEFAULT_SCHEMA}"
|
||||
|
||||
if [[ ! -f "$spdx_file" ]]; then
|
||||
log_error "File not found: $spdx_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Detect if it's an SPDX file (JSON-LD format)
|
||||
if ! grep -qE '"@context"|"spdxId"|"spdxVersion"' "$spdx_file" 2>/dev/null; then
|
||||
log_warn "File does not appear to be SPDX: $spdx_file"
|
||||
log_info "Skipping (use validate-sbom.sh for CycloneDX files)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Validating: $spdx_file"
|
||||
|
||||
local validation_passed=true
|
||||
|
||||
# Try pyspdxtools validation first (semantic validation)
|
||||
if command -v pyspdxtools &> /dev/null; then
|
||||
log_info "Running SPDX semantic validation..."
|
||||
if pyspdxtools validate "$spdx_file" 2>&1; then
|
||||
log_info "✓ SPDX semantic validation passed"
|
||||
else
|
||||
# pyspdxtools may not support SPDX 3.0 yet
|
||||
log_warn "pyspdxtools validation failed or not supported for this format"
|
||||
log_info "Falling back to JSON schema validation only"
|
||||
fi
|
||||
fi
|
||||
|
||||
# JSON schema validation (syntax validation)
|
||||
if [[ -f "$schema" ]]; then
|
||||
if validate_spdx_schema "$spdx_file" "$schema"; then
|
||||
log_info "✓ JSON schema validation passed"
|
||||
else
|
||||
log_error "✗ JSON schema validation failed"
|
||||
validation_passed=false
|
||||
fi
|
||||
else
|
||||
log_warn "Schema file not found: $schema"
|
||||
log_info "Skipping schema validation"
|
||||
fi
|
||||
|
||||
if [[ "$validation_passed" == "true" ]]; then
|
||||
log_info "✓ Validation passed: $spdx_file"
|
||||
return 0
|
||||
else
|
||||
log_error "✗ Validation failed: $spdx_file"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
validate_all() {
|
||||
local fixture_dir="${REPO_ROOT}/bench/golden-corpus"
|
||||
local failed=0
|
||||
local passed=0
|
||||
local skipped=0
|
||||
|
||||
log_info "Validating all SPDX fixtures in ${fixture_dir}..."
|
||||
|
||||
if [[ ! -d "$fixture_dir" ]]; then
|
||||
log_error "Fixture directory not found: $fixture_dir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
while IFS= read -r -d '' file; do
|
||||
# Check if it's an SPDX file
|
||||
if grep -qE '"@context"|"spdxVersion"' "$file" 2>/dev/null; then
|
||||
if validate_spdx "$file"; then
|
||||
((passed++))
|
||||
else
|
||||
((failed++))
|
||||
fi
|
||||
else
|
||||
log_info "Skipping non-SPDX file: $file"
|
||||
((skipped++))
|
||||
fi
|
||||
done < <(find "$fixture_dir" -type f \( -name '*spdx*.json' -o -name '*.spdx.json' \) -print0)
|
||||
|
||||
echo ""
|
||||
log_info "Validation Summary:"
|
||||
log_info " Passed: ${passed}"
|
||||
log_info " Failed: ${failed}"
|
||||
log_info " Skipped: ${skipped}"
|
||||
|
||||
if [[ $failed -gt 0 ]]; then
|
||||
log_error "Some validations failed!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "All SPDX validations passed!"
|
||||
return 0
|
||||
}
|
||||
|
||||
usage() {
|
||||
cat << EOF
|
||||
Usage: $(basename "$0") [OPTIONS] <spdx-file>
|
||||
|
||||
Validates SPDX files against SPDX 3.0.1 JSON schema.
|
||||
|
||||
Options:
|
||||
--all Validate all SPDX fixtures in bench/golden-corpus/
|
||||
--schema <path> Use custom schema file (default: docs/schemas/spdx-jsonld-3.0.1.schema.json)
|
||||
--help, -h Show this help message
|
||||
|
||||
Examples:
|
||||
$(basename "$0") sample.spdx.json
|
||||
$(basename "$0") --schema custom-schema.json sample.json
|
||||
$(basename "$0") --all
|
||||
|
||||
Exit codes:
|
||||
0 All validations passed
|
||||
1 Validation failed or error
|
||||
EOF
|
||||
}
|
||||
|
||||
main() {
|
||||
local schema="$DEFAULT_SCHEMA"
|
||||
local validate_all_flag=false
|
||||
local files=()
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--all)
|
||||
validate_all_flag=true
|
||||
shift
|
||||
;;
|
||||
--schema)
|
||||
schema="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
-*)
|
||||
log_error "Unknown option: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
files+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Ensure tools are available
|
||||
check_spdx_tools || true # Continue even if pyspdxtools install fails
|
||||
|
||||
if [[ "$validate_all_flag" == "true" ]]; then
|
||||
validate_all
|
||||
exit $?
|
||||
fi
|
||||
|
||||
if [[ ${#files[@]} -eq 0 ]]; then
|
||||
log_error "No SPDX file specified"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local failed=0
|
||||
for file in "${files[@]}"; do
|
||||
if ! validate_spdx "$file" "$schema"; then
|
||||
((failed++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $failed -gt 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
main "$@"
|
||||
261
.gitea/scripts/validate/validate-vex.sh
Normal file
261
.gitea/scripts/validate/validate-vex.sh
Normal file
@@ -0,0 +1,261 @@
|
||||
#!/bin/bash
|
||||
# scripts/validate-vex.sh
|
||||
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
|
||||
# Task: SCHEMA-8200-006 - Create validate-vex.sh wrapper for OpenVEX validation
|
||||
#
|
||||
# Validates OpenVEX files against the OpenVEX 0.2.0 JSON schema.
|
||||
# Uses ajv-cli for JSON schema validation.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/validate-vex.sh <vex-file>
|
||||
# ./scripts/validate-vex.sh bench/golden-corpus/sample.vex.json
|
||||
# ./scripts/validate-vex.sh --all # Validate all VEX fixtures
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 - All validations passed
|
||||
# 1 - Validation failed or error
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
|
||||
DEFAULT_SCHEMA="${SCHEMA_DIR}/openvex-0.2.0.schema.json"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
log_info() {
|
||||
echo -e "${GREEN}[INFO]${NC} $*"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
echo -e "${YELLOW}[WARN]${NC} $*"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $*"
|
||||
}
|
||||
|
||||
check_ajv() {
|
||||
if ! command -v ajv &> /dev/null; then
|
||||
log_warn "ajv-cli not found in PATH"
|
||||
log_info "Installing ajv-cli via npm..."
|
||||
|
||||
if command -v npm &> /dev/null; then
|
||||
npm install -g ajv-cli ajv-formats
|
||||
elif command -v npx &> /dev/null; then
|
||||
log_info "Using npx for ajv (no global install)"
|
||||
return 0
|
||||
else
|
||||
log_error "npm/npx not found. Please install Node.js first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "ajv-cli installed successfully"
|
||||
fi
|
||||
}
|
||||
|
||||
run_ajv() {
|
||||
local schema="$1"
|
||||
local data="$2"
|
||||
|
||||
if command -v ajv &> /dev/null; then
|
||||
ajv validate -s "$schema" -d "$data" --spec=draft2020 2>&1
|
||||
elif command -v npx &> /dev/null; then
|
||||
npx ajv-cli validate -s "$schema" -d "$data" --spec=draft2020 2>&1
|
||||
else
|
||||
log_error "No ajv available"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
validate_openvex() {
|
||||
local vex_file="$1"
|
||||
local schema="${2:-$DEFAULT_SCHEMA}"
|
||||
|
||||
if [[ ! -f "$vex_file" ]]; then
|
||||
log_error "File not found: $vex_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$schema" ]]; then
|
||||
log_error "Schema not found: $schema"
|
||||
log_info "Expected schema at: ${DEFAULT_SCHEMA}"
|
||||
log_info "Download from: https://raw.githubusercontent.com/openvex/spec/main/openvex_json_schema.json"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Detect if it's an OpenVEX file
|
||||
if ! grep -qE '"@context".*"https://openvex.dev/ns"|"openvex"' "$vex_file" 2>/dev/null; then
|
||||
log_warn "File does not appear to be OpenVEX: $vex_file"
|
||||
log_info "Skipping (use validate-sbom.sh for CycloneDX files)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Validating: $vex_file"
|
||||
|
||||
# Run ajv validation
|
||||
if run_ajv "$schema" "$vex_file"; then
|
||||
log_info "✓ Validation passed: $vex_file"
|
||||
return 0
|
||||
else
|
||||
log_error "✗ Validation failed: $vex_file"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
validate_all() {
|
||||
local failed=0
|
||||
local passed=0
|
||||
local skipped=0
|
||||
|
||||
# Search multiple directories for VEX files
|
||||
local search_dirs=(
|
||||
"${REPO_ROOT}/bench/golden-corpus"
|
||||
"${REPO_ROOT}/bench/vex-lattice"
|
||||
"${REPO_ROOT}/datasets"
|
||||
)
|
||||
|
||||
log_info "Validating all OpenVEX fixtures..."
|
||||
|
||||
for fixture_dir in "${search_dirs[@]}"; do
|
||||
if [[ ! -d "$fixture_dir" ]]; then
|
||||
log_warn "Directory not found, skipping: $fixture_dir"
|
||||
continue
|
||||
fi
|
||||
|
||||
log_info "Searching in: $fixture_dir"
|
||||
|
||||
while IFS= read -r -d '' file; do
|
||||
# Check if it's an OpenVEX file
|
||||
if grep -qE '"@context".*"https://openvex.dev/ns"|"openvex"' "$file" 2>/dev/null; then
|
||||
if validate_openvex "$file"; then
|
||||
((passed++))
|
||||
else
|
||||
((failed++))
|
||||
fi
|
||||
elif grep -q '"vex"' "$file" 2>/dev/null || [[ "$file" == *vex* ]]; then
|
||||
# Might be VEX-related but not OpenVEX format
|
||||
log_info "Checking potential VEX file: $file"
|
||||
if grep -qE '"@context"' "$file" 2>/dev/null; then
|
||||
if validate_openvex "$file"; then
|
||||
((passed++))
|
||||
else
|
||||
((failed++))
|
||||
fi
|
||||
else
|
||||
log_info "Skipping non-OpenVEX file: $file"
|
||||
((skipped++))
|
||||
fi
|
||||
else
|
||||
((skipped++))
|
||||
fi
|
||||
done < <(find "$fixture_dir" -type f \( -name '*vex*.json' -o -name '*.vex.json' -o -name '*openvex*.json' \) -print0 2>/dev/null || true)
|
||||
done
|
||||
|
||||
echo ""
|
||||
log_info "Validation Summary:"
|
||||
log_info " Passed: ${passed}"
|
||||
log_info " Failed: ${failed}"
|
||||
log_info " Skipped: ${skipped}"
|
||||
|
||||
if [[ $failed -gt 0 ]]; then
|
||||
log_error "Some validations failed!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ $passed -eq 0 ]] && [[ $skipped -eq 0 ]]; then
|
||||
log_warn "No OpenVEX files found to validate"
|
||||
else
|
||||
log_info "All OpenVEX validations passed!"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
usage() {
|
||||
cat << EOF
|
||||
Usage: $(basename "$0") [OPTIONS] <vex-file>
|
||||
|
||||
Validates OpenVEX files against the OpenVEX 0.2.0 JSON schema.
|
||||
|
||||
Options:
|
||||
--all Validate all OpenVEX fixtures in bench/ and datasets/
|
||||
--schema <path> Use custom schema file (default: docs/schemas/openvex-0.2.0.schema.json)
|
||||
--help, -h Show this help message
|
||||
|
||||
Examples:
|
||||
$(basename "$0") sample.vex.json
|
||||
$(basename "$0") --schema custom-schema.json sample.json
|
||||
$(basename "$0") --all
|
||||
|
||||
Exit codes:
|
||||
0 All validations passed
|
||||
1 Validation failed or error
|
||||
EOF
|
||||
}
|
||||
|
||||
main() {
|
||||
local schema="$DEFAULT_SCHEMA"
|
||||
local validate_all_flag=false
|
||||
local files=()
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--all)
|
||||
validate_all_flag=true
|
||||
shift
|
||||
;;
|
||||
--schema)
|
||||
schema="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
-*)
|
||||
log_error "Unknown option: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
files+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Ensure ajv is available
|
||||
check_ajv
|
||||
|
||||
if [[ "$validate_all_flag" == "true" ]]; then
|
||||
validate_all
|
||||
exit $?
|
||||
fi
|
||||
|
||||
if [[ ${#files[@]} -eq 0 ]]; then
|
||||
log_error "No VEX file specified"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local failed=0
|
||||
for file in "${files[@]}"; do
|
||||
if ! validate_openvex "$file" "$schema"; then
|
||||
((failed++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $failed -gt 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
main "$@"
|
||||
25
.gitea/scripts/validate/verify-binaries.sh
Normal file
25
.gitea/scripts/validate/verify-binaries.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Verifies binary artefacts live only in approved locations.
|
||||
# Allowed roots: .nuget/packages (curated feed + cache), vendor (pinned binaries),
|
||||
# offline (air-gap bundles/templates), plugins/tools/deploy/ops (module-owned binaries).
|
||||
|
||||
repo_root="$(git rev-parse --show-toplevel)"
|
||||
cd "$repo_root"
|
||||
|
||||
# Extensions considered binary artefacts.
|
||||
binary_ext="(nupkg|dll|exe|so|dylib|a|lib|tar|tar.gz|tgz|zip|jar|deb|rpm|bin)"
|
||||
# Locations allowed to contain binaries.
|
||||
allowed_prefix="^(.nuget/packages|.nuget/packages/packages|vendor|offline|plugins|tools|deploy|ops|third_party|docs/artifacts|samples|src/.*/Fixtures|src/.*/fixtures)/"
|
||||
|
||||
# Only consider files that currently exist in the working tree (skip deleted placeholders).
|
||||
violations=$(git ls-files | while read -r f; do [[ -f "$f" ]] && echo "$f"; done | grep -E "\\.${binary_ext}$" | grep -Ev "$allowed_prefix" || true)
|
||||
|
||||
if [[ -n "$violations" ]]; then
|
||||
echo "Binary artefacts found outside approved directories:" >&2
|
||||
echo "$violations" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
printf "Binary layout OK (allowed roots: %s)\n" "$allowed_prefix"
|
||||
@@ -4,12 +4,12 @@ on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'ops/devops/airgap/**'
|
||||
- 'devops/airgap/**'
|
||||
- '.gitea/workflows/airgap-sealed-ci.yml'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'ops/devops/airgap/**'
|
||||
- 'devops/airgap/**'
|
||||
- '.gitea/workflows/airgap-sealed-ci.yml'
|
||||
|
||||
jobs:
|
||||
@@ -21,8 +21,8 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
- name: Install dnslib
|
||||
run: pip install dnslib
|
||||
- name: Run sealed-mode smoke
|
||||
run: sudo ops/devops/airgap/sealed-ci-smoke.sh
|
||||
run: sudo devops/airgap/sealed-ci-smoke.sh
|
||||
|
||||
@@ -50,9 +50,9 @@ jobs:
|
||||
|
||||
- name: Package AOC backfill release
|
||||
run: |
|
||||
chmod +x ops/devops/aoc/package-backfill-release.sh
|
||||
chmod +x devops/aoc/package-backfill-release.sh
|
||||
DATASET_HASH="${{ github.event.inputs.dataset_hash }}" \
|
||||
ops/devops/aoc/package-backfill-release.sh
|
||||
devops/aoc/package-backfill-release.sh
|
||||
env:
|
||||
DATASET_HASH: ${{ github.event.inputs.dataset_hash }}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ on:
|
||||
- 'src/Concelier/**'
|
||||
- 'src/Authority/**'
|
||||
- 'src/Excititor/**'
|
||||
- 'ops/devops/aoc/**'
|
||||
- 'devops/aoc/**'
|
||||
- '.gitea/workflows/aoc-guard.yml'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
@@ -17,7 +17,7 @@ on:
|
||||
- 'src/Concelier/**'
|
||||
- 'src/Authority/**'
|
||||
- 'src/Excititor/**'
|
||||
- 'ops/devops/aoc/**'
|
||||
- 'devops/aoc/**'
|
||||
- '.gitea/workflows/aoc-guard.yml'
|
||||
|
||||
jobs:
|
||||
@@ -33,10 +33,10 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
@@ -113,10 +113,10 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Build bundle
|
||||
run: |
|
||||
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Resolve Authority configuration
|
||||
id: config
|
||||
|
||||
@@ -9,7 +9,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: Validate Helm chart rendering
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CHART_PATH="deploy/helm/stellaops"
|
||||
CHART_PATH="devops/helm/stellaops"
|
||||
helm lint "$CHART_PATH"
|
||||
for values in values.yaml values-dev.yaml values-stage.yaml values-prod.yaml values-airgap.yaml values-mirror.yaml; do
|
||||
release="stellaops-${values%.*}"
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Validate deployment profiles
|
||||
run: ./deploy/tools/validate-profiles.sh
|
||||
run: ./devops/tools/validate-profiles.sh
|
||||
|
||||
build-test:
|
||||
runs-on: ubuntu-22.04
|
||||
@@ -85,10 +85,10 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Verify binary layout
|
||||
run: scripts/verify-binaries.sh
|
||||
run: .gitea/scripts/validate/verify-binaries.sh
|
||||
|
||||
- name: Ensure binary manifests are up to date
|
||||
run: |
|
||||
@@ -106,22 +106,22 @@ jobs:
|
||||
run: python3 scripts/verify-policy-scopes.py
|
||||
|
||||
- name: Validate NuGet restore source ordering
|
||||
run: python3 ops/devops/validate_restore_sources.py
|
||||
run: python3 devops/validate_restore_sources.py
|
||||
|
||||
- name: Validate telemetry storage configuration
|
||||
run: python3 ops/devops/telemetry/validate_storage_stack.py
|
||||
run: python3 devops/telemetry/validate_storage_stack.py
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: |
|
||||
python3 scripts/packs/run-fixtures-check.sh
|
||||
python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Telemetry tenant isolation smoke
|
||||
env:
|
||||
COMPOSE_DIR: ${GITHUB_WORKSPACE}/deploy/compose
|
||||
COMPOSE_DIR: ${GITHUB_WORKSPACE}/devops/compose
|
||||
run: |
|
||||
set -euo pipefail
|
||||
./ops/devops/telemetry/generate_dev_tls.sh
|
||||
COMPOSE_DIR="${COMPOSE_DIR:-${GITHUB_WORKSPACE}/deploy/compose}"
|
||||
./devops/telemetry/generate_dev_tls.sh
|
||||
COMPOSE_DIR="${COMPOSE_DIR:-${GITHUB_WORKSPACE}/devops/compose}"
|
||||
cleanup() {
|
||||
set +e
|
||||
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry.yaml down -v --remove-orphans >/dev/null 2>&1)
|
||||
@@ -131,8 +131,8 @@ jobs:
|
||||
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry-storage.yaml up -d)
|
||||
(cd "$COMPOSE_DIR" && docker compose -f docker-compose.telemetry.yaml up -d)
|
||||
sleep 5
|
||||
python3 ops/devops/telemetry/smoke_otel_collector.py --host localhost
|
||||
python3 ops/devops/telemetry/tenant_isolation_smoke.py \
|
||||
python3 devops/telemetry/smoke_otel_collector.py --host localhost
|
||||
python3 devops/telemetry/tenant_isolation_smoke.py \
|
||||
--collector https://localhost:4318/v1 \
|
||||
--tempo https://localhost:3200 \
|
||||
--loki https://localhost:3100
|
||||
@@ -320,7 +320,7 @@ PY
|
||||
|
||||
curl -sSf -X POST -H 'Content-type: application/json' --data "$payload" "$SLACK_WEBHOOK"
|
||||
- name: Run release tooling tests
|
||||
run: python ops/devops/release/test_verify_release.py
|
||||
run: python devops/release/test_verify_release.py
|
||||
|
||||
- name: Build scanner language analyzer projects
|
||||
run: |
|
||||
@@ -592,9 +592,9 @@ PY
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "::group::Computing reachability metrics"
|
||||
if [ -f scripts/ci/compute-reachability-metrics.sh ]; then
|
||||
chmod +x scripts/ci/compute-reachability-metrics.sh
|
||||
METRICS=$(./scripts/ci/compute-reachability-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||
if [ -f .gitea/scripts/metrics/compute-reachability-metrics.sh ]; then
|
||||
chmod +x .gitea/scripts/metrics/compute-reachability-metrics.sh
|
||||
METRICS=$(./.gitea/scripts/metrics/compute-reachability-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||
echo "Reachability metrics: $METRICS"
|
||||
else
|
||||
@@ -607,9 +607,9 @@ PY
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "::group::Computing TTFS metrics"
|
||||
if [ -f scripts/ci/compute-ttfs-metrics.sh ]; then
|
||||
chmod +x scripts/ci/compute-ttfs-metrics.sh
|
||||
METRICS=$(./scripts/ci/compute-ttfs-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||
if [ -f .gitea/scripts/metrics/compute-ttfs-metrics.sh ]; then
|
||||
chmod +x .gitea/scripts/metrics/compute-ttfs-metrics.sh
|
||||
METRICS=$(./.gitea/scripts/metrics/compute-ttfs-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||
echo "TTFS metrics: $METRICS"
|
||||
else
|
||||
@@ -622,9 +622,9 @@ PY
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "::group::Enforcing performance SLOs"
|
||||
if [ -f scripts/ci/enforce-performance-slos.sh ]; then
|
||||
chmod +x scripts/ci/enforce-performance-slos.sh
|
||||
./scripts/ci/enforce-performance-slos.sh --warn-only || true
|
||||
if [ -f .gitea/scripts/metrics/enforce-performance-slos.sh ]; then
|
||||
chmod +x .gitea/scripts/metrics/enforce-performance-slos.sh
|
||||
./.gitea/scripts/metrics/enforce-performance-slos.sh --warn-only || true
|
||||
else
|
||||
echo "Performance SLO script not found, skipping"
|
||||
fi
|
||||
@@ -635,7 +635,7 @@ PY
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "::group::Validating RLS policies"
|
||||
if [ -f deploy/postgres-validation/001_validate_rls.sql ]; then
|
||||
if [ -f devops/database/postgres/validation/001_validate_rls.sql ]; then
|
||||
echo "RLS validation script found"
|
||||
# Check that all tenant-scoped schemas have RLS enabled
|
||||
SCHEMAS=("scheduler" "vex" "authority" "notify" "policy" "findings_ledger")
|
||||
@@ -801,7 +801,7 @@ PY
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Run sealed-mode CI harness
|
||||
working-directory: ops/devops/sealed-mode-ci
|
||||
working-directory: devops/sealed-mode-ci
|
||||
env:
|
||||
COMPOSE_PROJECT_NAME: sealedmode
|
||||
run: |
|
||||
@@ -812,7 +812,7 @@ PY
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: sealed-mode-ci
|
||||
path: ops/devops/sealed-mode-ci/artifacts/sealed-mode-ci
|
||||
path: devops/sealed-mode-ci/artifacts/sealed-mode-ci
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
@@ -35,8 +35,8 @@ jobs:
|
||||
|
||||
- name: Build CLI artifacts
|
||||
run: |
|
||||
chmod +x scripts/cli/build-cli.sh
|
||||
RIDS="${{ github.event.inputs.rids }}" CONFIG="${{ github.event.inputs.config }}" SBOM_TOOL=syft SIGN="${{ github.event.inputs.sign }}" COSIGN_KEY="${{ secrets.COSIGN_KEY }}" scripts/cli/build-cli.sh
|
||||
chmod +x .gitea/scripts/build/build-cli.sh
|
||||
RIDS="${{ github.event.inputs.rids }}" CONFIG="${{ github.event.inputs.config }}" SBOM_TOOL=syft SIGN="${{ github.event.inputs.sign }}" COSIGN_KEY="${{ secrets.COSIGN_KEY }}" .gitea/scripts/build/build-cli.sh
|
||||
|
||||
- name: List artifacts
|
||||
run: find out/cli -maxdepth 3 -type f -print
|
||||
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 preview
|
||||
uses: actions/setup-dotnet@v4
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
paths:
|
||||
- 'src/Web/**'
|
||||
- '.gitea/workflows/console-ci.yml'
|
||||
- 'ops/devops/console/**'
|
||||
- 'devops/console/**'
|
||||
|
||||
jobs:
|
||||
lint-test-build:
|
||||
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'ops/devops/console/**'
|
||||
- 'devops/console/**'
|
||||
- '.gitea/workflows/console-runner-image.yml'
|
||||
|
||||
jobs:
|
||||
@@ -21,12 +21,12 @@ jobs:
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
chmod +x ops/devops/console/build-runner-image.sh ops/devops/console/build-runner-image-ci.sh
|
||||
ops/devops/console/build-runner-image-ci.sh
|
||||
chmod +x devops/console/build-runner-image.sh devops/console/build-runner-image-ci.sh
|
||||
devops/console/build-runner-image-ci.sh
|
||||
|
||||
- name: Upload runner image artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: console-runner-image-${{ github.run_id }}
|
||||
path: ops/devops/artifacts/console-runner/
|
||||
path: devops/artifacts/console-runner/
|
||||
retention-days: 14
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
@@ -51,10 +51,10 @@ jobs:
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
run: |
|
||||
chmod +x scripts/buildx/build-multiarch.sh
|
||||
chmod +x .gitea/scripts/build/build-multiarch.sh
|
||||
extra=""
|
||||
if [[ "${{ github.event.inputs.push }}" == "true" ]]; then extra="--push"; fi
|
||||
scripts/buildx/build-multiarch.sh \
|
||||
.gitea/scripts/build/build-multiarch.sh \
|
||||
"${{ github.event.inputs.image }}" \
|
||||
"${{ github.event.inputs.context }}" \
|
||||
--platform "${{ github.event.inputs.platforms }}" \
|
||||
@@ -62,8 +62,8 @@ jobs:
|
||||
|
||||
- name: Build air-gap bundle
|
||||
run: |
|
||||
chmod +x scripts/buildx/build-airgap-bundle.sh
|
||||
scripts/buildx/build-airgap-bundle.sh "${{ github.event.inputs.image }}"
|
||||
chmod +x .gitea/scripts/build/build-airgap-bundle.sh
|
||||
.gitea/scripts/build/build-airgap-bundle.sh "${{ github.event.inputs.image }}"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 (preview)
|
||||
uses: actions/setup-dotnet@v4
|
||||
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node (corepack/pnpm)
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
@@ -5,16 +5,16 @@ on:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'deploy/docker/**'
|
||||
- 'deploy/compose/docker-compose.*.yml'
|
||||
- 'devops/docker/**'
|
||||
- 'devops/compose/docker-compose.*.yml'
|
||||
- 'etc/appsettings.crypto.*.yaml'
|
||||
- 'etc/crypto-plugins-manifest.json'
|
||||
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||
- '.gitea/workflows/docker-regional-builds.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'deploy/docker/**'
|
||||
- 'deploy/compose/docker-compose.*.yml'
|
||||
- 'devops/docker/**'
|
||||
- 'devops/compose/docker-compose.*.yml'
|
||||
- 'etc/appsettings.crypto.*.yaml'
|
||||
- 'etc/crypto-plugins-manifest.json'
|
||||
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||
@@ -65,7 +65,7 @@ jobs:
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./deploy/docker/Dockerfile.platform
|
||||
file: ./devops/docker/Dockerfile.platform
|
||||
target: runtime-base
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./deploy/docker/Dockerfile.crypto-profile
|
||||
file: ./devops/docker/Dockerfile.crypto-profile
|
||||
target: ${{ matrix.service }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
@@ -176,7 +176,7 @@ jobs:
|
||||
|
||||
- name: Validate docker-compose file
|
||||
run: |
|
||||
docker compose -f deploy/compose/docker-compose.${{ matrix.profile }}.yml config --quiet
|
||||
docker compose -f devops/compose/docker-compose.${{ matrix.profile }}.yml config --quiet
|
||||
|
||||
- name: Check required crypto configuration fields
|
||||
run: |
|
||||
|
||||
@@ -30,10 +30,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Emit retention summary
|
||||
env:
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Package staged Zastava artefacts
|
||||
run: |
|
||||
|
||||
@@ -5,14 +5,14 @@ on:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/ExportCenter/**'
|
||||
- 'ops/devops/export/**'
|
||||
- 'devops/export/**'
|
||||
- '.gitea/workflows/export-ci.yml'
|
||||
- 'docs/modules/devops/export-ci-contract.md'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'src/ExportCenter/**'
|
||||
- 'ops/devops/export/**'
|
||||
- 'devops/export/**'
|
||||
- '.gitea/workflows/export-ci.yml'
|
||||
- 'docs/modules/devops/export-ci-contract.md'
|
||||
|
||||
@@ -30,12 +30,12 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
@@ -48,9 +48,9 @@ jobs:
|
||||
|
||||
- name: Bring up MinIO
|
||||
run: |
|
||||
docker compose -f ops/devops/export/minio-compose.yml up -d
|
||||
docker compose -f devops/export/minio-compose.yml up -d
|
||||
sleep 5
|
||||
MINIO_ENDPOINT=http://localhost:9000 ops/devops/export/seed-minio.sh
|
||||
MINIO_ENDPOINT=http://localhost:9000 devops/export/seed-minio.sh
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj -c Release --logger "trx;LogFileName=export-tests.trx" --results-directory $ARTIFACT_DIR
|
||||
|
||||
- name: Trivy/OCI smoke
|
||||
run: ops/devops/export/trivy-smoke.sh
|
||||
run: devops/export/trivy-smoke.sh
|
||||
|
||||
- name: Schema lint
|
||||
run: |
|
||||
@@ -82,4 +82,4 @@ jobs:
|
||||
|
||||
- name: Teardown MinIO
|
||||
if: always()
|
||||
run: docker compose -f ops/devops/export/minio-compose.yml down -v
|
||||
run: docker compose -f devops/export/minio-compose.yml down -v
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Trivy
|
||||
uses: aquasecurity/trivy-action@v0.24.0
|
||||
|
||||
@@ -9,10 +9,10 @@ on:
|
||||
paths:
|
||||
- 'src/Findings/**'
|
||||
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||
- 'deploy/releases/2025.09-stable.yaml'
|
||||
- 'deploy/releases/2025.09-airgap.yaml'
|
||||
- 'deploy/downloads/manifest.json'
|
||||
- 'ops/devops/release/check_release_manifest.py'
|
||||
- 'devops/releases/2025.09-stable.yaml'
|
||||
- 'devops/releases/2025.09-airgap.yaml'
|
||||
- 'devops/downloads/manifest.json'
|
||||
- 'devops/release/check_release_manifest.py'
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
paths:
|
||||
@@ -217,7 +217,7 @@ jobs:
|
||||
- name: Validate release manifests (production)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python ops/devops/release/check_release_manifest.py
|
||||
python devops/release/check_release_manifest.py
|
||||
|
||||
- name: Re-apply RLS migration (idempotency check)
|
||||
run: |
|
||||
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Install k6
|
||||
run: |
|
||||
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'api/ledger/**'
|
||||
- 'ops/devops/ledger/**'
|
||||
- 'devops/ledger/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'api/ledger/**'
|
||||
@@ -30,8 +30,8 @@ jobs:
|
||||
|
||||
- name: Validate OpenAPI spec
|
||||
run: |
|
||||
chmod +x ops/devops/ledger/validate-oas.sh
|
||||
ops/devops/ledger/validate-oas.sh
|
||||
chmod +x devops/ledger/validate-oas.sh
|
||||
devops/ledger/validate-oas.sh
|
||||
|
||||
- name: Upload validation report
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -72,9 +72,9 @@ jobs:
|
||||
|
||||
- name: Check deprecation policy
|
||||
run: |
|
||||
if [ -f "ops/devops/ledger/deprecation-policy.yaml" ]; then
|
||||
if [ -f "devops/ledger/deprecation-policy.yaml" ]; then
|
||||
echo "Validating deprecation policy..."
|
||||
python3 -c "import yaml; yaml.safe_load(open('ops/devops/ledger/deprecation-policy.yaml'))"
|
||||
python3 -c "import yaml; yaml.safe_load(open('devops/ledger/deprecation-policy.yaml'))"
|
||||
echo "Deprecation policy is valid"
|
||||
else
|
||||
echo "[info] No deprecation policy yet (OK for initial setup)"
|
||||
|
||||
@@ -14,7 +14,7 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'ops/devops/ledger/**'
|
||||
- 'devops/ledger/**'
|
||||
|
||||
jobs:
|
||||
build-pack:
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
|
||||
- name: Build pack
|
||||
run: |
|
||||
chmod +x ops/devops/ledger/build-pack.sh
|
||||
chmod +x devops/ledger/build-pack.sh
|
||||
SNAPSHOT_ID="${{ github.event.inputs.snapshot_id }}"
|
||||
if [ -z "$SNAPSHOT_ID" ]; then
|
||||
SNAPSHOT_ID="ci-$(date +%Y%m%d%H%M%S)"
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
SIGN_FLAG="--sign"
|
||||
fi
|
||||
|
||||
SNAPSHOT_ID="$SNAPSHOT_ID" ops/devops/ledger/build-pack.sh $SIGN_FLAG
|
||||
SNAPSHOT_ID="$SNAPSHOT_ID" devops/ledger/build-pack.sh $SIGN_FLAG
|
||||
|
||||
- name: Verify checksums
|
||||
run: |
|
||||
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
env:
|
||||
STAGING_MONGO_URI: ${{ inputs.mongo_uri }}
|
||||
run: |
|
||||
STAGING_MONGO_URI="$STAGING_MONGO_URI" ops/devops/lnm/backfill-validation.sh
|
||||
STAGING_MONGO_URI="$STAGING_MONGO_URI" devops/lnm/backfill-validation.sh
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
@@ -11,7 +11,7 @@ on:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'src/Concelier/__Libraries/StellaOps.Concelier.Migrations/**'
|
||||
- 'ops/devops/lnm/**'
|
||||
- 'devops/lnm/**'
|
||||
|
||||
jobs:
|
||||
build-runner:
|
||||
@@ -40,8 +40,8 @@ jobs:
|
||||
|
||||
- name: Build and package runner
|
||||
run: |
|
||||
chmod +x ops/devops/lnm/package-runner.sh
|
||||
ops/devops/lnm/package-runner.sh
|
||||
chmod +x devops/lnm/package-runner.sh
|
||||
devops/lnm/package-runner.sh
|
||||
|
||||
- name: Verify checksums
|
||||
run: |
|
||||
@@ -69,15 +69,15 @@ jobs:
|
||||
- name: Validate monitoring config
|
||||
run: |
|
||||
# Validate alert rules syntax
|
||||
if [ -f "ops/devops/lnm/alerts/lnm-alerts.yaml" ]; then
|
||||
if [ -f "devops/lnm/alerts/lnm-alerts.yaml" ]; then
|
||||
echo "Validating alert rules..."
|
||||
python3 -c "import yaml; yaml.safe_load(open('ops/devops/lnm/alerts/lnm-alerts.yaml'))"
|
||||
python3 -c "import yaml; yaml.safe_load(open('devops/lnm/alerts/lnm-alerts.yaml'))"
|
||||
fi
|
||||
|
||||
# Validate dashboard JSON
|
||||
if [ -f "ops/devops/lnm/dashboards/lnm-migration.json" ]; then
|
||||
if [ -f "devops/lnm/dashboards/lnm-migration.json" ]; then
|
||||
echo "Validating dashboard..."
|
||||
python3 -c "import json; json.load(open('ops/devops/lnm/dashboards/lnm-migration.json'))"
|
||||
python3 -c "import json; json.load(open('devops/lnm/dashboards/lnm-migration.json'))"
|
||||
fi
|
||||
|
||||
echo "Monitoring config validation complete"
|
||||
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
@@ -78,9 +78,9 @@ jobs:
|
||||
|
||||
- name: Run fixture validation
|
||||
run: |
|
||||
if [ -f scripts/packs/run-fixtures-check.sh ]; then
|
||||
chmod +x scripts/packs/run-fixtures-check.sh
|
||||
./scripts/packs/run-fixtures-check.sh
|
||||
if [ -f .gitea/scripts/test/run-fixtures-check.sh ]; then
|
||||
chmod +x .gitea/scripts/test/run-fixtures-check.sh
|
||||
./.gitea/scripts/test/run-fixtures-check.sh
|
||||
fi
|
||||
|
||||
checksum-audit:
|
||||
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
include-prerelease: true
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Verify signing prerequisites
|
||||
run: scripts/mirror/check_signing_prereqs.sh
|
||||
|
||||
@@ -3,9 +3,9 @@ name: mock-dev-release
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- deploy/releases/2025.09-mock-dev.yaml
|
||||
- deploy/downloads/manifest.json
|
||||
- ops/devops/mock-release/**
|
||||
- devops/releases/2025.09-mock-dev.yaml
|
||||
- devops/downloads/manifest.json
|
||||
- devops/mock-release/**
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -19,19 +19,19 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p out/mock-release
|
||||
cp deploy/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||
cp deploy/downloads/manifest.json out/mock-release/
|
||||
cp devops/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||
cp devops/downloads/manifest.json out/mock-release/
|
||||
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
|
||||
|
||||
- name: Compose config (dev + mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ops/devops/mock-release/config_check.sh
|
||||
devops/mock-release/config_check.sh
|
||||
|
||||
- name: Helm template (mock overlay)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
helm template mock ./deploy/helm/stellaops -f deploy/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||
helm template mock ./devops/helm/stellaops -f devops/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||
ls -lh /tmp/helm-mock.yaml
|
||||
|
||||
- name: Upload mock release bundle
|
||||
|
||||
405
.gitea/workflows/module-publish.yml
Normal file
405
.gitea/workflows/module-publish.yml
Normal file
@@ -0,0 +1,405 @@
|
||||
# .gitea/workflows/module-publish.yml
|
||||
# Per-module NuGet and container publishing to Gitea registry
|
||||
# Sprint: SPRINT_20251226_004_CICD
|
||||
|
||||
name: Module Publish
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
module:
|
||||
description: 'Module to publish'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- Authority
|
||||
- Attestor
|
||||
- Concelier
|
||||
- Scanner
|
||||
- Policy
|
||||
- Signer
|
||||
- Excititor
|
||||
- Gateway
|
||||
- Scheduler
|
||||
- Orchestrator
|
||||
- TaskRunner
|
||||
- Notify
|
||||
- CLI
|
||||
version:
|
||||
description: 'Semantic version (e.g., 1.2.3)'
|
||||
required: true
|
||||
type: string
|
||||
publish_nuget:
|
||||
description: 'Publish NuGet packages'
|
||||
type: boolean
|
||||
default: true
|
||||
publish_container:
|
||||
description: 'Publish container image'
|
||||
type: boolean
|
||||
default: true
|
||||
prerelease:
|
||||
description: 'Mark as prerelease'
|
||||
type: boolean
|
||||
default: false
|
||||
push:
|
||||
tags:
|
||||
- 'module-*-v*' # e.g., module-authority-v1.2.3
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
REGISTRY: git.stella-ops.org
|
||||
NUGET_SOURCE: https://git.stella-ops.org/api/packages/stella-ops.org/nuget/index.json
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PARSE TAG (for tag-triggered builds)
|
||||
# ===========================================================================
|
||||
|
||||
parse-tag:
|
||||
name: Parse Tag
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event_name == 'push'
|
||||
outputs:
|
||||
module: ${{ steps.parse.outputs.module }}
|
||||
version: ${{ steps.parse.outputs.version }}
|
||||
steps:
|
||||
- name: Parse module and version from tag
|
||||
id: parse
|
||||
run: |
|
||||
TAG="${{ github.ref_name }}"
|
||||
# Expected format: module-{name}-v{version}
|
||||
# Example: module-authority-v1.2.3
|
||||
if [[ "$TAG" =~ ^module-([a-zA-Z]+)-v([0-9]+\.[0-9]+\.[0-9]+.*)$ ]]; then
|
||||
MODULE="${BASH_REMATCH[1]}"
|
||||
VERSION="${BASH_REMATCH[2]}"
|
||||
# Capitalize first letter
|
||||
MODULE="$(echo "${MODULE:0:1}" | tr '[:lower:]' '[:upper:]')${MODULE:1}"
|
||||
echo "module=$MODULE" >> "$GITHUB_OUTPUT"
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "Parsed: module=$MODULE, version=$VERSION"
|
||||
else
|
||||
echo "::error::Invalid tag format. Expected: module-{name}-v{version}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# VALIDATE
|
||||
# ===========================================================================
|
||||
|
||||
validate:
|
||||
name: Validate Inputs
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [parse-tag]
|
||||
if: always() && (needs.parse-tag.result == 'success' || needs.parse-tag.result == 'skipped')
|
||||
outputs:
|
||||
module: ${{ steps.resolve.outputs.module }}
|
||||
version: ${{ steps.resolve.outputs.version }}
|
||||
publish_nuget: ${{ steps.resolve.outputs.publish_nuget }}
|
||||
publish_container: ${{ steps.resolve.outputs.publish_container }}
|
||||
steps:
|
||||
- name: Resolve inputs
|
||||
id: resolve
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
MODULE="${{ needs.parse-tag.outputs.module }}"
|
||||
VERSION="${{ needs.parse-tag.outputs.version }}"
|
||||
PUBLISH_NUGET="true"
|
||||
PUBLISH_CONTAINER="true"
|
||||
else
|
||||
MODULE="${{ github.event.inputs.module }}"
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
PUBLISH_NUGET="${{ github.event.inputs.publish_nuget }}"
|
||||
PUBLISH_CONTAINER="${{ github.event.inputs.publish_container }}"
|
||||
fi
|
||||
|
||||
echo "module=$MODULE" >> "$GITHUB_OUTPUT"
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "publish_nuget=$PUBLISH_NUGET" >> "$GITHUB_OUTPUT"
|
||||
echo "publish_container=$PUBLISH_CONTAINER" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "=== Resolved Configuration ==="
|
||||
echo "Module: $MODULE"
|
||||
echo "Version: $VERSION"
|
||||
echo "Publish NuGet: $PUBLISH_NUGET"
|
||||
echo "Publish Container: $PUBLISH_CONTAINER"
|
||||
|
||||
- name: Validate version format
|
||||
run: |
|
||||
VERSION="${{ steps.resolve.outputs.version }}"
|
||||
if ! [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.]+)?$ ]]; then
|
||||
echo "::error::Invalid version format. Expected: MAJOR.MINOR.PATCH[-prerelease]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# PUBLISH NUGET
|
||||
# ===========================================================================
|
||||
|
||||
publish-nuget:
|
||||
name: Publish NuGet
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate]
|
||||
if: needs.validate.outputs.publish_nuget == 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Determine project path
|
||||
id: path
|
||||
run: |
|
||||
MODULE="${{ needs.validate.outputs.module }}"
|
||||
|
||||
# Map module names to project paths
|
||||
case "$MODULE" in
|
||||
Authority)
|
||||
PROJECT="src/Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj"
|
||||
;;
|
||||
Attestor)
|
||||
PROJECT="src/Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj"
|
||||
;;
|
||||
Concelier)
|
||||
PROJECT="src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj"
|
||||
;;
|
||||
Scanner)
|
||||
PROJECT="src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj"
|
||||
;;
|
||||
Policy)
|
||||
PROJECT="src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj"
|
||||
;;
|
||||
Signer)
|
||||
PROJECT="src/Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj"
|
||||
;;
|
||||
Excititor)
|
||||
PROJECT="src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj"
|
||||
;;
|
||||
Gateway)
|
||||
PROJECT="src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj"
|
||||
;;
|
||||
Scheduler)
|
||||
PROJECT="src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj"
|
||||
;;
|
||||
Orchestrator)
|
||||
PROJECT="src/Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj"
|
||||
;;
|
||||
TaskRunner)
|
||||
PROJECT="src/TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj"
|
||||
;;
|
||||
Notify)
|
||||
PROJECT="src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj"
|
||||
;;
|
||||
CLI)
|
||||
PROJECT="src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Unknown module: $MODULE"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "project=$PROJECT" >> "$GITHUB_OUTPUT"
|
||||
echo "Project path: $PROJECT"
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore ${{ steps.path.outputs.project }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build ${{ steps.path.outputs.project }} \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
-p:Version=${{ needs.validate.outputs.version }}
|
||||
|
||||
- name: Pack NuGet
|
||||
run: |
|
||||
dotnet pack ${{ steps.path.outputs.project }} \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
-p:Version=${{ needs.validate.outputs.version }} \
|
||||
-p:PackageVersion=${{ needs.validate.outputs.version }} \
|
||||
--output out/packages
|
||||
|
||||
- name: Push to Gitea NuGet registry
|
||||
run: |
|
||||
for nupkg in out/packages/*.nupkg; do
|
||||
echo "Pushing: $nupkg"
|
||||
dotnet nuget push "$nupkg" \
|
||||
--source "${{ env.NUGET_SOURCE }}" \
|
||||
--api-key "${{ secrets.GITEA_TOKEN }}" \
|
||||
--skip-duplicate
|
||||
done
|
||||
|
||||
- name: Upload NuGet artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nuget-${{ needs.validate.outputs.module }}-${{ needs.validate.outputs.version }}
|
||||
path: out/packages/*.nupkg
|
||||
retention-days: 30
|
||||
|
||||
# ===========================================================================
|
||||
# PUBLISH CONTAINER
|
||||
# ===========================================================================
|
||||
|
||||
publish-container:
|
||||
name: Publish Container
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate]
|
||||
if: needs.validate.outputs.publish_container == 'true' && needs.validate.outputs.module != 'CLI'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITEA_TOKEN }}
|
||||
|
||||
- name: Determine image name
|
||||
id: image
|
||||
run: |
|
||||
MODULE="${{ needs.validate.outputs.module }}"
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
MODULE_LOWER=$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
IMAGE="${{ env.REGISTRY }}/stella-ops.org/${MODULE_LOWER}"
|
||||
|
||||
echo "name=$IMAGE" >> "$GITHUB_OUTPUT"
|
||||
echo "tag_version=${IMAGE}:${VERSION}" >> "$GITHUB_OUTPUT"
|
||||
echo "tag_latest=${IMAGE}:latest" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "Image: $IMAGE"
|
||||
echo "Tags: ${VERSION}, latest"
|
||||
|
||||
- name: Build and push container
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: devops/docker/Dockerfile.platform
|
||||
target: ${{ needs.validate.outputs.module | lower }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ steps.image.outputs.tag_version }}
|
||||
${{ steps.image.outputs.tag_latest }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: |
|
||||
org.opencontainers.image.title=StellaOps ${{ needs.validate.outputs.module }}
|
||||
org.opencontainers.image.version=${{ needs.validate.outputs.version }}
|
||||
org.opencontainers.image.source=https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
|
||||
# ===========================================================================
|
||||
# PUBLISH CLI BINARIES (multi-platform)
|
||||
# ===========================================================================
|
||||
|
||||
publish-cli:
|
||||
name: Publish CLI (${{ matrix.runtime }})
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate]
|
||||
if: needs.validate.outputs.module == 'CLI'
|
||||
strategy:
|
||||
matrix:
|
||||
runtime:
|
||||
- linux-x64
|
||||
- linux-arm64
|
||||
- win-x64
|
||||
- osx-x64
|
||||
- osx-arm64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install cross-compilation tools
|
||||
if: matrix.runtime == 'linux-arm64'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends binutils-aarch64-linux-gnu
|
||||
|
||||
- name: Publish CLI
|
||||
run: |
|
||||
dotnet publish src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||
--configuration Release \
|
||||
--runtime ${{ matrix.runtime }} \
|
||||
--self-contained true \
|
||||
-p:Version=${{ needs.validate.outputs.version }} \
|
||||
-p:PublishSingleFile=true \
|
||||
-p:PublishTrimmed=true \
|
||||
-p:EnableCompressionInSingleFile=true \
|
||||
--output out/cli/${{ matrix.runtime }}
|
||||
|
||||
- name: Create archive
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
RUNTIME="${{ matrix.runtime }}"
|
||||
|
||||
cd out/cli/$RUNTIME
|
||||
if [[ "$RUNTIME" == win-* ]]; then
|
||||
zip -r ../stellaops-cli-${VERSION}-${RUNTIME}.zip .
|
||||
else
|
||||
tar -czvf ../stellaops-cli-${VERSION}-${RUNTIME}.tar.gz .
|
||||
fi
|
||||
|
||||
- name: Upload CLI artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cli-${{ needs.validate.outputs.version }}-${{ matrix.runtime }}
|
||||
path: |
|
||||
out/cli/*.zip
|
||||
out/cli/*.tar.gz
|
||||
retention-days: 30
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: Publish Summary
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate, publish-nuget, publish-container, publish-cli]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate Summary
|
||||
run: |
|
||||
echo "## Module Publish Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Property | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Module | ${{ needs.validate.outputs.module }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Version | ${{ needs.validate.outputs.version }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| NuGet | ${{ needs.publish-nuget.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Container | ${{ needs.publish-container.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| CLI | ${{ needs.publish-cli.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Registry URLs" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- NuGet: \`${{ env.NUGET_SOURCE }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Container: \`${{ env.REGISTRY }}/stella-ops.org/${{ needs.validate.outputs.module | lower }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for failures
|
||||
if: contains(needs.*.result, 'failure')
|
||||
run: |
|
||||
echo "::error::One or more publish jobs failed"
|
||||
exit 1
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Python (telemetry schema checks)
|
||||
uses: actions/setup-python@v5
|
||||
@@ -36,8 +36,8 @@ jobs:
|
||||
env:
|
||||
TELEMETRY_BUNDLE_SCHEMA: docs/modules/telemetry/schemas/telemetry-bundle.schema.json
|
||||
run: |
|
||||
chmod +x ops/devops/telemetry/tests/ci-run.sh
|
||||
ops/devops/telemetry/tests/ci-run.sh
|
||||
chmod +x devops/telemetry/tests/ci-run.sh
|
||||
devops/telemetry/tests/ci-run.sh
|
||||
|
||||
- name: Upload SLO results
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Install nats CLI
|
||||
run: |
|
||||
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
run: |
|
||||
export COSIGN_KEY_B64=$(base64 -w0 out/policy-sign/keys/ci-policy-cosign.key)
|
||||
COSIGN_PASSWORD= \
|
||||
scripts/policy/sign-policy.sh --file docs/examples/policies/baseline.stella --out-dir out/policy-sign
|
||||
.gitea/scripts/sign/sign-policy.sh --file docs/examples/policies/baseline.stella --out-dir out/policy-sign
|
||||
|
||||
- name: Attest and verify sample policy blob
|
||||
run: |
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Resolve staging credentials
|
||||
id: staging
|
||||
|
||||
@@ -10,7 +10,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Emit provenance summary
|
||||
run: |
|
||||
|
||||
@@ -3,10 +3,10 @@ name: release-manifest-verify
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- deploy/releases/2025.09-stable.yaml
|
||||
- deploy/releases/2025.09-airgap.yaml
|
||||
- deploy/downloads/manifest.json
|
||||
- ops/devops/release/check_release_manifest.py
|
||||
- devops/releases/2025.09-stable.yaml
|
||||
- devops/releases/2025.09-airgap.yaml
|
||||
- devops/downloads/manifest.json
|
||||
- devops/release/check_release_manifest.py
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -16,4 +16,4 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Validate release & downloads manifests
|
||||
run: |
|
||||
python ops/devops/release/check_release_manifest.py
|
||||
python devops/release/check_release_manifest.py
|
||||
|
||||
683
.gitea/workflows/release-suite.yml
Normal file
683
.gitea/workflows/release-suite.yml
Normal file
@@ -0,0 +1,683 @@
|
||||
# .gitea/workflows/release-suite.yml
|
||||
# Full suite release pipeline with Ubuntu-style versioning (YYYY.MM)
|
||||
# Sprint: SPRINT_20251226_005_CICD
|
||||
|
||||
name: Suite Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Suite version (YYYY.MM format, e.g., 2026.04)'
|
||||
required: true
|
||||
type: string
|
||||
codename:
|
||||
description: 'Release codename (e.g., Nova, Orion, Pulsar)'
|
||||
required: true
|
||||
type: string
|
||||
channel:
|
||||
description: 'Release channel'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- edge
|
||||
- stable
|
||||
- lts
|
||||
default: edge
|
||||
skip_tests:
|
||||
description: 'Skip test execution (use with caution)'
|
||||
type: boolean
|
||||
default: false
|
||||
dry_run:
|
||||
description: 'Dry run (build but do not publish)'
|
||||
type: boolean
|
||||
default: false
|
||||
push:
|
||||
tags:
|
||||
- 'suite-*' # e.g., suite-2026.04
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
REGISTRY: git.stella-ops.org
|
||||
NUGET_SOURCE: https://git.stella-ops.org/api/packages/stella-ops.org/nuget/index.json
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PARSE TAG (for tag-triggered builds)
|
||||
# ===========================================================================
|
||||
|
||||
parse-tag:
|
||||
name: Parse Tag
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event_name == 'push'
|
||||
outputs:
|
||||
version: ${{ steps.parse.outputs.version }}
|
||||
codename: ${{ steps.parse.outputs.codename }}
|
||||
channel: ${{ steps.parse.outputs.channel }}
|
||||
steps:
|
||||
- name: Parse version from tag
|
||||
id: parse
|
||||
run: |
|
||||
TAG="${{ github.ref_name }}"
|
||||
# Expected format: suite-{YYYY.MM} or suite-{YYYY.MM}-{codename}
|
||||
if [[ "$TAG" =~ ^suite-([0-9]{4}\.(04|10))(-([a-zA-Z]+))?$ ]]; then
|
||||
VERSION="${BASH_REMATCH[1]}"
|
||||
CODENAME="${BASH_REMATCH[4]:-TBD}"
|
||||
|
||||
# Determine channel based on month (04 = LTS, 10 = feature)
|
||||
MONTH="${BASH_REMATCH[2]}"
|
||||
if [[ "$MONTH" == "04" ]]; then
|
||||
CHANNEL="lts"
|
||||
else
|
||||
CHANNEL="stable"
|
||||
fi
|
||||
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "codename=$CODENAME" >> "$GITHUB_OUTPUT"
|
||||
echo "channel=$CHANNEL" >> "$GITHUB_OUTPUT"
|
||||
echo "Parsed: version=$VERSION, codename=$CODENAME, channel=$CHANNEL"
|
||||
else
|
||||
echo "::error::Invalid tag format. Expected: suite-YYYY.MM or suite-YYYY.MM-codename"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# VALIDATE
|
||||
# ===========================================================================
|
||||
|
||||
validate:
|
||||
name: Validate Release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [parse-tag]
|
||||
if: always() && (needs.parse-tag.result == 'success' || needs.parse-tag.result == 'skipped')
|
||||
outputs:
|
||||
version: ${{ steps.resolve.outputs.version }}
|
||||
codename: ${{ steps.resolve.outputs.codename }}
|
||||
channel: ${{ steps.resolve.outputs.channel }}
|
||||
dry_run: ${{ steps.resolve.outputs.dry_run }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Resolve inputs
|
||||
id: resolve
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
VERSION="${{ needs.parse-tag.outputs.version }}"
|
||||
CODENAME="${{ needs.parse-tag.outputs.codename }}"
|
||||
CHANNEL="${{ needs.parse-tag.outputs.channel }}"
|
||||
DRY_RUN="false"
|
||||
else
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
CODENAME="${{ github.event.inputs.codename }}"
|
||||
CHANNEL="${{ github.event.inputs.channel }}"
|
||||
DRY_RUN="${{ github.event.inputs.dry_run }}"
|
||||
fi
|
||||
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "codename=$CODENAME" >> "$GITHUB_OUTPUT"
|
||||
echo "channel=$CHANNEL" >> "$GITHUB_OUTPUT"
|
||||
echo "dry_run=$DRY_RUN" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "=== Suite Release Configuration ==="
|
||||
echo "Version: $VERSION"
|
||||
echo "Codename: $CODENAME"
|
||||
echo "Channel: $CHANNEL"
|
||||
echo "Dry Run: $DRY_RUN"
|
||||
|
||||
- name: Validate version format
|
||||
run: |
|
||||
VERSION="${{ steps.resolve.outputs.version }}"
|
||||
if ! [[ "$VERSION" =~ ^[0-9]{4}\.(04|10)$ ]]; then
|
||||
echo "::error::Invalid version format. Expected YYYY.MM where MM is 04 or 10 (e.g., 2026.04)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate codename
|
||||
run: |
|
||||
CODENAME="${{ steps.resolve.outputs.codename }}"
|
||||
if [[ -z "$CODENAME" || "$CODENAME" == "TBD" ]]; then
|
||||
echo "::warning::No codename provided, release will use 'TBD'"
|
||||
elif ! [[ "$CODENAME" =~ ^[A-Z][a-z]+$ ]]; then
|
||||
echo "::warning::Codename should be capitalized (e.g., Nova, Orion)"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# RUN TESTS (unless skipped)
|
||||
# ===========================================================================
|
||||
|
||||
test-gate:
|
||||
name: Test Gate
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate]
|
||||
if: github.event.inputs.skip_tests != 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Release Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Unit|Category=Architecture|Category=Contract" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=release-tests.trx" \
|
||||
--results-directory ./TestResults
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: release-test-results
|
||||
path: ./TestResults
|
||||
retention-days: 14
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD MODULES (matrix strategy)
|
||||
# ===========================================================================
|
||||
|
||||
build-modules:
|
||||
name: Build ${{ matrix.module }}
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate, test-gate]
|
||||
if: always() && needs.validate.result == 'success' && (needs.test-gate.result == 'success' || needs.test-gate.result == 'skipped')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module:
|
||||
- name: Authority
|
||||
project: src/Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj
|
||||
- name: Attestor
|
||||
project: src/Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj
|
||||
- name: Concelier
|
||||
project: src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj
|
||||
- name: Scanner
|
||||
project: src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj
|
||||
- name: Policy
|
||||
project: src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj
|
||||
- name: Signer
|
||||
project: src/Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj
|
||||
- name: Excititor
|
||||
project: src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj
|
||||
- name: Gateway
|
||||
project: src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj
|
||||
- name: Scheduler
|
||||
project: src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Determine module version
|
||||
id: version
|
||||
run: |
|
||||
MODULE_NAME="${{ matrix.module.name }}"
|
||||
MODULE_LOWER=$(echo "$MODULE_NAME" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
# Try to read version from version.txt, fallback to 1.0.0
|
||||
VERSION_FILE="src/${MODULE_NAME}/version.txt"
|
||||
if [[ -f "$VERSION_FILE" ]]; then
|
||||
MODULE_VERSION=$(cat "$VERSION_FILE" | tr -d '[:space:]')
|
||||
else
|
||||
MODULE_VERSION="1.0.0"
|
||||
fi
|
||||
|
||||
echo "module_version=$MODULE_VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "module_lower=$MODULE_LOWER" >> "$GITHUB_OUTPUT"
|
||||
echo "Module: $MODULE_NAME, Version: $MODULE_VERSION"
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore ${{ matrix.module.project }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
dotnet build ${{ matrix.module.project }} \
|
||||
--configuration Release \
|
||||
--no-restore \
|
||||
-p:Version=${{ steps.version.outputs.module_version }}
|
||||
|
||||
- name: Pack NuGet
|
||||
run: |
|
||||
dotnet pack ${{ matrix.module.project }} \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
-p:Version=${{ steps.version.outputs.module_version }} \
|
||||
-p:PackageVersion=${{ steps.version.outputs.module_version }} \
|
||||
--output out/packages
|
||||
|
||||
- name: Push NuGet
|
||||
if: needs.validate.outputs.dry_run != 'true'
|
||||
run: |
|
||||
for nupkg in out/packages/*.nupkg; do
|
||||
if [[ -f "$nupkg" ]]; then
|
||||
echo "Pushing: $nupkg"
|
||||
dotnet nuget push "$nupkg" \
|
||||
--source "${{ env.NUGET_SOURCE }}" \
|
||||
--api-key "${{ secrets.GITEA_TOKEN }}" \
|
||||
--skip-duplicate
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Upload NuGet artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: nuget-${{ matrix.module.name }}
|
||||
path: out/packages/*.nupkg
|
||||
retention-days: 30
|
||||
if-no-files-found: ignore
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD CONTAINERS
|
||||
# ===========================================================================
|
||||
|
||||
build-containers:
|
||||
name: Container ${{ matrix.module }}
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate, build-modules]
|
||||
if: needs.validate.outputs.dry_run != 'true'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
module:
|
||||
- authority
|
||||
- attestor
|
||||
- concelier
|
||||
- scanner
|
||||
- policy
|
||||
- signer
|
||||
- excititor
|
||||
- gateway
|
||||
- scheduler
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITEA_TOKEN }}
|
||||
|
||||
- name: Build and push container
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: devops/docker/Dockerfile.platform
|
||||
target: ${{ matrix.module }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/stella-ops.org/${{ matrix.module }}:${{ needs.validate.outputs.version }}
|
||||
${{ env.REGISTRY }}/stella-ops.org/${{ matrix.module }}:${{ needs.validate.outputs.channel }}
|
||||
${{ env.REGISTRY }}/stella-ops.org/${{ matrix.module }}:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: |
|
||||
org.opencontainers.image.title=StellaOps ${{ matrix.module }}
|
||||
org.opencontainers.image.version=${{ needs.validate.outputs.version }}
|
||||
org.opencontainers.image.description=StellaOps ${{ needs.validate.outputs.version }} ${{ needs.validate.outputs.codename }}
|
||||
org.opencontainers.image.source=https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD CLI (multi-platform)
|
||||
# ===========================================================================
|
||||
|
||||
build-cli:
|
||||
name: CLI (${{ matrix.runtime }})
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate, test-gate]
|
||||
if: always() && needs.validate.result == 'success' && (needs.test-gate.result == 'success' || needs.test-gate.result == 'skipped')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
runtime:
|
||||
- linux-x64
|
||||
- linux-arm64
|
||||
- win-x64
|
||||
- osx-x64
|
||||
- osx-arm64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Install cross-compilation tools
|
||||
if: matrix.runtime == 'linux-arm64'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends binutils-aarch64-linux-gnu
|
||||
|
||||
- name: Publish CLI
|
||||
run: |
|
||||
dotnet publish src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||
--configuration Release \
|
||||
--runtime ${{ matrix.runtime }} \
|
||||
--self-contained true \
|
||||
-p:Version=${{ needs.validate.outputs.version }}.0 \
|
||||
-p:PublishSingleFile=true \
|
||||
-p:PublishTrimmed=true \
|
||||
-p:EnableCompressionInSingleFile=true \
|
||||
--output out/cli/${{ matrix.runtime }}
|
||||
|
||||
- name: Create archive
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
RUNTIME="${{ matrix.runtime }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
cd out/cli/$RUNTIME
|
||||
if [[ "$RUNTIME" == win-* ]]; then
|
||||
zip -r "../stellaops-cli-${VERSION}-${CODENAME}-${RUNTIME}.zip" .
|
||||
else
|
||||
tar -czvf "../stellaops-cli-${VERSION}-${CODENAME}-${RUNTIME}.tar.gz" .
|
||||
fi
|
||||
|
||||
- name: Upload CLI artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cli-${{ needs.validate.outputs.version }}-${{ matrix.runtime }}
|
||||
path: |
|
||||
out/cli/*.zip
|
||||
out/cli/*.tar.gz
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# BUILD HELM CHART
|
||||
# ===========================================================================
|
||||
|
||||
build-helm:
|
||||
name: Helm Chart
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Helm
|
||||
run: |
|
||||
curl -fsSL https://get.helm.sh/helm-v3.16.0-linux-amd64.tar.gz | \
|
||||
tar -xzf - -C /tmp
|
||||
sudo install -m 0755 /tmp/linux-amd64/helm /usr/local/bin/helm
|
||||
|
||||
- name: Lint Helm chart
|
||||
run: helm lint devops/helm/stellaops
|
||||
|
||||
- name: Package Helm chart
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
helm package devops/helm/stellaops \
|
||||
--version "$VERSION" \
|
||||
--app-version "$VERSION" \
|
||||
--destination out/helm
|
||||
|
||||
- name: Upload Helm chart
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: helm-chart-${{ needs.validate.outputs.version }}
|
||||
path: out/helm/*.tgz
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# GENERATE RELEASE MANIFEST
|
||||
# ===========================================================================
|
||||
|
||||
release-manifest:
|
||||
name: Release Manifest
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate, build-modules, build-cli, build-helm]
|
||||
if: always() && needs.validate.result == 'success'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Generate release manifest
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
CHANNEL="${{ needs.validate.outputs.channel }}"
|
||||
|
||||
mkdir -p out/release
|
||||
|
||||
cat > out/release/suite-${VERSION}.yaml << EOF
|
||||
apiVersion: stellaops.org/v1
|
||||
kind: SuiteRelease
|
||||
metadata:
|
||||
version: "${VERSION}"
|
||||
codename: "${CODENAME}"
|
||||
channel: "${CHANNEL}"
|
||||
date: "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
gitSha: "${{ github.sha }}"
|
||||
gitRef: "${{ github.ref }}"
|
||||
spec:
|
||||
modules:
|
||||
authority: "1.0.0"
|
||||
attestor: "1.0.0"
|
||||
concelier: "1.0.0"
|
||||
scanner: "1.0.0"
|
||||
policy: "1.0.0"
|
||||
signer: "1.0.0"
|
||||
excititor: "1.0.0"
|
||||
gateway: "1.0.0"
|
||||
scheduler: "1.0.0"
|
||||
platforms:
|
||||
- linux-x64
|
||||
- linux-arm64
|
||||
- win-x64
|
||||
- osx-x64
|
||||
- osx-arm64
|
||||
artifacts:
|
||||
containers: "${{ env.REGISTRY }}/stella-ops.org/*:${VERSION}"
|
||||
nuget: "${{ env.NUGET_SOURCE }}"
|
||||
helm: "stellaops-${VERSION}.tgz"
|
||||
EOF
|
||||
|
||||
echo "=== Release Manifest ==="
|
||||
cat out/release/suite-${VERSION}.yaml
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
cd artifacts
|
||||
find . -type f \( -name "*.nupkg" -o -name "*.tgz" -o -name "*.zip" -o -name "*.tar.gz" \) \
|
||||
-exec sha256sum {} \; > ../out/release/SHA256SUMS-${VERSION}.txt
|
||||
|
||||
echo "=== Checksums ==="
|
||||
cat ../out/release/SHA256SUMS-${VERSION}.txt
|
||||
|
||||
- name: Upload release manifest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-manifest-${{ needs.validate.outputs.version }}
|
||||
path: out/release
|
||||
retention-days: 90
|
||||
|
||||
# ===========================================================================
|
||||
# CREATE GITEA RELEASE
|
||||
# ===========================================================================
|
||||
|
||||
create-release:
|
||||
name: Create Gitea Release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate, build-modules, build-containers, build-cli, build-helm, release-manifest]
|
||||
if: needs.validate.outputs.dry_run != 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Prepare release assets
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
|
||||
mkdir -p release-assets
|
||||
|
||||
# Copy CLI archives
|
||||
find artifacts -name "*.zip" -exec cp {} release-assets/ \;
|
||||
find artifacts -name "*.tar.gz" -exec cp {} release-assets/ \;
|
||||
|
||||
# Copy Helm chart
|
||||
find artifacts -name "*.tgz" -exec cp {} release-assets/ \;
|
||||
|
||||
# Copy manifest and checksums
|
||||
find artifacts -name "suite-*.yaml" -exec cp {} release-assets/ \;
|
||||
find artifacts -name "SHA256SUMS-*.txt" -exec cp {} release-assets/ \;
|
||||
|
||||
ls -la release-assets/
|
||||
|
||||
- name: Generate release notes
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
CHANNEL="${{ needs.validate.outputs.channel }}"
|
||||
|
||||
cat > release-notes.md << 'EOF'
|
||||
## StellaOps ${{ needs.validate.outputs.version }} "${{ needs.validate.outputs.codename }}"
|
||||
|
||||
### Release Information
|
||||
- **Version:** ${{ needs.validate.outputs.version }}
|
||||
- **Codename:** ${{ needs.validate.outputs.codename }}
|
||||
- **Channel:** ${{ needs.validate.outputs.channel }}
|
||||
- **Date:** $(date -u +%Y-%m-%d)
|
||||
- **Git SHA:** ${{ github.sha }}
|
||||
|
||||
### Included Modules
|
||||
| Module | Version | Container |
|
||||
|--------|---------|-----------|
|
||||
| Authority | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/authority:${{ needs.validate.outputs.version }}` |
|
||||
| Attestor | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/attestor:${{ needs.validate.outputs.version }}` |
|
||||
| Concelier | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/concelier:${{ needs.validate.outputs.version }}` |
|
||||
| Scanner | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/scanner:${{ needs.validate.outputs.version }}` |
|
||||
| Policy | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/policy:${{ needs.validate.outputs.version }}` |
|
||||
| Signer | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/signer:${{ needs.validate.outputs.version }}` |
|
||||
| Excititor | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/excititor:${{ needs.validate.outputs.version }}` |
|
||||
| Gateway | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/gateway:${{ needs.validate.outputs.version }}` |
|
||||
| Scheduler | 1.0.0 | `${{ env.REGISTRY }}/stella-ops.org/scheduler:${{ needs.validate.outputs.version }}` |
|
||||
|
||||
### CLI Downloads
|
||||
| Platform | Download |
|
||||
|----------|----------|
|
||||
| Linux x64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-linux-x64.tar.gz` |
|
||||
| Linux ARM64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-linux-arm64.tar.gz` |
|
||||
| Windows x64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-win-x64.zip` |
|
||||
| macOS x64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-osx-x64.tar.gz` |
|
||||
| macOS ARM64 | `stellaops-cli-${{ needs.validate.outputs.version }}-${{ needs.validate.outputs.codename }}-osx-arm64.tar.gz` |
|
||||
|
||||
### Installation
|
||||
|
||||
#### Helm
|
||||
```bash
|
||||
helm install stellaops ./stellaops-${{ needs.validate.outputs.version }}.tgz
|
||||
```
|
||||
|
||||
#### Docker Compose
|
||||
```bash
|
||||
docker compose -f devops/compose/docker-compose.yml up -d
|
||||
```
|
||||
|
||||
---
|
||||
See [CHANGELOG.md](CHANGELOG.md) for detailed changes.
|
||||
EOF
|
||||
|
||||
- name: Create Gitea release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
run: |
|
||||
VERSION="${{ needs.validate.outputs.version }}"
|
||||
CODENAME="${{ needs.validate.outputs.codename }}"
|
||||
CHANNEL="${{ needs.validate.outputs.channel }}"
|
||||
|
||||
# Determine if prerelease
|
||||
PRERELEASE_FLAG=""
|
||||
if [[ "$CHANNEL" == "edge" ]]; then
|
||||
PRERELEASE_FLAG="--prerelease"
|
||||
fi
|
||||
|
||||
gh release create "suite-${VERSION}" \
|
||||
--title "StellaOps ${VERSION} ${CODENAME}" \
|
||||
--notes-file release-notes.md \
|
||||
$PRERELEASE_FLAG \
|
||||
release-assets/*
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: Release Summary
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [validate, build-modules, build-containers, build-cli, build-helm, release-manifest, create-release]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Generate Summary
|
||||
run: |
|
||||
echo "## Suite Release Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Release Information" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Property | Value |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Version | ${{ needs.validate.outputs.version }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Codename | ${{ needs.validate.outputs.codename }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Channel | ${{ needs.validate.outputs.channel }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Dry Run | ${{ needs.validate.outputs.dry_run }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Job Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Job | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build Modules | ${{ needs.build-modules.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build Containers | ${{ needs.build-containers.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build CLI | ${{ needs.build-cli.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Build Helm | ${{ needs.build-helm.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Release Manifest | ${{ needs.release-manifest.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Create Release | ${{ needs.create-release.result || 'skipped' }} |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for failures
|
||||
if: contains(needs.*.result, 'failure')
|
||||
run: |
|
||||
echo "::error::One or more release jobs failed"
|
||||
exit 1
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'deploy/**'
|
||||
- 'devops/**'
|
||||
- 'scripts/release/**'
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -24,12 +24,12 @@ jobs:
|
||||
|
||||
- name: Validate Helm charts
|
||||
run: |
|
||||
helm lint deploy/helm/stellaops
|
||||
helm template stellaops deploy/helm/stellaops --dry-run
|
||||
helm lint devops/helm/stellaops
|
||||
helm template stellaops devops/helm/stellaops --dry-run
|
||||
|
||||
- name: Validate Kubernetes manifests
|
||||
run: |
|
||||
for f in deploy/k8s/*.yaml; do
|
||||
for f in devops/k8s/*.yaml; do
|
||||
kubectl apply --dry-run=client -f "$f" || exit 1
|
||||
done
|
||||
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
for img in "${REQUIRED_IMAGES[@]}"; do
|
||||
echo "Checking $img..."
|
||||
# Validate Dockerfile exists
|
||||
if [ ! -f "src/${img^}/Dockerfile" ] && [ ! -f "deploy/docker/${img}/Dockerfile" ]; then
|
||||
if [ ! -f "src/${img^}/Dockerfile" ] && [ ! -f "devops/docker/${img}/Dockerfile" ]; then
|
||||
echo "Warning: Dockerfile not found for $img"
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -45,13 +45,13 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Validate NuGet restore source ordering
|
||||
run: python3 ops/devops/validate_restore_sources.py
|
||||
run: python3 devops/validate_restore_sources.py
|
||||
|
||||
- name: Validate telemetry storage configuration
|
||||
run: python3 ops/devops/telemetry/validate_storage_stack.py
|
||||
run: python3 devops/telemetry/validate_storage_stack.py
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
@@ -198,7 +198,7 @@ jobs:
|
||||
|
||||
- name: Enforce CLI parity gate
|
||||
run: |
|
||||
python3 ops/devops/check_cli_parity.py
|
||||
python3 .gitea/scripts/release/check_cli_parity.py
|
||||
|
||||
- name: Log in to registry
|
||||
if: steps.meta.outputs.push == 'true'
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
if [[ "${{ steps.meta.outputs.push }}" != "true" ]]; then
|
||||
EXTRA_ARGS+=("--no-push")
|
||||
fi
|
||||
./ops/devops/release/build_release.py \
|
||||
./.gitea/scripts/release/build_release.py \
|
||||
--version "${{ steps.meta.outputs.version }}" \
|
||||
--channel "${{ steps.meta.outputs.channel }}" \
|
||||
--calendar "${{ steps.meta.outputs.calendar }}" \
|
||||
@@ -234,7 +234,7 @@ jobs:
|
||||
|
||||
- name: Verify release artefacts
|
||||
run: |
|
||||
python ops/devops/release/verify_release.py --release-dir out/release
|
||||
python .gitea/scripts/release/verify_release.py --release-dir out/release
|
||||
|
||||
- name: Upload release artefacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'ops/devops/risk-bundle/**'
|
||||
- 'devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
pull_request:
|
||||
@@ -14,7 +14,7 @@ on:
|
||||
paths:
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||
- 'ops/devops/risk-bundle/**'
|
||||
- 'devops/risk-bundle/**'
|
||||
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||
workflow_dispatch:
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
@@ -68,10 +68,10 @@ jobs:
|
||||
- name: Build risk bundle (fixtures)
|
||||
run: |
|
||||
mkdir -p $BUNDLE_OUTPUT
|
||||
ops/devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||
devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||
|
||||
- name: Verify bundle integrity
|
||||
run: ops/devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||
run: devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
|
||||
@@ -10,7 +10,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
@@ -19,8 +19,8 @@ jobs:
|
||||
|
||||
- name: Run determinism harness
|
||||
run: |
|
||||
chmod +x scripts/scanner/determinism-run.sh
|
||||
scripts/scanner/determinism-run.sh
|
||||
chmod +x .gitea/scripts/test/determinism-run.sh
|
||||
.gitea/scripts/test/determinism-run.sh
|
||||
|
||||
- name: Upload determinism artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
@@ -4,14 +4,14 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/Sdk/**'
|
||||
- 'ops/devops/sdk/**'
|
||||
- 'devops/sdk/**'
|
||||
- 'scripts/sdk/**'
|
||||
- '.gitea/workflows/sdk-publish.yml'
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'src/Sdk/**'
|
||||
- 'ops/devops/sdk/**'
|
||||
- 'devops/sdk/**'
|
||||
- 'scripts/sdk/**'
|
||||
- '.gitea/workflows/sdk-publish.yml'
|
||||
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
|
||||
@@ -5,7 +5,7 @@ on:
|
||||
paths:
|
||||
- 'src/Signals/**'
|
||||
- '.gitea/workflows/signals-ci.yml'
|
||||
- 'ops/devops/signals/**'
|
||||
- 'devops/signals/**'
|
||||
- 'helm/signals/**'
|
||||
- 'scripts/signals/**'
|
||||
push:
|
||||
@@ -13,7 +13,7 @@ on:
|
||||
paths:
|
||||
- 'src/Signals/**'
|
||||
- '.gitea/workflows/signals-ci.yml'
|
||||
- 'ops/devops/signals/**'
|
||||
- 'devops/signals/**'
|
||||
- 'helm/signals/**'
|
||||
- 'scripts/signals/**'
|
||||
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
|
||||
@@ -18,7 +18,7 @@ on:
|
||||
- 'docs/modules/signals/unknowns/**'
|
||||
- 'docs/modules/signals/heuristics/**'
|
||||
- 'docs/modules/signals/SHA256SUMS'
|
||||
- 'tools/cosign/sign-signals.sh'
|
||||
- '.gitea/scripts/sign/sign-signals.sh'
|
||||
|
||||
jobs:
|
||||
sign-signals-artifacts:
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
@@ -77,8 +77,8 @@ jobs:
|
||||
|
||||
- name: Sign signals artifacts
|
||||
run: |
|
||||
chmod +x tools/cosign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||
chmod +x .gitea/scripts/sign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" .gitea/scripts/sign/sign-signals.sh
|
||||
|
||||
- name: Verify signatures
|
||||
run: |
|
||||
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
@@ -55,8 +55,8 @@ jobs:
|
||||
|
||||
- name: Sign signals artifacts
|
||||
run: |
|
||||
chmod +x tools/cosign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||
chmod +x .gitea/scripts/sign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" .gitea/scripts/sign/sign-signals.sh
|
||||
|
||||
- name: Build deterministic signals evidence tar
|
||||
run: |
|
||||
|
||||
@@ -17,7 +17,7 @@ on:
|
||||
- 'src/Signals/**'
|
||||
- 'scripts/signals/reachability-smoke.sh'
|
||||
- '.gitea/workflows/signals-reachability.yml'
|
||||
- 'tools/cosign/sign-signals.sh'
|
||||
- '.gitea/scripts/sign/sign-signals.sh'
|
||||
|
||||
jobs:
|
||||
reachability-smoke:
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup .NET 10 RC
|
||||
uses: actions/setup-dotnet@v4
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
@@ -94,8 +94,8 @@ jobs:
|
||||
|
||||
- name: Sign signals artifacts
|
||||
run: |
|
||||
chmod +x tools/cosign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||
chmod +x .gitea/scripts/sign/sign-signals.sh
|
||||
OUT_DIR="${OUT_DIR}" .gitea/scripts/sign/sign-signals.sh
|
||||
|
||||
- name: Upload signed artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
@@ -4,13 +4,13 @@ on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'ops/devops/symbols/**'
|
||||
- 'devops/symbols/**'
|
||||
- 'scripts/symbols/**'
|
||||
- '.gitea/workflows/symbols-ci.yml'
|
||||
pull_request:
|
||||
branches: [ main, develop ]
|
||||
paths:
|
||||
- 'ops/devops/symbols/**'
|
||||
- 'devops/symbols/**'
|
||||
- 'scripts/symbols/**'
|
||||
- '.gitea/workflows/symbols-ci.yml'
|
||||
workflow_dispatch: {}
|
||||
@@ -27,10 +27,10 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Run Symbols.Server smoke
|
||||
run: |
|
||||
|
||||
@@ -18,10 +18,10 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||
run: scripts/enable-openssl11-shim.sh
|
||||
run: .gitea/scripts/util/enable-openssl11-shim.sh
|
||||
|
||||
- name: Run Symbols.Server smoke
|
||||
env:
|
||||
|
||||
510
.gitea/workflows/test-matrix.yml
Normal file
510
.gitea/workflows/test-matrix.yml
Normal file
@@ -0,0 +1,510 @@
|
||||
# .gitea/workflows/test-matrix.yml
|
||||
# Unified test matrix pipeline with TRX reporting for all test categories
|
||||
# Sprint: SPRINT_20251226_003_CICD
|
||||
|
||||
name: Test Matrix
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
schedule:
|
||||
- cron: '0 5 * * *' # Daily at 5 AM UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
include_performance:
|
||||
description: 'Include performance tests'
|
||||
type: boolean
|
||||
default: false
|
||||
include_benchmark:
|
||||
description: 'Include benchmark tests'
|
||||
type: boolean
|
||||
default: false
|
||||
include_airgap:
|
||||
description: 'Include airgap tests'
|
||||
type: boolean
|
||||
default: false
|
||||
include_chaos:
|
||||
description: 'Include chaos tests'
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
DOTNET_VERSION: '10.0.100'
|
||||
DOTNET_NOLOGO: 1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||
TZ: UTC
|
||||
|
||||
jobs:
|
||||
# ===========================================================================
|
||||
# PR-GATING TESTS (run on every push/PR)
|
||||
# ===========================================================================
|
||||
|
||||
unit:
|
||||
name: Unit Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Unit Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Unit" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=unit-tests.trx" \
|
||||
--results-directory ./TestResults/Unit \
|
||||
--collect:"XPlat Code Coverage"
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-unit
|
||||
path: ./TestResults/Unit
|
||||
retention-days: 14
|
||||
|
||||
architecture:
|
||||
name: Architecture Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Architecture Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Architecture" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=architecture-tests.trx" \
|
||||
--results-directory ./TestResults/Architecture
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-architecture
|
||||
path: ./TestResults/Architecture
|
||||
retention-days: 14
|
||||
|
||||
contract:
|
||||
name: Contract Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Contract Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Contract" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=contract-tests.trx" \
|
||||
--results-directory ./TestResults/Contract
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-contract
|
||||
path: ./TestResults/Contract
|
||||
retention-days: 14
|
||||
|
||||
integration:
|
||||
name: Integration Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 30
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: stellaops
|
||||
POSTGRES_PASSWORD: stellaops
|
||||
POSTGRES_DB: stellaops_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Integration Tests
|
||||
env:
|
||||
STELLAOPS_TEST_POSTGRES_CONNECTION: "Host=localhost;Port=5432;Database=stellaops_test;Username=stellaops;Password=stellaops"
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Integration" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=integration-tests.trx" \
|
||||
--results-directory ./TestResults/Integration
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-integration
|
||||
path: ./TestResults/Integration
|
||||
retention-days: 14
|
||||
|
||||
security:
|
||||
name: Security Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Security Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Security" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=security-tests.trx" \
|
||||
--results-directory ./TestResults/Security
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-security
|
||||
path: ./TestResults/Security
|
||||
retention-days: 14
|
||||
|
||||
golden:
|
||||
name: Golden Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Golden Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Golden" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=golden-tests.trx" \
|
||||
--results-directory ./TestResults/Golden
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-golden
|
||||
path: ./TestResults/Golden
|
||||
retention-days: 14
|
||||
|
||||
# ===========================================================================
|
||||
# SCHEDULED/ON-DEMAND TESTS
|
||||
# ===========================================================================
|
||||
|
||||
performance:
|
||||
name: Performance Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 30
|
||||
if: github.event_name == 'schedule' || github.event.inputs.include_performance == 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Performance Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Performance" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=performance-tests.trx" \
|
||||
--results-directory ./TestResults/Performance
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-performance
|
||||
path: ./TestResults/Performance
|
||||
retention-days: 14
|
||||
|
||||
benchmark:
|
||||
name: Benchmark Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 45
|
||||
if: github.event_name == 'schedule' || github.event.inputs.include_benchmark == 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Benchmark Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Benchmark" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=benchmark-tests.trx" \
|
||||
--results-directory ./TestResults/Benchmark
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-benchmark
|
||||
path: ./TestResults/Benchmark
|
||||
retention-days: 14
|
||||
|
||||
airgap:
|
||||
name: AirGap Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 30
|
||||
if: github.event.inputs.include_airgap == 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run AirGap Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=AirGap" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=airgap-tests.trx" \
|
||||
--results-directory ./TestResults/AirGap
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-airgap
|
||||
path: ./TestResults/AirGap
|
||||
retention-days: 14
|
||||
|
||||
chaos:
|
||||
name: Chaos Tests
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 30
|
||||
if: github.event.inputs.include_chaos == 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore
|
||||
run: dotnet restore src/StellaOps.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.sln -c Release --no-restore
|
||||
|
||||
- name: Run Chaos Tests
|
||||
run: |
|
||||
dotnet test src/StellaOps.sln \
|
||||
--filter "Category=Chaos" \
|
||||
--configuration Release \
|
||||
--no-build \
|
||||
--logger "trx;LogFileName=chaos-tests.trx" \
|
||||
--results-directory ./TestResults/Chaos
|
||||
|
||||
- name: Upload Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-chaos
|
||||
path: ./TestResults/Chaos
|
||||
retention-days: 14
|
||||
|
||||
# ===========================================================================
|
||||
# SUMMARY JOB
|
||||
# ===========================================================================
|
||||
|
||||
summary:
|
||||
name: Test Summary
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [unit, architecture, contract, integration, security, golden]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Download all test results
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: test-results-*
|
||||
path: ./TestResults
|
||||
|
||||
- name: Install trx2junit
|
||||
run: dotnet tool install -g trx2junit
|
||||
|
||||
- name: Convert TRX to JUnit
|
||||
run: |
|
||||
find ./TestResults -name "*.trx" -exec trx2junit {} \; || true
|
||||
|
||||
- name: Generate Summary
|
||||
run: |
|
||||
echo "## Test Results Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Category | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Unit | ${{ needs.unit.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Architecture | ${{ needs.architecture.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Contract | ${{ needs.contract.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Integration | ${{ needs.integration.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Security | ${{ needs.security.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Golden | ${{ needs.golden.result }} |" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Upload Combined Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results-combined
|
||||
path: ./TestResults
|
||||
retention-days: 14
|
||||
|
||||
- name: Check for failures
|
||||
if: contains(needs.*.result, 'failure')
|
||||
run: exit 1
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Task Pack offline bundle fixtures
|
||||
run: python3 scripts/packs/run-fixtures-check.sh
|
||||
run: python3 .gitea/scripts/test/run-fixtures-check.sh
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
Reference in New Issue
Block a user