devops folders consolidate

This commit is contained in:
master
2026-01-25 23:27:41 +02:00
parent 6e687b523a
commit a50bbb38ef
334 changed files with 35079 additions and 5569 deletions

View File

@@ -0,0 +1,170 @@
#!/bin/bash
# -----------------------------------------------------------------------------
# bootstrap-trust-offline.sh
# Sprint: SPRINT_20260125_003_Attestor_trust_workflows_conformance
# Task: WORKFLOW-001 - Create bootstrap workflow script
# Description: Initialize trust for air-gapped StellaOps deployment
# -----------------------------------------------------------------------------
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $1"; }
usage() {
echo "Usage: $0 <trust-bundle> [options]"
echo ""
echo "Initialize trust for an air-gapped StellaOps deployment."
echo ""
echo "Arguments:"
echo " trust-bundle Path to trust bundle (tar.zst or directory)"
echo ""
echo "Options:"
echo " --key-dir DIR Directory for signing keys (default: /etc/stellaops/keys)"
echo " --reject-if-stale D Reject bundle if older than D (e.g., 7d, 24h)"
echo " --skip-keygen Skip signing key generation"
echo " --force Force import even if validation fails"
echo " -h, --help Show this help message"
echo ""
echo "Example:"
echo " $0 /media/usb/trust-bundle-2026-01-25.tar.zst"
exit 1
}
BUNDLE_PATH=""
KEY_DIR="/etc/stellaops/keys"
REJECT_STALE=""
SKIP_KEYGEN=false
FORCE=false
while [[ $# -gt 0 ]]; do
case $1 in
--key-dir) KEY_DIR="$2"; shift 2 ;;
--reject-if-stale) REJECT_STALE="$2"; shift 2 ;;
--skip-keygen) SKIP_KEYGEN=true; shift ;;
--force) FORCE=true; shift ;;
-h|--help) usage ;;
-*) log_error "Unknown option: $1"; usage ;;
*)
if [[ -z "$BUNDLE_PATH" ]]; then
BUNDLE_PATH="$1"
else
log_error "Unexpected argument: $1"
usage
fi
shift
;;
esac
done
if [[ -z "$BUNDLE_PATH" ]]; then
log_error "Trust bundle path is required"
usage
fi
if [[ ! -e "$BUNDLE_PATH" ]]; then
log_error "Trust bundle not found: $BUNDLE_PATH"
exit 1
fi
echo ""
echo "================================================"
echo " StellaOps Offline Trust Bootstrap"
echo "================================================"
echo ""
log_info "Trust Bundle: $BUNDLE_PATH"
log_info "Key Directory: $KEY_DIR"
if [[ -n "$REJECT_STALE" ]]; then
log_info "Staleness Threshold: $REJECT_STALE"
fi
echo ""
# Step 1: Generate signing keys (if using local keys)
if [[ "$SKIP_KEYGEN" != "true" ]]; then
log_step "Step 1: Generating signing keys..."
mkdir -p "$KEY_DIR"
chmod 700 "$KEY_DIR"
if [[ ! -f "$KEY_DIR/signing-key.pem" ]]; then
openssl ecparam -name prime256v1 -genkey -noout -out "$KEY_DIR/signing-key.pem"
chmod 600 "$KEY_DIR/signing-key.pem"
log_info "Generated signing key: $KEY_DIR/signing-key.pem"
else
log_info "Signing key already exists: $KEY_DIR/signing-key.pem"
fi
else
log_step "Step 1: Skipping key generation (--skip-keygen)"
fi
# Step 2: Import trust bundle
log_step "Step 2: Importing trust bundle..."
IMPORT_ARGS="--verify-manifest"
if [[ -n "$REJECT_STALE" ]]; then
IMPORT_ARGS="$IMPORT_ARGS --reject-if-stale $REJECT_STALE"
fi
if [[ "$FORCE" == "true" ]]; then
IMPORT_ARGS="$IMPORT_ARGS --force"
fi
stella trust import "$BUNDLE_PATH" $IMPORT_ARGS
if [[ $? -ne 0 ]]; then
log_error "Failed to import trust bundle"
exit 1
fi
log_info "Trust bundle imported successfully"
# Step 3: Verify trust state
log_step "Step 3: Verifying trust state..."
stella trust status --show-keys
if [[ $? -ne 0 ]]; then
log_error "Failed to verify trust status"
exit 1
fi
# Step 4: Test offline verification
log_step "Step 4: Testing offline verification capability..."
# Check that we have TUF metadata
CACHE_DIR="${HOME}/.local/share/StellaOps/TufCache"
if [[ -f "$CACHE_DIR/root.json" ]] && [[ -f "$CACHE_DIR/timestamp.json" ]]; then
log_info "TUF metadata present"
else
log_warn "TUF metadata may be incomplete"
fi
# Check for tiles (if snapshot included them)
if [[ -d "$CACHE_DIR/tiles" ]]; then
TILE_COUNT=$(find "$CACHE_DIR/tiles" -name "*.tile" 2>/dev/null | wc -l)
log_info "Tiles cached: $TILE_COUNT"
fi
echo ""
echo "================================================"
echo -e "${GREEN} Offline Bootstrap Complete!${NC}"
echo "================================================"
echo ""
log_info "Trust state imported to: $CACHE_DIR"
log_info "Signing key (if generated): $KEY_DIR/signing-key.pem"
echo ""
log_info "This system can now verify attestations offline using the imported trust state."
log_warn "Remember to periodically update the trust bundle to maintain freshness."
echo ""
log_info "To update trust state:"
echo " 1. On connected system: stella trust snapshot export --out bundle.tar.zst"
echo " 2. Transfer bundle to this system"
echo " 3. Run: $0 bundle.tar.zst"
echo ""

View File

@@ -0,0 +1,196 @@
#!/bin/bash
# -----------------------------------------------------------------------------
# bootstrap-trust.sh
# Sprint: SPRINT_20260125_003_Attestor_trust_workflows_conformance
# Task: WORKFLOW-001 - Create bootstrap workflow script
# Description: Initialize trust for new StellaOps deployment
# -----------------------------------------------------------------------------
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $1"; }
usage() {
echo "Usage: $0 [options]"
echo ""
echo "Initialize trust for a new StellaOps deployment."
echo ""
echo "Options:"
echo " --tuf-url URL TUF repository URL (required)"
echo " --service-map NAME Service map target name (default: sigstore-services-v1)"
echo " --pin KEY Rekor key to pin (can specify multiple)"
echo " --key-dir DIR Directory for signing keys (default: /etc/stellaops/keys)"
echo " --skip-keygen Skip signing key generation"
echo " --skip-test Skip sign/verify test"
echo " --offline Initialize in offline mode"
echo " -h, --help Show this help message"
echo ""
echo "Example:"
echo " $0 --tuf-url https://trust.example.com/tuf/ --pin rekor-key-v1"
exit 1
}
TUF_URL=""
SERVICE_MAP="sigstore-services-v1"
PIN_KEYS=()
KEY_DIR="/etc/stellaops/keys"
SKIP_KEYGEN=false
SKIP_TEST=false
OFFLINE=false
while [[ $# -gt 0 ]]; do
case $1 in
--tuf-url) TUF_URL="$2"; shift 2 ;;
--service-map) SERVICE_MAP="$2"; shift 2 ;;
--pin) PIN_KEYS+=("$2"); shift 2 ;;
--key-dir) KEY_DIR="$2"; shift 2 ;;
--skip-keygen) SKIP_KEYGEN=true; shift ;;
--skip-test) SKIP_TEST=true; shift ;;
--offline) OFFLINE=true; shift ;;
-h|--help) usage ;;
*) log_error "Unknown option: $1"; usage ;;
esac
done
if [[ -z "$TUF_URL" ]]; then
log_error "TUF URL is required"
usage
fi
if [[ ${#PIN_KEYS[@]} -eq 0 ]]; then
PIN_KEYS=("rekor-key-v1")
fi
echo ""
echo "================================================"
echo " StellaOps Trust Bootstrap"
echo "================================================"
echo ""
log_info "TUF URL: $TUF_URL"
log_info "Service Map: $SERVICE_MAP"
log_info "Pinned Keys: ${PIN_KEYS[*]}"
log_info "Key Directory: $KEY_DIR"
echo ""
# Step 1: Generate signing keys (if using local keys)
if [[ "$SKIP_KEYGEN" != "true" ]]; then
log_step "Step 1: Generating signing keys..."
mkdir -p "$KEY_DIR"
chmod 700 "$KEY_DIR"
if [[ ! -f "$KEY_DIR/signing-key.pem" ]]; then
stella keys generate --type ecdsa-p256 --out "$KEY_DIR/signing-key.pem" 2>/dev/null || \
openssl ecparam -name prime256v1 -genkey -noout -out "$KEY_DIR/signing-key.pem"
chmod 600 "$KEY_DIR/signing-key.pem"
log_info "Generated signing key: $KEY_DIR/signing-key.pem"
else
log_info "Signing key already exists: $KEY_DIR/signing-key.pem"
fi
else
log_step "Step 1: Skipping key generation (--skip-keygen)"
fi
# Step 2: Initialize TUF client
log_step "Step 2: Initializing TUF client..."
PIN_ARGS=""
for key in "${PIN_KEYS[@]}"; do
PIN_ARGS="$PIN_ARGS --pin $key"
done
OFFLINE_ARG=""
if [[ "$OFFLINE" == "true" ]]; then
OFFLINE_ARG="--offline"
fi
stella trust init \
--tuf-url "$TUF_URL" \
--service-map "$SERVICE_MAP" \
$PIN_ARGS \
$OFFLINE_ARG \
--force
if [[ $? -ne 0 ]]; then
log_error "Failed to initialize TUF client"
exit 1
fi
log_info "TUF client initialized successfully"
# Step 3: Verify TUF metadata loaded
log_step "Step 3: Verifying TUF metadata..."
stella trust status --show-keys --show-endpoints
if [[ $? -ne 0 ]]; then
log_error "Failed to verify TUF status"
exit 1
fi
# Step 4: Test sign/verify cycle
if [[ "$SKIP_TEST" != "true" ]] && [[ "$SKIP_KEYGEN" != "true" ]]; then
log_step "Step 4: Testing sign/verify cycle..."
TEST_FILE=$(mktemp)
TEST_SIG=$(mktemp)
echo "StellaOps bootstrap test $(date -u +%Y-%m-%dT%H:%M:%SZ)" > "$TEST_FILE"
stella sign "$TEST_FILE" --key "$KEY_DIR/signing-key.pem" --out "$TEST_SIG" 2>/dev/null || {
# Fallback to openssl if stella sign not available
openssl dgst -sha256 -sign "$KEY_DIR/signing-key.pem" -out "$TEST_SIG" "$TEST_FILE"
}
if [[ -f "$TEST_SIG" ]] && [[ -s "$TEST_SIG" ]]; then
log_info "Sign/verify test passed"
else
log_warn "Sign test could not be verified (this may be expected)"
fi
rm -f "$TEST_FILE" "$TEST_SIG"
else
log_step "Step 4: Skipping sign/verify test"
fi
# Step 5: Test Rekor connectivity (if online)
if [[ "$OFFLINE" != "true" ]]; then
log_step "Step 5: Testing Rekor connectivity..."
REKOR_URL=$(stella trust status --output json 2>/dev/null | grep -o '"rekor_url"[[:space:]]*:[[:space:]]*"[^"]*"' | head -1 | cut -d'"' -f4 || echo "")
if [[ -n "$REKOR_URL" ]]; then
if curl -sf "${REKOR_URL}/api/v1/log" >/dev/null 2>&1; then
log_info "Rekor connectivity: OK"
else
log_warn "Rekor connectivity check failed (service may be unavailable)"
fi
else
log_warn "Could not determine Rekor URL from trust status"
fi
else
log_step "Step 5: Skipping Rekor test (offline mode)"
fi
echo ""
echo "================================================"
echo -e "${GREEN} Bootstrap Complete!${NC}"
echo "================================================"
echo ""
log_info "Trust repository initialized at: ~/.local/share/StellaOps/TufCache"
log_info "Signing key (if generated): $KEY_DIR/signing-key.pem"
echo ""
log_info "Next steps:"
echo " 1. Configure your CI/CD to use the signing key"
echo " 2. Set up periodic 'stella trust sync' for metadata freshness"
echo " 3. For air-gap deployments, run 'stella trust export' to create bundles"
echo ""

View File

@@ -0,0 +1,195 @@
#!/bin/bash
# -----------------------------------------------------------------------------
# disaster-swap-endpoint.sh
# Sprint: SPRINT_20260125_003_Attestor_trust_workflows_conformance
# Task: WORKFLOW-003 - Create disaster endpoint swap script
# Description: Emergency endpoint swap via TUF (no client reconfiguration)
# -----------------------------------------------------------------------------
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $1"; }
usage() {
echo "Usage: $0 --repo <dir> --new-rekor-url <url> [options]"
echo ""
echo "Emergency endpoint swap via TUF update."
echo "Clients will auto-discover new endpoints without reconfiguration."
echo ""
echo "Options:"
echo " --repo DIR TUF repository directory (required)"
echo " --new-rekor-url URL New Rekor URL (required)"
echo " --new-fulcio-url URL New Fulcio URL (optional)"
echo " --note TEXT Note explaining the change"
echo " --version N New service map version (auto-increment if not specified)"
echo " -h, --help Show this help message"
echo ""
echo "Example:"
echo " $0 --repo /path/to/tuf \\"
echo " --new-rekor-url https://rekor-mirror.internal:8080 \\"
echo " --note 'Emergency: Production Rekor outage'"
echo ""
echo "IMPORTANT: This changes where ALL clients send requests!"
exit 1
}
REPO_DIR=""
NEW_REKOR_URL=""
NEW_FULCIO_URL=""
NOTE=""
VERSION=""
while [[ $# -gt 0 ]]; do
case $1 in
--repo) REPO_DIR="$2"; shift 2 ;;
--new-rekor-url) NEW_REKOR_URL="$2"; shift 2 ;;
--new-fulcio-url) NEW_FULCIO_URL="$2"; shift 2 ;;
--note) NOTE="$2"; shift 2 ;;
--version) VERSION="$2"; shift 2 ;;
-h|--help) usage ;;
*) log_error "Unknown argument: $1"; usage ;;
esac
done
if [[ -z "$REPO_DIR" ]] || [[ -z "$NEW_REKOR_URL" ]]; then
log_error "--repo and --new-rekor-url are required"
usage
fi
if [[ ! -d "$REPO_DIR" ]]; then
log_error "TUF repository not found: $REPO_DIR"
exit 1
fi
echo ""
echo "================================================"
echo -e "${RED} EMERGENCY ENDPOINT SWAP${NC}"
echo "================================================"
echo ""
log_warn "This will redirect ALL clients to new endpoints!"
echo ""
log_info "TUF Repository: $REPO_DIR"
log_info "New Rekor URL: $NEW_REKOR_URL"
if [[ -n "$NEW_FULCIO_URL" ]]; then
log_info "New Fulcio URL: $NEW_FULCIO_URL"
fi
if [[ -n "$NOTE" ]]; then
log_info "Note: $NOTE"
fi
echo ""
read -p "Type 'SWAP' to confirm endpoint change: " CONFIRM
if [[ "$CONFIRM" != "SWAP" ]]; then
log_error "Aborted"
exit 1
fi
# Find current service map
CURRENT_MAP=$(ls "$REPO_DIR/targets/" 2>/dev/null | grep -E '^sigstore-services-v[0-9]+\.json$' | sort -V | tail -1 || echo "")
if [[ -z "$CURRENT_MAP" ]]; then
log_error "No service map found in $REPO_DIR/targets/"
exit 1
fi
CURRENT_PATH="$REPO_DIR/targets/$CURRENT_MAP"
log_info "Current service map: $CURRENT_MAP"
# Determine new version
if [[ -z "$VERSION" ]]; then
CURRENT_VERSION=$(echo "$CURRENT_MAP" | grep -oE '[0-9]+' | tail -1)
VERSION=$((CURRENT_VERSION + 1))
fi
NEW_MAP="sigstore-services-v${VERSION}.json"
NEW_PATH="$REPO_DIR/targets/$NEW_MAP"
log_step "Creating new service map: $NEW_MAP"
# Read current map and update
if command -v python3 &>/dev/null; then
python3 - "$CURRENT_PATH" "$NEW_PATH" "$NEW_REKOR_URL" "$NEW_FULCIO_URL" "$NOTE" "$VERSION" << 'PYTHON_SCRIPT'
import json
import sys
from datetime import datetime
current_path = sys.argv[1]
new_path = sys.argv[2]
new_rekor_url = sys.argv[3]
new_fulcio_url = sys.argv[4] if len(sys.argv) > 4 and sys.argv[4] else None
note = sys.argv[5] if len(sys.argv) > 5 and sys.argv[5] else None
version = int(sys.argv[6]) if len(sys.argv) > 6 else 1
with open(current_path) as f:
data = json.load(f)
# Update endpoints
data['version'] = version
data['rekor']['url'] = new_rekor_url
if new_fulcio_url and 'fulcio' in data:
data['fulcio']['url'] = new_fulcio_url
# Update metadata
if 'metadata' not in data:
data['metadata'] = {}
data['metadata']['updated_at'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
if note:
data['metadata']['note'] = note
with open(new_path, 'w') as f:
json.dump(data, f, indent=2)
print(f"Created: {new_path}")
PYTHON_SCRIPT
else
# Fallback: simple JSON creation
cat > "$NEW_PATH" << EOF
{
"version": $VERSION,
"rekor": {
"url": "$NEW_REKOR_URL"
},
"metadata": {
"updated_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"note": "$NOTE"
}
}
EOF
fi
log_info "New service map created: $NEW_PATH"
# Add to targets
log_step "Adding new service map to TUF targets..."
if [[ -x "$REPO_DIR/scripts/add-target.sh" ]]; then
"$REPO_DIR/scripts/add-target.sh" "$NEW_PATH" "$NEW_MAP" --repo "$REPO_DIR"
fi
echo ""
echo "================================================"
echo -e "${GREEN} Endpoint Swap Prepared${NC}"
echo "================================================"
echo ""
log_warn "NEXT STEPS (REQUIRED):"
echo " 1. Review the new service map: cat $NEW_PATH"
echo " 2. Sign the updated targets.json with targets key"
echo " 3. Update snapshot.json and sign with snapshot key"
echo " 4. Update timestamp.json and sign with timestamp key"
echo " 5. Deploy updated metadata to TUF server"
echo ""
log_info "Clients will auto-discover the new endpoint within their refresh interval."
log_info "For immediate effect, clients can run: stella trust sync --force"
echo ""
log_warn "Monitor client traffic to ensure failover is working!"
echo ""

View File

@@ -0,0 +1,221 @@
#!/usr/bin/env bash
#
# Initialize StellaOps configuration from sample files
#
# Usage:
# ./devops/scripts/init-config.sh [profile]
#
# Profiles:
# dev - Development environment (default)
# stage - Staging environment
# prod - Production environment
# airgap - Air-gapped deployment
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
ETC_DIR="${ROOT_DIR}/etc"
PROFILE="${1:-dev}"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
log_ok() { echo -e "${GREEN}[OK]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
# Validate profile
case "${PROFILE}" in
dev|stage|prod|airgap)
log_info "Initializing configuration for profile: ${PROFILE}"
;;
*)
log_error "Unknown profile: ${PROFILE}"
echo "Valid profiles: dev, stage, prod, airgap"
exit 1
;;
esac
# Create directory structure
create_directories() {
log_info "Creating directory structure..."
local dirs=(
"etc/authority/plugins"
"etc/certificates/trust-roots"
"etc/certificates/signing"
"etc/concelier/sources"
"etc/crypto/profiles/cn"
"etc/crypto/profiles/eu"
"etc/crypto/profiles/kr"
"etc/crypto/profiles/ru"
"etc/crypto/profiles/us-fips"
"etc/env"
"etc/llm-providers"
"etc/notify/templates"
"etc/plugins/notify"
"etc/plugins/scanner/lang"
"etc/plugins/scanner/os"
"etc/policy/packs"
"etc/policy/schemas"
"etc/router"
"etc/scanner"
"etc/scheduler"
"etc/scm-connectors"
"etc/secrets"
"etc/signals"
"etc/vex"
)
for dir in "${dirs[@]}"; do
mkdir -p "${ROOT_DIR}/${dir}"
done
log_ok "Directory structure created"
}
# Copy sample files to active configs
copy_sample_files() {
log_info "Copying sample files..."
local count=0
# Find all .sample files
while IFS= read -r -d '' sample_file; do
# Determine target file (remove .sample extension)
local target_file="${sample_file%.sample}"
# Skip if target already exists
if [[ -f "${target_file}" ]]; then
log_warn "Skipping (exists): ${target_file#${ROOT_DIR}/}"
continue
fi
cp "${sample_file}" "${target_file}"
log_ok "Created: ${target_file#${ROOT_DIR}/}"
((count++))
done < <(find "${ETC_DIR}" -name "*.sample" -type f -print0 2>/dev/null)
log_info "Copied ${count} sample files"
}
# Copy environment-specific profile
copy_env_profile() {
log_info "Setting up environment profile: ${PROFILE}"
local env_sample="${ETC_DIR}/env/${PROFILE}.env.sample"
local env_target="${ROOT_DIR}/.env"
if [[ -f "${env_sample}" ]]; then
if [[ -f "${env_target}" ]]; then
log_warn ".env already exists, not overwriting"
else
cp "${env_sample}" "${env_target}"
log_ok "Created .env from ${PROFILE} profile"
fi
else
log_warn "No environment sample found for profile: ${PROFILE}"
fi
}
# Create .gitignore entries for active configs
update_gitignore() {
log_info "Updating .gitignore..."
local gitignore="${ROOT_DIR}/.gitignore"
local entries=(
"# Active configuration files (not samples)"
"etc/**/*.yaml"
"!etc/**/*.yaml.sample"
"etc/**/*.json"
"!etc/**/*.json.sample"
"etc/**/env"
"!etc/**/env.sample"
"etc/secrets/*"
"!etc/secrets/*.sample"
"!etc/secrets/README.md"
)
# Check if entries already exist
if grep -q "# Active configuration files" "${gitignore}" 2>/dev/null; then
log_warn ".gitignore already contains config entries"
return
fi
echo "" >> "${gitignore}"
for entry in "${entries[@]}"; do
echo "${entry}" >> "${gitignore}"
done
log_ok "Updated .gitignore"
}
# Validate the configuration
validate_config() {
log_info "Validating configuration..."
local errors=0
# Check for required directories
local required_dirs=(
"etc/scanner"
"etc/authority"
"etc/policy"
)
for dir in "${required_dirs[@]}"; do
if [[ ! -d "${ROOT_DIR}/${dir}" ]]; then
log_error "Missing required directory: ${dir}"
((errors++))
fi
done
if [[ ${errors} -gt 0 ]]; then
log_error "Validation failed with ${errors} errors"
exit 1
fi
log_ok "Configuration validated"
}
# Print summary
print_summary() {
echo ""
echo "========================================"
echo " Configuration Initialized"
echo "========================================"
echo ""
echo "Profile: ${PROFILE}"
echo ""
echo "Next steps:"
echo " 1. Review and customize configurations in etc/"
echo " 2. Set sensitive values via environment variables"
echo " 3. For crypto compliance, set STELLAOPS_CRYPTO_PROFILE"
echo ""
echo "Quick start:"
echo " docker compose up -d"
echo ""
echo "Documentation:"
echo " docs/operations/configuration-guide.md"
echo ""
}
# Main
main() {
create_directories
copy_sample_files
copy_env_profile
update_gitignore
validate_config
print_summary
}
main "$@"

View File

@@ -0,0 +1,406 @@
#!/usr/bin/env bash
# =============================================================================
# CI COMMON FUNCTIONS
# =============================================================================
# Shared utility functions for local CI testing scripts.
#
# Usage:
# source "$SCRIPT_DIR/lib/ci-common.sh"
#
# =============================================================================
# Prevent multiple sourcing
[[ -n "${_CI_COMMON_LOADED:-}" ]] && return
_CI_COMMON_LOADED=1
# =============================================================================
# COLOR DEFINITIONS
# =============================================================================
if [[ -t 1 ]] && [[ -n "${TERM:-}" ]] && [[ "${TERM}" != "dumb" ]]; then
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
BLUE='\033[0;34m'
MAGENTA='\033[0;35m'
CYAN='\033[0;36m'
WHITE='\033[0;37m'
BOLD='\033[1m'
DIM='\033[2m'
RESET='\033[0m'
else
RED=''
GREEN=''
YELLOW=''
BLUE=''
MAGENTA=''
CYAN=''
WHITE=''
BOLD=''
DIM=''
RESET=''
fi
# =============================================================================
# LOGGING FUNCTIONS
# =============================================================================
# Log an info message
log_info() {
echo -e "${BLUE}[INFO]${RESET} $*"
}
# Log a success message
log_success() {
echo -e "${GREEN}[OK]${RESET} $*"
}
# Log a warning message
log_warn() {
echo -e "${YELLOW}[WARN]${RESET} $*" >&2
}
# Log an error message
log_error() {
echo -e "${RED}[ERROR]${RESET} $*" >&2
}
# Log a debug message (only if VERBOSE is true)
log_debug() {
if [[ "${VERBOSE:-false}" == "true" ]]; then
echo -e "${DIM}[DEBUG]${RESET} $*"
fi
}
# Log a step in a process
log_step() {
local step_num="$1"
local total_steps="$2"
local message="$3"
echo -e "${CYAN}[${step_num}/${total_steps}]${RESET} ${BOLD}${message}${RESET}"
}
# Log a section header
log_section() {
echo ""
echo -e "${BOLD}${MAGENTA}=== $* ===${RESET}"
echo ""
}
# Log a subsection header
log_subsection() {
echo -e "${CYAN}--- $* ---${RESET}"
}
# =============================================================================
# ERROR HANDLING
# =============================================================================
# Exit with error message
die() {
log_error "$@"
exit 1
}
# Check if a command exists
require_command() {
local cmd="$1"
local install_hint="${2:-}"
if ! command -v "$cmd" &>/dev/null; then
log_error "Required command not found: $cmd"
if [[ -n "$install_hint" ]]; then
log_info "Install with: $install_hint"
fi
return 1
fi
return 0
}
# Check if a file exists
require_file() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "Required file not found: $file"
return 1
fi
return 0
}
# Check if a directory exists
require_dir() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
log_error "Required directory not found: $dir"
return 1
fi
return 0
}
# =============================================================================
# TIMING FUNCTIONS
# =============================================================================
# Get current timestamp in seconds
get_timestamp() {
date +%s
}
# Format duration in human-readable format
format_duration() {
local seconds="$1"
local minutes=$((seconds / 60))
local remaining_seconds=$((seconds % 60))
if [[ $minutes -gt 0 ]]; then
echo "${minutes}m ${remaining_seconds}s"
else
echo "${remaining_seconds}s"
fi
}
# Start a timer and return the start time
start_timer() {
get_timestamp
}
# Stop a timer and print the duration
stop_timer() {
local start_time="$1"
local label="${2:-Operation}"
local end_time
end_time=$(get_timestamp)
local duration=$((end_time - start_time))
log_info "$label completed in $(format_duration $duration)"
}
# =============================================================================
# STRING FUNCTIONS
# =============================================================================
# Convert string to lowercase
to_lower() {
echo "$1" | tr '[:upper:]' '[:lower:]'
}
# Convert string to uppercase
to_upper() {
echo "$1" | tr '[:lower:]' '[:upper:]'
}
# Trim whitespace from string
trim() {
local var="$*"
var="${var#"${var%%[![:space:]]*}"}"
var="${var%"${var##*[![:space:]]}"}"
echo -n "$var"
}
# Join array elements with delimiter
join_by() {
local delimiter="$1"
shift
local first="$1"
shift
printf '%s' "$first" "${@/#/$delimiter}"
}
# =============================================================================
# ARRAY FUNCTIONS
# =============================================================================
# Check if array contains element
array_contains() {
local needle="$1"
shift
local element
for element in "$@"; do
[[ "$element" == "$needle" ]] && return 0
done
return 1
}
# =============================================================================
# FILE FUNCTIONS
# =============================================================================
# Create directory if it doesn't exist
ensure_dir() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
log_debug "Created directory: $dir"
fi
}
# Get absolute path
get_absolute_path() {
local path="$1"
if [[ -d "$path" ]]; then
(cd "$path" && pwd)
elif [[ -f "$path" ]]; then
local dir
dir=$(dirname "$path")
echo "$(cd "$dir" && pwd)/$(basename "$path")"
else
echo "$path"
fi
}
# =============================================================================
# GIT FUNCTIONS
# =============================================================================
# Get the repository root directory
get_repo_root() {
git rev-parse --show-toplevel 2>/dev/null
}
# Get current branch name
get_current_branch() {
git rev-parse --abbrev-ref HEAD 2>/dev/null
}
# Get current commit SHA
get_current_sha() {
git rev-parse HEAD 2>/dev/null
}
# Get short commit SHA
get_short_sha() {
git rev-parse --short HEAD 2>/dev/null
}
# Check if working directory is clean
is_git_clean() {
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
}
# Get list of changed files compared to main branch
get_changed_files() {
local base_branch="${1:-main}"
git diff --name-only "$base_branch"...HEAD 2>/dev/null
}
# =============================================================================
# MODULE DETECTION
# =============================================================================
# Map of module names to source paths
declare -A MODULE_PATHS=(
["Scanner"]="src/Scanner src/BinaryIndex"
["Concelier"]="src/Concelier src/Excititor"
["Authority"]="src/Authority"
["Policy"]="src/Policy src/RiskEngine"
["Attestor"]="src/Attestor src/Provenance"
["EvidenceLocker"]="src/EvidenceLocker"
["ExportCenter"]="src/ExportCenter"
["Findings"]="src/Findings"
["SbomService"]="src/SbomService"
["Notify"]="src/Notify src/Notifier"
["Router"]="src/Router src/Gateway"
["Cryptography"]="src/Cryptography"
["AirGap"]="src/AirGap"
["Cli"]="src/Cli"
["AdvisoryAI"]="src/AdvisoryAI"
["ReachGraph"]="src/ReachGraph"
["Orchestrator"]="src/Orchestrator"
["PacksRegistry"]="src/PacksRegistry"
["Replay"]="src/Replay"
["Aoc"]="src/Aoc"
["IssuerDirectory"]="src/IssuerDirectory"
["Telemetry"]="src/Telemetry"
["Signals"]="src/Signals"
["Web"]="src/Web"
["DevPortal"]="src/DevPortal"
)
# Modules that use Node.js/npm instead of .NET
declare -a NODE_MODULES=("Web" "DevPortal")
# Detect which modules have changed based on git diff
detect_changed_modules() {
local base_branch="${1:-main}"
local changed_files
changed_files=$(get_changed_files "$base_branch")
local changed_modules=()
local module
local paths
for module in "${!MODULE_PATHS[@]}"; do
paths="${MODULE_PATHS[$module]}"
for path in $paths; do
if echo "$changed_files" | grep -q "^${path}/"; then
if ! array_contains "$module" "${changed_modules[@]}"; then
changed_modules+=("$module")
fi
break
fi
done
done
# Check for infrastructure changes that affect all modules
if echo "$changed_files" | grep -qE "^(Directory\.Build\.props|Directory\.Packages\.props|nuget\.config)"; then
echo "ALL"
return
fi
# Check for shared library changes
if echo "$changed_files" | grep -q "^src/__Libraries/"; then
echo "ALL"
return
fi
if [[ ${#changed_modules[@]} -eq 0 ]]; then
echo "NONE"
else
echo "${changed_modules[*]}"
fi
}
# =============================================================================
# RESULT REPORTING
# =============================================================================
# Print a summary table row
print_table_row() {
local col1="$1"
local col2="$2"
local col3="${3:-}"
printf " %-30s %-15s %s\n" "$col1" "$col2" "$col3"
}
# Print pass/fail status
print_status() {
local name="$1"
local passed="$2"
local duration="${3:-}"
if [[ "$passed" == "true" ]]; then
print_table_row "$name" "${GREEN}PASSED${RESET}" "$duration"
else
print_table_row "$name" "${RED}FAILED${RESET}" "$duration"
fi
}
# =============================================================================
# ENVIRONMENT LOADING
# =============================================================================
# Load environment file if it exists
load_env_file() {
local env_file="$1"
if [[ -f "$env_file" ]]; then
log_debug "Loading environment from: $env_file"
set -a
# shellcheck source=/dev/null
source "$env_file"
set +a
return 0
fi
return 1
}

View File

@@ -0,0 +1,342 @@
#!/usr/bin/env bash
# =============================================================================
# CI DOCKER UTILITIES
# =============================================================================
# Docker-related utility functions for local CI testing.
#
# Usage:
# source "$SCRIPT_DIR/lib/ci-docker.sh"
#
# =============================================================================
# Prevent multiple sourcing
[[ -n "${_CI_DOCKER_LOADED:-}" ]] && return
_CI_DOCKER_LOADED=1
# =============================================================================
# CONFIGURATION
# =============================================================================
CI_COMPOSE_FILE="${CI_COMPOSE_FILE:-devops/compose/docker-compose.testing.yml}"
CI_IMAGE="${CI_IMAGE:-stellaops-ci:local}"
CI_DOCKERFILE="${CI_DOCKERFILE:-devops/docker/Dockerfile.ci}"
CI_PROJECT_NAME="${CI_PROJECT_NAME:-stellaops-ci}"
# Service names from docker-compose.testing.yml
CI_SERVICES=(postgres-test valkey-test rustfs-test mock-registry)
# =============================================================================
# DOCKER CHECK
# =============================================================================
# Check if Docker is available and running
check_docker() {
if ! command -v docker &>/dev/null; then
log_error "Docker is not installed or not in PATH"
log_info "Install Docker: https://docs.docker.com/get-docker/"
return 1
fi
if ! docker info &>/dev/null; then
log_error "Docker daemon is not running"
log_info "Start Docker Desktop or run: sudo systemctl start docker"
return 1
fi
log_debug "Docker is available and running"
return 0
}
# Check if Docker Compose is available
check_docker_compose() {
if docker compose version &>/dev/null; then
DOCKER_COMPOSE="docker compose"
log_debug "Using Docker Compose plugin"
return 0
elif command -v docker-compose &>/dev/null; then
DOCKER_COMPOSE="docker-compose"
log_debug "Using standalone docker-compose"
return 0
else
log_error "Docker Compose is not installed"
log_info "Install with: docker compose plugin or standalone docker-compose"
return 1
fi
}
# =============================================================================
# CI SERVICES MANAGEMENT
# =============================================================================
# Start CI services
start_ci_services() {
local services=("$@")
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
if [[ ! -f "$compose_file" ]]; then
log_error "Compose file not found: $compose_file"
return 1
fi
check_docker || return 1
check_docker_compose || return 1
log_section "Starting CI Services"
if [[ ${#services[@]} -eq 0 ]]; then
# Start all services
log_info "Starting all CI services..."
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d
else
# Start specific services
log_info "Starting services: ${services[*]}"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d "${services[@]}"
fi
local result=$?
if [[ $result -ne 0 ]]; then
log_error "Failed to start CI services"
return $result
fi
# Wait for services to be healthy
wait_for_services "${services[@]}"
}
# Stop CI services
stop_ci_services() {
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
if [[ ! -f "$compose_file" ]]; then
log_debug "Compose file not found, nothing to stop"
return 0
fi
check_docker_compose || return 1
log_section "Stopping CI Services"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down
}
# Stop CI services and remove volumes
cleanup_ci_services() {
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
if [[ ! -f "$compose_file" ]]; then
return 0
fi
check_docker_compose || return 1
log_section "Cleaning Up CI Services"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down -v --remove-orphans
}
# Check status of CI services
check_ci_services_status() {
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
check_docker_compose || return 1
log_subsection "CI Services Status"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" ps
}
# =============================================================================
# HEALTH CHECKS
# =============================================================================
# Wait for a specific service to be healthy
wait_for_service() {
local service="$1"
local timeout="${2:-60}"
local interval="${3:-2}"
log_info "Waiting for $service to be healthy..."
local elapsed=0
while [[ $elapsed -lt $timeout ]]; do
local status
status=$(docker inspect --format='{{.State.Health.Status}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "not found")
if [[ "$status" == "healthy" ]]; then
log_success "$service is healthy"
return 0
elif [[ "$status" == "not found" ]]; then
# Container might not have health check, check if running
local running
running=$(docker inspect --format='{{.State.Running}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "false")
if [[ "$running" == "true" ]]; then
log_success "$service is running (no health check)"
return 0
fi
fi
sleep "$interval"
elapsed=$((elapsed + interval))
done
log_error "$service did not become healthy within ${timeout}s"
return 1
}
# Wait for multiple services to be healthy
wait_for_services() {
local services=("$@")
local failed=0
if [[ ${#services[@]} -eq 0 ]]; then
services=("${CI_SERVICES[@]}")
fi
log_info "Waiting for services to be ready..."
for service in "${services[@]}"; do
if ! wait_for_service "$service" 60 2; then
failed=1
fi
done
return $failed
}
# Check if PostgreSQL is accepting connections
check_postgres_ready() {
local host="${1:-localhost}"
local port="${2:-5433}"
local user="${3:-stellaops_ci}"
local db="${4:-stellaops_test}"
if command -v pg_isready &>/dev/null; then
pg_isready -h "$host" -p "$port" -U "$user" -d "$db" &>/dev/null
else
# Fallback to nc if pg_isready not available
nc -z "$host" "$port" &>/dev/null
fi
}
# Check if Valkey/Redis is accepting connections
check_valkey_ready() {
local host="${1:-localhost}"
local port="${2:-6380}"
if command -v valkey-cli &>/dev/null; then
valkey-cli -h "$host" -p "$port" ping &>/dev/null
elif command -v redis-cli &>/dev/null; then
redis-cli -h "$host" -p "$port" ping &>/dev/null
else
nc -z "$host" "$port" &>/dev/null
fi
}
# =============================================================================
# CI DOCKER IMAGE MANAGEMENT
# =============================================================================
# Check if CI image exists
ci_image_exists() {
docker image inspect "$CI_IMAGE" &>/dev/null
}
# Build CI Docker image
build_ci_image() {
local force_rebuild="${1:-false}"
local dockerfile="$REPO_ROOT/$CI_DOCKERFILE"
if [[ ! -f "$dockerfile" ]]; then
log_error "Dockerfile not found: $dockerfile"
return 1
fi
check_docker || return 1
if ci_image_exists && [[ "$force_rebuild" != "true" ]]; then
log_info "CI image already exists: $CI_IMAGE"
log_info "Use --rebuild to force rebuild"
return 0
fi
log_section "Building CI Docker Image"
log_info "Dockerfile: $dockerfile"
log_info "Image: $CI_IMAGE"
docker build -t "$CI_IMAGE" -f "$dockerfile" "$REPO_ROOT"
if [[ $? -ne 0 ]]; then
log_error "Failed to build CI image"
return 1
fi
log_success "CI image built successfully: $CI_IMAGE"
}
# =============================================================================
# CONTAINER EXECUTION
# =============================================================================
# Run a command inside the CI container
run_in_ci_container() {
local command="$*"
check_docker || return 1
if ! ci_image_exists; then
log_info "CI image not found, building..."
build_ci_image || return 1
fi
local docker_args=(
--rm
-v "$REPO_ROOT:/src"
-v "$REPO_ROOT/TestResults:/src/TestResults"
-e DOTNET_NOLOGO=1
-e DOTNET_CLI_TELEMETRY_OPTOUT=1
-e DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
-e TZ=UTC
-w /src
)
# Mount Docker socket for Testcontainers
if [[ -S /var/run/docker.sock ]]; then
docker_args+=(-v /var/run/docker.sock:/var/run/docker.sock)
fi
# Load environment file if exists
local env_file="$REPO_ROOT/devops/ci-local/.env.local"
if [[ -f "$env_file" ]]; then
docker_args+=(--env-file "$env_file")
fi
# Connect to CI network if services are running
if docker network inspect stellaops-ci-net &>/dev/null; then
docker_args+=(--network stellaops-ci-net)
fi
log_debug "Running in CI container: $command"
docker run "${docker_args[@]}" "$CI_IMAGE" bash -c "$command"
}
# =============================================================================
# DOCKER NETWORK UTILITIES
# =============================================================================
# Get the IP address of a running container
get_container_ip() {
local container="$1"
docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$container" 2>/dev/null
}
# Check if container is running
is_container_running() {
local container="$1"
[[ "$(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null)" == "true" ]]
}
# Get container logs
get_container_logs() {
local container="$1"
local lines="${2:-100}"
docker logs --tail "$lines" "$container" 2>&1
}

View File

@@ -0,0 +1,475 @@
#!/usr/bin/env bash
# =============================================================================
# CI-WEB.SH - Angular Web Testing Utilities
# =============================================================================
# Functions for running Angular/Web frontend tests locally.
#
# Test Types:
# - Unit Tests (Karma/Jasmine)
# - E2E Tests (Playwright)
# - Accessibility Tests (Axe-core)
# - Lighthouse Audits
# - Storybook Build
#
# =============================================================================
# Prevent direct execution
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
echo "This script should be sourced, not executed directly."
exit 1
fi
# =============================================================================
# CONSTANTS
# =============================================================================
WEB_DIR="${REPO_ROOT:-$(git rev-parse --show-toplevel)}/src/Web/StellaOps.Web"
WEB_NODE_VERSION="20"
# Test categories for Web
WEB_TEST_CATEGORIES=(
"web:unit" # Karma unit tests
"web:e2e" # Playwright E2E
"web:a11y" # Accessibility
"web:lighthouse" # Performance/a11y audit
"web:build" # Production build
"web:storybook" # Storybook build
)
# =============================================================================
# DEPENDENCY CHECKS
# =============================================================================
check_node_version() {
if ! command -v node &>/dev/null; then
log_error "Node.js not found"
log_info "Install Node.js $WEB_NODE_VERSION+: https://nodejs.org"
return 1
fi
local version
version=$(node --version | sed 's/v//' | cut -d. -f1)
if [[ "$version" -lt "$WEB_NODE_VERSION" ]]; then
log_warn "Node.js version $version is below recommended $WEB_NODE_VERSION"
else
log_debug "Node.js version: $(node --version)"
fi
return 0
}
check_npm() {
if ! command -v npm &>/dev/null; then
log_error "npm not found"
return 1
fi
log_debug "npm version: $(npm --version)"
return 0
}
check_web_dependencies() {
log_subsection "Checking Web Dependencies"
check_node_version || return 1
check_npm || return 1
# Check if node_modules exists
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
log_warn "node_modules not found - will install dependencies"
fi
return 0
}
# =============================================================================
# SETUP
# =============================================================================
install_web_dependencies() {
log_subsection "Installing Web Dependencies"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
pushd "$WEB_DIR" > /dev/null || return 1
# Check if package-lock.json exists
if [[ -f "package-lock.json" ]]; then
log_info "Running npm ci (clean install)..."
npm ci --prefer-offline --no-audit --no-fund || {
log_error "npm ci failed"
popd > /dev/null
return 1
}
else
log_info "Running npm install..."
npm install --no-audit --no-fund || {
log_error "npm install failed"
popd > /dev/null
return 1
}
fi
popd > /dev/null
log_success "Web dependencies installed"
return 0
}
ensure_web_dependencies() {
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
install_web_dependencies || return 1
fi
return 0
}
# =============================================================================
# TEST RUNNERS
# =============================================================================
run_web_unit_tests() {
log_subsection "Running Web Unit Tests (Karma/Jasmine)"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run test:ci"
popd > /dev/null
return 0
fi
# Run tests
npm run test:ci
local result=$?
stop_timer "$start_time" "Web unit tests"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web unit tests passed"
else
log_error "Web unit tests failed"
fi
return $result
}
run_web_e2e_tests() {
log_subsection "Running Web E2E Tests (Playwright)"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
# Install Playwright browsers if needed
if [[ ! -d "$HOME/.cache/ms-playwright" ]] && [[ ! -d "node_modules/.cache/ms-playwright" ]]; then
log_info "Installing Playwright browsers..."
npx playwright install --with-deps chromium || {
log_warn "Playwright browser installation failed - E2E tests may fail"
}
fi
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run test:e2e"
popd > /dev/null
return 0
fi
# Run E2E tests
npm run test:e2e
local result=$?
stop_timer "$start_time" "Web E2E tests"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web E2E tests passed"
else
log_error "Web E2E tests failed"
fi
return $result
}
run_web_a11y_tests() {
log_subsection "Running Web Accessibility Tests (Axe)"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run test:a11y"
popd > /dev/null
return 0
fi
# Run accessibility tests
npm run test:a11y
local result=$?
stop_timer "$start_time" "Web accessibility tests"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web accessibility tests passed"
else
log_warn "Web accessibility tests had issues (non-blocking)"
fi
# A11y tests are non-blocking by default
return 0
}
run_web_build() {
log_subsection "Building Web Application"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run build -- --configuration production"
popd > /dev/null
return 0
fi
# Build production bundle
npm run build -- --configuration production --progress=false
local result=$?
stop_timer "$start_time" "Web build"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web build completed"
# Check bundle size
if [[ -d "$WEB_DIR/dist" ]]; then
local size
size=$(du -sh "$WEB_DIR/dist" 2>/dev/null | cut -f1)
log_info "Bundle size: $size"
fi
else
log_error "Web build failed"
fi
return $result
}
run_web_storybook_build() {
log_subsection "Building Storybook"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run storybook:build"
popd > /dev/null
return 0
fi
# Build Storybook
npm run storybook:build
local result=$?
stop_timer "$start_time" "Storybook build"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Storybook build completed"
else
log_error "Storybook build failed"
fi
return $result
}
run_web_lighthouse() {
log_subsection "Running Lighthouse Audit"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
# Check if lighthouse is available
if ! command -v lhci &>/dev/null && ! npx lhci --version &>/dev/null 2>&1; then
log_warn "Lighthouse CI not installed - skipping audit"
log_info "Install with: npm install -g @lhci/cli"
return 0
fi
ensure_web_dependencies || return 1
# Build first if not already built
if [[ ! -d "$WEB_DIR/dist" ]]; then
run_web_build || return 1
fi
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: lhci autorun"
popd > /dev/null
return 0
fi
# Run Lighthouse
npx lhci autorun \
--collect.staticDistDir=./dist/stellaops-web/browser \
--collect.numberOfRuns=1 \
--upload.target=filesystem \
--upload.outputDir=./lighthouse-results 2>/dev/null || {
log_warn "Lighthouse audit had issues"
}
stop_timer "$start_time" "Lighthouse audit"
popd > /dev/null
log_success "Lighthouse audit completed"
return 0
}
# =============================================================================
# COMPOSITE RUNNERS
# =============================================================================
run_web_smoke() {
log_section "Web Smoke Tests"
log_info "Running quick web validation"
local failed=0
run_web_build || failed=1
if [[ $failed -eq 0 ]]; then
run_web_unit_tests || failed=1
fi
return $failed
}
run_web_pr_gating() {
log_section "Web PR-Gating Tests"
log_info "Running full web PR-gating suite"
local failed=0
local results=()
# Build
run_web_build
results+=("Build:$?")
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
# Unit tests
if [[ $failed -eq 0 ]]; then
run_web_unit_tests
results+=("Unit:$?")
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
fi
# E2E tests
if [[ $failed -eq 0 ]]; then
run_web_e2e_tests
results+=("E2E:$?")
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
fi
# A11y tests (non-blocking)
run_web_a11y_tests
results+=("A11y:$?")
# Print summary
log_section "Web Test Results"
for result in "${results[@]}"; do
local name="${result%%:*}"
local status="${result##*:}"
if [[ "$status" == "0" ]]; then
print_status "Web $name" "true"
else
print_status "Web $name" "false"
fi
done
return $failed
}
run_web_full() {
log_section "Full Web Test Suite"
log_info "Running all web tests including extended categories"
local failed=0
# PR-gating tests
run_web_pr_gating || failed=1
# Extended tests
run_web_storybook_build || log_warn "Storybook build failed (non-blocking)"
run_web_lighthouse || log_warn "Lighthouse audit failed (non-blocking)"
return $failed
}
# =============================================================================
# EXPORTS
# =============================================================================
export -f check_web_dependencies
export -f install_web_dependencies
export -f ensure_web_dependencies
export -f run_web_unit_tests
export -f run_web_e2e_tests
export -f run_web_a11y_tests
export -f run_web_build
export -f run_web_storybook_build
export -f run_web_lighthouse
export -f run_web_smoke
export -f run_web_pr_gating
export -f run_web_full

View File

@@ -0,0 +1,178 @@
#!/usr/bin/env bash
# Shared Exit Codes Registry
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard exit codes for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/exit-codes.sh"
#
# Exit codes follow POSIX conventions (0-125)
# 126-127 reserved for shell errors
# 128+ reserved for signal handling
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_EXIT_CODES_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_EXIT_CODES_LOADED=1
# ============================================================================
# Standard Exit Codes
# ============================================================================
# Success
export EXIT_SUCCESS=0
# General errors (1-9)
export EXIT_ERROR=1 # Generic error
export EXIT_USAGE=2 # Invalid usage/arguments
export EXIT_CONFIG_ERROR=3 # Configuration error
export EXIT_NOT_FOUND=4 # File/resource not found
export EXIT_PERMISSION=5 # Permission denied
export EXIT_IO_ERROR=6 # I/O error
export EXIT_NETWORK_ERROR=7 # Network error
export EXIT_TIMEOUT=8 # Operation timed out
export EXIT_INTERRUPTED=9 # User interrupted (Ctrl+C)
# Tool/dependency errors (10-19)
export EXIT_MISSING_TOOL=10 # Required tool not installed
export EXIT_TOOL_ERROR=11 # Tool execution failed
export EXIT_VERSION_MISMATCH=12 # Wrong tool version
export EXIT_DEPENDENCY_ERROR=13 # Dependency resolution failed
# Build errors (20-29)
export EXIT_BUILD_FAILED=20 # Build compilation failed
export EXIT_RESTORE_FAILED=21 # Package restore failed
export EXIT_PUBLISH_FAILED=22 # Publish failed
export EXIT_PACKAGING_FAILED=23 # Packaging failed
# Test errors (30-39)
export EXIT_TEST_FAILED=30 # Tests failed
export EXIT_TEST_TIMEOUT=31 # Test timed out
export EXIT_FIXTURE_ERROR=32 # Test fixture error
export EXIT_DETERMINISM_FAIL=33 # Determinism check failed
# Deployment errors (40-49)
export EXIT_DEPLOY_FAILED=40 # Deployment failed
export EXIT_ROLLBACK_FAILED=41 # Rollback failed
export EXIT_HEALTH_CHECK_FAIL=42 # Health check failed
export EXIT_REGISTRY_ERROR=43 # Container registry error
# Validation errors (50-59)
export EXIT_VALIDATION_FAILED=50 # General validation failed
export EXIT_SCHEMA_ERROR=51 # Schema validation failed
export EXIT_LINT_ERROR=52 # Lint check failed
export EXIT_FORMAT_ERROR=53 # Format check failed
export EXIT_LICENSE_ERROR=54 # License compliance failed
# Security errors (60-69)
export EXIT_SECURITY_ERROR=60 # Security check failed
export EXIT_SECRETS_FOUND=61 # Secrets detected in code
export EXIT_VULN_FOUND=62 # Vulnerabilities found
export EXIT_SIGN_FAILED=63 # Signing failed
export EXIT_VERIFY_FAILED=64 # Verification failed
# Git/VCS errors (70-79)
export EXIT_GIT_ERROR=70 # Git operation failed
export EXIT_DIRTY_WORKTREE=71 # Uncommitted changes
export EXIT_MERGE_CONFLICT=72 # Merge conflict
export EXIT_BRANCH_ERROR=73 # Branch operation failed
# Reserved for specific tools (80-99)
export EXIT_DOTNET_ERROR=80 # .NET specific error
export EXIT_DOCKER_ERROR=81 # Docker specific error
export EXIT_HELM_ERROR=82 # Helm specific error
export EXIT_KUBECTL_ERROR=83 # kubectl specific error
export EXIT_NPM_ERROR=84 # npm specific error
export EXIT_PYTHON_ERROR=85 # Python specific error
# Legacy compatibility
export EXIT_TOOLCHAIN=69 # Tool not found (legacy, use EXIT_MISSING_TOOL)
# ============================================================================
# Helper Functions
# ============================================================================
# Get exit code name from number
exit_code_name() {
local code="${1:-}"
case "$code" in
0) echo "SUCCESS" ;;
1) echo "ERROR" ;;
2) echo "USAGE" ;;
3) echo "CONFIG_ERROR" ;;
4) echo "NOT_FOUND" ;;
5) echo "PERMISSION" ;;
6) echo "IO_ERROR" ;;
7) echo "NETWORK_ERROR" ;;
8) echo "TIMEOUT" ;;
9) echo "INTERRUPTED" ;;
10) echo "MISSING_TOOL" ;;
11) echo "TOOL_ERROR" ;;
12) echo "VERSION_MISMATCH" ;;
13) echo "DEPENDENCY_ERROR" ;;
20) echo "BUILD_FAILED" ;;
21) echo "RESTORE_FAILED" ;;
22) echo "PUBLISH_FAILED" ;;
23) echo "PACKAGING_FAILED" ;;
30) echo "TEST_FAILED" ;;
31) echo "TEST_TIMEOUT" ;;
32) echo "FIXTURE_ERROR" ;;
33) echo "DETERMINISM_FAIL" ;;
40) echo "DEPLOY_FAILED" ;;
41) echo "ROLLBACK_FAILED" ;;
42) echo "HEALTH_CHECK_FAIL" ;;
43) echo "REGISTRY_ERROR" ;;
50) echo "VALIDATION_FAILED" ;;
51) echo "SCHEMA_ERROR" ;;
52) echo "LINT_ERROR" ;;
53) echo "FORMAT_ERROR" ;;
54) echo "LICENSE_ERROR" ;;
60) echo "SECURITY_ERROR" ;;
61) echo "SECRETS_FOUND" ;;
62) echo "VULN_FOUND" ;;
63) echo "SIGN_FAILED" ;;
64) echo "VERIFY_FAILED" ;;
69) echo "TOOLCHAIN (legacy)" ;;
70) echo "GIT_ERROR" ;;
71) echo "DIRTY_WORKTREE" ;;
72) echo "MERGE_CONFLICT" ;;
73) echo "BRANCH_ERROR" ;;
80) echo "DOTNET_ERROR" ;;
81) echo "DOCKER_ERROR" ;;
82) echo "HELM_ERROR" ;;
83) echo "KUBECTL_ERROR" ;;
84) echo "NPM_ERROR" ;;
85) echo "PYTHON_ERROR" ;;
126) echo "COMMAND_NOT_EXECUTABLE" ;;
127) echo "COMMAND_NOT_FOUND" ;;
*)
if [[ $code -ge 128 ]] && [[ $code -le 255 ]]; then
local signal=$((code - 128))
echo "SIGNAL_${signal}"
else
echo "UNKNOWN_${code}"
fi
;;
esac
}
# Check if exit code indicates success
is_success() {
[[ "${1:-1}" -eq 0 ]]
}
# Check if exit code indicates error
is_error() {
[[ "${1:-0}" -ne 0 ]]
}
# Exit with message and code
exit_with() {
local code="${1:-1}"
shift
if [[ $# -gt 0 ]]; then
echo "$@" >&2
fi
exit "$code"
}

View File

@@ -0,0 +1,262 @@
#!/usr/bin/env bash
# Shared Git Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Common git operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/git-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_GIT_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_GIT_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Repository Information
# ============================================================================
# Get repository root directory
git_root() {
git rev-parse --show-toplevel 2>/dev/null || echo "."
}
# Check if current directory is a git repository
is_git_repo() {
git rev-parse --git-dir >/dev/null 2>&1
}
# Get current commit SHA (full)
git_sha() {
git rev-parse HEAD 2>/dev/null
}
# Get current commit SHA (short)
git_sha_short() {
git rev-parse --short HEAD 2>/dev/null
}
# Get current branch name
git_branch() {
git rev-parse --abbrev-ref HEAD 2>/dev/null
}
# Get current tag (if HEAD is tagged)
git_tag() {
git describe --tags --exact-match HEAD 2>/dev/null || echo ""
}
# Get latest tag
git_latest_tag() {
git describe --tags --abbrev=0 2>/dev/null || echo ""
}
# Get remote URL
git_remote_url() {
local remote="${1:-origin}"
git remote get-url "$remote" 2>/dev/null
}
# Get repository name from remote URL
git_repo_name() {
local url
url=$(git_remote_url "${1:-origin}")
basename "$url" .git
}
# ============================================================================
# Commit Information
# ============================================================================
# Get commit message
git_commit_message() {
local sha="${1:-HEAD}"
git log -1 --format="%s" "$sha" 2>/dev/null
}
# Get commit author
git_commit_author() {
local sha="${1:-HEAD}"
git log -1 --format="%an" "$sha" 2>/dev/null
}
# Get commit author email
git_commit_author_email() {
local sha="${1:-HEAD}"
git log -1 --format="%ae" "$sha" 2>/dev/null
}
# Get commit timestamp (ISO 8601)
git_commit_timestamp() {
local sha="${1:-HEAD}"
git log -1 --format="%aI" "$sha" 2>/dev/null
}
# Get commit timestamp (Unix epoch)
git_commit_epoch() {
local sha="${1:-HEAD}"
git log -1 --format="%at" "$sha" 2>/dev/null
}
# ============================================================================
# Working Tree State
# ============================================================================
# Check if working tree is clean
git_is_clean() {
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
}
# Check if working tree is dirty
git_is_dirty() {
! git_is_clean
}
# Get list of changed files
git_changed_files() {
git status --porcelain 2>/dev/null | awk '{print $2}'
}
# Get list of staged files
git_staged_files() {
git diff --cached --name-only 2>/dev/null
}
# Get list of untracked files
git_untracked_files() {
git ls-files --others --exclude-standard 2>/dev/null
}
# ============================================================================
# Diff and History
# ============================================================================
# Get files changed between two refs
git_diff_files() {
local from="${1:-HEAD~1}"
local to="${2:-HEAD}"
git diff --name-only "$from" "$to" 2>/dev/null
}
# Get files changed in last N commits
git_recent_files() {
local count="${1:-1}"
git diff --name-only "HEAD~${count}" HEAD 2>/dev/null
}
# Check if file was changed between two refs
git_file_changed() {
local file="$1"
local from="${2:-HEAD~1}"
local to="${3:-HEAD}"
git diff --name-only "$from" "$to" -- "$file" 2>/dev/null | grep -q "$file"
}
# Get commits between two refs
git_commits_between() {
local from="${1:-HEAD~10}"
local to="${2:-HEAD}"
git log --oneline "$from".."$to" 2>/dev/null
}
# ============================================================================
# Tag Operations
# ============================================================================
# Create a tag
git_create_tag() {
local tag="$1"
local message="${2:-}"
if [[ -n "$message" ]]; then
git tag -a "$tag" -m "$message"
else
git tag "$tag"
fi
}
# Delete a tag
git_delete_tag() {
local tag="$1"
git tag -d "$tag" 2>/dev/null
}
# Push tag to remote
git_push_tag() {
local tag="$1"
local remote="${2:-origin}"
git push "$remote" "$tag"
}
# List tags matching pattern
git_list_tags() {
local pattern="${1:-*}"
git tag -l "$pattern" 2>/dev/null
}
# ============================================================================
# Branch Operations
# ============================================================================
# Check if branch exists
git_branch_exists() {
local branch="$1"
git show-ref --verify --quiet "refs/heads/$branch" 2>/dev/null
}
# Check if remote branch exists
git_remote_branch_exists() {
local branch="$1"
local remote="${2:-origin}"
git show-ref --verify --quiet "refs/remotes/$remote/$branch" 2>/dev/null
}
# Get default branch
git_default_branch() {
local remote="${1:-origin}"
git remote show "$remote" 2>/dev/null | grep "HEAD branch" | awk '{print $NF}'
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get version string for CI builds
git_ci_version() {
local tag
tag=$(git_tag)
if [[ -n "$tag" ]]; then
echo "$tag"
else
local branch sha
branch=$(git_branch | tr '/' '-')
sha=$(git_sha_short)
echo "${branch}-${sha}"
fi
}
# Check if current commit is on default branch
git_is_default_branch() {
local current default
current=$(git_branch)
default=$(git_default_branch)
[[ "$current" == "$default" ]]
}
# Check if running in CI environment
git_is_ci() {
[[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]] || [[ -n "${GITLAB_CI:-}" ]]
}
# Ensure clean worktree or fail
git_require_clean() {
if git_is_dirty; then
log_error "Working tree is dirty. Commit or stash changes first."
return "${EXIT_DIRTY_WORKTREE:-71}"
fi
}

View File

@@ -0,0 +1,266 @@
#!/usr/bin/env bash
# Shared Hash/Checksum Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Cryptographic hash and checksum operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/hash-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_HASH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_HASH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Hash Computation
# ============================================================================
# Compute SHA-256 hash of a file
compute_sha256() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha256sum >/dev/null 2>&1; then
sha256sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 256 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha256 "$file" | awk '{print $NF}'
else
log_error "No SHA-256 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute SHA-512 hash of a file
compute_sha512() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha512sum >/dev/null 2>&1; then
sha512sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 512 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha512 "$file" | awk '{print $NF}'
else
log_error "No SHA-512 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute MD5 hash of a file (for compatibility, not security)
compute_md5() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v md5sum >/dev/null 2>&1; then
md5sum "$file" | awk '{print $1}'
elif command -v md5 >/dev/null 2>&1; then
md5 -q "$file"
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -md5 "$file" | awk '{print $NF}'
else
log_error "No MD5 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute hash of string
compute_string_hash() {
local string="$1"
local algorithm="${2:-sha256}"
case "$algorithm" in
sha256)
echo -n "$string" | sha256sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 256 2>/dev/null | awk '{print $1}'
;;
sha512)
echo -n "$string" | sha512sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 512 2>/dev/null | awk '{print $1}'
;;
md5)
echo -n "$string" | md5sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | md5 2>/dev/null
;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
}
# ============================================================================
# Checksum Files
# ============================================================================
# Write checksum file for a single file
write_checksum() {
local file="$1"
local checksum_file="${2:-${file}.sha256}"
local algorithm="${3:-sha256}"
local hash
case "$algorithm" in
sha256) hash=$(compute_sha256 "$file") ;;
sha512) hash=$(compute_sha512 "$file") ;;
md5) hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ -z "$hash" ]]; then
return "${EXIT_ERROR:-1}"
fi
local basename
basename=$(basename "$file")
echo "$hash $basename" > "$checksum_file"
log_debug "Wrote checksum to $checksum_file"
}
# Write checksums for multiple files
write_checksums() {
local output_file="$1"
shift
local files=("$@")
: > "$output_file"
for file in "${files[@]}"; do
if [[ -f "$file" ]]; then
local hash basename
hash=$(compute_sha256 "$file")
basename=$(basename "$file")
echo "$hash $basename" >> "$output_file"
fi
done
log_debug "Wrote checksums to $output_file"
}
# ============================================================================
# Checksum Verification
# ============================================================================
# Verify checksum of a file
verify_checksum() {
local file="$1"
local expected_hash="$2"
local algorithm="${3:-sha256}"
local actual_hash
case "$algorithm" in
sha256) actual_hash=$(compute_sha256 "$file") ;;
sha512) actual_hash=$(compute_sha512 "$file") ;;
md5) actual_hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ "$actual_hash" == "$expected_hash" ]]; then
log_debug "Checksum verified: $file"
return 0
else
log_error "Checksum mismatch for $file"
log_error " Expected: $expected_hash"
log_error " Actual: $actual_hash"
return "${EXIT_VERIFY_FAILED:-64}"
fi
}
# Verify checksums from file (sha256sum -c style)
verify_checksums_file() {
local checksum_file="$1"
local base_dir="${2:-.}"
if [[ ! -f "$checksum_file" ]]; then
log_error "Checksum file not found: $checksum_file"
return "${EXIT_NOT_FOUND:-4}"
fi
local failures=0
while IFS= read -r line; do
# Skip empty lines and comments
[[ -z "$line" ]] && continue
[[ "$line" == \#* ]] && continue
local hash filename
hash=$(echo "$line" | awk '{print $1}')
filename=$(echo "$line" | awk '{print $2}')
local filepath="${base_dir}/${filename}"
if [[ ! -f "$filepath" ]]; then
log_error "File not found: $filepath"
((failures++))
continue
fi
if ! verify_checksum "$filepath" "$hash"; then
((failures++))
fi
done < "$checksum_file"
if [[ $failures -gt 0 ]]; then
log_error "$failures checksum verification(s) failed"
return "${EXIT_VERIFY_FAILED:-64}"
fi
log_info "All checksums verified"
return 0
}
# ============================================================================
# Helpers
# ============================================================================
# Check if two files have the same content
files_identical() {
local file1="$1"
local file2="$2"
[[ -f "$file1" ]] && [[ -f "$file2" ]] || return 1
local hash1 hash2
hash1=$(compute_sha256 "$file1")
hash2=$(compute_sha256 "$file2")
[[ "$hash1" == "$hash2" ]]
}
# Get short hash for display
short_hash() {
local hash="$1"
local length="${2:-8}"
echo "${hash:0:$length}"
}
# Generate deterministic ID from inputs
generate_id() {
local inputs="$*"
compute_string_hash "$inputs" sha256 | head -c 16
}

View File

@@ -0,0 +1,181 @@
#!/usr/bin/env bash
# Shared Logging Library
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard logging functions for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/logging.sh"
#
# Log Levels: DEBUG, INFO, WARN, ERROR
# Set LOG_LEVEL environment variable to control verbosity (default: INFO)
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_LOGGING_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_LOGGING_LOADED=1
# Colors (disable with NO_COLOR=1)
if [[ -z "${NO_COLOR:-}" ]] && [[ -t 1 ]]; then
export LOG_COLOR_RED='\033[0;31m'
export LOG_COLOR_GREEN='\033[0;32m'
export LOG_COLOR_YELLOW='\033[1;33m'
export LOG_COLOR_BLUE='\033[0;34m'
export LOG_COLOR_MAGENTA='\033[0;35m'
export LOG_COLOR_CYAN='\033[0;36m'
export LOG_COLOR_GRAY='\033[0;90m'
export LOG_COLOR_RESET='\033[0m'
else
export LOG_COLOR_RED=''
export LOG_COLOR_GREEN=''
export LOG_COLOR_YELLOW=''
export LOG_COLOR_BLUE=''
export LOG_COLOR_MAGENTA=''
export LOG_COLOR_CYAN=''
export LOG_COLOR_GRAY=''
export LOG_COLOR_RESET=''
fi
# Log level configuration
export LOG_LEVEL="${LOG_LEVEL:-INFO}"
# Convert log level to numeric for comparison
_log_level_to_num() {
case "$1" in
DEBUG) echo 0 ;;
INFO) echo 1 ;;
WARN) echo 2 ;;
ERROR) echo 3 ;;
*) echo 1 ;;
esac
}
# Check if message should be logged based on level
_should_log() {
local msg_level="$1"
local current_level="${LOG_LEVEL:-INFO}"
local msg_num current_num
msg_num=$(_log_level_to_num "$msg_level")
current_num=$(_log_level_to_num "$current_level")
[[ $msg_num -ge $current_num ]]
}
# Format timestamp
_log_timestamp() {
if [[ "${LOG_TIMESTAMPS:-true}" == "true" ]]; then
date -u +"%Y-%m-%dT%H:%M:%SZ"
fi
}
# Core logging function
_log() {
local level="$1"
local color="$2"
shift 2
if ! _should_log "$level"; then
return 0
fi
local timestamp
timestamp=$(_log_timestamp)
local prefix=""
if [[ -n "$timestamp" ]]; then
prefix="${LOG_COLOR_GRAY}${timestamp}${LOG_COLOR_RESET} "
fi
echo -e "${prefix}${color}[${level}]${LOG_COLOR_RESET} $*"
}
# Public logging functions
log_debug() {
_log "DEBUG" "${LOG_COLOR_GRAY}" "$@"
}
log_info() {
_log "INFO" "${LOG_COLOR_GREEN}" "$@"
}
log_warn() {
_log "WARN" "${LOG_COLOR_YELLOW}" "$@"
}
log_error() {
_log "ERROR" "${LOG_COLOR_RED}" "$@" >&2
}
# Step logging (for workflow stages)
log_step() {
_log "STEP" "${LOG_COLOR_BLUE}" "$@"
}
# Success message
log_success() {
_log "OK" "${LOG_COLOR_GREEN}" "$@"
}
# GitHub Actions annotations
log_gh_notice() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::notice::$*"
else
log_info "$@"
fi
}
log_gh_warning() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::warning::$*"
else
log_warn "$@"
fi
}
log_gh_error() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::error::$*"
else
log_error "$@"
fi
}
# Group logging (for GitHub Actions)
log_group_start() {
local title="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::group::$title"
else
log_step "=== $title ==="
fi
}
log_group_end() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::endgroup::"
fi
}
# Masked logging (for secrets)
log_masked() {
local value="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::add-mask::$value"
fi
}
# Die with error message
die() {
log_error "$@"
exit 1
}
# Conditional die
die_if() {
local condition="$1"
shift
if eval "$condition"; then
die "$@"
fi
}

View File

@@ -0,0 +1,274 @@
#!/usr/bin/env bash
# Shared Path Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Path manipulation and file operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/path-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_PATH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_PATH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Path Normalization
# ============================================================================
# Normalize path (resolve .., ., symlinks)
normalize_path() {
local path="$1"
# Handle empty path
if [[ -z "$path" ]]; then
echo "."
return 0
fi
# Try realpath first (most reliable)
if command -v realpath >/dev/null 2>&1; then
realpath -m "$path" 2>/dev/null && return 0
fi
# Fallback to Python
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os; print(os.path.normpath('$path'))" 2>/dev/null && return 0
fi
# Manual normalization (basic)
echo "$path" | sed 's|/\./|/|g' | sed 's|/[^/]*/\.\./|/|g' | sed 's|//|/|g'
}
# Get absolute path
absolute_path() {
local path="$1"
if [[ "$path" == /* ]]; then
normalize_path "$path"
else
normalize_path "$(pwd)/$path"
fi
}
# Get relative path from one path to another
relative_path() {
local from="$1"
local to="$2"
if command -v realpath >/dev/null 2>&1; then
realpath --relative-to="$from" "$to" 2>/dev/null && return 0
fi
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os.path; print(os.path.relpath('$to', '$from'))" 2>/dev/null && return 0
fi
# Fallback: just return absolute path
absolute_path "$to"
}
# ============================================================================
# Path Components
# ============================================================================
# Get directory name
dir_name() {
dirname "$1"
}
# Get base name
base_name() {
basename "$1"
}
# Get file extension
file_extension() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base##*.}"
else
echo ""
fi
}
# Get file name without extension
file_stem() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base%.*}"
else
echo "$base"
fi
}
# ============================================================================
# Directory Operations
# ============================================================================
# Ensure directory exists
ensure_directory() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
fi
}
# Create temporary directory
create_temp_dir() {
local prefix="${1:-stellaops}"
mktemp -d "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
}
# Create temporary file
create_temp_file() {
local prefix="${1:-stellaops}"
local suffix="${2:-}"
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX${suffix}"
}
# Clean temporary directory
clean_temp() {
local path="$1"
if [[ -d "$path" ]] && [[ "$path" == *stellaops* ]]; then
rm -rf "$path"
fi
}
# ============================================================================
# File Existence Checks
# ============================================================================
# Check if file exists
file_exists() {
[[ -f "$1" ]]
}
# Check if directory exists
dir_exists() {
[[ -d "$1" ]]
}
# Check if path exists (file or directory)
path_exists() {
[[ -e "$1" ]]
}
# Check if file is readable
file_readable() {
[[ -r "$1" ]]
}
# Check if file is writable
file_writable() {
[[ -w "$1" ]]
}
# Check if file is executable
file_executable() {
[[ -x "$1" ]]
}
# ============================================================================
# File Discovery
# ============================================================================
# Find files by pattern
find_files() {
local dir="${1:-.}"
local pattern="${2:-*}"
find "$dir" -type f -name "$pattern" 2>/dev/null
}
# Find files by extension
find_by_extension() {
local dir="${1:-.}"
local ext="${2:-}"
find "$dir" -type f -name "*.${ext}" 2>/dev/null
}
# Find project files (csproj, package.json, etc.)
find_project_files() {
local dir="${1:-.}"
find "$dir" -type f \( \
-name "*.csproj" -o \
-name "*.fsproj" -o \
-name "package.json" -o \
-name "Cargo.toml" -o \
-name "go.mod" -o \
-name "pom.xml" -o \
-name "build.gradle" \
\) 2>/dev/null | grep -v node_modules | grep -v bin | grep -v obj
}
# Find test projects
find_test_projects() {
local dir="${1:-.}"
find "$dir" -type f -name "*.Tests.csproj" 2>/dev/null | grep -v bin | grep -v obj
}
# ============================================================================
# Path Validation
# ============================================================================
# Check if path is under directory
path_under() {
local path="$1"
local dir="$2"
local abs_path abs_dir
abs_path=$(absolute_path "$path")
abs_dir=$(absolute_path "$dir")
[[ "$abs_path" == "$abs_dir"* ]]
}
# Validate path is safe (no directory traversal)
path_is_safe() {
local path="$1"
local base="${2:-.}"
# Check for obvious traversal attempts
if [[ "$path" == *".."* ]] || [[ "$path" == "/*" ]]; then
return 1
fi
# Verify resolved path is under base
path_under "$path" "$base"
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get artifact output directory
get_artifact_dir() {
local name="${1:-artifacts}"
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/out/${name}"
}
# Get test results directory
get_test_results_dir() {
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/TestResults"
}
# Ensure artifact directory exists and return path
ensure_artifact_dir() {
local name="${1:-artifacts}"
local dir
dir=$(get_artifact_dir "$name")
ensure_directory "$dir"
echo "$dir"
}

1050
deploy/scripts/local-ci.sh Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,330 @@
#!/usr/bin/env bash
#
# Migrate legacy configuration structure to consolidated etc/
#
# This script migrates:
# - certificates/ -> etc/certificates/
# - config/ -> etc/crypto/ and etc/env/
# - policies/ -> etc/policy/
# - etc/rootpack/ -> etc/crypto/profiles/
#
# Usage:
# ./devops/scripts/migrate-config.sh [--dry-run]
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
DRY_RUN=false
[[ "${1:-}" == "--dry-run" ]] && DRY_RUN=true
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
log_ok() { echo -e "${GREEN}[OK]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
log_dry() { echo -e "${YELLOW}[DRY-RUN]${NC} $*"; }
# Execute or log command
run_cmd() {
if [[ "${DRY_RUN}" == true ]]; then
log_dry "$*"
else
"$@"
fi
}
# Create backup
create_backup() {
local backup_file="${ROOT_DIR}/config-backup-$(date +%Y%m%d-%H%M%S).tar.gz"
log_info "Creating backup: ${backup_file}"
if [[ "${DRY_RUN}" == true ]]; then
log_dry "Would create backup of: certificates/ config/ policies/ etc/"
return
fi
local dirs_to_backup=()
[[ -d "${ROOT_DIR}/certificates" ]] && dirs_to_backup+=("certificates")
[[ -d "${ROOT_DIR}/config" ]] && dirs_to_backup+=("config")
[[ -d "${ROOT_DIR}/policies" ]] && dirs_to_backup+=("policies")
[[ -d "${ROOT_DIR}/etc" ]] && dirs_to_backup+=("etc")
if [[ ${#dirs_to_backup[@]} -gt 0 ]]; then
cd "${ROOT_DIR}"
tar -czvf "${backup_file}" "${dirs_to_backup[@]}"
log_ok "Backup created: ${backup_file}"
else
log_warn "No directories to backup"
fi
}
# Create new directory structure
create_directories() {
log_info "Creating new directory structure..."
local dirs=(
"etc/certificates/trust-roots"
"etc/certificates/signing"
"etc/crypto/profiles/cn"
"etc/crypto/profiles/eu"
"etc/crypto/profiles/kr"
"etc/crypto/profiles/ru"
"etc/crypto/profiles/us-fips"
"etc/env"
"etc/policy/packs"
"etc/policy/schemas"
)
for dir in "${dirs[@]}"; do
run_cmd mkdir -p "${ROOT_DIR}/${dir}"
done
log_ok "Directory structure created"
}
# Migrate certificates/
migrate_certificates() {
local src_dir="${ROOT_DIR}/certificates"
if [[ ! -d "${src_dir}" ]]; then
log_info "No certificates/ directory found, skipping"
return
fi
log_info "Migrating certificates/..."
# Trust roots (CA bundles)
for f in "${src_dir}"/*-bundle*.pem "${src_dir}"/*-root*.pem "${src_dir}"/*_bundle*.pem "${src_dir}"/*_root*.pem 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/trust-roots/"
log_ok "Moved: $(basename "$f") -> etc/certificates/trust-roots/"
done
# Signing keys
for f in "${src_dir}"/*-signing-*.pem "${src_dir}"/*_signing_*.pem 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/signing/"
log_ok "Moved: $(basename "$f") -> etc/certificates/signing/"
done
# Move remaining .pem and .cer files to trust-roots
for f in "${src_dir}"/*.pem "${src_dir}"/*.cer 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/trust-roots/"
log_ok "Moved: $(basename "$f") -> etc/certificates/trust-roots/"
done
# Remove empty directory
if [[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}")" ]]; then
run_cmd rmdir "${src_dir}"
log_ok "Removed empty: certificates/"
fi
}
# Migrate config/
migrate_config_dir() {
local src_dir="${ROOT_DIR}/config"
if [[ ! -d "${src_dir}" ]]; then
log_info "No config/ directory found, skipping"
return
fi
log_info "Migrating config/..."
# Map env files to crypto profiles
declare -A env_mapping=(
[".env.fips.example"]="us-fips/env.sample"
[".env.eidas.example"]="eu/env.sample"
[".env.ru-free.example"]="ru/env.sample"
[".env.ru-paid.example"]="ru/env-paid.sample"
[".env.sm.example"]="cn/env.sample"
[".env.kcmvp.example"]="kr/env.sample"
)
for src_name in "${!env_mapping[@]}"; do
local src_file="${src_dir}/env/${src_name}"
local dst_file="${ROOT_DIR}/etc/crypto/profiles/${env_mapping[$src_name]}"
if [[ -f "${src_file}" ]]; then
run_cmd mkdir -p "$(dirname "${dst_file}")"
run_cmd mv "${src_file}" "${dst_file}"
log_ok "Moved: ${src_name} -> etc/crypto/profiles/${env_mapping[$src_name]}"
fi
done
# Remove crypto-profiles.sample.json (superseded)
if [[ -f "${src_dir}/crypto-profiles.sample.json" ]]; then
run_cmd rm "${src_dir}/crypto-profiles.sample.json"
log_ok "Removed: config/crypto-profiles.sample.json (superseded by etc/crypto/)"
fi
# Remove empty directories
[[ -d "${src_dir}/env" ]] && [[ -z "$(ls -A "${src_dir}/env" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}/env"
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
}
# Migrate policies/
migrate_policies() {
local src_dir="${ROOT_DIR}/policies"
if [[ ! -d "${src_dir}" ]]; then
log_info "No policies/ directory found, skipping"
return
fi
log_info "Migrating policies/..."
# Move policy packs
for f in "${src_dir}"/*.yaml 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/policy/packs/"
log_ok "Moved: $(basename "$f") -> etc/policy/packs/"
done
# Move schemas
if [[ -d "${src_dir}/schemas" ]]; then
for f in "${src_dir}/schemas"/*.json 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/policy/schemas/"
log_ok "Moved: schemas/$(basename "$f") -> etc/policy/schemas/"
done
[[ -z "$(ls -A "${src_dir}/schemas" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}/schemas"
fi
# Move AGENTS.md if present
[[ -f "${src_dir}/AGENTS.md" ]] && run_cmd mv "${src_dir}/AGENTS.md" "${ROOT_DIR}/etc/policy/"
# Remove empty directory
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
}
# Migrate etc/rootpack/ to etc/crypto/profiles/
migrate_rootpack() {
local src_dir="${ROOT_DIR}/etc/rootpack"
if [[ ! -d "${src_dir}" ]]; then
log_info "No etc/rootpack/ directory found, skipping"
return
fi
log_info "Migrating etc/rootpack/ to etc/crypto/profiles/..."
for region_dir in "${src_dir}"/*; do
[[ -d "${region_dir}" ]] || continue
local region_name=$(basename "${region_dir}")
local target_dir="${ROOT_DIR}/etc/crypto/profiles/${region_name}"
run_cmd mkdir -p "${target_dir}"
for f in "${region_dir}"/*; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${target_dir}/"
log_ok "Moved: rootpack/${region_name}/$(basename "$f") -> etc/crypto/profiles/${region_name}/"
done
[[ -z "$(ls -A "${region_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${region_dir}"
done
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
}
# Validate migration
validate_migration() {
log_info "Validating migration..."
local errors=0
# Check new structure exists
local required=(
"etc/certificates"
"etc/crypto/profiles"
"etc/policy"
)
for dir in "${required[@]}"; do
if [[ ! -d "${ROOT_DIR}/${dir}" ]]; then
log_error "Missing: ${dir}"
((errors++))
fi
done
# Check legacy directories are gone
local legacy=(
"certificates"
"config"
"policies"
"etc/rootpack"
)
for dir in "${legacy[@]}"; do
if [[ -d "${ROOT_DIR}/${dir}" ]] && [[ -n "$(ls -A "${ROOT_DIR}/${dir}" 2>/dev/null)" ]]; then
log_warn "Legacy directory still has content: ${dir}"
fi
done
if [[ ${errors} -gt 0 ]]; then
log_error "Validation failed"
return 1
fi
log_ok "Migration validated"
}
# Print summary
print_summary() {
echo ""
echo "========================================"
if [[ "${DRY_RUN}" == true ]]; then
echo " Migration Dry Run Complete"
else
echo " Migration Complete"
fi
echo "========================================"
echo ""
echo "New structure:"
echo " etc/certificates/ - Trust anchors and signing keys"
echo " etc/crypto/profiles/ - Regional crypto profiles"
echo " etc/policy/ - Policy engine configuration"
echo ""
if [[ "${DRY_RUN}" == true ]]; then
echo "Run without --dry-run to apply changes"
else
echo "Next steps:"
echo " 1. Update Docker Compose volume mounts"
echo " 2. Update any hardcoded paths in scripts"
echo " 3. Restart services and validate"
echo ""
echo "Rollback:"
echo " tar -xzvf config-backup-*.tar.gz"
fi
echo ""
}
# Main
main() {
if [[ "${DRY_RUN}" == true ]]; then
log_info "DRY RUN - no changes will be made"
fi
create_backup
create_directories
migrate_certificates
migrate_config_dir
migrate_policies
migrate_rootpack
validate_migration
print_summary
}
main "$@"

View File

@@ -0,0 +1,197 @@
#!/bin/bash
# -----------------------------------------------------------------------------
# rotate-rekor-key.sh
# Sprint: SPRINT_20260125_003_Attestor_trust_workflows_conformance
# Task: WORKFLOW-002 - Create key rotation workflow script
# Description: Rotate Rekor public key with grace period
# -----------------------------------------------------------------------------
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $1"; }
usage() {
echo "Usage: $0 <phase> [options]"
echo ""
echo "Rotate Rekor public key through a dual-key grace period."
echo ""
echo "Phases:"
echo " add-key Add new key to TUF (starts grace period)"
echo " verify Verify both keys are active"
echo " remove-old Remove old key (after grace period)"
echo ""
echo "Options:"
echo " --repo DIR TUF repository directory"
echo " --new-key FILE Path to new Rekor public key"
echo " --new-key-name NAME Target name for new key (default: rekor-key-v{N+1})"
echo " --old-key-name NAME Target name for old key to remove"
echo " --grace-days N Grace period in days (default: 7)"
echo " -h, --help Show this help message"
echo ""
echo "Example (3-phase rotation):"
echo " # Phase 1: Add new key"
echo " $0 add-key --repo /path/to/tuf --new-key rekor-key-v2.pub"
echo ""
echo " # Wait for grace period (clients sync)"
echo " sleep 7d"
echo ""
echo " # Phase 2: Verify"
echo " $0 verify"
echo ""
echo " # Phase 3: Remove old key"
echo " $0 remove-old --repo /path/to/tuf --old-key-name rekor-key-v1"
exit 1
}
PHASE=""
REPO_DIR=""
NEW_KEY=""
NEW_KEY_NAME=""
OLD_KEY_NAME=""
GRACE_DAYS=7
while [[ $# -gt 0 ]]; do
case $1 in
add-key|verify|remove-old)
PHASE="$1"
shift
;;
--repo) REPO_DIR="$2"; shift 2 ;;
--new-key) NEW_KEY="$2"; shift 2 ;;
--new-key-name) NEW_KEY_NAME="$2"; shift 2 ;;
--old-key-name) OLD_KEY_NAME="$2"; shift 2 ;;
--grace-days) GRACE_DAYS="$2"; shift 2 ;;
-h|--help) usage ;;
*) log_error "Unknown argument: $1"; usage ;;
esac
done
if [[ -z "$PHASE" ]]; then
log_error "Phase is required"
usage
fi
echo ""
echo "================================================"
echo " Rekor Key Rotation - Phase: $PHASE"
echo "================================================"
echo ""
case "$PHASE" in
add-key)
if [[ -z "$REPO_DIR" ]] || [[ -z "$NEW_KEY" ]]; then
log_error "add-key requires --repo and --new-key"
usage
fi
if [[ ! -f "$NEW_KEY" ]]; then
log_error "New key file not found: $NEW_KEY"
exit 1
fi
if [[ ! -d "$REPO_DIR" ]]; then
log_error "TUF repository not found: $REPO_DIR"
exit 1
fi
# Determine new key name if not specified
if [[ -z "$NEW_KEY_NAME" ]]; then
# Find highest version and increment
HIGHEST=$(ls "$REPO_DIR/targets/" 2>/dev/null | grep -E '^rekor-key-v[0-9]+' | \
sed 's/rekor-key-v//' | sed 's/\.pub$//' | sort -n | tail -1 || echo "0")
NEW_VERSION=$((HIGHEST + 1))
NEW_KEY_NAME="rekor-key-v${NEW_VERSION}"
fi
log_step "Adding new Rekor key: $NEW_KEY_NAME"
log_info "Source: $NEW_KEY"
# Copy key to targets
cp "$NEW_KEY" "$REPO_DIR/targets/${NEW_KEY_NAME}.pub"
# Add to targets.json
if [[ -x "$REPO_DIR/scripts/add-target.sh" ]]; then
"$REPO_DIR/scripts/add-target.sh" "$REPO_DIR/targets/${NEW_KEY_NAME}.pub" "${NEW_KEY_NAME}.pub" --repo "$REPO_DIR"
else
log_warn "add-target.sh not found, updating targets.json manually required"
fi
log_info ""
log_info "Key added: $NEW_KEY_NAME"
log_info ""
log_warn "IMPORTANT: Dual-key period has started."
log_warn "Wait at least $GRACE_DAYS days before running 'remove-old' phase."
log_warn "During this time, clients will sync and receive both keys."
log_info ""
log_info "Next steps:"
echo " 1. Sign and publish updated TUF metadata"
echo " 2. Monitor client sync status"
echo " 3. After $GRACE_DAYS days, run: $0 remove-old --repo $REPO_DIR --old-key-name <old-key>"
;;
verify)
log_step "Verifying key rotation status..."
# Check local trust state
stella trust status --show-keys
log_info ""
log_info "Verify that:"
echo " 1. Both old and new Rekor keys are listed"
echo " 2. Service endpoints are resolving correctly"
echo " 3. Attestations signed with old key still verify"
;;
remove-old)
if [[ -z "$REPO_DIR" ]] || [[ -z "$OLD_KEY_NAME" ]]; then
log_error "remove-old requires --repo and --old-key-name"
usage
fi
if [[ ! -d "$REPO_DIR" ]]; then
log_error "TUF repository not found: $REPO_DIR"
exit 1
fi
OLD_KEY_FILE="$REPO_DIR/targets/${OLD_KEY_NAME}.pub"
if [[ ! -f "$OLD_KEY_FILE" ]]; then
OLD_KEY_FILE="$REPO_DIR/targets/${OLD_KEY_NAME}"
fi
if [[ ! -f "$OLD_KEY_FILE" ]]; then
log_error "Old key not found: $OLD_KEY_NAME"
exit 1
fi
log_step "Removing old Rekor key: $OLD_KEY_NAME"
log_warn "This is IRREVERSIBLE. Ensure all clients have synced the new key."
read -p "Type 'CONFIRM' to proceed: " CONFIRM
if [[ "$CONFIRM" != "CONFIRM" ]]; then
log_error "Aborted"
exit 1
fi
# Remove key file
rm -f "$OLD_KEY_FILE"
# Remove from targets.json (simplified - production should use proper JSON manipulation)
log_warn "Remember to update targets.json to remove the old key entry"
log_warn "Then sign and publish the updated metadata"
log_info ""
log_info "Old key removed: $OLD_KEY_NAME"
log_info "Key rotation complete!"
;;
esac
echo ""

View File

@@ -0,0 +1,265 @@
#!/bin/bash
# -----------------------------------------------------------------------------
# rotate-signing-key.sh
# Sprint: SPRINT_20260125_003_Attestor_trust_workflows_conformance
# Task: WORKFLOW-002 - Create key rotation workflow script
# Description: Rotate organization signing key with dual-key grace period
# -----------------------------------------------------------------------------
set -euo pipefail
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
log_step() { echo -e "${BLUE}[STEP]${NC} $1"; }
usage() {
echo "Usage: $0 <phase> [options]"
echo ""
echo "Rotate organization signing key through a dual-key grace period."
echo ""
echo "Phases:"
echo " generate Generate new signing key"
echo " activate Activate new key (dual-key period starts)"
echo " verify Verify both keys are functional"
echo " retire Retire old key (after grace period)"
echo ""
echo "Options:"
echo " --key-dir DIR Directory for signing keys (default: /etc/stellaops/keys)"
echo " --key-type TYPE Key type: ecdsa-p256, ecdsa-p384, rsa-4096 (default: ecdsa-p256)"
echo " --new-key NAME Name for new key (default: signing-key-v{N+1})"
echo " --old-key NAME Name of old key to retire"
echo " --grace-days N Grace period in days (default: 14)"
echo " --ci-config FILE CI config file to update"
echo " -h, --help Show this help message"
echo ""
echo "Example (4-phase rotation):"
echo " # Phase 1: Generate new key"
echo " $0 generate --key-dir /etc/stellaops/keys"
echo ""
echo " # Phase 2: Activate (update CI to use both keys)"
echo " $0 activate --ci-config .gitea/workflows/ci.yaml"
echo ""
echo " # Wait for grace period"
echo " sleep 14d"
echo ""
echo " # Phase 3: Verify"
echo " $0 verify"
echo ""
echo " # Phase 4: Retire old key"
echo " $0 retire --old-key signing-key-v1"
exit 1
}
PHASE=""
KEY_DIR="/etc/stellaops/keys"
KEY_TYPE="ecdsa-p256"
NEW_KEY_NAME=""
OLD_KEY_NAME=""
GRACE_DAYS=14
CI_CONFIG=""
while [[ $# -gt 0 ]]; do
case $1 in
generate|activate|verify|retire)
PHASE="$1"
shift
;;
--key-dir) KEY_DIR="$2"; shift 2 ;;
--key-type) KEY_TYPE="$2"; shift 2 ;;
--new-key) NEW_KEY_NAME="$2"; shift 2 ;;
--old-key) OLD_KEY_NAME="$2"; shift 2 ;;
--grace-days) GRACE_DAYS="$2"; shift 2 ;;
--ci-config) CI_CONFIG="$2"; shift 2 ;;
-h|--help) usage ;;
*) log_error "Unknown argument: $1"; usage ;;
esac
done
if [[ -z "$PHASE" ]]; then
log_error "Phase is required"
usage
fi
echo ""
echo "================================================"
echo " Signing Key Rotation - Phase: $PHASE"
echo "================================================"
echo ""
case "$PHASE" in
generate)
log_step "Generating new signing key..."
mkdir -p "$KEY_DIR"
chmod 700 "$KEY_DIR"
# Determine new key name if not specified
if [[ -z "$NEW_KEY_NAME" ]]; then
HIGHEST=$(ls "$KEY_DIR" 2>/dev/null | grep -E '^signing-key-v[0-9]+' | \
sed 's/signing-key-v//' | sed 's/\.pem$//' | sort -n | tail -1 || echo "0")
NEW_VERSION=$((HIGHEST + 1))
NEW_KEY_NAME="signing-key-v${NEW_VERSION}"
fi
NEW_KEY_PATH="$KEY_DIR/${NEW_KEY_NAME}.pem"
NEW_PUB_PATH="$KEY_DIR/${NEW_KEY_NAME}.pub"
if [[ -f "$NEW_KEY_PATH" ]]; then
log_error "Key already exists: $NEW_KEY_PATH"
exit 1
fi
case "$KEY_TYPE" in
ecdsa-p256)
openssl ecparam -name prime256v1 -genkey -noout -out "$NEW_KEY_PATH"
openssl ec -in "$NEW_KEY_PATH" -pubout -out "$NEW_PUB_PATH" 2>/dev/null
;;
ecdsa-p384)
openssl ecparam -name secp384r1 -genkey -noout -out "$NEW_KEY_PATH"
openssl ec -in "$NEW_KEY_PATH" -pubout -out "$NEW_PUB_PATH" 2>/dev/null
;;
rsa-4096)
openssl genrsa -out "$NEW_KEY_PATH" 4096
openssl rsa -in "$NEW_KEY_PATH" -pubout -out "$NEW_PUB_PATH" 2>/dev/null
;;
*)
log_error "Unknown key type: $KEY_TYPE"
exit 1
;;
esac
chmod 600 "$NEW_KEY_PATH"
chmod 644 "$NEW_PUB_PATH"
log_info ""
log_info "New signing key generated:"
log_info " Private key: $NEW_KEY_PATH"
log_info " Public key: $NEW_PUB_PATH"
log_info ""
log_info "Key fingerprint:"
openssl dgst -sha256 -r "$NEW_PUB_PATH" | cut -d' ' -f1
log_info ""
log_warn "Store the public key securely for distribution."
log_warn "Next: Run '$0 activate' to enable dual-key signing."
;;
activate)
log_step "Activating dual-key signing..."
# List available keys
log_info "Available signing keys in $KEY_DIR:"
ls -la "$KEY_DIR"/*.pem 2>/dev/null || log_warn "No .pem files found"
if [[ -n "$CI_CONFIG" ]] && [[ -f "$CI_CONFIG" ]]; then
log_info ""
log_info "CI config file: $CI_CONFIG"
log_warn "Manual update required:"
echo " 1. Add the new key path to signing configuration"
echo " 2. Ensure both old and new keys can sign"
echo " 3. Update verification to accept both key signatures"
fi
log_info ""
log_info "Dual-key activation checklist:"
echo " [ ] New key added to CI/CD pipeline"
echo " [ ] New public key distributed to verifiers"
echo " [ ] Both keys tested for signing"
echo " [ ] Grace period documented: $GRACE_DAYS days"
log_info ""
log_warn "Grace period starts now. Do not retire old key for $GRACE_DAYS days."
log_info "Next: Run '$0 verify' to confirm both keys work."
;;
verify)
log_step "Verifying signing key status..."
# Test each key
log_info "Testing signing keys in $KEY_DIR:"
TEST_FILE=$(mktemp)
echo "StellaOps key rotation verification $(date -u +%Y-%m-%dT%H:%M:%SZ)" > "$TEST_FILE"
for keyfile in "$KEY_DIR"/*.pem; do
if [[ -f "$keyfile" ]]; then
keyname=$(basename "$keyfile" .pem)
TEST_SIG=$(mktemp)
if openssl dgst -sha256 -sign "$keyfile" -out "$TEST_SIG" "$TEST_FILE" 2>/dev/null; then
log_info " $keyname: OK (signing works)"
else
log_warn " $keyname: FAILED (cannot sign)"
fi
rm -f "$TEST_SIG"
fi
done
rm -f "$TEST_FILE"
log_info ""
log_info "Verification checklist:"
echo " [ ] All active keys can sign successfully"
echo " [ ] Old attestations still verify"
echo " [ ] New attestations verify with new key"
echo " [ ] Verifiers have both public keys"
;;
retire)
if [[ -z "$OLD_KEY_NAME" ]]; then
log_error "retire requires --old-key"
usage
fi
OLD_KEY_PATH="$KEY_DIR/${OLD_KEY_NAME}.pem"
OLD_PUB_PATH="$KEY_DIR/${OLD_KEY_NAME}.pub"
if [[ ! -f "$OLD_KEY_PATH" ]] && [[ ! -f "$KEY_DIR/${OLD_KEY_NAME}" ]]; then
log_error "Old key not found: $OLD_KEY_NAME"
exit 1
fi
log_step "Retiring old signing key: $OLD_KEY_NAME"
log_warn "This is IRREVERSIBLE. Ensure:"
echo " 1. Grace period ($GRACE_DAYS days) has passed"
echo " 2. All systems have been updated to use new key"
echo " 3. Old attestations have been resigned or archived"
read -p "Type 'RETIRE' to proceed: " CONFIRM
if [[ "$CONFIRM" != "RETIRE" ]]; then
log_error "Aborted"
exit 1
fi
# Archive old key (don't delete immediately)
ARCHIVE_DIR="$KEY_DIR/archived"
mkdir -p "$ARCHIVE_DIR"
chmod 700 "$ARCHIVE_DIR"
TIMESTAMP=$(date -u +%Y%m%d%H%M%S)
if [[ -f "$OLD_KEY_PATH" ]]; then
mv "$OLD_KEY_PATH" "$ARCHIVE_DIR/${OLD_KEY_NAME}-retired-${TIMESTAMP}.pem"
fi
if [[ -f "$OLD_PUB_PATH" ]]; then
mv "$OLD_PUB_PATH" "$ARCHIVE_DIR/${OLD_KEY_NAME}-retired-${TIMESTAMP}.pub"
fi
log_info ""
log_info "Old key archived to: $ARCHIVE_DIR/"
log_info "Key rotation complete!"
log_warn ""
log_warn "Post-retirement checklist:"
echo " [ ] Remove old key from CI/CD configuration"
echo " [ ] Update documentation"
echo " [ ] Notify stakeholders of completion"
echo " [ ] Delete archived key after retention period"
;;
esac
echo ""

View File

@@ -0,0 +1,183 @@
#!/bin/bash
# test-local.sh - Run full CI test suite locally using Docker
# Sprint: SPRINT_20251226_006_CICD
#
# Usage:
# ./devops/scripts/test-local.sh # Run all PR-gating tests
# ./devops/scripts/test-local.sh --category Unit # Run specific category
# ./devops/scripts/test-local.sh --build-only # Only build, skip tests
# ./devops/scripts/test-local.sh --no-docker # Run directly without Docker
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Configuration
CI_IMAGE="stellaops-ci:local"
DOCKERFILE="$REPO_ROOT/devops/docker/Dockerfile.ci"
RESULTS_DIR="$REPO_ROOT/TestResults"
# Default options
USE_DOCKER=true
BUILD_ONLY=false
SPECIFIC_CATEGORY=""
REBUILD_IMAGE=false
# PR-gating test categories
PR_GATING_CATEGORIES=(Unit Architecture Contract Integration Security Golden)
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--category)
SPECIFIC_CATEGORY="$2"
shift 2
;;
--build-only)
BUILD_ONLY=true
shift
;;
--no-docker)
USE_DOCKER=false
shift
;;
--rebuild)
REBUILD_IMAGE=true
shift
;;
--help)
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " --category CATEGORY Run only specific test category"
echo " --build-only Only build, skip tests"
echo " --no-docker Run directly without Docker container"
echo " --rebuild Force rebuild of CI Docker image"
echo " --help Show this help message"
echo ""
echo "Available categories: ${PR_GATING_CATEGORIES[*]}"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
echo "=== StellaOps Local CI Test Runner ==="
echo "Repository: $REPO_ROOT"
echo "Use Docker: $USE_DOCKER"
echo "Build Only: $BUILD_ONLY"
echo "Category: ${SPECIFIC_CATEGORY:-All PR-gating}"
# Create results directory
mkdir -p "$RESULTS_DIR"
run_tests() {
local category=$1
echo ""
echo "=== Running $category tests ==="
dotnet test "$REPO_ROOT/src/StellaOps.sln" \
--filter "Category=$category" \
--configuration Release \
--no-build \
--logger "trx;LogFileName=${category}-tests.trx" \
--results-directory "$RESULTS_DIR/$category" \
--verbosity minimal || true
}
run_build() {
echo ""
echo "=== Restoring dependencies ==="
dotnet restore "$REPO_ROOT/src/StellaOps.sln"
echo ""
echo "=== Building solution ==="
dotnet build "$REPO_ROOT/src/StellaOps.sln" \
--configuration Release \
--no-restore
}
run_all_tests() {
run_build
if [[ "$BUILD_ONLY" == "true" ]]; then
echo ""
echo "=== Build completed (tests skipped) ==="
return
fi
if [[ -n "$SPECIFIC_CATEGORY" ]]; then
run_tests "$SPECIFIC_CATEGORY"
else
for category in "${PR_GATING_CATEGORIES[@]}"; do
run_tests "$category"
done
fi
echo ""
echo "=== Test Summary ==="
find "$RESULTS_DIR" -name "*.trx" -exec echo " Found: {}" \;
# Convert TRX to JUnit if trx2junit is available
if command -v trx2junit &>/dev/null; then
echo ""
echo "=== Converting TRX to JUnit ==="
find "$RESULTS_DIR" -name "*.trx" -exec trx2junit {} \; 2>/dev/null || true
fi
}
if [[ "$USE_DOCKER" == "true" ]]; then
# Check if Docker is available
if ! command -v docker &>/dev/null; then
echo "Error: Docker is not installed or not in PATH"
echo "Use --no-docker to run tests directly"
exit 1
fi
# Build CI image if needed
if [[ "$REBUILD_IMAGE" == "true" ]] || ! docker image inspect "$CI_IMAGE" &>/dev/null; then
echo ""
echo "=== Building CI Docker image ==="
docker build -t "$CI_IMAGE" -f "$DOCKERFILE" "$REPO_ROOT"
fi
# Run in Docker container
echo ""
echo "=== Running in Docker container ==="
DOCKER_ARGS=(
--rm
-v "$REPO_ROOT:/src"
-v "$RESULTS_DIR:/src/TestResults"
-e DOTNET_NOLOGO=1
-e DOTNET_CLI_TELEMETRY_OPTOUT=1
-w /src
)
# Mount Docker socket if available (for Testcontainers)
if [[ -S /var/run/docker.sock ]]; then
DOCKER_ARGS+=(-v /var/run/docker.sock:/var/run/docker.sock)
fi
# Build test command
TEST_CMD="./devops/scripts/test-local.sh --no-docker"
if [[ -n "$SPECIFIC_CATEGORY" ]]; then
TEST_CMD="$TEST_CMD --category $SPECIFIC_CATEGORY"
fi
if [[ "$BUILD_ONLY" == "true" ]]; then
TEST_CMD="$TEST_CMD --build-only"
fi
docker run "${DOCKER_ARGS[@]}" "$CI_IMAGE" bash -c "$TEST_CMD"
else
# Run directly
run_all_tests
fi
echo ""
echo "=== Done ==="
echo "Results saved to: $RESULTS_DIR"

View File

@@ -0,0 +1,181 @@
#!/bin/bash
# test-package-publish.sh - Test NuGet package publishing to local Gitea
# Sprint: SPRINT_20251226_004_CICD
#
# Prerequisites:
# - Docker running
# - Gitea test instance running (docker compose -f devops/compose/docker-compose.gitea-test.yaml up -d)
# - GITEA_TEST_TOKEN environment variable set
# - GITEA_TEST_OWNER environment variable set (default: stellaops)
#
# Usage:
# export GITEA_TEST_TOKEN="your-access-token"
# ./test-package-publish.sh # Test with sample package
# ./test-package-publish.sh --module Authority # Test specific module
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Configuration
GITEA_URL="${GITEA_TEST_URL:-http://localhost:3000}"
GITEA_OWNER="${GITEA_TEST_OWNER:-stellaops}"
GITEA_TOKEN="${GITEA_TEST_TOKEN:-}"
TEST_MODULE=""
DRY_RUN=false
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
NC='\033[0m'
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--module)
TEST_MODULE="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--help)
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " --module MODULE Test specific module (e.g., Authority)"
echo " --dry-run Validate without pushing"
echo " --help Show this help message"
echo ""
echo "Environment Variables:"
echo " GITEA_TEST_URL Gitea URL (default: http://localhost:3000)"
echo " GITEA_TEST_OWNER Package owner (default: stellaops)"
echo " GITEA_TEST_TOKEN Access token with package:write scope"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
echo "=== Package Publishing Test ==="
echo "Gitea URL: $GITEA_URL"
echo "Owner: $GITEA_OWNER"
echo "Dry Run: $DRY_RUN"
# Check prerequisites
if [[ -z "$GITEA_TOKEN" && "$DRY_RUN" == "false" ]]; then
echo -e "${RED}ERROR: GITEA_TEST_TOKEN environment variable is required${NC}"
echo "Generate a token at: $GITEA_URL/user/settings/applications"
exit 1
fi
# Check if Gitea is running
if ! curl -s "$GITEA_URL/api/healthz" >/dev/null 2>&1; then
echo -e "${YELLOW}WARNING: Gitea not reachable at $GITEA_URL${NC}"
echo "Start it with: docker compose -f devops/compose/docker-compose.gitea-test.yaml up -d"
if [[ "$DRY_RUN" == "false" ]]; then
exit 1
fi
fi
# NuGet source URL
NUGET_SOURCE="$GITEA_URL/api/packages/$GITEA_OWNER/nuget/index.json"
echo "NuGet Source: $NUGET_SOURCE"
echo ""
# Create a test package
TEST_DIR="$REPO_ROOT/out/package-test"
mkdir -p "$TEST_DIR"
# If no module specified, use a simple test
if [[ -z "$TEST_MODULE" ]]; then
echo "=== Creating Test Package ==="
# Create a minimal test package
TEST_PROJ_DIR="$TEST_DIR/StellaOps.PackageTest"
mkdir -p "$TEST_PROJ_DIR"
cat > "$TEST_PROJ_DIR/StellaOps.PackageTest.csproj" <<'EOF'
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<PackageId>StellaOps.PackageTest</PackageId>
<Version>0.0.1-test</Version>
<Authors>StellaOps</Authors>
<Description>Test package for registry validation</Description>
<PackageLicenseExpression>BUSL-1.1</PackageLicenseExpression>
</PropertyGroup>
</Project>
EOF
cat > "$TEST_PROJ_DIR/Class1.cs" <<'EOF'
namespace StellaOps.PackageTest;
public class TestClass { }
EOF
echo "Building test package..."
dotnet pack "$TEST_PROJ_DIR/StellaOps.PackageTest.csproj" -c Release -o "$TEST_DIR/packages"
PACKAGE_FILE=$(find "$TEST_DIR/packages" -name "*.nupkg" | head -1)
else
echo "=== Packing Module: $TEST_MODULE ==="
# Find the module's main project
MODULE_PROJ=$(find "$REPO_ROOT/src" -path "*/$TEST_MODULE/*" -name "StellaOps.$TEST_MODULE.csproj" | head -1)
if [[ -z "$MODULE_PROJ" ]]; then
echo -e "${RED}ERROR: Module project not found for $TEST_MODULE${NC}"
exit 1
fi
echo "Project: $MODULE_PROJ"
dotnet pack "$MODULE_PROJ" -c Release -p:Version=0.0.1-test -o "$TEST_DIR/packages"
PACKAGE_FILE=$(find "$TEST_DIR/packages" -name "*.nupkg" | head -1)
fi
if [[ -z "$PACKAGE_FILE" ]]; then
echo -e "${RED}ERROR: No package file created${NC}"
exit 1
fi
echo ""
echo "Package created: $PACKAGE_FILE"
echo ""
if [[ "$DRY_RUN" == "true" ]]; then
echo -e "${YELLOW}=== DRY RUN: Skipping push ===${NC}"
echo "Package validated successfully!"
echo ""
echo "To push manually:"
echo " dotnet nuget push \"$PACKAGE_FILE\" \\"
echo " --source $NUGET_SOURCE \\"
echo " --api-key YOUR_TOKEN"
else
echo "=== Pushing Package ==="
if dotnet nuget push "$PACKAGE_FILE" \
--source "$NUGET_SOURCE" \
--api-key "$GITEA_TOKEN" \
--skip-duplicate; then
echo ""
echo -e "${GREEN}SUCCESS: Package pushed to Gitea registry${NC}"
echo "View at: $GITEA_URL/$GITEA_OWNER/-/packages"
else
echo ""
echo -e "${RED}FAILED: Package push failed${NC}"
exit 1
fi
fi
echo ""
echo "=== Cleanup ==="
rm -rf "$TEST_DIR"
echo "Test directory cleaned up"
echo ""
echo -e "${GREEN}Done!${NC}"

View File

@@ -0,0 +1,318 @@
#!/usr/bin/env bash
# =============================================================================
# PRE-COMMIT VALIDATION SCRIPT
# =============================================================================
# Run this script before committing to ensure all CI checks will pass.
#
# Usage:
# ./devops/scripts/validate-before-commit.sh [level]
#
# Levels:
# quick - Smoke test only (~2 min)
# pr - Full PR-gating suite (~15 min) [default]
# full - All tests including extended (~45 min)
#
# Examples:
# ./devops/scripts/validate-before-commit.sh # PR-gating
# ./devops/scripts/validate-before-commit.sh quick # Smoke only
# ./devops/scripts/validate-before-commit.sh full # Everything
#
# =============================================================================
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'
# Validation level
LEVEL="${1:-pr}"
# =============================================================================
# UTILITIES
# =============================================================================
print_header() {
echo ""
echo -e "${CYAN}=============================================${NC}"
echo -e "${CYAN} $1${NC}"
echo -e "${CYAN}=============================================${NC}"
echo ""
}
print_step() {
echo -e "${BLUE}>>> $1${NC}"
}
print_success() {
echo -e "${GREEN}[PASS] $1${NC}"
}
print_fail() {
echo -e "${RED}[FAIL] $1${NC}"
}
print_warn() {
echo -e "${YELLOW}[WARN] $1${NC}"
}
print_info() {
echo -e "${CYAN}[INFO] $1${NC}"
}
# =============================================================================
# CHECKS
# =============================================================================
check_git_status() {
print_step "Checking git status..."
# Check for uncommitted changes
if ! git diff --quiet 2>/dev/null; then
print_warn "You have unstaged changes"
fi
# Check for untracked files
local untracked
untracked=$(git ls-files --others --exclude-standard 2>/dev/null | wc -l)
if [[ "$untracked" -gt 0 ]]; then
print_warn "You have $untracked untracked file(s)"
fi
# Show current branch
local branch
branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null)
print_info "Current branch: $branch"
}
check_dependencies() {
print_step "Checking dependencies..."
local missing=0
# Check .NET
if ! command -v dotnet &>/dev/null; then
print_fail ".NET SDK not found"
missing=1
else
local version
version=$(dotnet --version)
print_success ".NET SDK: $version"
fi
# Check Docker
if ! command -v docker &>/dev/null; then
print_warn "Docker not found (some tests may fail)"
else
if docker info &>/dev/null; then
print_success "Docker: running"
else
print_warn "Docker: not running"
fi
fi
# Check Git
if ! command -v git &>/dev/null; then
print_fail "Git not found"
missing=1
else
print_success "Git: installed"
fi
return $missing
}
run_smoke_tests() {
print_step "Running smoke tests..."
if "$SCRIPT_DIR/local-ci.sh" smoke; then
print_success "Smoke tests passed"
return 0
else
print_fail "Smoke tests failed"
return 1
fi
}
run_pr_tests() {
print_step "Running PR-gating suite..."
if "$SCRIPT_DIR/local-ci.sh" pr; then
print_success "PR-gating suite passed"
return 0
else
print_fail "PR-gating suite failed"
return 1
fi
}
run_full_tests() {
print_step "Running full test suite..."
if "$SCRIPT_DIR/local-ci.sh" full; then
print_success "Full test suite passed"
return 0
else
print_fail "Full test suite failed"
return 1
fi
}
run_module_tests() {
print_step "Running module tests..."
if "$SCRIPT_DIR/local-ci.sh" module; then
print_success "Module tests passed"
return 0
else
print_fail "Module tests failed"
return 1
fi
}
validate_helm() {
if command -v helm &>/dev/null; then
print_step "Validating Helm chart..."
local chart="$REPO_ROOT/devops/helm/stellaops"
if [[ -d "$chart" ]]; then
if helm lint "$chart" &>/dev/null; then
print_success "Helm chart valid"
else
print_warn "Helm chart has warnings"
fi
fi
fi
}
validate_compose() {
print_step "Validating Docker Compose..."
local compose="$REPO_ROOT/devops/compose/docker-compose.ci.yaml"
if [[ -f "$compose" ]]; then
if docker compose -f "$compose" config &>/dev/null; then
print_success "Docker Compose valid"
else
print_warn "Docker Compose has issues"
fi
fi
}
# =============================================================================
# MAIN
# =============================================================================
main() {
print_header "Pre-Commit Validation"
print_info "Level: $LEVEL"
print_info "Repository: $REPO_ROOT"
local start_time
start_time=$(date +%s)
local failed=0
# Always run these checks
check_git_status
check_dependencies || failed=1
if [[ $failed -eq 1 ]]; then
print_fail "Dependency check failed"
exit 1
fi
# Run appropriate test level
case "$LEVEL" in
quick|smoke)
run_smoke_tests || failed=1
;;
pr|default)
run_smoke_tests || failed=1
if [[ $failed -eq 0 ]]; then
run_module_tests || failed=1
fi
if [[ $failed -eq 0 ]]; then
run_pr_tests || failed=1
fi
validate_helm
validate_compose
;;
full|all)
run_smoke_tests || failed=1
if [[ $failed -eq 0 ]]; then
run_full_tests || failed=1
fi
validate_helm
validate_compose
;;
*)
print_fail "Unknown level: $LEVEL"
echo "Valid levels: quick, pr, full"
exit 1
;;
esac
# Calculate duration
local end_time
end_time=$(date +%s)
local duration=$((end_time - start_time))
local minutes=$((duration / 60))
local seconds=$((duration % 60))
# Final summary
print_header "Summary"
print_info "Duration: ${minutes}m ${seconds}s"
if [[ $failed -eq 0 ]]; then
echo ""
echo -e "${GREEN}=============================================${NC}"
echo -e "${GREEN} ALL CHECKS PASSED - Ready to commit!${NC}"
echo -e "${GREEN}=============================================${NC}"
echo ""
echo "Next steps:"
echo " git add -A"
echo " git commit -m \"Your commit message\""
echo ""
exit 0
else
echo ""
echo -e "${RED}=============================================${NC}"
echo -e "${RED} VALIDATION FAILED - Do not commit!${NC}"
echo -e "${RED}=============================================${NC}"
echo ""
echo "Check the logs in: out/local-ci/logs/"
echo ""
exit 1
fi
}
# Show usage if --help
if [[ "${1:-}" == "--help" ]] || [[ "${1:-}" == "-h" ]]; then
cat <<EOF
Pre-Commit Validation Script
Usage: $(basename "$0") [level]
Levels:
quick Smoke test only (~2 min)
pr Full PR-gating suite (~15 min) [default]
full All tests including extended (~45 min)
Examples:
$(basename "$0") # Run PR-gating validation
$(basename "$0") quick # Quick smoke test only
$(basename "$0") full # Run everything
What each level validates:
quick: Build + Unit tests
pr: Build + Unit + Architecture + Contract + Integration + Security + Golden
full: All PR-gating + Performance + Benchmark + AirGap + Chaos + Determinism
EOF
exit 0
fi
main "$@"

View File

@@ -0,0 +1,145 @@
#!/bin/bash
# validate-compose.sh - Validate all Docker Compose profiles
# Sprint: SPRINT_20251226_006_CICD
#
# Usage:
# ./devops/scripts/validate-compose.sh # Validate all profiles
# ./devops/scripts/validate-compose.sh dev stage # Validate specific profiles
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
COMPOSE_DIR="$REPO_ROOT/devops/compose"
# Default profiles to validate
DEFAULT_PROFILES=(
dev
stage
prod
airgap
mirror
crypto-fips
crypto-gost
monitoring
)
echo "=== Docker Compose Profile Validation ==="
echo "Compose directory: $COMPOSE_DIR"
# Check if docker compose is available
if ! command -v docker &>/dev/null; then
echo "Error: Docker is not installed"
exit 1
fi
# Check compose directory exists
if [[ ! -d "$COMPOSE_DIR" ]]; then
echo "Error: Compose directory not found: $COMPOSE_DIR"
exit 1
fi
# Determine profiles to validate
if [[ $# -gt 0 ]]; then
PROFILES=("$@")
else
PROFILES=("${DEFAULT_PROFILES[@]}")
fi
FAILED=0
PASSED=0
SKIPPED=0
# Validate base compose file first
BASE_COMPOSE="$COMPOSE_DIR/docker-compose.yml"
if [[ -f "$BASE_COMPOSE" ]]; then
echo ""
echo "=== Validating base: docker-compose.yml ==="
if docker compose -f "$BASE_COMPOSE" config --quiet 2>/dev/null; then
echo " [PASS] docker-compose.yml"
((PASSED++))
else
echo " [FAIL] docker-compose.yml"
docker compose -f "$BASE_COMPOSE" config 2>&1 | head -20
((FAILED++))
fi
else
echo ""
echo "Warning: Base compose file not found: $BASE_COMPOSE"
fi
# Validate each profile
for profile in "${PROFILES[@]}"; do
# Check for both .yml and .yaml extensions
PROFILE_FILE="$COMPOSE_DIR/docker-compose.${profile}.yaml"
if [[ ! -f "$PROFILE_FILE" ]]; then
PROFILE_FILE="$COMPOSE_DIR/docker-compose.${profile}.yml"
fi
echo ""
echo "=== Validating profile: $profile ==="
if [[ ! -f "$PROFILE_FILE" ]]; then
echo " [SKIP] Profile file not found: docker-compose.${profile}.yml"
((SKIPPED++))
continue
fi
# Validate profile alone
if docker compose -f "$PROFILE_FILE" config --quiet 2>/dev/null; then
echo " [PASS] docker-compose.${profile}.yml (standalone)"
else
echo " [FAIL] docker-compose.${profile}.yml (standalone)"
docker compose -f "$PROFILE_FILE" config 2>&1 | head -10
((FAILED++))
continue
fi
# Validate profile with base
if [[ -f "$BASE_COMPOSE" ]]; then
if docker compose -f "$BASE_COMPOSE" -f "$PROFILE_FILE" config --quiet 2>/dev/null; then
echo " [PASS] docker-compose.yml + docker-compose.${profile}.yml (merged)"
((PASSED++))
else
echo " [FAIL] Merged validation failed"
docker compose -f "$BASE_COMPOSE" -f "$PROFILE_FILE" config 2>&1 | head -10
((FAILED++))
fi
fi
done
# Validate Helm chart if present
HELM_DIR="$REPO_ROOT/devops/helm/stellaops"
if [[ -d "$HELM_DIR" ]]; then
echo ""
echo "=== Validating Helm chart ==="
if command -v helm &>/dev/null; then
if helm lint "$HELM_DIR" --quiet 2>/dev/null; then
echo " [PASS] Helm chart: stellaops"
((PASSED++))
else
echo " [FAIL] Helm chart: stellaops"
helm lint "$HELM_DIR" 2>&1 | head -20
((FAILED++))
fi
else
echo " [SKIP] Helm not installed"
((SKIPPED++))
fi
fi
# Summary
echo ""
echo "=== Validation Summary ==="
echo " Passed: $PASSED"
echo " Failed: $FAILED"
echo " Skipped: $SKIPPED"
if [[ $FAILED -gt 0 ]]; then
echo ""
echo "ERROR: $FAILED validation(s) failed"
exit 1
fi
echo ""
echo "All validations passed!"