Fix build and code structure improvements. New but essential UI functionality. CI improvements. Documentation improvements. AI module improvements.
This commit is contained in:
406
devops/scripts/lib/ci-common.sh
Normal file
406
devops/scripts/lib/ci-common.sh
Normal file
@@ -0,0 +1,406 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# CI COMMON FUNCTIONS
|
||||
# =============================================================================
|
||||
# Shared utility functions for local CI testing scripts.
|
||||
#
|
||||
# Usage:
|
||||
# source "$SCRIPT_DIR/lib/ci-common.sh"
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
# Prevent multiple sourcing
|
||||
[[ -n "${_CI_COMMON_LOADED:-}" ]] && return
|
||||
_CI_COMMON_LOADED=1
|
||||
|
||||
# =============================================================================
|
||||
# COLOR DEFINITIONS
|
||||
# =============================================================================
|
||||
|
||||
if [[ -t 1 ]] && [[ -n "${TERM:-}" ]] && [[ "${TERM}" != "dumb" ]]; then
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
BLUE='\033[0;34m'
|
||||
MAGENTA='\033[0;35m'
|
||||
CYAN='\033[0;36m'
|
||||
WHITE='\033[0;37m'
|
||||
BOLD='\033[1m'
|
||||
DIM='\033[2m'
|
||||
RESET='\033[0m'
|
||||
else
|
||||
RED=''
|
||||
GREEN=''
|
||||
YELLOW=''
|
||||
BLUE=''
|
||||
MAGENTA=''
|
||||
CYAN=''
|
||||
WHITE=''
|
||||
BOLD=''
|
||||
DIM=''
|
||||
RESET=''
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# LOGGING FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Log an info message
|
||||
log_info() {
|
||||
echo -e "${BLUE}[INFO]${RESET} $*"
|
||||
}
|
||||
|
||||
# Log a success message
|
||||
log_success() {
|
||||
echo -e "${GREEN}[OK]${RESET} $*"
|
||||
}
|
||||
|
||||
# Log a warning message
|
||||
log_warn() {
|
||||
echo -e "${YELLOW}[WARN]${RESET} $*" >&2
|
||||
}
|
||||
|
||||
# Log an error message
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${RESET} $*" >&2
|
||||
}
|
||||
|
||||
# Log a debug message (only if VERBOSE is true)
|
||||
log_debug() {
|
||||
if [[ "${VERBOSE:-false}" == "true" ]]; then
|
||||
echo -e "${DIM}[DEBUG]${RESET} $*"
|
||||
fi
|
||||
}
|
||||
|
||||
# Log a step in a process
|
||||
log_step() {
|
||||
local step_num="$1"
|
||||
local total_steps="$2"
|
||||
local message="$3"
|
||||
echo -e "${CYAN}[${step_num}/${total_steps}]${RESET} ${BOLD}${message}${RESET}"
|
||||
}
|
||||
|
||||
# Log a section header
|
||||
log_section() {
|
||||
echo ""
|
||||
echo -e "${BOLD}${MAGENTA}=== $* ===${RESET}"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Log a subsection header
|
||||
log_subsection() {
|
||||
echo -e "${CYAN}--- $* ---${RESET}"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# ERROR HANDLING
|
||||
# =============================================================================
|
||||
|
||||
# Exit with error message
|
||||
die() {
|
||||
log_error "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if a command exists
|
||||
require_command() {
|
||||
local cmd="$1"
|
||||
local install_hint="${2:-}"
|
||||
|
||||
if ! command -v "$cmd" &>/dev/null; then
|
||||
log_error "Required command not found: $cmd"
|
||||
if [[ -n "$install_hint" ]]; then
|
||||
log_info "Install with: $install_hint"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Check if a file exists
|
||||
require_file() {
|
||||
local file="$1"
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "Required file not found: $file"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Check if a directory exists
|
||||
require_dir() {
|
||||
local dir="$1"
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
log_error "Required directory not found: $dir"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# TIMING FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Get current timestamp in seconds
|
||||
get_timestamp() {
|
||||
date +%s
|
||||
}
|
||||
|
||||
# Format duration in human-readable format
|
||||
format_duration() {
|
||||
local seconds="$1"
|
||||
local minutes=$((seconds / 60))
|
||||
local remaining_seconds=$((seconds % 60))
|
||||
|
||||
if [[ $minutes -gt 0 ]]; then
|
||||
echo "${minutes}m ${remaining_seconds}s"
|
||||
else
|
||||
echo "${remaining_seconds}s"
|
||||
fi
|
||||
}
|
||||
|
||||
# Start a timer and return the start time
|
||||
start_timer() {
|
||||
get_timestamp
|
||||
}
|
||||
|
||||
# Stop a timer and print the duration
|
||||
stop_timer() {
|
||||
local start_time="$1"
|
||||
local label="${2:-Operation}"
|
||||
local end_time
|
||||
end_time=$(get_timestamp)
|
||||
local duration=$((end_time - start_time))
|
||||
|
||||
log_info "$label completed in $(format_duration $duration)"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# STRING FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Convert string to lowercase
|
||||
to_lower() {
|
||||
echo "$1" | tr '[:upper:]' '[:lower:]'
|
||||
}
|
||||
|
||||
# Convert string to uppercase
|
||||
to_upper() {
|
||||
echo "$1" | tr '[:lower:]' '[:upper:]'
|
||||
}
|
||||
|
||||
# Trim whitespace from string
|
||||
trim() {
|
||||
local var="$*"
|
||||
var="${var#"${var%%[![:space:]]*}"}"
|
||||
var="${var%"${var##*[![:space:]]}"}"
|
||||
echo -n "$var"
|
||||
}
|
||||
|
||||
# Join array elements with delimiter
|
||||
join_by() {
|
||||
local delimiter="$1"
|
||||
shift
|
||||
local first="$1"
|
||||
shift
|
||||
printf '%s' "$first" "${@/#/$delimiter}"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# ARRAY FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Check if array contains element
|
||||
array_contains() {
|
||||
local needle="$1"
|
||||
shift
|
||||
local element
|
||||
for element in "$@"; do
|
||||
[[ "$element" == "$needle" ]] && return 0
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# FILE FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Create directory if it doesn't exist
|
||||
ensure_dir() {
|
||||
local dir="$1"
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
mkdir -p "$dir"
|
||||
log_debug "Created directory: $dir"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get absolute path
|
||||
get_absolute_path() {
|
||||
local path="$1"
|
||||
if [[ -d "$path" ]]; then
|
||||
(cd "$path" && pwd)
|
||||
elif [[ -f "$path" ]]; then
|
||||
local dir
|
||||
dir=$(dirname "$path")
|
||||
echo "$(cd "$dir" && pwd)/$(basename "$path")"
|
||||
else
|
||||
echo "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# GIT FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Get the repository root directory
|
||||
get_repo_root() {
|
||||
git rev-parse --show-toplevel 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current branch name
|
||||
get_current_branch() {
|
||||
git rev-parse --abbrev-ref HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current commit SHA
|
||||
get_current_sha() {
|
||||
git rev-parse HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get short commit SHA
|
||||
get_short_sha() {
|
||||
git rev-parse --short HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if working directory is clean
|
||||
is_git_clean() {
|
||||
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
|
||||
}
|
||||
|
||||
# Get list of changed files compared to main branch
|
||||
get_changed_files() {
|
||||
local base_branch="${1:-main}"
|
||||
git diff --name-only "$base_branch"...HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# MODULE DETECTION
|
||||
# =============================================================================
|
||||
|
||||
# Map of module names to source paths
|
||||
declare -A MODULE_PATHS=(
|
||||
["Scanner"]="src/Scanner src/BinaryIndex"
|
||||
["Concelier"]="src/Concelier src/Excititor"
|
||||
["Authority"]="src/Authority"
|
||||
["Policy"]="src/Policy src/RiskEngine"
|
||||
["Attestor"]="src/Attestor src/Provenance"
|
||||
["EvidenceLocker"]="src/EvidenceLocker"
|
||||
["ExportCenter"]="src/ExportCenter"
|
||||
["Findings"]="src/Findings"
|
||||
["SbomService"]="src/SbomService"
|
||||
["Notify"]="src/Notify src/Notifier"
|
||||
["Router"]="src/Router src/Gateway"
|
||||
["Cryptography"]="src/Cryptography"
|
||||
["AirGap"]="src/AirGap"
|
||||
["Cli"]="src/Cli"
|
||||
["AdvisoryAI"]="src/AdvisoryAI"
|
||||
["ReachGraph"]="src/ReachGraph"
|
||||
["Orchestrator"]="src/Orchestrator"
|
||||
["PacksRegistry"]="src/PacksRegistry"
|
||||
["Replay"]="src/Replay"
|
||||
["Aoc"]="src/Aoc"
|
||||
["IssuerDirectory"]="src/IssuerDirectory"
|
||||
["Telemetry"]="src/Telemetry"
|
||||
["Signals"]="src/Signals"
|
||||
["Web"]="src/Web"
|
||||
["DevPortal"]="src/DevPortal"
|
||||
)
|
||||
|
||||
# Modules that use Node.js/npm instead of .NET
|
||||
declare -a NODE_MODULES=("Web" "DevPortal")
|
||||
|
||||
# Detect which modules have changed based on git diff
|
||||
detect_changed_modules() {
|
||||
local base_branch="${1:-main}"
|
||||
local changed_files
|
||||
changed_files=$(get_changed_files "$base_branch")
|
||||
|
||||
local changed_modules=()
|
||||
local module
|
||||
local paths
|
||||
|
||||
for module in "${!MODULE_PATHS[@]}"; do
|
||||
paths="${MODULE_PATHS[$module]}"
|
||||
for path in $paths; do
|
||||
if echo "$changed_files" | grep -q "^${path}/"; then
|
||||
if ! array_contains "$module" "${changed_modules[@]}"; then
|
||||
changed_modules+=("$module")
|
||||
fi
|
||||
break
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Check for infrastructure changes that affect all modules
|
||||
if echo "$changed_files" | grep -qE "^(Directory\.Build\.props|Directory\.Packages\.props|nuget\.config)"; then
|
||||
echo "ALL"
|
||||
return
|
||||
fi
|
||||
|
||||
# Check for shared library changes
|
||||
if echo "$changed_files" | grep -q "^src/__Libraries/"; then
|
||||
echo "ALL"
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ ${#changed_modules[@]} -eq 0 ]]; then
|
||||
echo "NONE"
|
||||
else
|
||||
echo "${changed_modules[*]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# RESULT REPORTING
|
||||
# =============================================================================
|
||||
|
||||
# Print a summary table row
|
||||
print_table_row() {
|
||||
local col1="$1"
|
||||
local col2="$2"
|
||||
local col3="${3:-}"
|
||||
|
||||
printf " %-30s %-15s %s\n" "$col1" "$col2" "$col3"
|
||||
}
|
||||
|
||||
# Print pass/fail status
|
||||
print_status() {
|
||||
local name="$1"
|
||||
local passed="$2"
|
||||
local duration="${3:-}"
|
||||
|
||||
if [[ "$passed" == "true" ]]; then
|
||||
print_table_row "$name" "${GREEN}PASSED${RESET}" "$duration"
|
||||
else
|
||||
print_table_row "$name" "${RED}FAILED${RESET}" "$duration"
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# ENVIRONMENT LOADING
|
||||
# =============================================================================
|
||||
|
||||
# Load environment file if it exists
|
||||
load_env_file() {
|
||||
local env_file="$1"
|
||||
|
||||
if [[ -f "$env_file" ]]; then
|
||||
log_debug "Loading environment from: $env_file"
|
||||
set -a
|
||||
# shellcheck source=/dev/null
|
||||
source "$env_file"
|
||||
set +a
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
342
devops/scripts/lib/ci-docker.sh
Normal file
342
devops/scripts/lib/ci-docker.sh
Normal file
@@ -0,0 +1,342 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# CI DOCKER UTILITIES
|
||||
# =============================================================================
|
||||
# Docker-related utility functions for local CI testing.
|
||||
#
|
||||
# Usage:
|
||||
# source "$SCRIPT_DIR/lib/ci-docker.sh"
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
# Prevent multiple sourcing
|
||||
[[ -n "${_CI_DOCKER_LOADED:-}" ]] && return
|
||||
_CI_DOCKER_LOADED=1
|
||||
|
||||
# =============================================================================
|
||||
# CONFIGURATION
|
||||
# =============================================================================
|
||||
|
||||
CI_COMPOSE_FILE="${CI_COMPOSE_FILE:-devops/compose/docker-compose.ci.yaml}"
|
||||
CI_IMAGE="${CI_IMAGE:-stellaops-ci:local}"
|
||||
CI_DOCKERFILE="${CI_DOCKERFILE:-devops/docker/Dockerfile.ci}"
|
||||
CI_PROJECT_NAME="${CI_PROJECT_NAME:-stellaops-ci}"
|
||||
|
||||
# Service names from docker-compose.ci.yaml
|
||||
CI_SERVICES=(postgres-ci valkey-ci nats-ci mock-registry minio-ci)
|
||||
|
||||
# =============================================================================
|
||||
# DOCKER CHECK
|
||||
# =============================================================================
|
||||
|
||||
# Check if Docker is available and running
|
||||
check_docker() {
|
||||
if ! command -v docker &>/dev/null; then
|
||||
log_error "Docker is not installed or not in PATH"
|
||||
log_info "Install Docker: https://docs.docker.com/get-docker/"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! docker info &>/dev/null; then
|
||||
log_error "Docker daemon is not running"
|
||||
log_info "Start Docker Desktop or run: sudo systemctl start docker"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_debug "Docker is available and running"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Check if Docker Compose is available
|
||||
check_docker_compose() {
|
||||
if docker compose version &>/dev/null; then
|
||||
DOCKER_COMPOSE="docker compose"
|
||||
log_debug "Using Docker Compose plugin"
|
||||
return 0
|
||||
elif command -v docker-compose &>/dev/null; then
|
||||
DOCKER_COMPOSE="docker-compose"
|
||||
log_debug "Using standalone docker-compose"
|
||||
return 0
|
||||
else
|
||||
log_error "Docker Compose is not installed"
|
||||
log_info "Install with: docker compose plugin or standalone docker-compose"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CI SERVICES MANAGEMENT
|
||||
# =============================================================================
|
||||
|
||||
# Start CI services
|
||||
start_ci_services() {
|
||||
local services=("$@")
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
if [[ ! -f "$compose_file" ]]; then
|
||||
log_error "Compose file not found: $compose_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
check_docker || return 1
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_section "Starting CI Services"
|
||||
|
||||
if [[ ${#services[@]} -eq 0 ]]; then
|
||||
# Start all services
|
||||
log_info "Starting all CI services..."
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d
|
||||
else
|
||||
# Start specific services
|
||||
log_info "Starting services: ${services[*]}"
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d "${services[@]}"
|
||||
fi
|
||||
|
||||
local result=$?
|
||||
if [[ $result -ne 0 ]]; then
|
||||
log_error "Failed to start CI services"
|
||||
return $result
|
||||
fi
|
||||
|
||||
# Wait for services to be healthy
|
||||
wait_for_services "${services[@]}"
|
||||
}
|
||||
|
||||
# Stop CI services
|
||||
stop_ci_services() {
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
if [[ ! -f "$compose_file" ]]; then
|
||||
log_debug "Compose file not found, nothing to stop"
|
||||
return 0
|
||||
fi
|
||||
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_section "Stopping CI Services"
|
||||
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down
|
||||
}
|
||||
|
||||
# Stop CI services and remove volumes
|
||||
cleanup_ci_services() {
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
if [[ ! -f "$compose_file" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_section "Cleaning Up CI Services"
|
||||
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down -v --remove-orphans
|
||||
}
|
||||
|
||||
# Check status of CI services
|
||||
check_ci_services_status() {
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_subsection "CI Services Status"
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" ps
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# HEALTH CHECKS
|
||||
# =============================================================================
|
||||
|
||||
# Wait for a specific service to be healthy
|
||||
wait_for_service() {
|
||||
local service="$1"
|
||||
local timeout="${2:-60}"
|
||||
local interval="${3:-2}"
|
||||
|
||||
log_info "Waiting for $service to be healthy..."
|
||||
|
||||
local elapsed=0
|
||||
while [[ $elapsed -lt $timeout ]]; do
|
||||
local status
|
||||
status=$(docker inspect --format='{{.State.Health.Status}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "not found")
|
||||
|
||||
if [[ "$status" == "healthy" ]]; then
|
||||
log_success "$service is healthy"
|
||||
return 0
|
||||
elif [[ "$status" == "not found" ]]; then
|
||||
# Container might not have health check, check if running
|
||||
local running
|
||||
running=$(docker inspect --format='{{.State.Running}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "false")
|
||||
if [[ "$running" == "true" ]]; then
|
||||
log_success "$service is running (no health check)"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
sleep "$interval"
|
||||
elapsed=$((elapsed + interval))
|
||||
done
|
||||
|
||||
log_error "$service did not become healthy within ${timeout}s"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Wait for multiple services to be healthy
|
||||
wait_for_services() {
|
||||
local services=("$@")
|
||||
local failed=0
|
||||
|
||||
if [[ ${#services[@]} -eq 0 ]]; then
|
||||
services=("${CI_SERVICES[@]}")
|
||||
fi
|
||||
|
||||
log_info "Waiting for services to be ready..."
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
if ! wait_for_service "$service" 60 2; then
|
||||
failed=1
|
||||
fi
|
||||
done
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
# Check if PostgreSQL is accepting connections
|
||||
check_postgres_ready() {
|
||||
local host="${1:-localhost}"
|
||||
local port="${2:-5433}"
|
||||
local user="${3:-stellaops_ci}"
|
||||
local db="${4:-stellaops_test}"
|
||||
|
||||
if command -v pg_isready &>/dev/null; then
|
||||
pg_isready -h "$host" -p "$port" -U "$user" -d "$db" &>/dev/null
|
||||
else
|
||||
# Fallback to nc if pg_isready not available
|
||||
nc -z "$host" "$port" &>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if Valkey/Redis is accepting connections
|
||||
check_valkey_ready() {
|
||||
local host="${1:-localhost}"
|
||||
local port="${2:-6380}"
|
||||
|
||||
if command -v valkey-cli &>/dev/null; then
|
||||
valkey-cli -h "$host" -p "$port" ping &>/dev/null
|
||||
elif command -v redis-cli &>/dev/null; then
|
||||
redis-cli -h "$host" -p "$port" ping &>/dev/null
|
||||
else
|
||||
nc -z "$host" "$port" &>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CI DOCKER IMAGE MANAGEMENT
|
||||
# =============================================================================
|
||||
|
||||
# Check if CI image exists
|
||||
ci_image_exists() {
|
||||
docker image inspect "$CI_IMAGE" &>/dev/null
|
||||
}
|
||||
|
||||
# Build CI Docker image
|
||||
build_ci_image() {
|
||||
local force_rebuild="${1:-false}"
|
||||
local dockerfile="$REPO_ROOT/$CI_DOCKERFILE"
|
||||
|
||||
if [[ ! -f "$dockerfile" ]]; then
|
||||
log_error "Dockerfile not found: $dockerfile"
|
||||
return 1
|
||||
fi
|
||||
|
||||
check_docker || return 1
|
||||
|
||||
if ci_image_exists && [[ "$force_rebuild" != "true" ]]; then
|
||||
log_info "CI image already exists: $CI_IMAGE"
|
||||
log_info "Use --rebuild to force rebuild"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_section "Building CI Docker Image"
|
||||
log_info "Dockerfile: $dockerfile"
|
||||
log_info "Image: $CI_IMAGE"
|
||||
|
||||
docker build -t "$CI_IMAGE" -f "$dockerfile" "$REPO_ROOT"
|
||||
|
||||
if [[ $? -ne 0 ]]; then
|
||||
log_error "Failed to build CI image"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_success "CI image built successfully: $CI_IMAGE"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CONTAINER EXECUTION
|
||||
# =============================================================================
|
||||
|
||||
# Run a command inside the CI container
|
||||
run_in_ci_container() {
|
||||
local command="$*"
|
||||
|
||||
check_docker || return 1
|
||||
|
||||
if ! ci_image_exists; then
|
||||
log_info "CI image not found, building..."
|
||||
build_ci_image || return 1
|
||||
fi
|
||||
|
||||
local docker_args=(
|
||||
--rm
|
||||
-v "$REPO_ROOT:/src"
|
||||
-v "$REPO_ROOT/TestResults:/src/TestResults"
|
||||
-e DOTNET_NOLOGO=1
|
||||
-e DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
-e DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
|
||||
-e TZ=UTC
|
||||
-w /src
|
||||
)
|
||||
|
||||
# Mount Docker socket for Testcontainers
|
||||
if [[ -S /var/run/docker.sock ]]; then
|
||||
docker_args+=(-v /var/run/docker.sock:/var/run/docker.sock)
|
||||
fi
|
||||
|
||||
# Load environment file if exists
|
||||
local env_file="$REPO_ROOT/devops/ci-local/.env.local"
|
||||
if [[ -f "$env_file" ]]; then
|
||||
docker_args+=(--env-file "$env_file")
|
||||
fi
|
||||
|
||||
# Connect to CI network if services are running
|
||||
if docker network inspect stellaops-ci-net &>/dev/null; then
|
||||
docker_args+=(--network stellaops-ci-net)
|
||||
fi
|
||||
|
||||
log_debug "Running in CI container: $command"
|
||||
docker run "${docker_args[@]}" "$CI_IMAGE" bash -c "$command"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# DOCKER NETWORK UTILITIES
|
||||
# =============================================================================
|
||||
|
||||
# Get the IP address of a running container
|
||||
get_container_ip() {
|
||||
local container="$1"
|
||||
docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$container" 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if container is running
|
||||
is_container_running() {
|
||||
local container="$1"
|
||||
[[ "$(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null)" == "true" ]]
|
||||
}
|
||||
|
||||
# Get container logs
|
||||
get_container_logs() {
|
||||
local container="$1"
|
||||
local lines="${2:-100}"
|
||||
docker logs --tail "$lines" "$container" 2>&1
|
||||
}
|
||||
475
devops/scripts/lib/ci-web.sh
Normal file
475
devops/scripts/lib/ci-web.sh
Normal file
@@ -0,0 +1,475 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# CI-WEB.SH - Angular Web Testing Utilities
|
||||
# =============================================================================
|
||||
# Functions for running Angular/Web frontend tests locally.
|
||||
#
|
||||
# Test Types:
|
||||
# - Unit Tests (Karma/Jasmine)
|
||||
# - E2E Tests (Playwright)
|
||||
# - Accessibility Tests (Axe-core)
|
||||
# - Lighthouse Audits
|
||||
# - Storybook Build
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
# Prevent direct execution
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
echo "This script should be sourced, not executed directly."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# CONSTANTS
|
||||
# =============================================================================
|
||||
|
||||
WEB_DIR="${REPO_ROOT:-$(git rev-parse --show-toplevel)}/src/Web/StellaOps.Web"
|
||||
WEB_NODE_VERSION="20"
|
||||
|
||||
# Test categories for Web
|
||||
WEB_TEST_CATEGORIES=(
|
||||
"web:unit" # Karma unit tests
|
||||
"web:e2e" # Playwright E2E
|
||||
"web:a11y" # Accessibility
|
||||
"web:lighthouse" # Performance/a11y audit
|
||||
"web:build" # Production build
|
||||
"web:storybook" # Storybook build
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# DEPENDENCY CHECKS
|
||||
# =============================================================================
|
||||
|
||||
check_node_version() {
|
||||
if ! command -v node &>/dev/null; then
|
||||
log_error "Node.js not found"
|
||||
log_info "Install Node.js $WEB_NODE_VERSION+: https://nodejs.org"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local version
|
||||
version=$(node --version | sed 's/v//' | cut -d. -f1)
|
||||
if [[ "$version" -lt "$WEB_NODE_VERSION" ]]; then
|
||||
log_warn "Node.js version $version is below recommended $WEB_NODE_VERSION"
|
||||
else
|
||||
log_debug "Node.js version: $(node --version)"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
check_npm() {
|
||||
if ! command -v npm &>/dev/null; then
|
||||
log_error "npm not found"
|
||||
return 1
|
||||
fi
|
||||
log_debug "npm version: $(npm --version)"
|
||||
return 0
|
||||
}
|
||||
|
||||
check_web_dependencies() {
|
||||
log_subsection "Checking Web Dependencies"
|
||||
|
||||
check_node_version || return 1
|
||||
check_npm || return 1
|
||||
|
||||
# Check if node_modules exists
|
||||
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
|
||||
log_warn "node_modules not found - will install dependencies"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# SETUP
|
||||
# =============================================================================
|
||||
|
||||
install_web_dependencies() {
|
||||
log_subsection "Installing Web Dependencies"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
# Check if package-lock.json exists
|
||||
if [[ -f "package-lock.json" ]]; then
|
||||
log_info "Running npm ci (clean install)..."
|
||||
npm ci --prefer-offline --no-audit --no-fund || {
|
||||
log_error "npm ci failed"
|
||||
popd > /dev/null
|
||||
return 1
|
||||
}
|
||||
else
|
||||
log_info "Running npm install..."
|
||||
npm install --no-audit --no-fund || {
|
||||
log_error "npm install failed"
|
||||
popd > /dev/null
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
log_success "Web dependencies installed"
|
||||
return 0
|
||||
}
|
||||
|
||||
ensure_web_dependencies() {
|
||||
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
|
||||
install_web_dependencies || return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# TEST RUNNERS
|
||||
# =============================================================================
|
||||
|
||||
run_web_unit_tests() {
|
||||
log_subsection "Running Web Unit Tests (Karma/Jasmine)"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run test:ci"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run tests
|
||||
npm run test:ci
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web unit tests"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web unit tests passed"
|
||||
else
|
||||
log_error "Web unit tests failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_e2e_tests() {
|
||||
log_subsection "Running Web E2E Tests (Playwright)"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
# Install Playwright browsers if needed
|
||||
if [[ ! -d "$HOME/.cache/ms-playwright" ]] && [[ ! -d "node_modules/.cache/ms-playwright" ]]; then
|
||||
log_info "Installing Playwright browsers..."
|
||||
npx playwright install --with-deps chromium || {
|
||||
log_warn "Playwright browser installation failed - E2E tests may fail"
|
||||
}
|
||||
fi
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run test:e2e"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run E2E tests
|
||||
npm run test:e2e
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web E2E tests"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web E2E tests passed"
|
||||
else
|
||||
log_error "Web E2E tests failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_a11y_tests() {
|
||||
log_subsection "Running Web Accessibility Tests (Axe)"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run test:a11y"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run accessibility tests
|
||||
npm run test:a11y
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web accessibility tests"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web accessibility tests passed"
|
||||
else
|
||||
log_warn "Web accessibility tests had issues (non-blocking)"
|
||||
fi
|
||||
|
||||
# A11y tests are non-blocking by default
|
||||
return 0
|
||||
}
|
||||
|
||||
run_web_build() {
|
||||
log_subsection "Building Web Application"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run build -- --configuration production"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Build production bundle
|
||||
npm run build -- --configuration production --progress=false
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web build"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web build completed"
|
||||
|
||||
# Check bundle size
|
||||
if [[ -d "$WEB_DIR/dist" ]]; then
|
||||
local size
|
||||
size=$(du -sh "$WEB_DIR/dist" 2>/dev/null | cut -f1)
|
||||
log_info "Bundle size: $size"
|
||||
fi
|
||||
else
|
||||
log_error "Web build failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_storybook_build() {
|
||||
log_subsection "Building Storybook"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run storybook:build"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Build Storybook
|
||||
npm run storybook:build
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Storybook build"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Storybook build completed"
|
||||
else
|
||||
log_error "Storybook build failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_lighthouse() {
|
||||
log_subsection "Running Lighthouse Audit"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check if lighthouse is available
|
||||
if ! command -v lhci &>/dev/null && ! npx lhci --version &>/dev/null 2>&1; then
|
||||
log_warn "Lighthouse CI not installed - skipping audit"
|
||||
log_info "Install with: npm install -g @lhci/cli"
|
||||
return 0
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
# Build first if not already built
|
||||
if [[ ! -d "$WEB_DIR/dist" ]]; then
|
||||
run_web_build || return 1
|
||||
fi
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: lhci autorun"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run Lighthouse
|
||||
npx lhci autorun \
|
||||
--collect.staticDistDir=./dist/stellaops-web/browser \
|
||||
--collect.numberOfRuns=1 \
|
||||
--upload.target=filesystem \
|
||||
--upload.outputDir=./lighthouse-results 2>/dev/null || {
|
||||
log_warn "Lighthouse audit had issues"
|
||||
}
|
||||
|
||||
stop_timer "$start_time" "Lighthouse audit"
|
||||
popd > /dev/null
|
||||
|
||||
log_success "Lighthouse audit completed"
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# COMPOSITE RUNNERS
|
||||
# =============================================================================
|
||||
|
||||
run_web_smoke() {
|
||||
log_section "Web Smoke Tests"
|
||||
log_info "Running quick web validation"
|
||||
|
||||
local failed=0
|
||||
|
||||
run_web_build || failed=1
|
||||
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_web_unit_tests || failed=1
|
||||
fi
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
run_web_pr_gating() {
|
||||
log_section "Web PR-Gating Tests"
|
||||
log_info "Running full web PR-gating suite"
|
||||
|
||||
local failed=0
|
||||
local results=()
|
||||
|
||||
# Build
|
||||
run_web_build
|
||||
results+=("Build:$?")
|
||||
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
|
||||
|
||||
# Unit tests
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_web_unit_tests
|
||||
results+=("Unit:$?")
|
||||
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
|
||||
fi
|
||||
|
||||
# E2E tests
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_web_e2e_tests
|
||||
results+=("E2E:$?")
|
||||
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
|
||||
fi
|
||||
|
||||
# A11y tests (non-blocking)
|
||||
run_web_a11y_tests
|
||||
results+=("A11y:$?")
|
||||
|
||||
# Print summary
|
||||
log_section "Web Test Results"
|
||||
for result in "${results[@]}"; do
|
||||
local name="${result%%:*}"
|
||||
local status="${result##*:}"
|
||||
if [[ "$status" == "0" ]]; then
|
||||
print_status "Web $name" "true"
|
||||
else
|
||||
print_status "Web $name" "false"
|
||||
fi
|
||||
done
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
run_web_full() {
|
||||
log_section "Full Web Test Suite"
|
||||
log_info "Running all web tests including extended categories"
|
||||
|
||||
local failed=0
|
||||
|
||||
# PR-gating tests
|
||||
run_web_pr_gating || failed=1
|
||||
|
||||
# Extended tests
|
||||
run_web_storybook_build || log_warn "Storybook build failed (non-blocking)"
|
||||
run_web_lighthouse || log_warn "Lighthouse audit failed (non-blocking)"
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# EXPORTS
|
||||
# =============================================================================
|
||||
|
||||
export -f check_web_dependencies
|
||||
export -f install_web_dependencies
|
||||
export -f ensure_web_dependencies
|
||||
export -f run_web_unit_tests
|
||||
export -f run_web_e2e_tests
|
||||
export -f run_web_a11y_tests
|
||||
export -f run_web_build
|
||||
export -f run_web_storybook_build
|
||||
export -f run_web_lighthouse
|
||||
export -f run_web_smoke
|
||||
export -f run_web_pr_gating
|
||||
export -f run_web_full
|
||||
178
devops/scripts/lib/exit-codes.sh
Normal file
178
devops/scripts/lib/exit-codes.sh
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Exit Codes Registry
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Standard exit codes for all CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/exit-codes.sh"
|
||||
#
|
||||
# Exit codes follow POSIX conventions (0-125)
|
||||
# 126-127 reserved for shell errors
|
||||
# 128+ reserved for signal handling
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_EXIT_CODES_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_EXIT_CODES_LOADED=1
|
||||
|
||||
# ============================================================================
|
||||
# Standard Exit Codes
|
||||
# ============================================================================
|
||||
|
||||
# Success
|
||||
export EXIT_SUCCESS=0
|
||||
|
||||
# General errors (1-9)
|
||||
export EXIT_ERROR=1 # Generic error
|
||||
export EXIT_USAGE=2 # Invalid usage/arguments
|
||||
export EXIT_CONFIG_ERROR=3 # Configuration error
|
||||
export EXIT_NOT_FOUND=4 # File/resource not found
|
||||
export EXIT_PERMISSION=5 # Permission denied
|
||||
export EXIT_IO_ERROR=6 # I/O error
|
||||
export EXIT_NETWORK_ERROR=7 # Network error
|
||||
export EXIT_TIMEOUT=8 # Operation timed out
|
||||
export EXIT_INTERRUPTED=9 # User interrupted (Ctrl+C)
|
||||
|
||||
# Tool/dependency errors (10-19)
|
||||
export EXIT_MISSING_TOOL=10 # Required tool not installed
|
||||
export EXIT_TOOL_ERROR=11 # Tool execution failed
|
||||
export EXIT_VERSION_MISMATCH=12 # Wrong tool version
|
||||
export EXIT_DEPENDENCY_ERROR=13 # Dependency resolution failed
|
||||
|
||||
# Build errors (20-29)
|
||||
export EXIT_BUILD_FAILED=20 # Build compilation failed
|
||||
export EXIT_RESTORE_FAILED=21 # Package restore failed
|
||||
export EXIT_PUBLISH_FAILED=22 # Publish failed
|
||||
export EXIT_PACKAGING_FAILED=23 # Packaging failed
|
||||
|
||||
# Test errors (30-39)
|
||||
export EXIT_TEST_FAILED=30 # Tests failed
|
||||
export EXIT_TEST_TIMEOUT=31 # Test timed out
|
||||
export EXIT_FIXTURE_ERROR=32 # Test fixture error
|
||||
export EXIT_DETERMINISM_FAIL=33 # Determinism check failed
|
||||
|
||||
# Deployment errors (40-49)
|
||||
export EXIT_DEPLOY_FAILED=40 # Deployment failed
|
||||
export EXIT_ROLLBACK_FAILED=41 # Rollback failed
|
||||
export EXIT_HEALTH_CHECK_FAIL=42 # Health check failed
|
||||
export EXIT_REGISTRY_ERROR=43 # Container registry error
|
||||
|
||||
# Validation errors (50-59)
|
||||
export EXIT_VALIDATION_FAILED=50 # General validation failed
|
||||
export EXIT_SCHEMA_ERROR=51 # Schema validation failed
|
||||
export EXIT_LINT_ERROR=52 # Lint check failed
|
||||
export EXIT_FORMAT_ERROR=53 # Format check failed
|
||||
export EXIT_LICENSE_ERROR=54 # License compliance failed
|
||||
|
||||
# Security errors (60-69)
|
||||
export EXIT_SECURITY_ERROR=60 # Security check failed
|
||||
export EXIT_SECRETS_FOUND=61 # Secrets detected in code
|
||||
export EXIT_VULN_FOUND=62 # Vulnerabilities found
|
||||
export EXIT_SIGN_FAILED=63 # Signing failed
|
||||
export EXIT_VERIFY_FAILED=64 # Verification failed
|
||||
|
||||
# Git/VCS errors (70-79)
|
||||
export EXIT_GIT_ERROR=70 # Git operation failed
|
||||
export EXIT_DIRTY_WORKTREE=71 # Uncommitted changes
|
||||
export EXIT_MERGE_CONFLICT=72 # Merge conflict
|
||||
export EXIT_BRANCH_ERROR=73 # Branch operation failed
|
||||
|
||||
# Reserved for specific tools (80-99)
|
||||
export EXIT_DOTNET_ERROR=80 # .NET specific error
|
||||
export EXIT_DOCKER_ERROR=81 # Docker specific error
|
||||
export EXIT_HELM_ERROR=82 # Helm specific error
|
||||
export EXIT_KUBECTL_ERROR=83 # kubectl specific error
|
||||
export EXIT_NPM_ERROR=84 # npm specific error
|
||||
export EXIT_PYTHON_ERROR=85 # Python specific error
|
||||
|
||||
# Legacy compatibility
|
||||
export EXIT_TOOLCHAIN=69 # Tool not found (legacy, use EXIT_MISSING_TOOL)
|
||||
|
||||
# ============================================================================
|
||||
# Helper Functions
|
||||
# ============================================================================
|
||||
|
||||
# Get exit code name from number
|
||||
exit_code_name() {
|
||||
local code="${1:-}"
|
||||
|
||||
case "$code" in
|
||||
0) echo "SUCCESS" ;;
|
||||
1) echo "ERROR" ;;
|
||||
2) echo "USAGE" ;;
|
||||
3) echo "CONFIG_ERROR" ;;
|
||||
4) echo "NOT_FOUND" ;;
|
||||
5) echo "PERMISSION" ;;
|
||||
6) echo "IO_ERROR" ;;
|
||||
7) echo "NETWORK_ERROR" ;;
|
||||
8) echo "TIMEOUT" ;;
|
||||
9) echo "INTERRUPTED" ;;
|
||||
10) echo "MISSING_TOOL" ;;
|
||||
11) echo "TOOL_ERROR" ;;
|
||||
12) echo "VERSION_MISMATCH" ;;
|
||||
13) echo "DEPENDENCY_ERROR" ;;
|
||||
20) echo "BUILD_FAILED" ;;
|
||||
21) echo "RESTORE_FAILED" ;;
|
||||
22) echo "PUBLISH_FAILED" ;;
|
||||
23) echo "PACKAGING_FAILED" ;;
|
||||
30) echo "TEST_FAILED" ;;
|
||||
31) echo "TEST_TIMEOUT" ;;
|
||||
32) echo "FIXTURE_ERROR" ;;
|
||||
33) echo "DETERMINISM_FAIL" ;;
|
||||
40) echo "DEPLOY_FAILED" ;;
|
||||
41) echo "ROLLBACK_FAILED" ;;
|
||||
42) echo "HEALTH_CHECK_FAIL" ;;
|
||||
43) echo "REGISTRY_ERROR" ;;
|
||||
50) echo "VALIDATION_FAILED" ;;
|
||||
51) echo "SCHEMA_ERROR" ;;
|
||||
52) echo "LINT_ERROR" ;;
|
||||
53) echo "FORMAT_ERROR" ;;
|
||||
54) echo "LICENSE_ERROR" ;;
|
||||
60) echo "SECURITY_ERROR" ;;
|
||||
61) echo "SECRETS_FOUND" ;;
|
||||
62) echo "VULN_FOUND" ;;
|
||||
63) echo "SIGN_FAILED" ;;
|
||||
64) echo "VERIFY_FAILED" ;;
|
||||
69) echo "TOOLCHAIN (legacy)" ;;
|
||||
70) echo "GIT_ERROR" ;;
|
||||
71) echo "DIRTY_WORKTREE" ;;
|
||||
72) echo "MERGE_CONFLICT" ;;
|
||||
73) echo "BRANCH_ERROR" ;;
|
||||
80) echo "DOTNET_ERROR" ;;
|
||||
81) echo "DOCKER_ERROR" ;;
|
||||
82) echo "HELM_ERROR" ;;
|
||||
83) echo "KUBECTL_ERROR" ;;
|
||||
84) echo "NPM_ERROR" ;;
|
||||
85) echo "PYTHON_ERROR" ;;
|
||||
126) echo "COMMAND_NOT_EXECUTABLE" ;;
|
||||
127) echo "COMMAND_NOT_FOUND" ;;
|
||||
*)
|
||||
if [[ $code -ge 128 ]] && [[ $code -le 255 ]]; then
|
||||
local signal=$((code - 128))
|
||||
echo "SIGNAL_${signal}"
|
||||
else
|
||||
echo "UNKNOWN_${code}"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if exit code indicates success
|
||||
is_success() {
|
||||
[[ "${1:-1}" -eq 0 ]]
|
||||
}
|
||||
|
||||
# Check if exit code indicates error
|
||||
is_error() {
|
||||
[[ "${1:-0}" -ne 0 ]]
|
||||
}
|
||||
|
||||
# Exit with message and code
|
||||
exit_with() {
|
||||
local code="${1:-1}"
|
||||
shift
|
||||
if [[ $# -gt 0 ]]; then
|
||||
echo "$@" >&2
|
||||
fi
|
||||
exit "$code"
|
||||
}
|
||||
262
devops/scripts/lib/git-utils.sh
Normal file
262
devops/scripts/lib/git-utils.sh
Normal file
@@ -0,0 +1,262 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Git Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Common git operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/git-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_GIT_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_GIT_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Repository Information
|
||||
# ============================================================================
|
||||
|
||||
# Get repository root directory
|
||||
git_root() {
|
||||
git rev-parse --show-toplevel 2>/dev/null || echo "."
|
||||
}
|
||||
|
||||
# Check if current directory is a git repository
|
||||
is_git_repo() {
|
||||
git rev-parse --git-dir >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Get current commit SHA (full)
|
||||
git_sha() {
|
||||
git rev-parse HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current commit SHA (short)
|
||||
git_sha_short() {
|
||||
git rev-parse --short HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current branch name
|
||||
git_branch() {
|
||||
git rev-parse --abbrev-ref HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current tag (if HEAD is tagged)
|
||||
git_tag() {
|
||||
git describe --tags --exact-match HEAD 2>/dev/null || echo ""
|
||||
}
|
||||
|
||||
# Get latest tag
|
||||
git_latest_tag() {
|
||||
git describe --tags --abbrev=0 2>/dev/null || echo ""
|
||||
}
|
||||
|
||||
# Get remote URL
|
||||
git_remote_url() {
|
||||
local remote="${1:-origin}"
|
||||
git remote get-url "$remote" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get repository name from remote URL
|
||||
git_repo_name() {
|
||||
local url
|
||||
url=$(git_remote_url "${1:-origin}")
|
||||
basename "$url" .git
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Commit Information
|
||||
# ============================================================================
|
||||
|
||||
# Get commit message
|
||||
git_commit_message() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%s" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit author
|
||||
git_commit_author() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%an" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit author email
|
||||
git_commit_author_email() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%ae" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit timestamp (ISO 8601)
|
||||
git_commit_timestamp() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%aI" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit timestamp (Unix epoch)
|
||||
git_commit_epoch() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%at" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Working Tree State
|
||||
# ============================================================================
|
||||
|
||||
# Check if working tree is clean
|
||||
git_is_clean() {
|
||||
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
|
||||
}
|
||||
|
||||
# Check if working tree is dirty
|
||||
git_is_dirty() {
|
||||
! git_is_clean
|
||||
}
|
||||
|
||||
# Get list of changed files
|
||||
git_changed_files() {
|
||||
git status --porcelain 2>/dev/null | awk '{print $2}'
|
||||
}
|
||||
|
||||
# Get list of staged files
|
||||
git_staged_files() {
|
||||
git diff --cached --name-only 2>/dev/null
|
||||
}
|
||||
|
||||
# Get list of untracked files
|
||||
git_untracked_files() {
|
||||
git ls-files --others --exclude-standard 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Diff and History
|
||||
# ============================================================================
|
||||
|
||||
# Get files changed between two refs
|
||||
git_diff_files() {
|
||||
local from="${1:-HEAD~1}"
|
||||
local to="${2:-HEAD}"
|
||||
git diff --name-only "$from" "$to" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get files changed in last N commits
|
||||
git_recent_files() {
|
||||
local count="${1:-1}"
|
||||
git diff --name-only "HEAD~${count}" HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if file was changed between two refs
|
||||
git_file_changed() {
|
||||
local file="$1"
|
||||
local from="${2:-HEAD~1}"
|
||||
local to="${3:-HEAD}"
|
||||
git diff --name-only "$from" "$to" -- "$file" 2>/dev/null | grep -q "$file"
|
||||
}
|
||||
|
||||
# Get commits between two refs
|
||||
git_commits_between() {
|
||||
local from="${1:-HEAD~10}"
|
||||
local to="${2:-HEAD}"
|
||||
git log --oneline "$from".."$to" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Tag Operations
|
||||
# ============================================================================
|
||||
|
||||
# Create a tag
|
||||
git_create_tag() {
|
||||
local tag="$1"
|
||||
local message="${2:-}"
|
||||
|
||||
if [[ -n "$message" ]]; then
|
||||
git tag -a "$tag" -m "$message"
|
||||
else
|
||||
git tag "$tag"
|
||||
fi
|
||||
}
|
||||
|
||||
# Delete a tag
|
||||
git_delete_tag() {
|
||||
local tag="$1"
|
||||
git tag -d "$tag" 2>/dev/null
|
||||
}
|
||||
|
||||
# Push tag to remote
|
||||
git_push_tag() {
|
||||
local tag="$1"
|
||||
local remote="${2:-origin}"
|
||||
git push "$remote" "$tag"
|
||||
}
|
||||
|
||||
# List tags matching pattern
|
||||
git_list_tags() {
|
||||
local pattern="${1:-*}"
|
||||
git tag -l "$pattern" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Branch Operations
|
||||
# ============================================================================
|
||||
|
||||
# Check if branch exists
|
||||
git_branch_exists() {
|
||||
local branch="$1"
|
||||
git show-ref --verify --quiet "refs/heads/$branch" 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if remote branch exists
|
||||
git_remote_branch_exists() {
|
||||
local branch="$1"
|
||||
local remote="${2:-origin}"
|
||||
git show-ref --verify --quiet "refs/remotes/$remote/$branch" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get default branch
|
||||
git_default_branch() {
|
||||
local remote="${1:-origin}"
|
||||
git remote show "$remote" 2>/dev/null | grep "HEAD branch" | awk '{print $NF}'
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# CI/CD Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Get version string for CI builds
|
||||
git_ci_version() {
|
||||
local tag
|
||||
tag=$(git_tag)
|
||||
|
||||
if [[ -n "$tag" ]]; then
|
||||
echo "$tag"
|
||||
else
|
||||
local branch sha
|
||||
branch=$(git_branch | tr '/' '-')
|
||||
sha=$(git_sha_short)
|
||||
echo "${branch}-${sha}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if current commit is on default branch
|
||||
git_is_default_branch() {
|
||||
local current default
|
||||
current=$(git_branch)
|
||||
default=$(git_default_branch)
|
||||
[[ "$current" == "$default" ]]
|
||||
}
|
||||
|
||||
# Check if running in CI environment
|
||||
git_is_ci() {
|
||||
[[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]] || [[ -n "${GITLAB_CI:-}" ]]
|
||||
}
|
||||
|
||||
# Ensure clean worktree or fail
|
||||
git_require_clean() {
|
||||
if git_is_dirty; then
|
||||
log_error "Working tree is dirty. Commit or stash changes first."
|
||||
return "${EXIT_DIRTY_WORKTREE:-71}"
|
||||
fi
|
||||
}
|
||||
266
devops/scripts/lib/hash-utils.sh
Normal file
266
devops/scripts/lib/hash-utils.sh
Normal file
@@ -0,0 +1,266 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Hash/Checksum Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Cryptographic hash and checksum operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/hash-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_HASH_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_HASH_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Hash Computation
|
||||
# ============================================================================
|
||||
|
||||
# Compute SHA-256 hash of a file
|
||||
compute_sha256() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v sha256sum >/dev/null 2>&1; then
|
||||
sha256sum "$file" | awk '{print $1}'
|
||||
elif command -v shasum >/dev/null 2>&1; then
|
||||
shasum -a 256 "$file" | awk '{print $1}'
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -sha256 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No SHA-256 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute SHA-512 hash of a file
|
||||
compute_sha512() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v sha512sum >/dev/null 2>&1; then
|
||||
sha512sum "$file" | awk '{print $1}'
|
||||
elif command -v shasum >/dev/null 2>&1; then
|
||||
shasum -a 512 "$file" | awk '{print $1}'
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -sha512 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No SHA-512 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute MD5 hash of a file (for compatibility, not security)
|
||||
compute_md5() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v md5sum >/dev/null 2>&1; then
|
||||
md5sum "$file" | awk '{print $1}'
|
||||
elif command -v md5 >/dev/null 2>&1; then
|
||||
md5 -q "$file"
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -md5 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No MD5 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute hash of string
|
||||
compute_string_hash() {
|
||||
local string="$1"
|
||||
local algorithm="${2:-sha256}"
|
||||
|
||||
case "$algorithm" in
|
||||
sha256)
|
||||
echo -n "$string" | sha256sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | shasum -a 256 2>/dev/null | awk '{print $1}'
|
||||
;;
|
||||
sha512)
|
||||
echo -n "$string" | sha512sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | shasum -a 512 2>/dev/null | awk '{print $1}'
|
||||
;;
|
||||
md5)
|
||||
echo -n "$string" | md5sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | md5 2>/dev/null
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Checksum Files
|
||||
# ============================================================================
|
||||
|
||||
# Write checksum file for a single file
|
||||
write_checksum() {
|
||||
local file="$1"
|
||||
local checksum_file="${2:-${file}.sha256}"
|
||||
local algorithm="${3:-sha256}"
|
||||
|
||||
local hash
|
||||
case "$algorithm" in
|
||||
sha256) hash=$(compute_sha256 "$file") ;;
|
||||
sha512) hash=$(compute_sha512 "$file") ;;
|
||||
md5) hash=$(compute_md5 "$file") ;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ -z "$hash" ]]; then
|
||||
return "${EXIT_ERROR:-1}"
|
||||
fi
|
||||
|
||||
local basename
|
||||
basename=$(basename "$file")
|
||||
echo "$hash $basename" > "$checksum_file"
|
||||
log_debug "Wrote checksum to $checksum_file"
|
||||
}
|
||||
|
||||
# Write checksums for multiple files
|
||||
write_checksums() {
|
||||
local output_file="$1"
|
||||
shift
|
||||
local files=("$@")
|
||||
|
||||
: > "$output_file"
|
||||
|
||||
for file in "${files[@]}"; do
|
||||
if [[ -f "$file" ]]; then
|
||||
local hash basename
|
||||
hash=$(compute_sha256 "$file")
|
||||
basename=$(basename "$file")
|
||||
echo "$hash $basename" >> "$output_file"
|
||||
fi
|
||||
done
|
||||
|
||||
log_debug "Wrote checksums to $output_file"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Checksum Verification
|
||||
# ============================================================================
|
||||
|
||||
# Verify checksum of a file
|
||||
verify_checksum() {
|
||||
local file="$1"
|
||||
local expected_hash="$2"
|
||||
local algorithm="${3:-sha256}"
|
||||
|
||||
local actual_hash
|
||||
case "$algorithm" in
|
||||
sha256) actual_hash=$(compute_sha256 "$file") ;;
|
||||
sha512) actual_hash=$(compute_sha512 "$file") ;;
|
||||
md5) actual_hash=$(compute_md5 "$file") ;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$actual_hash" == "$expected_hash" ]]; then
|
||||
log_debug "Checksum verified: $file"
|
||||
return 0
|
||||
else
|
||||
log_error "Checksum mismatch for $file"
|
||||
log_error " Expected: $expected_hash"
|
||||
log_error " Actual: $actual_hash"
|
||||
return "${EXIT_VERIFY_FAILED:-64}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Verify checksums from file (sha256sum -c style)
|
||||
verify_checksums_file() {
|
||||
local checksum_file="$1"
|
||||
local base_dir="${2:-.}"
|
||||
|
||||
if [[ ! -f "$checksum_file" ]]; then
|
||||
log_error "Checksum file not found: $checksum_file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
local failures=0
|
||||
|
||||
while IFS= read -r line; do
|
||||
# Skip empty lines and comments
|
||||
[[ -z "$line" ]] && continue
|
||||
[[ "$line" == \#* ]] && continue
|
||||
|
||||
local hash filename
|
||||
hash=$(echo "$line" | awk '{print $1}')
|
||||
filename=$(echo "$line" | awk '{print $2}')
|
||||
|
||||
local filepath="${base_dir}/${filename}"
|
||||
|
||||
if [[ ! -f "$filepath" ]]; then
|
||||
log_error "File not found: $filepath"
|
||||
((failures++))
|
||||
continue
|
||||
fi
|
||||
|
||||
if ! verify_checksum "$filepath" "$hash"; then
|
||||
((failures++))
|
||||
fi
|
||||
done < "$checksum_file"
|
||||
|
||||
if [[ $failures -gt 0 ]]; then
|
||||
log_error "$failures checksum verification(s) failed"
|
||||
return "${EXIT_VERIFY_FAILED:-64}"
|
||||
fi
|
||||
|
||||
log_info "All checksums verified"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Check if two files have the same content
|
||||
files_identical() {
|
||||
local file1="$1"
|
||||
local file2="$2"
|
||||
|
||||
[[ -f "$file1" ]] && [[ -f "$file2" ]] || return 1
|
||||
|
||||
local hash1 hash2
|
||||
hash1=$(compute_sha256 "$file1")
|
||||
hash2=$(compute_sha256 "$file2")
|
||||
|
||||
[[ "$hash1" == "$hash2" ]]
|
||||
}
|
||||
|
||||
# Get short hash for display
|
||||
short_hash() {
|
||||
local hash="$1"
|
||||
local length="${2:-8}"
|
||||
echo "${hash:0:$length}"
|
||||
}
|
||||
|
||||
# Generate deterministic ID from inputs
|
||||
generate_id() {
|
||||
local inputs="$*"
|
||||
compute_string_hash "$inputs" sha256 | head -c 16
|
||||
}
|
||||
181
devops/scripts/lib/logging.sh
Normal file
181
devops/scripts/lib/logging.sh
Normal file
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Logging Library
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Standard logging functions for all CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/logging.sh"
|
||||
#
|
||||
# Log Levels: DEBUG, INFO, WARN, ERROR
|
||||
# Set LOG_LEVEL environment variable to control verbosity (default: INFO)
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_LOGGING_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_LOGGING_LOADED=1
|
||||
|
||||
# Colors (disable with NO_COLOR=1)
|
||||
if [[ -z "${NO_COLOR:-}" ]] && [[ -t 1 ]]; then
|
||||
export LOG_COLOR_RED='\033[0;31m'
|
||||
export LOG_COLOR_GREEN='\033[0;32m'
|
||||
export LOG_COLOR_YELLOW='\033[1;33m'
|
||||
export LOG_COLOR_BLUE='\033[0;34m'
|
||||
export LOG_COLOR_MAGENTA='\033[0;35m'
|
||||
export LOG_COLOR_CYAN='\033[0;36m'
|
||||
export LOG_COLOR_GRAY='\033[0;90m'
|
||||
export LOG_COLOR_RESET='\033[0m'
|
||||
else
|
||||
export LOG_COLOR_RED=''
|
||||
export LOG_COLOR_GREEN=''
|
||||
export LOG_COLOR_YELLOW=''
|
||||
export LOG_COLOR_BLUE=''
|
||||
export LOG_COLOR_MAGENTA=''
|
||||
export LOG_COLOR_CYAN=''
|
||||
export LOG_COLOR_GRAY=''
|
||||
export LOG_COLOR_RESET=''
|
||||
fi
|
||||
|
||||
# Log level configuration
|
||||
export LOG_LEVEL="${LOG_LEVEL:-INFO}"
|
||||
|
||||
# Convert log level to numeric for comparison
|
||||
_log_level_to_num() {
|
||||
case "$1" in
|
||||
DEBUG) echo 0 ;;
|
||||
INFO) echo 1 ;;
|
||||
WARN) echo 2 ;;
|
||||
ERROR) echo 3 ;;
|
||||
*) echo 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if message should be logged based on level
|
||||
_should_log() {
|
||||
local msg_level="$1"
|
||||
local current_level="${LOG_LEVEL:-INFO}"
|
||||
|
||||
local msg_num current_num
|
||||
msg_num=$(_log_level_to_num "$msg_level")
|
||||
current_num=$(_log_level_to_num "$current_level")
|
||||
|
||||
[[ $msg_num -ge $current_num ]]
|
||||
}
|
||||
|
||||
# Format timestamp
|
||||
_log_timestamp() {
|
||||
if [[ "${LOG_TIMESTAMPS:-true}" == "true" ]]; then
|
||||
date -u +"%Y-%m-%dT%H:%M:%SZ"
|
||||
fi
|
||||
}
|
||||
|
||||
# Core logging function
|
||||
_log() {
|
||||
local level="$1"
|
||||
local color="$2"
|
||||
shift 2
|
||||
|
||||
if ! _should_log "$level"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local timestamp
|
||||
timestamp=$(_log_timestamp)
|
||||
|
||||
local prefix=""
|
||||
if [[ -n "$timestamp" ]]; then
|
||||
prefix="${LOG_COLOR_GRAY}${timestamp}${LOG_COLOR_RESET} "
|
||||
fi
|
||||
|
||||
echo -e "${prefix}${color}[${level}]${LOG_COLOR_RESET} $*"
|
||||
}
|
||||
|
||||
# Public logging functions
|
||||
log_debug() {
|
||||
_log "DEBUG" "${LOG_COLOR_GRAY}" "$@"
|
||||
}
|
||||
|
||||
log_info() {
|
||||
_log "INFO" "${LOG_COLOR_GREEN}" "$@"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
_log "WARN" "${LOG_COLOR_YELLOW}" "$@"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
_log "ERROR" "${LOG_COLOR_RED}" "$@" >&2
|
||||
}
|
||||
|
||||
# Step logging (for workflow stages)
|
||||
log_step() {
|
||||
_log "STEP" "${LOG_COLOR_BLUE}" "$@"
|
||||
}
|
||||
|
||||
# Success message
|
||||
log_success() {
|
||||
_log "OK" "${LOG_COLOR_GREEN}" "$@"
|
||||
}
|
||||
|
||||
# GitHub Actions annotations
|
||||
log_gh_notice() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::notice::$*"
|
||||
else
|
||||
log_info "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
log_gh_warning() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::warning::$*"
|
||||
else
|
||||
log_warn "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
log_gh_error() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::error::$*"
|
||||
else
|
||||
log_error "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Group logging (for GitHub Actions)
|
||||
log_group_start() {
|
||||
local title="$1"
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::group::$title"
|
||||
else
|
||||
log_step "=== $title ==="
|
||||
fi
|
||||
}
|
||||
|
||||
log_group_end() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
}
|
||||
|
||||
# Masked logging (for secrets)
|
||||
log_masked() {
|
||||
local value="$1"
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::add-mask::$value"
|
||||
fi
|
||||
}
|
||||
|
||||
# Die with error message
|
||||
die() {
|
||||
log_error "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Conditional die
|
||||
die_if() {
|
||||
local condition="$1"
|
||||
shift
|
||||
if eval "$condition"; then
|
||||
die "$@"
|
||||
fi
|
||||
}
|
||||
274
devops/scripts/lib/path-utils.sh
Normal file
274
devops/scripts/lib/path-utils.sh
Normal file
@@ -0,0 +1,274 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Path Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Path manipulation and file operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/path-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_PATH_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_PATH_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Path Normalization
|
||||
# ============================================================================
|
||||
|
||||
# Normalize path (resolve .., ., symlinks)
|
||||
normalize_path() {
|
||||
local path="$1"
|
||||
|
||||
# Handle empty path
|
||||
if [[ -z "$path" ]]; then
|
||||
echo "."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Try realpath first (most reliable)
|
||||
if command -v realpath >/dev/null 2>&1; then
|
||||
realpath -m "$path" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Fallback to Python
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -c "import os; print(os.path.normpath('$path'))" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Manual normalization (basic)
|
||||
echo "$path" | sed 's|/\./|/|g' | sed 's|/[^/]*/\.\./|/|g' | sed 's|//|/|g'
|
||||
}
|
||||
|
||||
# Get absolute path
|
||||
absolute_path() {
|
||||
local path="$1"
|
||||
|
||||
if [[ "$path" == /* ]]; then
|
||||
normalize_path "$path"
|
||||
else
|
||||
normalize_path "$(pwd)/$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get relative path from one path to another
|
||||
relative_path() {
|
||||
local from="$1"
|
||||
local to="$2"
|
||||
|
||||
if command -v realpath >/dev/null 2>&1; then
|
||||
realpath --relative-to="$from" "$to" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -c "import os.path; print(os.path.relpath('$to', '$from'))" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Fallback: just return absolute path
|
||||
absolute_path "$to"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Path Components
|
||||
# ============================================================================
|
||||
|
||||
# Get directory name
|
||||
dir_name() {
|
||||
dirname "$1"
|
||||
}
|
||||
|
||||
# Get base name
|
||||
base_name() {
|
||||
basename "$1"
|
||||
}
|
||||
|
||||
# Get file extension
|
||||
file_extension() {
|
||||
local path="$1"
|
||||
local base
|
||||
base=$(basename "$path")
|
||||
|
||||
if [[ "$base" == *.* ]]; then
|
||||
echo "${base##*.}"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Get file name without extension
|
||||
file_stem() {
|
||||
local path="$1"
|
||||
local base
|
||||
base=$(basename "$path")
|
||||
|
||||
if [[ "$base" == *.* ]]; then
|
||||
echo "${base%.*}"
|
||||
else
|
||||
echo "$base"
|
||||
fi
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Directory Operations
|
||||
# ============================================================================
|
||||
|
||||
# Ensure directory exists
|
||||
ensure_directory() {
|
||||
local dir="$1"
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
mkdir -p "$dir"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create temporary directory
|
||||
create_temp_dir() {
|
||||
local prefix="${1:-stellaops}"
|
||||
mktemp -d "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
|
||||
}
|
||||
|
||||
# Create temporary file
|
||||
create_temp_file() {
|
||||
local prefix="${1:-stellaops}"
|
||||
local suffix="${2:-}"
|
||||
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX${suffix}"
|
||||
}
|
||||
|
||||
# Clean temporary directory
|
||||
clean_temp() {
|
||||
local path="$1"
|
||||
if [[ -d "$path" ]] && [[ "$path" == *stellaops* ]]; then
|
||||
rm -rf "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# File Existence Checks
|
||||
# ============================================================================
|
||||
|
||||
# Check if file exists
|
||||
file_exists() {
|
||||
[[ -f "$1" ]]
|
||||
}
|
||||
|
||||
# Check if directory exists
|
||||
dir_exists() {
|
||||
[[ -d "$1" ]]
|
||||
}
|
||||
|
||||
# Check if path exists (file or directory)
|
||||
path_exists() {
|
||||
[[ -e "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is readable
|
||||
file_readable() {
|
||||
[[ -r "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is writable
|
||||
file_writable() {
|
||||
[[ -w "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is executable
|
||||
file_executable() {
|
||||
[[ -x "$1" ]]
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# File Discovery
|
||||
# ============================================================================
|
||||
|
||||
# Find files by pattern
|
||||
find_files() {
|
||||
local dir="${1:-.}"
|
||||
local pattern="${2:-*}"
|
||||
find "$dir" -type f -name "$pattern" 2>/dev/null
|
||||
}
|
||||
|
||||
# Find files by extension
|
||||
find_by_extension() {
|
||||
local dir="${1:-.}"
|
||||
local ext="${2:-}"
|
||||
find "$dir" -type f -name "*.${ext}" 2>/dev/null
|
||||
}
|
||||
|
||||
# Find project files (csproj, package.json, etc.)
|
||||
find_project_files() {
|
||||
local dir="${1:-.}"
|
||||
find "$dir" -type f \( \
|
||||
-name "*.csproj" -o \
|
||||
-name "*.fsproj" -o \
|
||||
-name "package.json" -o \
|
||||
-name "Cargo.toml" -o \
|
||||
-name "go.mod" -o \
|
||||
-name "pom.xml" -o \
|
||||
-name "build.gradle" \
|
||||
\) 2>/dev/null | grep -v node_modules | grep -v bin | grep -v obj
|
||||
}
|
||||
|
||||
# Find test projects
|
||||
find_test_projects() {
|
||||
local dir="${1:-.}"
|
||||
find "$dir" -type f -name "*.Tests.csproj" 2>/dev/null | grep -v bin | grep -v obj
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Path Validation
|
||||
# ============================================================================
|
||||
|
||||
# Check if path is under directory
|
||||
path_under() {
|
||||
local path="$1"
|
||||
local dir="$2"
|
||||
|
||||
local abs_path abs_dir
|
||||
abs_path=$(absolute_path "$path")
|
||||
abs_dir=$(absolute_path "$dir")
|
||||
|
||||
[[ "$abs_path" == "$abs_dir"* ]]
|
||||
}
|
||||
|
||||
# Validate path is safe (no directory traversal)
|
||||
path_is_safe() {
|
||||
local path="$1"
|
||||
local base="${2:-.}"
|
||||
|
||||
# Check for obvious traversal attempts
|
||||
if [[ "$path" == *".."* ]] || [[ "$path" == "/*" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Verify resolved path is under base
|
||||
path_under "$path" "$base"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# CI/CD Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Get artifact output directory
|
||||
get_artifact_dir() {
|
||||
local name="${1:-artifacts}"
|
||||
local base="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
echo "${base}/out/${name}"
|
||||
}
|
||||
|
||||
# Get test results directory
|
||||
get_test_results_dir() {
|
||||
local base="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
echo "${base}/TestResults"
|
||||
}
|
||||
|
||||
# Ensure artifact directory exists and return path
|
||||
ensure_artifact_dir() {
|
||||
local name="${1:-artifacts}"
|
||||
local dir
|
||||
dir=$(get_artifact_dir "$name")
|
||||
ensure_directory "$dir"
|
||||
echo "$dir"
|
||||
}
|
||||
Reference in New Issue
Block a user