save progress
This commit is contained in:
178
devops/scripts/lib/exit-codes.sh
Normal file
178
devops/scripts/lib/exit-codes.sh
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Exit Codes Registry
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Standard exit codes for all CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/exit-codes.sh"
|
||||
#
|
||||
# Exit codes follow POSIX conventions (0-125)
|
||||
# 126-127 reserved for shell errors
|
||||
# 128+ reserved for signal handling
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_EXIT_CODES_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_EXIT_CODES_LOADED=1
|
||||
|
||||
# ============================================================================
|
||||
# Standard Exit Codes
|
||||
# ============================================================================
|
||||
|
||||
# Success
|
||||
export EXIT_SUCCESS=0
|
||||
|
||||
# General errors (1-9)
|
||||
export EXIT_ERROR=1 # Generic error
|
||||
export EXIT_USAGE=2 # Invalid usage/arguments
|
||||
export EXIT_CONFIG_ERROR=3 # Configuration error
|
||||
export EXIT_NOT_FOUND=4 # File/resource not found
|
||||
export EXIT_PERMISSION=5 # Permission denied
|
||||
export EXIT_IO_ERROR=6 # I/O error
|
||||
export EXIT_NETWORK_ERROR=7 # Network error
|
||||
export EXIT_TIMEOUT=8 # Operation timed out
|
||||
export EXIT_INTERRUPTED=9 # User interrupted (Ctrl+C)
|
||||
|
||||
# Tool/dependency errors (10-19)
|
||||
export EXIT_MISSING_TOOL=10 # Required tool not installed
|
||||
export EXIT_TOOL_ERROR=11 # Tool execution failed
|
||||
export EXIT_VERSION_MISMATCH=12 # Wrong tool version
|
||||
export EXIT_DEPENDENCY_ERROR=13 # Dependency resolution failed
|
||||
|
||||
# Build errors (20-29)
|
||||
export EXIT_BUILD_FAILED=20 # Build compilation failed
|
||||
export EXIT_RESTORE_FAILED=21 # Package restore failed
|
||||
export EXIT_PUBLISH_FAILED=22 # Publish failed
|
||||
export EXIT_PACKAGING_FAILED=23 # Packaging failed
|
||||
|
||||
# Test errors (30-39)
|
||||
export EXIT_TEST_FAILED=30 # Tests failed
|
||||
export EXIT_TEST_TIMEOUT=31 # Test timed out
|
||||
export EXIT_FIXTURE_ERROR=32 # Test fixture error
|
||||
export EXIT_DETERMINISM_FAIL=33 # Determinism check failed
|
||||
|
||||
# Deployment errors (40-49)
|
||||
export EXIT_DEPLOY_FAILED=40 # Deployment failed
|
||||
export EXIT_ROLLBACK_FAILED=41 # Rollback failed
|
||||
export EXIT_HEALTH_CHECK_FAIL=42 # Health check failed
|
||||
export EXIT_REGISTRY_ERROR=43 # Container registry error
|
||||
|
||||
# Validation errors (50-59)
|
||||
export EXIT_VALIDATION_FAILED=50 # General validation failed
|
||||
export EXIT_SCHEMA_ERROR=51 # Schema validation failed
|
||||
export EXIT_LINT_ERROR=52 # Lint check failed
|
||||
export EXIT_FORMAT_ERROR=53 # Format check failed
|
||||
export EXIT_LICENSE_ERROR=54 # License compliance failed
|
||||
|
||||
# Security errors (60-69)
|
||||
export EXIT_SECURITY_ERROR=60 # Security check failed
|
||||
export EXIT_SECRETS_FOUND=61 # Secrets detected in code
|
||||
export EXIT_VULN_FOUND=62 # Vulnerabilities found
|
||||
export EXIT_SIGN_FAILED=63 # Signing failed
|
||||
export EXIT_VERIFY_FAILED=64 # Verification failed
|
||||
|
||||
# Git/VCS errors (70-79)
|
||||
export EXIT_GIT_ERROR=70 # Git operation failed
|
||||
export EXIT_DIRTY_WORKTREE=71 # Uncommitted changes
|
||||
export EXIT_MERGE_CONFLICT=72 # Merge conflict
|
||||
export EXIT_BRANCH_ERROR=73 # Branch operation failed
|
||||
|
||||
# Reserved for specific tools (80-99)
|
||||
export EXIT_DOTNET_ERROR=80 # .NET specific error
|
||||
export EXIT_DOCKER_ERROR=81 # Docker specific error
|
||||
export EXIT_HELM_ERROR=82 # Helm specific error
|
||||
export EXIT_KUBECTL_ERROR=83 # kubectl specific error
|
||||
export EXIT_NPM_ERROR=84 # npm specific error
|
||||
export EXIT_PYTHON_ERROR=85 # Python specific error
|
||||
|
||||
# Legacy compatibility
|
||||
export EXIT_TOOLCHAIN=69 # Tool not found (legacy, use EXIT_MISSING_TOOL)
|
||||
|
||||
# ============================================================================
|
||||
# Helper Functions
|
||||
# ============================================================================
|
||||
|
||||
# Get exit code name from number
|
||||
exit_code_name() {
|
||||
local code="${1:-}"
|
||||
|
||||
case "$code" in
|
||||
0) echo "SUCCESS" ;;
|
||||
1) echo "ERROR" ;;
|
||||
2) echo "USAGE" ;;
|
||||
3) echo "CONFIG_ERROR" ;;
|
||||
4) echo "NOT_FOUND" ;;
|
||||
5) echo "PERMISSION" ;;
|
||||
6) echo "IO_ERROR" ;;
|
||||
7) echo "NETWORK_ERROR" ;;
|
||||
8) echo "TIMEOUT" ;;
|
||||
9) echo "INTERRUPTED" ;;
|
||||
10) echo "MISSING_TOOL" ;;
|
||||
11) echo "TOOL_ERROR" ;;
|
||||
12) echo "VERSION_MISMATCH" ;;
|
||||
13) echo "DEPENDENCY_ERROR" ;;
|
||||
20) echo "BUILD_FAILED" ;;
|
||||
21) echo "RESTORE_FAILED" ;;
|
||||
22) echo "PUBLISH_FAILED" ;;
|
||||
23) echo "PACKAGING_FAILED" ;;
|
||||
30) echo "TEST_FAILED" ;;
|
||||
31) echo "TEST_TIMEOUT" ;;
|
||||
32) echo "FIXTURE_ERROR" ;;
|
||||
33) echo "DETERMINISM_FAIL" ;;
|
||||
40) echo "DEPLOY_FAILED" ;;
|
||||
41) echo "ROLLBACK_FAILED" ;;
|
||||
42) echo "HEALTH_CHECK_FAIL" ;;
|
||||
43) echo "REGISTRY_ERROR" ;;
|
||||
50) echo "VALIDATION_FAILED" ;;
|
||||
51) echo "SCHEMA_ERROR" ;;
|
||||
52) echo "LINT_ERROR" ;;
|
||||
53) echo "FORMAT_ERROR" ;;
|
||||
54) echo "LICENSE_ERROR" ;;
|
||||
60) echo "SECURITY_ERROR" ;;
|
||||
61) echo "SECRETS_FOUND" ;;
|
||||
62) echo "VULN_FOUND" ;;
|
||||
63) echo "SIGN_FAILED" ;;
|
||||
64) echo "VERIFY_FAILED" ;;
|
||||
69) echo "TOOLCHAIN (legacy)" ;;
|
||||
70) echo "GIT_ERROR" ;;
|
||||
71) echo "DIRTY_WORKTREE" ;;
|
||||
72) echo "MERGE_CONFLICT" ;;
|
||||
73) echo "BRANCH_ERROR" ;;
|
||||
80) echo "DOTNET_ERROR" ;;
|
||||
81) echo "DOCKER_ERROR" ;;
|
||||
82) echo "HELM_ERROR" ;;
|
||||
83) echo "KUBECTL_ERROR" ;;
|
||||
84) echo "NPM_ERROR" ;;
|
||||
85) echo "PYTHON_ERROR" ;;
|
||||
126) echo "COMMAND_NOT_EXECUTABLE" ;;
|
||||
127) echo "COMMAND_NOT_FOUND" ;;
|
||||
*)
|
||||
if [[ $code -ge 128 ]] && [[ $code -le 255 ]]; then
|
||||
local signal=$((code - 128))
|
||||
echo "SIGNAL_${signal}"
|
||||
else
|
||||
echo "UNKNOWN_${code}"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if exit code indicates success
|
||||
is_success() {
|
||||
[[ "${1:-1}" -eq 0 ]]
|
||||
}
|
||||
|
||||
# Check if exit code indicates error
|
||||
is_error() {
|
||||
[[ "${1:-0}" -ne 0 ]]
|
||||
}
|
||||
|
||||
# Exit with message and code
|
||||
exit_with() {
|
||||
local code="${1:-1}"
|
||||
shift
|
||||
if [[ $# -gt 0 ]]; then
|
||||
echo "$@" >&2
|
||||
fi
|
||||
exit "$code"
|
||||
}
|
||||
262
devops/scripts/lib/git-utils.sh
Normal file
262
devops/scripts/lib/git-utils.sh
Normal file
@@ -0,0 +1,262 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Git Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Common git operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/git-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_GIT_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_GIT_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Repository Information
|
||||
# ============================================================================
|
||||
|
||||
# Get repository root directory
|
||||
git_root() {
|
||||
git rev-parse --show-toplevel 2>/dev/null || echo "."
|
||||
}
|
||||
|
||||
# Check if current directory is a git repository
|
||||
is_git_repo() {
|
||||
git rev-parse --git-dir >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Get current commit SHA (full)
|
||||
git_sha() {
|
||||
git rev-parse HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current commit SHA (short)
|
||||
git_sha_short() {
|
||||
git rev-parse --short HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current branch name
|
||||
git_branch() {
|
||||
git rev-parse --abbrev-ref HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current tag (if HEAD is tagged)
|
||||
git_tag() {
|
||||
git describe --tags --exact-match HEAD 2>/dev/null || echo ""
|
||||
}
|
||||
|
||||
# Get latest tag
|
||||
git_latest_tag() {
|
||||
git describe --tags --abbrev=0 2>/dev/null || echo ""
|
||||
}
|
||||
|
||||
# Get remote URL
|
||||
git_remote_url() {
|
||||
local remote="${1:-origin}"
|
||||
git remote get-url "$remote" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get repository name from remote URL
|
||||
git_repo_name() {
|
||||
local url
|
||||
url=$(git_remote_url "${1:-origin}")
|
||||
basename "$url" .git
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Commit Information
|
||||
# ============================================================================
|
||||
|
||||
# Get commit message
|
||||
git_commit_message() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%s" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit author
|
||||
git_commit_author() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%an" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit author email
|
||||
git_commit_author_email() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%ae" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit timestamp (ISO 8601)
|
||||
git_commit_timestamp() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%aI" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit timestamp (Unix epoch)
|
||||
git_commit_epoch() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%at" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Working Tree State
|
||||
# ============================================================================
|
||||
|
||||
# Check if working tree is clean
|
||||
git_is_clean() {
|
||||
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
|
||||
}
|
||||
|
||||
# Check if working tree is dirty
|
||||
git_is_dirty() {
|
||||
! git_is_clean
|
||||
}
|
||||
|
||||
# Get list of changed files
|
||||
git_changed_files() {
|
||||
git status --porcelain 2>/dev/null | awk '{print $2}'
|
||||
}
|
||||
|
||||
# Get list of staged files
|
||||
git_staged_files() {
|
||||
git diff --cached --name-only 2>/dev/null
|
||||
}
|
||||
|
||||
# Get list of untracked files
|
||||
git_untracked_files() {
|
||||
git ls-files --others --exclude-standard 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Diff and History
|
||||
# ============================================================================
|
||||
|
||||
# Get files changed between two refs
|
||||
git_diff_files() {
|
||||
local from="${1:-HEAD~1}"
|
||||
local to="${2:-HEAD}"
|
||||
git diff --name-only "$from" "$to" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get files changed in last N commits
|
||||
git_recent_files() {
|
||||
local count="${1:-1}"
|
||||
git diff --name-only "HEAD~${count}" HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if file was changed between two refs
|
||||
git_file_changed() {
|
||||
local file="$1"
|
||||
local from="${2:-HEAD~1}"
|
||||
local to="${3:-HEAD}"
|
||||
git diff --name-only "$from" "$to" -- "$file" 2>/dev/null | grep -q "$file"
|
||||
}
|
||||
|
||||
# Get commits between two refs
|
||||
git_commits_between() {
|
||||
local from="${1:-HEAD~10}"
|
||||
local to="${2:-HEAD}"
|
||||
git log --oneline "$from".."$to" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Tag Operations
|
||||
# ============================================================================
|
||||
|
||||
# Create a tag
|
||||
git_create_tag() {
|
||||
local tag="$1"
|
||||
local message="${2:-}"
|
||||
|
||||
if [[ -n "$message" ]]; then
|
||||
git tag -a "$tag" -m "$message"
|
||||
else
|
||||
git tag "$tag"
|
||||
fi
|
||||
}
|
||||
|
||||
# Delete a tag
|
||||
git_delete_tag() {
|
||||
local tag="$1"
|
||||
git tag -d "$tag" 2>/dev/null
|
||||
}
|
||||
|
||||
# Push tag to remote
|
||||
git_push_tag() {
|
||||
local tag="$1"
|
||||
local remote="${2:-origin}"
|
||||
git push "$remote" "$tag"
|
||||
}
|
||||
|
||||
# List tags matching pattern
|
||||
git_list_tags() {
|
||||
local pattern="${1:-*}"
|
||||
git tag -l "$pattern" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Branch Operations
|
||||
# ============================================================================
|
||||
|
||||
# Check if branch exists
|
||||
git_branch_exists() {
|
||||
local branch="$1"
|
||||
git show-ref --verify --quiet "refs/heads/$branch" 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if remote branch exists
|
||||
git_remote_branch_exists() {
|
||||
local branch="$1"
|
||||
local remote="${2:-origin}"
|
||||
git show-ref --verify --quiet "refs/remotes/$remote/$branch" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get default branch
|
||||
git_default_branch() {
|
||||
local remote="${1:-origin}"
|
||||
git remote show "$remote" 2>/dev/null | grep "HEAD branch" | awk '{print $NF}'
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# CI/CD Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Get version string for CI builds
|
||||
git_ci_version() {
|
||||
local tag
|
||||
tag=$(git_tag)
|
||||
|
||||
if [[ -n "$tag" ]]; then
|
||||
echo "$tag"
|
||||
else
|
||||
local branch sha
|
||||
branch=$(git_branch | tr '/' '-')
|
||||
sha=$(git_sha_short)
|
||||
echo "${branch}-${sha}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if current commit is on default branch
|
||||
git_is_default_branch() {
|
||||
local current default
|
||||
current=$(git_branch)
|
||||
default=$(git_default_branch)
|
||||
[[ "$current" == "$default" ]]
|
||||
}
|
||||
|
||||
# Check if running in CI environment
|
||||
git_is_ci() {
|
||||
[[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]] || [[ -n "${GITLAB_CI:-}" ]]
|
||||
}
|
||||
|
||||
# Ensure clean worktree or fail
|
||||
git_require_clean() {
|
||||
if git_is_dirty; then
|
||||
log_error "Working tree is dirty. Commit or stash changes first."
|
||||
return "${EXIT_DIRTY_WORKTREE:-71}"
|
||||
fi
|
||||
}
|
||||
266
devops/scripts/lib/hash-utils.sh
Normal file
266
devops/scripts/lib/hash-utils.sh
Normal file
@@ -0,0 +1,266 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Hash/Checksum Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Cryptographic hash and checksum operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/hash-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_HASH_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_HASH_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Hash Computation
|
||||
# ============================================================================
|
||||
|
||||
# Compute SHA-256 hash of a file
|
||||
compute_sha256() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v sha256sum >/dev/null 2>&1; then
|
||||
sha256sum "$file" | awk '{print $1}'
|
||||
elif command -v shasum >/dev/null 2>&1; then
|
||||
shasum -a 256 "$file" | awk '{print $1}'
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -sha256 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No SHA-256 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute SHA-512 hash of a file
|
||||
compute_sha512() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v sha512sum >/dev/null 2>&1; then
|
||||
sha512sum "$file" | awk '{print $1}'
|
||||
elif command -v shasum >/dev/null 2>&1; then
|
||||
shasum -a 512 "$file" | awk '{print $1}'
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -sha512 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No SHA-512 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute MD5 hash of a file (for compatibility, not security)
|
||||
compute_md5() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v md5sum >/dev/null 2>&1; then
|
||||
md5sum "$file" | awk '{print $1}'
|
||||
elif command -v md5 >/dev/null 2>&1; then
|
||||
md5 -q "$file"
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -md5 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No MD5 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute hash of string
|
||||
compute_string_hash() {
|
||||
local string="$1"
|
||||
local algorithm="${2:-sha256}"
|
||||
|
||||
case "$algorithm" in
|
||||
sha256)
|
||||
echo -n "$string" | sha256sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | shasum -a 256 2>/dev/null | awk '{print $1}'
|
||||
;;
|
||||
sha512)
|
||||
echo -n "$string" | sha512sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | shasum -a 512 2>/dev/null | awk '{print $1}'
|
||||
;;
|
||||
md5)
|
||||
echo -n "$string" | md5sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | md5 2>/dev/null
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Checksum Files
|
||||
# ============================================================================
|
||||
|
||||
# Write checksum file for a single file
|
||||
write_checksum() {
|
||||
local file="$1"
|
||||
local checksum_file="${2:-${file}.sha256}"
|
||||
local algorithm="${3:-sha256}"
|
||||
|
||||
local hash
|
||||
case "$algorithm" in
|
||||
sha256) hash=$(compute_sha256 "$file") ;;
|
||||
sha512) hash=$(compute_sha512 "$file") ;;
|
||||
md5) hash=$(compute_md5 "$file") ;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ -z "$hash" ]]; then
|
||||
return "${EXIT_ERROR:-1}"
|
||||
fi
|
||||
|
||||
local basename
|
||||
basename=$(basename "$file")
|
||||
echo "$hash $basename" > "$checksum_file"
|
||||
log_debug "Wrote checksum to $checksum_file"
|
||||
}
|
||||
|
||||
# Write checksums for multiple files
|
||||
write_checksums() {
|
||||
local output_file="$1"
|
||||
shift
|
||||
local files=("$@")
|
||||
|
||||
: > "$output_file"
|
||||
|
||||
for file in "${files[@]}"; do
|
||||
if [[ -f "$file" ]]; then
|
||||
local hash basename
|
||||
hash=$(compute_sha256 "$file")
|
||||
basename=$(basename "$file")
|
||||
echo "$hash $basename" >> "$output_file"
|
||||
fi
|
||||
done
|
||||
|
||||
log_debug "Wrote checksums to $output_file"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Checksum Verification
|
||||
# ============================================================================
|
||||
|
||||
# Verify checksum of a file
|
||||
verify_checksum() {
|
||||
local file="$1"
|
||||
local expected_hash="$2"
|
||||
local algorithm="${3:-sha256}"
|
||||
|
||||
local actual_hash
|
||||
case "$algorithm" in
|
||||
sha256) actual_hash=$(compute_sha256 "$file") ;;
|
||||
sha512) actual_hash=$(compute_sha512 "$file") ;;
|
||||
md5) actual_hash=$(compute_md5 "$file") ;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$actual_hash" == "$expected_hash" ]]; then
|
||||
log_debug "Checksum verified: $file"
|
||||
return 0
|
||||
else
|
||||
log_error "Checksum mismatch for $file"
|
||||
log_error " Expected: $expected_hash"
|
||||
log_error " Actual: $actual_hash"
|
||||
return "${EXIT_VERIFY_FAILED:-64}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Verify checksums from file (sha256sum -c style)
|
||||
verify_checksums_file() {
|
||||
local checksum_file="$1"
|
||||
local base_dir="${2:-.}"
|
||||
|
||||
if [[ ! -f "$checksum_file" ]]; then
|
||||
log_error "Checksum file not found: $checksum_file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
local failures=0
|
||||
|
||||
while IFS= read -r line; do
|
||||
# Skip empty lines and comments
|
||||
[[ -z "$line" ]] && continue
|
||||
[[ "$line" == \#* ]] && continue
|
||||
|
||||
local hash filename
|
||||
hash=$(echo "$line" | awk '{print $1}')
|
||||
filename=$(echo "$line" | awk '{print $2}')
|
||||
|
||||
local filepath="${base_dir}/${filename}"
|
||||
|
||||
if [[ ! -f "$filepath" ]]; then
|
||||
log_error "File not found: $filepath"
|
||||
((failures++))
|
||||
continue
|
||||
fi
|
||||
|
||||
if ! verify_checksum "$filepath" "$hash"; then
|
||||
((failures++))
|
||||
fi
|
||||
done < "$checksum_file"
|
||||
|
||||
if [[ $failures -gt 0 ]]; then
|
||||
log_error "$failures checksum verification(s) failed"
|
||||
return "${EXIT_VERIFY_FAILED:-64}"
|
||||
fi
|
||||
|
||||
log_info "All checksums verified"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Check if two files have the same content
|
||||
files_identical() {
|
||||
local file1="$1"
|
||||
local file2="$2"
|
||||
|
||||
[[ -f "$file1" ]] && [[ -f "$file2" ]] || return 1
|
||||
|
||||
local hash1 hash2
|
||||
hash1=$(compute_sha256 "$file1")
|
||||
hash2=$(compute_sha256 "$file2")
|
||||
|
||||
[[ "$hash1" == "$hash2" ]]
|
||||
}
|
||||
|
||||
# Get short hash for display
|
||||
short_hash() {
|
||||
local hash="$1"
|
||||
local length="${2:-8}"
|
||||
echo "${hash:0:$length}"
|
||||
}
|
||||
|
||||
# Generate deterministic ID from inputs
|
||||
generate_id() {
|
||||
local inputs="$*"
|
||||
compute_string_hash "$inputs" sha256 | head -c 16
|
||||
}
|
||||
181
devops/scripts/lib/logging.sh
Normal file
181
devops/scripts/lib/logging.sh
Normal file
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Logging Library
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Standard logging functions for all CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/logging.sh"
|
||||
#
|
||||
# Log Levels: DEBUG, INFO, WARN, ERROR
|
||||
# Set LOG_LEVEL environment variable to control verbosity (default: INFO)
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_LOGGING_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_LOGGING_LOADED=1
|
||||
|
||||
# Colors (disable with NO_COLOR=1)
|
||||
if [[ -z "${NO_COLOR:-}" ]] && [[ -t 1 ]]; then
|
||||
export LOG_COLOR_RED='\033[0;31m'
|
||||
export LOG_COLOR_GREEN='\033[0;32m'
|
||||
export LOG_COLOR_YELLOW='\033[1;33m'
|
||||
export LOG_COLOR_BLUE='\033[0;34m'
|
||||
export LOG_COLOR_MAGENTA='\033[0;35m'
|
||||
export LOG_COLOR_CYAN='\033[0;36m'
|
||||
export LOG_COLOR_GRAY='\033[0;90m'
|
||||
export LOG_COLOR_RESET='\033[0m'
|
||||
else
|
||||
export LOG_COLOR_RED=''
|
||||
export LOG_COLOR_GREEN=''
|
||||
export LOG_COLOR_YELLOW=''
|
||||
export LOG_COLOR_BLUE=''
|
||||
export LOG_COLOR_MAGENTA=''
|
||||
export LOG_COLOR_CYAN=''
|
||||
export LOG_COLOR_GRAY=''
|
||||
export LOG_COLOR_RESET=''
|
||||
fi
|
||||
|
||||
# Log level configuration
|
||||
export LOG_LEVEL="${LOG_LEVEL:-INFO}"
|
||||
|
||||
# Convert log level to numeric for comparison
|
||||
_log_level_to_num() {
|
||||
case "$1" in
|
||||
DEBUG) echo 0 ;;
|
||||
INFO) echo 1 ;;
|
||||
WARN) echo 2 ;;
|
||||
ERROR) echo 3 ;;
|
||||
*) echo 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if message should be logged based on level
|
||||
_should_log() {
|
||||
local msg_level="$1"
|
||||
local current_level="${LOG_LEVEL:-INFO}"
|
||||
|
||||
local msg_num current_num
|
||||
msg_num=$(_log_level_to_num "$msg_level")
|
||||
current_num=$(_log_level_to_num "$current_level")
|
||||
|
||||
[[ $msg_num -ge $current_num ]]
|
||||
}
|
||||
|
||||
# Format timestamp
|
||||
_log_timestamp() {
|
||||
if [[ "${LOG_TIMESTAMPS:-true}" == "true" ]]; then
|
||||
date -u +"%Y-%m-%dT%H:%M:%SZ"
|
||||
fi
|
||||
}
|
||||
|
||||
# Core logging function
|
||||
_log() {
|
||||
local level="$1"
|
||||
local color="$2"
|
||||
shift 2
|
||||
|
||||
if ! _should_log "$level"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local timestamp
|
||||
timestamp=$(_log_timestamp)
|
||||
|
||||
local prefix=""
|
||||
if [[ -n "$timestamp" ]]; then
|
||||
prefix="${LOG_COLOR_GRAY}${timestamp}${LOG_COLOR_RESET} "
|
||||
fi
|
||||
|
||||
echo -e "${prefix}${color}[${level}]${LOG_COLOR_RESET} $*"
|
||||
}
|
||||
|
||||
# Public logging functions
|
||||
log_debug() {
|
||||
_log "DEBUG" "${LOG_COLOR_GRAY}" "$@"
|
||||
}
|
||||
|
||||
log_info() {
|
||||
_log "INFO" "${LOG_COLOR_GREEN}" "$@"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
_log "WARN" "${LOG_COLOR_YELLOW}" "$@"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
_log "ERROR" "${LOG_COLOR_RED}" "$@" >&2
|
||||
}
|
||||
|
||||
# Step logging (for workflow stages)
|
||||
log_step() {
|
||||
_log "STEP" "${LOG_COLOR_BLUE}" "$@"
|
||||
}
|
||||
|
||||
# Success message
|
||||
log_success() {
|
||||
_log "OK" "${LOG_COLOR_GREEN}" "$@"
|
||||
}
|
||||
|
||||
# GitHub Actions annotations
|
||||
log_gh_notice() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::notice::$*"
|
||||
else
|
||||
log_info "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
log_gh_warning() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::warning::$*"
|
||||
else
|
||||
log_warn "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
log_gh_error() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::error::$*"
|
||||
else
|
||||
log_error "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Group logging (for GitHub Actions)
|
||||
log_group_start() {
|
||||
local title="$1"
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::group::$title"
|
||||
else
|
||||
log_step "=== $title ==="
|
||||
fi
|
||||
}
|
||||
|
||||
log_group_end() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
}
|
||||
|
||||
# Masked logging (for secrets)
|
||||
log_masked() {
|
||||
local value="$1"
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::add-mask::$value"
|
||||
fi
|
||||
}
|
||||
|
||||
# Die with error message
|
||||
die() {
|
||||
log_error "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Conditional die
|
||||
die_if() {
|
||||
local condition="$1"
|
||||
shift
|
||||
if eval "$condition"; then
|
||||
die "$@"
|
||||
fi
|
||||
}
|
||||
274
devops/scripts/lib/path-utils.sh
Normal file
274
devops/scripts/lib/path-utils.sh
Normal file
@@ -0,0 +1,274 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Path Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Path manipulation and file operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/path-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_PATH_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_PATH_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Path Normalization
|
||||
# ============================================================================
|
||||
|
||||
# Normalize path (resolve .., ., symlinks)
|
||||
normalize_path() {
|
||||
local path="$1"
|
||||
|
||||
# Handle empty path
|
||||
if [[ -z "$path" ]]; then
|
||||
echo "."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Try realpath first (most reliable)
|
||||
if command -v realpath >/dev/null 2>&1; then
|
||||
realpath -m "$path" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Fallback to Python
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -c "import os; print(os.path.normpath('$path'))" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Manual normalization (basic)
|
||||
echo "$path" | sed 's|/\./|/|g' | sed 's|/[^/]*/\.\./|/|g' | sed 's|//|/|g'
|
||||
}
|
||||
|
||||
# Get absolute path
|
||||
absolute_path() {
|
||||
local path="$1"
|
||||
|
||||
if [[ "$path" == /* ]]; then
|
||||
normalize_path "$path"
|
||||
else
|
||||
normalize_path "$(pwd)/$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get relative path from one path to another
|
||||
relative_path() {
|
||||
local from="$1"
|
||||
local to="$2"
|
||||
|
||||
if command -v realpath >/dev/null 2>&1; then
|
||||
realpath --relative-to="$from" "$to" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -c "import os.path; print(os.path.relpath('$to', '$from'))" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Fallback: just return absolute path
|
||||
absolute_path "$to"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Path Components
|
||||
# ============================================================================
|
||||
|
||||
# Get directory name
|
||||
dir_name() {
|
||||
dirname "$1"
|
||||
}
|
||||
|
||||
# Get base name
|
||||
base_name() {
|
||||
basename "$1"
|
||||
}
|
||||
|
||||
# Get file extension
|
||||
file_extension() {
|
||||
local path="$1"
|
||||
local base
|
||||
base=$(basename "$path")
|
||||
|
||||
if [[ "$base" == *.* ]]; then
|
||||
echo "${base##*.}"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Get file name without extension
|
||||
file_stem() {
|
||||
local path="$1"
|
||||
local base
|
||||
base=$(basename "$path")
|
||||
|
||||
if [[ "$base" == *.* ]]; then
|
||||
echo "${base%.*}"
|
||||
else
|
||||
echo "$base"
|
||||
fi
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Directory Operations
|
||||
# ============================================================================
|
||||
|
||||
# Ensure directory exists
|
||||
ensure_directory() {
|
||||
local dir="$1"
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
mkdir -p "$dir"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create temporary directory
|
||||
create_temp_dir() {
|
||||
local prefix="${1:-stellaops}"
|
||||
mktemp -d "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
|
||||
}
|
||||
|
||||
# Create temporary file
|
||||
create_temp_file() {
|
||||
local prefix="${1:-stellaops}"
|
||||
local suffix="${2:-}"
|
||||
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX${suffix}"
|
||||
}
|
||||
|
||||
# Clean temporary directory
|
||||
clean_temp() {
|
||||
local path="$1"
|
||||
if [[ -d "$path" ]] && [[ "$path" == *stellaops* ]]; then
|
||||
rm -rf "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# File Existence Checks
|
||||
# ============================================================================
|
||||
|
||||
# Check if file exists
|
||||
file_exists() {
|
||||
[[ -f "$1" ]]
|
||||
}
|
||||
|
||||
# Check if directory exists
|
||||
dir_exists() {
|
||||
[[ -d "$1" ]]
|
||||
}
|
||||
|
||||
# Check if path exists (file or directory)
|
||||
path_exists() {
|
||||
[[ -e "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is readable
|
||||
file_readable() {
|
||||
[[ -r "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is writable
|
||||
file_writable() {
|
||||
[[ -w "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is executable
|
||||
file_executable() {
|
||||
[[ -x "$1" ]]
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# File Discovery
|
||||
# ============================================================================
|
||||
|
||||
# Find files by pattern
|
||||
find_files() {
|
||||
local dir="${1:-.}"
|
||||
local pattern="${2:-*}"
|
||||
find "$dir" -type f -name "$pattern" 2>/dev/null
|
||||
}
|
||||
|
||||
# Find files by extension
|
||||
find_by_extension() {
|
||||
local dir="${1:-.}"
|
||||
local ext="${2:-}"
|
||||
find "$dir" -type f -name "*.${ext}" 2>/dev/null
|
||||
}
|
||||
|
||||
# Find project files (csproj, package.json, etc.)
|
||||
find_project_files() {
|
||||
local dir="${1:-.}"
|
||||
find "$dir" -type f \( \
|
||||
-name "*.csproj" -o \
|
||||
-name "*.fsproj" -o \
|
||||
-name "package.json" -o \
|
||||
-name "Cargo.toml" -o \
|
||||
-name "go.mod" -o \
|
||||
-name "pom.xml" -o \
|
||||
-name "build.gradle" \
|
||||
\) 2>/dev/null | grep -v node_modules | grep -v bin | grep -v obj
|
||||
}
|
||||
|
||||
# Find test projects
|
||||
find_test_projects() {
|
||||
local dir="${1:-.}"
|
||||
find "$dir" -type f -name "*.Tests.csproj" 2>/dev/null | grep -v bin | grep -v obj
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Path Validation
|
||||
# ============================================================================
|
||||
|
||||
# Check if path is under directory
|
||||
path_under() {
|
||||
local path="$1"
|
||||
local dir="$2"
|
||||
|
||||
local abs_path abs_dir
|
||||
abs_path=$(absolute_path "$path")
|
||||
abs_dir=$(absolute_path "$dir")
|
||||
|
||||
[[ "$abs_path" == "$abs_dir"* ]]
|
||||
}
|
||||
|
||||
# Validate path is safe (no directory traversal)
|
||||
path_is_safe() {
|
||||
local path="$1"
|
||||
local base="${2:-.}"
|
||||
|
||||
# Check for obvious traversal attempts
|
||||
if [[ "$path" == *".."* ]] || [[ "$path" == "/*" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Verify resolved path is under base
|
||||
path_under "$path" "$base"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# CI/CD Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Get artifact output directory
|
||||
get_artifact_dir() {
|
||||
local name="${1:-artifacts}"
|
||||
local base="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
echo "${base}/out/${name}"
|
||||
}
|
||||
|
||||
# Get test results directory
|
||||
get_test_results_dir() {
|
||||
local base="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
echo "${base}/TestResults"
|
||||
}
|
||||
|
||||
# Ensure artifact directory exists and return path
|
||||
ensure_artifact_dir() {
|
||||
local name="${1:-artifacts}"
|
||||
local dir
|
||||
dir=$(get_artifact_dir "$name")
|
||||
ensure_directory "$dir"
|
||||
echo "$dir"
|
||||
}
|
||||
Reference in New Issue
Block a user