Refactor code structure and optimize performance across multiple modules

This commit is contained in:
StellaOps Bot
2025-12-26 20:03:22 +02:00
parent c786faae84
commit f10d83c444
1385 changed files with 69732 additions and 10280 deletions

View File

@@ -86,10 +86,11 @@ services:
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority/plugins"
volumes:
- ../../etc/authority.yaml:/etc/authority.yaml:ro
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
# Configuration (consolidated under etc/)
- ../../etc/authority:/app/etc/authority:ro
- ../../etc/certificates/trust-roots:/etc/ssl/certs/stellaops:ro
ports:
- "${AUTHORITY_PORT:-8440}:8440"
networks:
@@ -134,14 +135,14 @@ services:
- postgres
- authority
environment:
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
ISSUERDIRECTORY__CONFIG: "/app/etc/issuer-directory/issuer-directory.yaml"
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
ISSUERDIRECTORY__STORAGE__DRIVER: "postgres"
ISSUERDIRECTORY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
volumes:
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
- ../../etc/issuer-directory:/app/etc/issuer-directory:ro
ports:
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
networks:
@@ -195,7 +196,11 @@ services:
SCANNER__OFFLINEKIT__TRUSTROOTDIRECTORY: "${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}"
SCANNER__OFFLINEKIT__REKORSNAPSHOTDIRECTORY: "${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}"
volumes:
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-./offline/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
# Configuration (consolidated under etc/)
- ../../etc/scanner:/app/etc/scanner:ro
- ../../etc/certificates/trust-roots:/etc/ssl/certs/stellaops:ro
# Offline kit paths (for air-gap mode)
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-../../etc/certificates/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
- ${SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH:-./offline/rekor-snapshot}:${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}:ro
ports:
- "${SCANNER_WEB_PORT:-8444}:8444"
@@ -256,7 +261,7 @@ services:
NOTIFY__QUEUE__DRIVER: "nats"
NOTIFY__QUEUE__NATS__URL: "nats://nats:4222"
volumes:
- ../../etc/notify.dev.yaml:/app/etc/notify.yaml:ro
- ../../etc/notify:/app/etc/notify:ro
ports:
- "${NOTIFY_WEB_PORT:-8446}:8446"
networks:
@@ -293,6 +298,9 @@ services:
ports:
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
volumes:
# Configuration (consolidated under etc/)
- ../../etc/llm-providers:/app/etc/llm-providers:ro
# Runtime data
- advisory-ai-queue:/var/lib/advisory-ai/queue
- advisory-ai-plans:/var/lib/advisory-ai/plans
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
@@ -314,6 +322,9 @@ services:
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
volumes:
# Configuration (consolidated under etc/)
- ../../etc/llm-providers:/app/etc/llm-providers:ro
# Runtime data
- advisory-ai-queue:/var/lib/advisory-ai/queue
- advisory-ai-plans:/var/lib/advisory-ai/plans
- advisory-ai-outputs:/var/lib/advisory-ai/outputs

View File

@@ -22,7 +22,6 @@ ENV TZ=UTC
# Disable .NET telemetry
ENV DOTNET_NOLOGO=1
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
# .NET paths
ENV DOTNET_ROOT=/usr/share/dotnet
@@ -43,18 +42,30 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
jq \
# Build tools
build-essential \
# Docker CLI (for DinD scenarios)
docker.io \
docker-compose-plugin \
# Cross-compilation
binutils-aarch64-linux-gnu \
# Python (for scripts)
python3 \
python3-pip \
# .NET dependencies
libicu70 \
# Locales
locales \
&& rm -rf /var/lib/apt/lists/*
# ===========================================================================
# DOCKER CLI & COMPOSE (from official Docker repo)
# ===========================================================================
RUN install -m 0755 -d /etc/apt/keyrings \
&& curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc \
&& chmod a+r /etc/apt/keyrings/docker.asc \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu jammy stable" > /etc/apt/sources.list.d/docker.list \
&& apt-get update \
&& apt-get install -y --no-install-recommends docker-ce-cli docker-compose-plugin \
&& rm -rf /var/lib/apt/lists/* \
&& docker --version
# Set locale
RUN locale-gen en_US.UTF-8
ENV LANG=en_US.UTF-8
@@ -132,19 +143,20 @@ RUN useradd -m -s /bin/bash ciuser \
&& chown -R ciuser:ciuser /home/ciuser
# Health check script
COPY --chmod=755 <<'EOF' /usr/local/bin/ci-health-check
#!/bin/bash
set -e
echo "=== CI Environment Health Check ==="
echo "OS: $(cat /etc/os-release | grep PRETTY_NAME | cut -d= -f2)"
echo ".NET: $(dotnet --version)"
echo "Node: $(node --version)"
echo "npm: $(npm --version)"
echo "Helm: $(helm version --short)"
echo "Cosign: $(cosign version 2>&1 | head -1)"
echo "Docker: $(docker --version 2>/dev/null || echo 'Not available')"
echo "PostgreSQL client: $(psql --version)"
echo "=== All checks passed ==="
EOF
RUN printf '%s\n' \
'#!/bin/bash' \
'set -e' \
'echo "=== CI Environment Health Check ==="' \
'echo "OS: $(cat /etc/os-release | grep PRETTY_NAME | cut -d= -f2)"' \
'echo ".NET: $(dotnet --version)"' \
'echo "Node: $(node --version)"' \
'echo "npm: $(npm --version)"' \
'echo "Helm: $(helm version --short)"' \
'echo "Cosign: $(cosign version 2>&1 | head -1)"' \
'echo "Docker: $(docker --version 2>/dev/null || echo Not available)"' \
'echo "PostgreSQL client: $(psql --version)"' \
'echo "=== All checks passed ==="' \
> /usr/local/bin/ci-health-check \
&& chmod +x /usr/local/bin/ci-health-check
ENTRYPOINT ["/bin/bash"]

View File

@@ -0,0 +1,221 @@
#!/usr/bin/env bash
#
# Initialize StellaOps configuration from sample files
#
# Usage:
# ./devops/scripts/init-config.sh [profile]
#
# Profiles:
# dev - Development environment (default)
# stage - Staging environment
# prod - Production environment
# airgap - Air-gapped deployment
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
ETC_DIR="${ROOT_DIR}/etc"
PROFILE="${1:-dev}"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
log_ok() { echo -e "${GREEN}[OK]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
# Validate profile
case "${PROFILE}" in
dev|stage|prod|airgap)
log_info "Initializing configuration for profile: ${PROFILE}"
;;
*)
log_error "Unknown profile: ${PROFILE}"
echo "Valid profiles: dev, stage, prod, airgap"
exit 1
;;
esac
# Create directory structure
create_directories() {
log_info "Creating directory structure..."
local dirs=(
"etc/authority/plugins"
"etc/certificates/trust-roots"
"etc/certificates/signing"
"etc/concelier/sources"
"etc/crypto/profiles/cn"
"etc/crypto/profiles/eu"
"etc/crypto/profiles/kr"
"etc/crypto/profiles/ru"
"etc/crypto/profiles/us-fips"
"etc/env"
"etc/llm-providers"
"etc/notify/templates"
"etc/plugins/notify"
"etc/plugins/scanner/lang"
"etc/plugins/scanner/os"
"etc/policy/packs"
"etc/policy/schemas"
"etc/router"
"etc/scanner"
"etc/scheduler"
"etc/scm-connectors"
"etc/secrets"
"etc/signals"
"etc/vex"
)
for dir in "${dirs[@]}"; do
mkdir -p "${ROOT_DIR}/${dir}"
done
log_ok "Directory structure created"
}
# Copy sample files to active configs
copy_sample_files() {
log_info "Copying sample files..."
local count=0
# Find all .sample files
while IFS= read -r -d '' sample_file; do
# Determine target file (remove .sample extension)
local target_file="${sample_file%.sample}"
# Skip if target already exists
if [[ -f "${target_file}" ]]; then
log_warn "Skipping (exists): ${target_file#${ROOT_DIR}/}"
continue
fi
cp "${sample_file}" "${target_file}"
log_ok "Created: ${target_file#${ROOT_DIR}/}"
((count++))
done < <(find "${ETC_DIR}" -name "*.sample" -type f -print0 2>/dev/null)
log_info "Copied ${count} sample files"
}
# Copy environment-specific profile
copy_env_profile() {
log_info "Setting up environment profile: ${PROFILE}"
local env_sample="${ETC_DIR}/env/${PROFILE}.env.sample"
local env_target="${ROOT_DIR}/.env"
if [[ -f "${env_sample}" ]]; then
if [[ -f "${env_target}" ]]; then
log_warn ".env already exists, not overwriting"
else
cp "${env_sample}" "${env_target}"
log_ok "Created .env from ${PROFILE} profile"
fi
else
log_warn "No environment sample found for profile: ${PROFILE}"
fi
}
# Create .gitignore entries for active configs
update_gitignore() {
log_info "Updating .gitignore..."
local gitignore="${ROOT_DIR}/.gitignore"
local entries=(
"# Active configuration files (not samples)"
"etc/**/*.yaml"
"!etc/**/*.yaml.sample"
"etc/**/*.json"
"!etc/**/*.json.sample"
"etc/**/env"
"!etc/**/env.sample"
"etc/secrets/*"
"!etc/secrets/*.sample"
"!etc/secrets/README.md"
)
# Check if entries already exist
if grep -q "# Active configuration files" "${gitignore}" 2>/dev/null; then
log_warn ".gitignore already contains config entries"
return
fi
echo "" >> "${gitignore}"
for entry in "${entries[@]}"; do
echo "${entry}" >> "${gitignore}"
done
log_ok "Updated .gitignore"
}
# Validate the configuration
validate_config() {
log_info "Validating configuration..."
local errors=0
# Check for required directories
local required_dirs=(
"etc/scanner"
"etc/authority"
"etc/policy"
)
for dir in "${required_dirs[@]}"; do
if [[ ! -d "${ROOT_DIR}/${dir}" ]]; then
log_error "Missing required directory: ${dir}"
((errors++))
fi
done
if [[ ${errors} -gt 0 ]]; then
log_error "Validation failed with ${errors} errors"
exit 1
fi
log_ok "Configuration validated"
}
# Print summary
print_summary() {
echo ""
echo "========================================"
echo " Configuration Initialized"
echo "========================================"
echo ""
echo "Profile: ${PROFILE}"
echo ""
echo "Next steps:"
echo " 1. Review and customize configurations in etc/"
echo " 2. Set sensitive values via environment variables"
echo " 3. For crypto compliance, set STELLAOPS_CRYPTO_PROFILE"
echo ""
echo "Quick start:"
echo " docker compose up -d"
echo ""
echo "Documentation:"
echo " docs/operations/configuration-guide.md"
echo ""
}
# Main
main() {
create_directories
copy_sample_files
copy_env_profile
update_gitignore
validate_config
print_summary
}
main "$@"

View File

@@ -0,0 +1,330 @@
#!/usr/bin/env bash
#
# Migrate legacy configuration structure to consolidated etc/
#
# This script migrates:
# - certificates/ -> etc/certificates/
# - config/ -> etc/crypto/ and etc/env/
# - policies/ -> etc/policy/
# - etc/rootpack/ -> etc/crypto/profiles/
#
# Usage:
# ./devops/scripts/migrate-config.sh [--dry-run]
#
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(cd "${SCRIPT_DIR}/../.." && pwd)"
DRY_RUN=false
[[ "${1:-}" == "--dry-run" ]] && DRY_RUN=true
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
log_ok() { echo -e "${GREEN}[OK]${NC} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
log_dry() { echo -e "${YELLOW}[DRY-RUN]${NC} $*"; }
# Execute or log command
run_cmd() {
if [[ "${DRY_RUN}" == true ]]; then
log_dry "$*"
else
"$@"
fi
}
# Create backup
create_backup() {
local backup_file="${ROOT_DIR}/config-backup-$(date +%Y%m%d-%H%M%S).tar.gz"
log_info "Creating backup: ${backup_file}"
if [[ "${DRY_RUN}" == true ]]; then
log_dry "Would create backup of: certificates/ config/ policies/ etc/"
return
fi
local dirs_to_backup=()
[[ -d "${ROOT_DIR}/certificates" ]] && dirs_to_backup+=("certificates")
[[ -d "${ROOT_DIR}/config" ]] && dirs_to_backup+=("config")
[[ -d "${ROOT_DIR}/policies" ]] && dirs_to_backup+=("policies")
[[ -d "${ROOT_DIR}/etc" ]] && dirs_to_backup+=("etc")
if [[ ${#dirs_to_backup[@]} -gt 0 ]]; then
cd "${ROOT_DIR}"
tar -czvf "${backup_file}" "${dirs_to_backup[@]}"
log_ok "Backup created: ${backup_file}"
else
log_warn "No directories to backup"
fi
}
# Create new directory structure
create_directories() {
log_info "Creating new directory structure..."
local dirs=(
"etc/certificates/trust-roots"
"etc/certificates/signing"
"etc/crypto/profiles/cn"
"etc/crypto/profiles/eu"
"etc/crypto/profiles/kr"
"etc/crypto/profiles/ru"
"etc/crypto/profiles/us-fips"
"etc/env"
"etc/policy/packs"
"etc/policy/schemas"
)
for dir in "${dirs[@]}"; do
run_cmd mkdir -p "${ROOT_DIR}/${dir}"
done
log_ok "Directory structure created"
}
# Migrate certificates/
migrate_certificates() {
local src_dir="${ROOT_DIR}/certificates"
if [[ ! -d "${src_dir}" ]]; then
log_info "No certificates/ directory found, skipping"
return
fi
log_info "Migrating certificates/..."
# Trust roots (CA bundles)
for f in "${src_dir}"/*-bundle*.pem "${src_dir}"/*-root*.pem "${src_dir}"/*_bundle*.pem "${src_dir}"/*_root*.pem 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/trust-roots/"
log_ok "Moved: $(basename "$f") -> etc/certificates/trust-roots/"
done
# Signing keys
for f in "${src_dir}"/*-signing-*.pem "${src_dir}"/*_signing_*.pem 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/signing/"
log_ok "Moved: $(basename "$f") -> etc/certificates/signing/"
done
# Move remaining .pem and .cer files to trust-roots
for f in "${src_dir}"/*.pem "${src_dir}"/*.cer 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/certificates/trust-roots/"
log_ok "Moved: $(basename "$f") -> etc/certificates/trust-roots/"
done
# Remove empty directory
if [[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}")" ]]; then
run_cmd rmdir "${src_dir}"
log_ok "Removed empty: certificates/"
fi
}
# Migrate config/
migrate_config_dir() {
local src_dir="${ROOT_DIR}/config"
if [[ ! -d "${src_dir}" ]]; then
log_info "No config/ directory found, skipping"
return
fi
log_info "Migrating config/..."
# Map env files to crypto profiles
declare -A env_mapping=(
[".env.fips.example"]="us-fips/env.sample"
[".env.eidas.example"]="eu/env.sample"
[".env.ru-free.example"]="ru/env.sample"
[".env.ru-paid.example"]="ru/env-paid.sample"
[".env.sm.example"]="cn/env.sample"
[".env.kcmvp.example"]="kr/env.sample"
)
for src_name in "${!env_mapping[@]}"; do
local src_file="${src_dir}/env/${src_name}"
local dst_file="${ROOT_DIR}/etc/crypto/profiles/${env_mapping[$src_name]}"
if [[ -f "${src_file}" ]]; then
run_cmd mkdir -p "$(dirname "${dst_file}")"
run_cmd mv "${src_file}" "${dst_file}"
log_ok "Moved: ${src_name} -> etc/crypto/profiles/${env_mapping[$src_name]}"
fi
done
# Remove crypto-profiles.sample.json (superseded)
if [[ -f "${src_dir}/crypto-profiles.sample.json" ]]; then
run_cmd rm "${src_dir}/crypto-profiles.sample.json"
log_ok "Removed: config/crypto-profiles.sample.json (superseded by etc/crypto/)"
fi
# Remove empty directories
[[ -d "${src_dir}/env" ]] && [[ -z "$(ls -A "${src_dir}/env" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}/env"
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
}
# Migrate policies/
migrate_policies() {
local src_dir="${ROOT_DIR}/policies"
if [[ ! -d "${src_dir}" ]]; then
log_info "No policies/ directory found, skipping"
return
fi
log_info "Migrating policies/..."
# Move policy packs
for f in "${src_dir}"/*.yaml 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/policy/packs/"
log_ok "Moved: $(basename "$f") -> etc/policy/packs/"
done
# Move schemas
if [[ -d "${src_dir}/schemas" ]]; then
for f in "${src_dir}/schemas"/*.json 2>/dev/null; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${ROOT_DIR}/etc/policy/schemas/"
log_ok "Moved: schemas/$(basename "$f") -> etc/policy/schemas/"
done
[[ -z "$(ls -A "${src_dir}/schemas" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}/schemas"
fi
# Move AGENTS.md if present
[[ -f "${src_dir}/AGENTS.md" ]] && run_cmd mv "${src_dir}/AGENTS.md" "${ROOT_DIR}/etc/policy/"
# Remove empty directory
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
}
# Migrate etc/rootpack/ to etc/crypto/profiles/
migrate_rootpack() {
local src_dir="${ROOT_DIR}/etc/rootpack"
if [[ ! -d "${src_dir}" ]]; then
log_info "No etc/rootpack/ directory found, skipping"
return
fi
log_info "Migrating etc/rootpack/ to etc/crypto/profiles/..."
for region_dir in "${src_dir}"/*; do
[[ -d "${region_dir}" ]] || continue
local region_name=$(basename "${region_dir}")
local target_dir="${ROOT_DIR}/etc/crypto/profiles/${region_name}"
run_cmd mkdir -p "${target_dir}"
for f in "${region_dir}"/*; do
[[ -f "$f" ]] || continue
run_cmd mv "$f" "${target_dir}/"
log_ok "Moved: rootpack/${region_name}/$(basename "$f") -> etc/crypto/profiles/${region_name}/"
done
[[ -z "$(ls -A "${region_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${region_dir}"
done
[[ -d "${src_dir}" ]] && [[ -z "$(ls -A "${src_dir}" 2>/dev/null)" ]] && run_cmd rmdir "${src_dir}"
}
# Validate migration
validate_migration() {
log_info "Validating migration..."
local errors=0
# Check new structure exists
local required=(
"etc/certificates"
"etc/crypto/profiles"
"etc/policy"
)
for dir in "${required[@]}"; do
if [[ ! -d "${ROOT_DIR}/${dir}" ]]; then
log_error "Missing: ${dir}"
((errors++))
fi
done
# Check legacy directories are gone
local legacy=(
"certificates"
"config"
"policies"
"etc/rootpack"
)
for dir in "${legacy[@]}"; do
if [[ -d "${ROOT_DIR}/${dir}" ]] && [[ -n "$(ls -A "${ROOT_DIR}/${dir}" 2>/dev/null)" ]]; then
log_warn "Legacy directory still has content: ${dir}"
fi
done
if [[ ${errors} -gt 0 ]]; then
log_error "Validation failed"
return 1
fi
log_ok "Migration validated"
}
# Print summary
print_summary() {
echo ""
echo "========================================"
if [[ "${DRY_RUN}" == true ]]; then
echo " Migration Dry Run Complete"
else
echo " Migration Complete"
fi
echo "========================================"
echo ""
echo "New structure:"
echo " etc/certificates/ - Trust anchors and signing keys"
echo " etc/crypto/profiles/ - Regional crypto profiles"
echo " etc/policy/ - Policy engine configuration"
echo ""
if [[ "${DRY_RUN}" == true ]]; then
echo "Run without --dry-run to apply changes"
else
echo "Next steps:"
echo " 1. Update Docker Compose volume mounts"
echo " 2. Update any hardcoded paths in scripts"
echo " 3. Restart services and validate"
echo ""
echo "Rollback:"
echo " tar -xzvf config-backup-*.tar.gz"
fi
echo ""
}
# Main
main() {
if [[ "${DRY_RUN}" == true ]]; then
log_info "DRY RUN - no changes will be made"
fi
create_backup
create_directories
migrate_certificates
migrate_config_dir
migrate_policies
migrate_rootpack
validate_migration
print_summary
}
main "$@"

View File

@@ -0,0 +1,343 @@
#!/usr/bin/env python3
"""
Validate and report on test Category traits across the codebase.
Sprint: SPRINT_20251226_007_CICD
This script scans all test files in the codebase and reports:
1. Test files with Category traits
2. Test files missing Category traits
3. Coverage percentage by module
Usage:
python devops/scripts/validate-test-traits.py [--fix] [--module <name>]
Options:
--fix Attempt to add default Unit trait to tests without categories
--module Only process tests in the specified module
--verbose Show detailed output
--json Output as JSON for CI consumption
"""
import os
import re
import sys
import json
import argparse
from pathlib import Path
from dataclasses import dataclass, field
from typing import List, Dict, Set, Optional
VALID_CATEGORIES = {
"Unit",
"Integration",
"Architecture",
"Contract",
"Security",
"Golden",
"Performance",
"Benchmark",
"AirGap",
"Chaos",
"Determinism",
"Resilience",
"Observability",
"Property",
"Snapshot",
"Live",
}
# Patterns to identify test methods and classes
FACT_PATTERN = re.compile(r'\[Fact[^\]]*\]')
THEORY_PATTERN = re.compile(r'\[Theory[^\]]*\]')
# Match both string literals and TestCategories.Xxx constants
# Also match inline format like [Fact, Trait("Category", ...)]
TRAIT_CATEGORY_PATTERN = re.compile(
r'Trait\s*\(\s*["\']Category["\']\s*,\s*(?:["\'](\w+)["\']|TestCategories\.(\w+))\s*\)'
)
TEST_CLASS_PATTERN = re.compile(r'public\s+(?:sealed\s+)?class\s+\w+.*Tests?\b')
@dataclass
class TestFileAnalysis:
path: str
has_facts: bool = False
has_theories: bool = False
has_category_traits: bool = False
categories_found: Set[str] = field(default_factory=set)
test_method_count: int = 0
categorized_test_count: int = 0
def analyze_test_file(file_path: Path) -> TestFileAnalysis:
"""Analyze a single test file for Category traits."""
analysis = TestFileAnalysis(path=str(file_path))
try:
content = file_path.read_text(encoding='utf-8', errors='ignore')
except Exception as e:
print(f"Warning: Could not read {file_path}: {e}", file=sys.stderr)
return analysis
# Check for test methods
facts = FACT_PATTERN.findall(content)
theories = THEORY_PATTERN.findall(content)
analysis.has_facts = len(facts) > 0
analysis.has_theories = len(theories) > 0
analysis.test_method_count = len(facts) + len(theories)
# Check for Category traits
category_matches = TRAIT_CATEGORY_PATTERN.findall(content)
if category_matches:
analysis.has_category_traits = True
# Pattern has two capture groups - one for string literal, one for constant
# Extract non-empty values from tuples
categories = set()
for match in category_matches:
cat = match[0] or match[1] # First non-empty group
if cat:
categories.add(cat)
analysis.categories_found = categories
analysis.categorized_test_count = len(category_matches)
return analysis
def get_module_from_path(file_path: Path) -> str:
"""Extract module name from file path."""
parts = file_path.parts
# Look for src/<Module> pattern
for i, part in enumerate(parts):
if part == 'src' and i + 1 < len(parts):
next_part = parts[i + 1]
if next_part.startswith('__'):
return next_part # e.g., __Tests, __Libraries
return next_part
return "Unknown"
def find_test_files(root_path: Path, module_filter: Optional[str] = None) -> List[Path]:
"""Find all test files in the codebase."""
test_files = []
for pattern in ['**/*.Tests.cs', '**/*Test.cs', '**/*Tests/*.cs']:
for file_path in root_path.glob(pattern):
# Skip generated files
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
continue
if 'node_modules' in str(file_path):
continue
# Apply module filter if specified
if module_filter:
module = get_module_from_path(file_path)
if module.lower() != module_filter.lower():
continue
test_files.append(file_path)
return test_files
def generate_report(analyses: List[TestFileAnalysis], verbose: bool = False) -> Dict:
"""Generate a summary report from analyses."""
total_files = len(analyses)
files_with_tests = [a for a in analyses if a.has_facts or a.has_theories]
files_with_traits = [a for a in analyses if a.has_category_traits]
files_missing_traits = [a for a in files_with_tests if not a.has_category_traits]
# Group by module
by_module: Dict[str, Dict] = {}
for analysis in analyses:
module = get_module_from_path(Path(analysis.path))
if module not in by_module:
by_module[module] = {
'total': 0,
'with_tests': 0,
'with_traits': 0,
'missing_traits': 0,
'files_missing': []
}
by_module[module]['total'] += 1
if analysis.has_facts or analysis.has_theories:
by_module[module]['with_tests'] += 1
if analysis.has_category_traits:
by_module[module]['with_traits'] += 1
else:
if analysis.has_facts or analysis.has_theories:
by_module[module]['missing_traits'] += 1
if verbose:
by_module[module]['files_missing'].append(analysis.path)
# Calculate coverage
coverage = (len(files_with_traits) / len(files_with_tests) * 100) if files_with_tests else 0
# Collect all categories found
all_categories: Set[str] = set()
for analysis in analyses:
all_categories.update(analysis.categories_found)
return {
'summary': {
'total_test_files': total_files,
'files_with_tests': len(files_with_tests),
'files_with_category_traits': len(files_with_traits),
'files_missing_traits': len(files_missing_traits),
'coverage_percent': round(coverage, 1),
'categories_used': sorted(all_categories),
'valid_categories': sorted(VALID_CATEGORIES),
},
'by_module': by_module,
'files_missing_traits': [a.path for a in files_missing_traits] if verbose else []
}
def add_default_trait(file_path: Path, default_category: str = "Unit") -> bool:
"""Add default Category trait to test methods missing traits."""
try:
content = file_path.read_text(encoding='utf-8')
original = content
# Pattern to find [Fact] or [Theory] not preceded by Category trait
# This is a simplified approach - adds trait after [Fact] or [Theory]
# Check if file already has Category traits
if TRAIT_CATEGORY_PATTERN.search(content):
return False # Already has some traits, skip
# Add using statement if not present
if 'using StellaOps.TestKit;' not in content:
# Find last using statement and add after it
using_pattern = re.compile(r'(using [^;]+;\s*\n)(?!using)')
match = list(using_pattern.finditer(content))
if match:
last_using = match[-1]
insert_pos = last_using.end()
content = content[:insert_pos] + 'using StellaOps.TestKit;\n' + content[insert_pos:]
# Add Trait to [Fact] attributes
content = re.sub(
r'(\[Fact\])',
f'[Trait("Category", TestCategories.{default_category})]\n \\1',
content
)
# Add Trait to [Theory] attributes
content = re.sub(
r'(\[Theory\])',
f'[Trait("Category", TestCategories.{default_category})]\n \\1',
content
)
if content != original:
file_path.write_text(content, encoding='utf-8')
return True
return False
except Exception as e:
print(f"Error processing {file_path}: {e}", file=sys.stderr)
return False
def main():
parser = argparse.ArgumentParser(description='Validate test Category traits')
parser.add_argument('--fix', action='store_true', help='Add default Unit trait to tests without categories')
parser.add_argument('--module', type=str, help='Only process tests in the specified module')
parser.add_argument('--verbose', '-v', action='store_true', help='Show detailed output')
parser.add_argument('--json', action='store_true', help='Output as JSON')
parser.add_argument('--category', type=str, default='Unit', help='Default category for --fix (default: Unit)')
args = parser.parse_args()
# Find repository root
script_path = Path(__file__).resolve()
repo_root = script_path.parent.parent.parent
src_path = repo_root / 'src'
if not src_path.exists():
print(f"Error: src directory not found at {src_path}", file=sys.stderr)
sys.exit(1)
# Find all test files
test_files = find_test_files(src_path, args.module)
if not args.json:
print(f"Found {len(test_files)} test files to analyze...")
# Analyze each file
analyses = [analyze_test_file(f) for f in test_files]
# Generate report
report = generate_report(analyses, args.verbose)
if args.json:
print(json.dumps(report, indent=2))
else:
# Print summary
summary = report['summary']
print("\n" + "=" * 60)
print("TEST CATEGORY TRAIT COVERAGE REPORT")
print("=" * 60)
print(f"Total test files: {summary['total_test_files']}")
print(f"Files with test methods: {summary['files_with_tests']}")
print(f"Files with Category trait: {summary['files_with_category_traits']}")
print(f"Files missing traits: {summary['files_missing_traits']}")
print(f"Coverage: {summary['coverage_percent']}%")
print(f"\nCategories in use: {', '.join(summary['categories_used']) or 'None'}")
print(f"Valid categories: {', '.join(summary['valid_categories'])}")
# Print by module
print("\n" + "-" * 60)
print("BY MODULE")
print("-" * 60)
print(f"{'Module':<25} {'With Tests':<12} {'With Traits':<12} {'Missing':<10}")
print("-" * 60)
for module, data in sorted(report['by_module'].items()):
if data['with_tests'] > 0:
print(f"{module:<25} {data['with_tests']:<12} {data['with_traits']:<12} {data['missing_traits']:<10}")
# Show files missing traits if verbose
if args.verbose and report['files_missing_traits']:
print("\n" + "-" * 60)
print("FILES MISSING CATEGORY TRAITS")
print("-" * 60)
for f in sorted(report['files_missing_traits'])[:50]: # Limit to first 50
print(f" {f}")
if len(report['files_missing_traits']) > 50:
print(f" ... and {len(report['files_missing_traits']) - 50} more")
# Fix mode
if args.fix:
files_to_fix = [Path(a.path) for a in analyses
if (a.has_facts or a.has_theories) and not a.has_category_traits]
if not args.json:
print(f"\n{'=' * 60}")
print(f"FIXING {len(files_to_fix)} FILES WITH DEFAULT CATEGORY: {args.category}")
print("=" * 60)
fixed_count = 0
for file_path in files_to_fix:
if add_default_trait(file_path, args.category):
fixed_count += 1
if not args.json:
print(f" Fixed: {file_path}")
if not args.json:
print(f"\nFixed {fixed_count} files")
# Exit with error code if coverage is below threshold
if report['summary']['coverage_percent'] < 80:
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
main()