Fix build and code structure improvements. New but essential UI functionality. CI improvements. Documentation improvements. AI module improvements.
This commit is contained in:
147
devops/ci-local/.env.local.sample
Normal file
147
devops/ci-local/.env.local.sample
Normal file
@@ -0,0 +1,147 @@
|
||||
# =============================================================================
|
||||
# LOCAL CI TESTING ENVIRONMENT VARIABLES
|
||||
# =============================================================================
|
||||
# Copy this file to .env.local and customize for your local environment.
|
||||
# The .env.local file is gitignored and should NOT be committed.
|
||||
#
|
||||
# Usage:
|
||||
# cp devops/ci-local/.env.local.sample devops/ci-local/.env.local
|
||||
# # Edit .env.local with your values
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# DATABASE CONFIGURATION
|
||||
# =============================================================================
|
||||
# These values match docker-compose.ci.yaml defaults
|
||||
# Port 5433 is used to avoid conflicts with development PostgreSQL
|
||||
|
||||
STELLAOPS_TEST_POSTGRES_CONNECTION="Host=localhost;Port=5433;Database=stellaops_test;Username=stellaops_ci;Password=ci_test_password"
|
||||
|
||||
# Alternative connection string format
|
||||
POSTGRES_HOST=localhost
|
||||
POSTGRES_PORT=5433
|
||||
POSTGRES_USER=stellaops_ci
|
||||
POSTGRES_PASSWORD=ci_test_password
|
||||
POSTGRES_DB=stellaops_test
|
||||
|
||||
# =============================================================================
|
||||
# CACHE & MESSAGING
|
||||
# =============================================================================
|
||||
# Valkey (Redis-compatible) - Port 6380 to avoid conflicts
|
||||
VALKEY_CONNECTION_STRING="localhost:6380"
|
||||
VALKEY_HOST=localhost
|
||||
VALKEY_PORT=6380
|
||||
|
||||
# NATS JetStream - Port 4223 to avoid conflicts
|
||||
#NATS_URL="nats://localhost:4223"
|
||||
#NATS_HOST=localhost
|
||||
#NATS_PORT=4223
|
||||
|
||||
# =============================================================================
|
||||
# MOCK CONTAINER REGISTRY
|
||||
# =============================================================================
|
||||
# Local registry for release dry-run testing
|
||||
REGISTRY_HOST=localhost:5001
|
||||
REGISTRY_USERNAME=local
|
||||
REGISTRY_PASSWORD=local
|
||||
|
||||
# =============================================================================
|
||||
# MOCK S3 STORAGE (RustFS)
|
||||
# =============================================================================
|
||||
S3_ENDPOINT=http://localhost:9100
|
||||
S3_ACCESS_KEY=rustfsadmin
|
||||
S3_SECRET_KEY=rustfsadmin
|
||||
S3_BUCKET=stellaops-ci
|
||||
|
||||
# =============================================================================
|
||||
# SIGNING CONFIGURATION
|
||||
# =============================================================================
|
||||
# Mock signing keys for local testing - DO NOT USE IN PRODUCTION!
|
||||
# Generate real keys with: cosign generate-key-pair
|
||||
|
||||
# Base64-encoded private key (leave empty to skip signing tests)
|
||||
COSIGN_PRIVATE_KEY_B64=
|
||||
|
||||
# Password for the signing key
|
||||
COSIGN_PASSWORD=local-test-password
|
||||
|
||||
# For keyless signing (requires internet)
|
||||
# COSIGN_EXPERIMENTAL=1
|
||||
|
||||
# =============================================================================
|
||||
# OPTIONAL: REAL SECRETS FOR FULL TESTING
|
||||
# =============================================================================
|
||||
# Uncomment and fill in for full integration testing
|
||||
# These are NOT required for basic local CI runs
|
||||
|
||||
# Gitea API token for registry operations
|
||||
# GITEA_TOKEN=
|
||||
|
||||
# GitHub Container Registry token
|
||||
# GHCR_TOKEN=
|
||||
|
||||
# AI API key for AdvisoryAI tests
|
||||
# AI_API_KEY=
|
||||
|
||||
# Slack webhook for notification tests
|
||||
# SLACK_WEBHOOK=
|
||||
|
||||
# =============================================================================
|
||||
# LOCAL CI CONFIGURATION
|
||||
# =============================================================================
|
||||
|
||||
# Execution mode: docker, native, or act
|
||||
LOCAL_CI_MODE=docker
|
||||
|
||||
# Number of parallel test runners (default: auto-detect CPU count)
|
||||
LOCAL_CI_PARALLEL=4
|
||||
|
||||
# Enable verbose output
|
||||
LOCAL_CI_VERBOSE=false
|
||||
|
||||
# Results output directory (relative to repo root)
|
||||
LOCAL_CI_RESULTS_DIR=out/local-ci
|
||||
|
||||
# =============================================================================
|
||||
# DEPLOYMENT FLAGS
|
||||
# =============================================================================
|
||||
# Always dry-run for local testing
|
||||
DEPLOYMENT_DRY_RUN=true
|
||||
|
||||
# Mock deployment targets
|
||||
DEPLOYMENT_HOST=localhost
|
||||
DEPLOYMENT_USERNAME=testuser
|
||||
DEPLOYMENT_PATH=/tmp/stellaops-deploy
|
||||
|
||||
# =============================================================================
|
||||
# FEATURE FLAGS
|
||||
# =============================================================================
|
||||
|
||||
# Skip tests requiring external network access
|
||||
STELLAOPS_SKIP_NETWORK_TESTS=false
|
||||
|
||||
# Enable offline mode (uses cached/mock data)
|
||||
STELLAOPS_OFFLINE_MODE=false
|
||||
|
||||
# Skip slow benchmark tests
|
||||
SKIP_BENCHMARK_TESTS=true
|
||||
|
||||
# Skip chaos/resilience tests
|
||||
SKIP_CHAOS_TESTS=true
|
||||
|
||||
# =============================================================================
|
||||
# .NET BUILD CONFIGURATION
|
||||
# =============================================================================
|
||||
# These match CI environment exactly
|
||||
|
||||
DOTNET_NOLOGO=1
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
|
||||
TZ=UTC
|
||||
|
||||
# Build configuration
|
||||
BUILD_CONFIGURATION=Release
|
||||
|
||||
# Warnings as errors (match CI)
|
||||
DOTNET_WARNASERROR=true
|
||||
48
devops/ci-local/events/pull-request.json
Normal file
48
devops/ci-local/events/pull-request.json
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"action": "opened",
|
||||
"number": 999,
|
||||
"pull_request": {
|
||||
"number": 999,
|
||||
"title": "[Local CI] Test Pull Request",
|
||||
"body": "This is a simulated pull request for local CI testing.",
|
||||
"state": "open",
|
||||
"draft": false,
|
||||
"head": {
|
||||
"ref": "feature/local-ci-test",
|
||||
"sha": "0000000000000000000000000000000000000000",
|
||||
"repo": {
|
||||
"name": "git.stella-ops.org",
|
||||
"full_name": "stellaops/git.stella-ops.org"
|
||||
}
|
||||
},
|
||||
"base": {
|
||||
"ref": "main",
|
||||
"sha": "0000000000000000000000000000000000000001",
|
||||
"repo": {
|
||||
"name": "git.stella-ops.org",
|
||||
"full_name": "stellaops/git.stella-ops.org"
|
||||
}
|
||||
},
|
||||
"labels": [],
|
||||
"user": {
|
||||
"login": "local-ci-user",
|
||||
"type": "User"
|
||||
},
|
||||
"created_at": "2025-01-01T00:00:00Z",
|
||||
"updated_at": "2025-01-01T00:00:00Z"
|
||||
},
|
||||
"repository": {
|
||||
"name": "git.stella-ops.org",
|
||||
"full_name": "stellaops/git.stella-ops.org",
|
||||
"default_branch": "main",
|
||||
"private": true,
|
||||
"owner": {
|
||||
"login": "stellaops",
|
||||
"type": "Organization"
|
||||
}
|
||||
},
|
||||
"sender": {
|
||||
"login": "local-ci-user",
|
||||
"type": "User"
|
||||
}
|
||||
}
|
||||
54
devops/ci-local/events/push-main.json
Normal file
54
devops/ci-local/events/push-main.json
Normal file
@@ -0,0 +1,54 @@
|
||||
{
|
||||
"ref": "refs/heads/main",
|
||||
"before": "0000000000000000000000000000000000000001",
|
||||
"after": "0000000000000000000000000000000000000002",
|
||||
"created": false,
|
||||
"deleted": false,
|
||||
"forced": false,
|
||||
"compare": "https://git.stella-ops.org/compare/000001...000002",
|
||||
"commits": [
|
||||
{
|
||||
"id": "0000000000000000000000000000000000000002",
|
||||
"message": "[Local CI] Test commit on main branch",
|
||||
"timestamp": "2025-01-01T00:00:00Z",
|
||||
"author": {
|
||||
"name": "Local CI User",
|
||||
"email": "local-ci@stella-ops.org"
|
||||
},
|
||||
"committer": {
|
||||
"name": "Local CI User",
|
||||
"email": "local-ci@stella-ops.org"
|
||||
},
|
||||
"added": [],
|
||||
"removed": [],
|
||||
"modified": ["src/Scanner/StellaOps.Scanner.Core/Scanner.cs"]
|
||||
}
|
||||
],
|
||||
"head_commit": {
|
||||
"id": "0000000000000000000000000000000000000002",
|
||||
"message": "[Local CI] Test commit on main branch",
|
||||
"timestamp": "2025-01-01T00:00:00Z",
|
||||
"author": {
|
||||
"name": "Local CI User",
|
||||
"email": "local-ci@stella-ops.org"
|
||||
}
|
||||
},
|
||||
"repository": {
|
||||
"name": "git.stella-ops.org",
|
||||
"full_name": "stellaops/git.stella-ops.org",
|
||||
"default_branch": "main",
|
||||
"private": true,
|
||||
"owner": {
|
||||
"login": "stellaops",
|
||||
"type": "Organization"
|
||||
}
|
||||
},
|
||||
"pusher": {
|
||||
"name": "local-ci-user",
|
||||
"email": "local-ci@stella-ops.org"
|
||||
},
|
||||
"sender": {
|
||||
"login": "local-ci-user",
|
||||
"type": "User"
|
||||
}
|
||||
}
|
||||
21
devops/ci-local/events/release-tag.json
Normal file
21
devops/ci-local/events/release-tag.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"ref": "refs/tags/suite-2026.04",
|
||||
"ref_type": "tag",
|
||||
"master_branch": "main",
|
||||
"description": "StellaOps Suite Release 2026.04",
|
||||
"pusher_type": "user",
|
||||
"repository": {
|
||||
"name": "git.stella-ops.org",
|
||||
"full_name": "stellaops/git.stella-ops.org",
|
||||
"default_branch": "main",
|
||||
"private": true,
|
||||
"owner": {
|
||||
"login": "stellaops",
|
||||
"type": "Organization"
|
||||
}
|
||||
},
|
||||
"sender": {
|
||||
"login": "release-manager",
|
||||
"type": "User"
|
||||
}
|
||||
}
|
||||
22
devops/ci-local/events/schedule.json
Normal file
22
devops/ci-local/events/schedule.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"schedule": [
|
||||
{
|
||||
"cron": "0 5 * * *"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"name": "git.stella-ops.org",
|
||||
"full_name": "stellaops/git.stella-ops.org",
|
||||
"default_branch": "main",
|
||||
"private": true,
|
||||
"owner": {
|
||||
"login": "stellaops",
|
||||
"type": "Organization"
|
||||
}
|
||||
},
|
||||
"sender": {
|
||||
"login": "github-actions[bot]",
|
||||
"type": "Bot"
|
||||
},
|
||||
"workflow": ".gitea/workflows/nightly-regression.yml"
|
||||
}
|
||||
31
devops/ci-local/events/workflow-dispatch.json
Normal file
31
devops/ci-local/events/workflow-dispatch.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"action": "workflow_dispatch",
|
||||
"inputs": {
|
||||
"dry_run": "true",
|
||||
"include_performance": "false",
|
||||
"include_benchmark": "false",
|
||||
"include_airgap": "false",
|
||||
"include_chaos": "false",
|
||||
"include_determinism": "false",
|
||||
"include_resilience": "false",
|
||||
"include_observability": "false",
|
||||
"force_deploy": "false",
|
||||
"environment": "local"
|
||||
},
|
||||
"ref": "refs/heads/main",
|
||||
"repository": {
|
||||
"name": "git.stella-ops.org",
|
||||
"full_name": "stellaops/git.stella-ops.org",
|
||||
"default_branch": "main",
|
||||
"private": true,
|
||||
"owner": {
|
||||
"login": "stellaops",
|
||||
"type": "Organization"
|
||||
}
|
||||
},
|
||||
"sender": {
|
||||
"login": "local-ci-user",
|
||||
"type": "User"
|
||||
},
|
||||
"workflow": ".gitea/workflows/test-matrix.yml"
|
||||
}
|
||||
130
devops/compose/docker-compose.ci.yaml
Normal file
130
devops/compose/docker-compose.ci.yaml
Normal file
@@ -0,0 +1,130 @@
|
||||
# =============================================================================
|
||||
# LOCAL CI TESTING SERVICES
|
||||
# =============================================================================
|
||||
# Docker Compose profile for running CI tests locally.
|
||||
# Uses different ports to avoid conflicts with development services.
|
||||
#
|
||||
# Usage:
|
||||
# docker compose -f devops/compose/docker-compose.ci.yaml up -d
|
||||
# docker compose -f devops/compose/docker-compose.ci.yaml down -v
|
||||
#
|
||||
# Services:
|
||||
# - postgres-ci: PostgreSQL 16 for integration tests (port 5433)
|
||||
# - valkey-ci: Valkey/Redis for caching tests (port 6380)
|
||||
# - nats-ci: NATS JetStream for messaging tests (port 4223)
|
||||
# - mock-registry: Local container registry for release testing (port 5001)
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
networks:
|
||||
ci-net:
|
||||
driver: bridge
|
||||
name: stellaops-ci-net
|
||||
|
||||
volumes:
|
||||
ci-postgres-data:
|
||||
name: stellaops-ci-postgres
|
||||
ci-valkey-data:
|
||||
name: stellaops-ci-valkey
|
||||
|
||||
services:
|
||||
# ---------------------------------------------------------------------------
|
||||
# PostgreSQL 16 - Primary database for integration tests
|
||||
# ---------------------------------------------------------------------------
|
||||
postgres-ci:
|
||||
image: postgres:16-alpine
|
||||
container_name: stellaops-postgres-ci
|
||||
environment:
|
||||
POSTGRES_USER: stellaops_ci
|
||||
POSTGRES_PASSWORD: ci_test_password
|
||||
POSTGRES_DB: stellaops_test
|
||||
# Performance tuning for tests
|
||||
POSTGRES_INITDB_ARGS: "--data-checksums"
|
||||
ports:
|
||||
- "5433:5432" # Different port to avoid conflicts with dev
|
||||
volumes:
|
||||
- ci-postgres-data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- ci-net
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U stellaops_ci -d stellaops_test"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
restart: unless-stopped
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Valkey 8.0 - Redis-compatible cache for caching tests
|
||||
# ---------------------------------------------------------------------------
|
||||
valkey-ci:
|
||||
image: valkey/valkey:8.0-alpine
|
||||
container_name: stellaops-valkey-ci
|
||||
command: ["valkey-server", "--appendonly", "yes", "--maxmemory", "256mb", "--maxmemory-policy", "allkeys-lru"]
|
||||
ports:
|
||||
- "6380:6379" # Different port to avoid conflicts
|
||||
volumes:
|
||||
- ci-valkey-data:/data
|
||||
networks:
|
||||
- ci-net
|
||||
healthcheck:
|
||||
test: ["CMD", "valkey-cli", "ping"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# NATS JetStream - Message queue for messaging tests
|
||||
# ---------------------------------------------------------------------------
|
||||
nats-ci:
|
||||
image: nats:2.10-alpine
|
||||
container_name: stellaops-nats-ci
|
||||
command: ["-js", "-sd", "/data", "-m", "8222"]
|
||||
ports:
|
||||
- "4223:4222" # Client port (different from dev)
|
||||
- "8223:8222" # Monitoring port
|
||||
networks:
|
||||
- ci-net
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-q", "--spider", "http://localhost:8222/healthz"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Mock Container Registry - For release dry-run testing
|
||||
# ---------------------------------------------------------------------------
|
||||
mock-registry:
|
||||
image: registry:2
|
||||
container_name: stellaops-registry-ci
|
||||
ports:
|
||||
- "5001:5000"
|
||||
environment:
|
||||
REGISTRY_STORAGE_DELETE_ENABLED: "true"
|
||||
networks:
|
||||
- ci-net
|
||||
restart: unless-stopped
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Mock S3 (MinIO) - For artifact storage tests
|
||||
# ---------------------------------------------------------------------------
|
||||
minio-ci:
|
||||
image: minio/minio:latest
|
||||
container_name: stellaops-minio-ci
|
||||
command: server /data --console-address ":9001"
|
||||
ports:
|
||||
- "9100:9000" # S3 API port
|
||||
- "9101:9001" # Console port
|
||||
environment:
|
||||
MINIO_ROOT_USER: minioadmin
|
||||
MINIO_ROOT_PASSWORD: minioadmin
|
||||
networks:
|
||||
- ci-net
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
@@ -28,6 +28,7 @@ services:
|
||||
PGDATA: /var/lib/postgresql/data/pgdata
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
- ./postgres-init:/docker-entrypoint-initdb.d:ro
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:5432"
|
||||
networks:
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
-- PostgreSQL initialization for StellaOps air-gap deployment
|
||||
-- ============================================================================
|
||||
-- PostgreSQL initialization for StellaOps
|
||||
-- This script runs automatically on first container start
|
||||
-- ============================================================================
|
||||
|
||||
-- Enable pg_stat_statements extension for query performance analysis
|
||||
CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
|
||||
@@ -9,25 +11,59 @@ CREATE EXTENSION IF NOT EXISTS pg_trgm; -- Fuzzy text search
|
||||
CREATE EXTENSION IF NOT EXISTS btree_gin; -- GIN indexes for scalar types
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto; -- Cryptographic functions
|
||||
|
||||
-- ============================================================================
|
||||
-- Create schemas for all modules
|
||||
-- Migrations will create tables within these schemas
|
||||
CREATE SCHEMA IF NOT EXISTS authority;
|
||||
CREATE SCHEMA IF NOT EXISTS vuln;
|
||||
CREATE SCHEMA IF NOT EXISTS vex;
|
||||
CREATE SCHEMA IF NOT EXISTS scheduler;
|
||||
CREATE SCHEMA IF NOT EXISTS notify;
|
||||
CREATE SCHEMA IF NOT EXISTS policy;
|
||||
CREATE SCHEMA IF NOT EXISTS concelier;
|
||||
CREATE SCHEMA IF NOT EXISTS audit;
|
||||
CREATE SCHEMA IF NOT EXISTS unknowns;
|
||||
-- ============================================================================
|
||||
|
||||
-- Grant usage to application user (assumes POSTGRES_USER is the app user)
|
||||
GRANT USAGE ON SCHEMA authority TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA vuln TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA vex TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA scheduler TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA notify TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA policy TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA concelier TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA audit TO PUBLIC;
|
||||
GRANT USAGE ON SCHEMA unknowns TO PUBLIC;
|
||||
-- Core Platform
|
||||
CREATE SCHEMA IF NOT EXISTS authority; -- Authentication, authorization, OAuth/OIDC
|
||||
|
||||
-- Data Ingestion
|
||||
CREATE SCHEMA IF NOT EXISTS vuln; -- Concelier vulnerability data
|
||||
CREATE SCHEMA IF NOT EXISTS vex; -- Excititor VEX documents
|
||||
|
||||
-- Scanning & Analysis
|
||||
CREATE SCHEMA IF NOT EXISTS scanner; -- Container scanning, SBOM generation
|
||||
|
||||
-- Scheduling & Orchestration
|
||||
CREATE SCHEMA IF NOT EXISTS scheduler; -- Job scheduling
|
||||
CREATE SCHEMA IF NOT EXISTS taskrunner; -- Task execution
|
||||
|
||||
-- Policy & Risk
|
||||
CREATE SCHEMA IF NOT EXISTS policy; -- Policy engine
|
||||
CREATE SCHEMA IF NOT EXISTS unknowns; -- Unknown component tracking
|
||||
|
||||
-- Artifacts & Evidence
|
||||
CREATE SCHEMA IF NOT EXISTS proofchain; -- Attestor proof chains
|
||||
CREATE SCHEMA IF NOT EXISTS attestor; -- Attestor submission queue
|
||||
CREATE SCHEMA IF NOT EXISTS signer; -- Key management
|
||||
|
||||
-- Notifications
|
||||
CREATE SCHEMA IF NOT EXISTS notify; -- Notification delivery
|
||||
|
||||
-- Signals & Observability
|
||||
CREATE SCHEMA IF NOT EXISTS signals; -- Runtime signals
|
||||
|
||||
-- Registry
|
||||
CREATE SCHEMA IF NOT EXISTS packs; -- Task packs registry
|
||||
|
||||
-- Audit
|
||||
CREATE SCHEMA IF NOT EXISTS audit; -- System-wide audit log
|
||||
|
||||
-- ============================================================================
|
||||
-- Grant usage to application user (for single-user mode)
|
||||
-- Per-module users are created in 02-create-users.sql
|
||||
-- ============================================================================
|
||||
DO $$
|
||||
DECLARE
|
||||
schema_name TEXT;
|
||||
BEGIN
|
||||
FOR schema_name IN SELECT unnest(ARRAY[
|
||||
'authority', 'vuln', 'vex', 'scanner', 'scheduler', 'taskrunner',
|
||||
'policy', 'unknowns', 'proofchain', 'attestor', 'signer',
|
||||
'notify', 'signals', 'packs', 'audit'
|
||||
]) LOOP
|
||||
EXECUTE format('GRANT USAGE ON SCHEMA %I TO PUBLIC', schema_name);
|
||||
END LOOP;
|
||||
END $$;
|
||||
|
||||
53
devops/compose/postgres-init/02-create-users.sql
Normal file
53
devops/compose/postgres-init/02-create-users.sql
Normal file
@@ -0,0 +1,53 @@
|
||||
-- ============================================================================
|
||||
-- Per-Module Database Users
|
||||
-- ============================================================================
|
||||
-- Creates isolated database users for each StellaOps module.
|
||||
-- This enables least-privilege access control and audit trail per module.
|
||||
--
|
||||
-- Password format: {module}_dev (for development only)
|
||||
-- In production, use secrets management and rotate credentials.
|
||||
-- ============================================================================
|
||||
|
||||
-- Core Platform
|
||||
CREATE USER authority_user WITH PASSWORD 'authority_dev';
|
||||
|
||||
-- Data Ingestion
|
||||
CREATE USER concelier_user WITH PASSWORD 'concelier_dev';
|
||||
CREATE USER excititor_user WITH PASSWORD 'excititor_dev';
|
||||
|
||||
-- Scanning & Analysis
|
||||
CREATE USER scanner_user WITH PASSWORD 'scanner_dev';
|
||||
|
||||
-- Scheduling & Orchestration
|
||||
CREATE USER scheduler_user WITH PASSWORD 'scheduler_dev';
|
||||
CREATE USER taskrunner_user WITH PASSWORD 'taskrunner_dev';
|
||||
|
||||
-- Policy & Risk
|
||||
CREATE USER policy_user WITH PASSWORD 'policy_dev';
|
||||
CREATE USER unknowns_user WITH PASSWORD 'unknowns_dev';
|
||||
|
||||
-- Artifacts & Evidence
|
||||
CREATE USER attestor_user WITH PASSWORD 'attestor_dev';
|
||||
CREATE USER signer_user WITH PASSWORD 'signer_dev';
|
||||
|
||||
-- Notifications
|
||||
CREATE USER notify_user WITH PASSWORD 'notify_dev';
|
||||
|
||||
-- Signals & Observability
|
||||
CREATE USER signals_user WITH PASSWORD 'signals_dev';
|
||||
|
||||
-- Registry
|
||||
CREATE USER packs_user WITH PASSWORD 'packs_dev';
|
||||
|
||||
-- ============================================================================
|
||||
-- Log created users
|
||||
-- ============================================================================
|
||||
DO $$
|
||||
BEGIN
|
||||
RAISE NOTICE 'Created per-module database users:';
|
||||
RAISE NOTICE ' - authority_user, concelier_user, excititor_user';
|
||||
RAISE NOTICE ' - scanner_user, scheduler_user, taskrunner_user';
|
||||
RAISE NOTICE ' - policy_user, unknowns_user';
|
||||
RAISE NOTICE ' - attestor_user, signer_user';
|
||||
RAISE NOTICE ' - notify_user, signals_user, packs_user';
|
||||
END $$;
|
||||
153
devops/compose/postgres-init/03-grant-permissions.sql
Normal file
153
devops/compose/postgres-init/03-grant-permissions.sql
Normal file
@@ -0,0 +1,153 @@
|
||||
-- ============================================================================
|
||||
-- Per-Module Schema Permissions
|
||||
-- ============================================================================
|
||||
-- Grants each module user access to their respective schema(s).
|
||||
-- Users can only access tables in their designated schemas.
|
||||
-- ============================================================================
|
||||
|
||||
-- ============================================================================
|
||||
-- Authority Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA authority TO authority_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA authority TO authority_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA authority TO authority_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA authority GRANT ALL ON TABLES TO authority_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA authority GRANT ALL ON SEQUENCES TO authority_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Concelier Module (uses 'vuln' schema)
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA vuln TO concelier_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA vuln TO concelier_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA vuln TO concelier_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA vuln GRANT ALL ON TABLES TO concelier_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA vuln GRANT ALL ON SEQUENCES TO concelier_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Excititor Module (uses 'vex' schema)
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA vex TO excititor_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA vex TO excititor_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA vex TO excititor_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA vex GRANT ALL ON TABLES TO excititor_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA vex GRANT ALL ON SEQUENCES TO excititor_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Scanner Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA scanner TO scanner_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA scanner TO scanner_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA scanner TO scanner_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA scanner GRANT ALL ON TABLES TO scanner_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA scanner GRANT ALL ON SEQUENCES TO scanner_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Scheduler Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA scheduler TO scheduler_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA scheduler TO scheduler_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA scheduler TO scheduler_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA scheduler GRANT ALL ON TABLES TO scheduler_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA scheduler GRANT ALL ON SEQUENCES TO scheduler_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- TaskRunner Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA taskrunner TO taskrunner_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA taskrunner TO taskrunner_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA taskrunner TO taskrunner_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA taskrunner GRANT ALL ON TABLES TO taskrunner_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA taskrunner GRANT ALL ON SEQUENCES TO taskrunner_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Policy Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA policy TO policy_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA policy TO policy_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA policy TO policy_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA policy GRANT ALL ON TABLES TO policy_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA policy GRANT ALL ON SEQUENCES TO policy_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Unknowns Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA unknowns TO unknowns_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA unknowns TO unknowns_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA unknowns TO unknowns_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA unknowns GRANT ALL ON TABLES TO unknowns_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA unknowns GRANT ALL ON SEQUENCES TO unknowns_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Attestor Module (uses 'proofchain' and 'attestor' schemas)
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA proofchain TO attestor_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA proofchain TO attestor_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA proofchain TO attestor_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA proofchain GRANT ALL ON TABLES TO attestor_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA proofchain GRANT ALL ON SEQUENCES TO attestor_user;
|
||||
|
||||
GRANT USAGE ON SCHEMA attestor TO attestor_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA attestor TO attestor_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA attestor TO attestor_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA attestor GRANT ALL ON TABLES TO attestor_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA attestor GRANT ALL ON SEQUENCES TO attestor_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Signer Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA signer TO signer_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA signer TO signer_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA signer TO signer_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA signer GRANT ALL ON TABLES TO signer_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA signer GRANT ALL ON SEQUENCES TO signer_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Notify Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA notify TO notify_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA notify TO notify_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA notify TO notify_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA notify GRANT ALL ON TABLES TO notify_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA notify GRANT ALL ON SEQUENCES TO notify_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Signals Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA signals TO signals_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA signals TO signals_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA signals TO signals_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA signals GRANT ALL ON TABLES TO signals_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA signals GRANT ALL ON SEQUENCES TO signals_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Packs Registry Module
|
||||
-- ============================================================================
|
||||
GRANT USAGE ON SCHEMA packs TO packs_user;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA packs TO packs_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA packs TO packs_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA packs GRANT ALL ON TABLES TO packs_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA packs GRANT ALL ON SEQUENCES TO packs_user;
|
||||
|
||||
-- ============================================================================
|
||||
-- Verification
|
||||
-- ============================================================================
|
||||
DO $$
|
||||
DECLARE
|
||||
v_user TEXT;
|
||||
v_schema TEXT;
|
||||
BEGIN
|
||||
RAISE NOTICE 'Per-module permissions granted:';
|
||||
RAISE NOTICE ' authority_user -> authority';
|
||||
RAISE NOTICE ' concelier_user -> vuln';
|
||||
RAISE NOTICE ' excititor_user -> vex';
|
||||
RAISE NOTICE ' scanner_user -> scanner';
|
||||
RAISE NOTICE ' scheduler_user -> scheduler';
|
||||
RAISE NOTICE ' taskrunner_user -> taskrunner';
|
||||
RAISE NOTICE ' policy_user -> policy';
|
||||
RAISE NOTICE ' unknowns_user -> unknowns';
|
||||
RAISE NOTICE ' attestor_user -> proofchain, attestor';
|
||||
RAISE NOTICE ' signer_user -> signer';
|
||||
RAISE NOTICE ' notify_user -> notify';
|
||||
RAISE NOTICE ' signals_user -> signals';
|
||||
RAISE NOTICE ' packs_user -> packs';
|
||||
END $$;
|
||||
318
devops/docker/repro-builders/BUILD_ENVIRONMENT.md
Normal file
318
devops/docker/repro-builders/BUILD_ENVIRONMENT.md
Normal file
@@ -0,0 +1,318 @@
|
||||
# Reproducible Build Environment Requirements
|
||||
|
||||
**Sprint:** SPRINT_1227_0002_0001_LB_reproducible_builders
|
||||
**Task:** T12 — Document build environment requirements
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
This document describes the environment requirements for running reproducible distro package builds. The build system supports Alpine, Debian, and RHEL package ecosystems.
|
||||
|
||||
---
|
||||
|
||||
## Hardware Requirements
|
||||
|
||||
### Minimum Requirements
|
||||
|
||||
| Resource | Minimum | Recommended |
|
||||
|----------|---------|-------------|
|
||||
| CPU | 4 cores | 8+ cores |
|
||||
| RAM | 8 GB | 16+ GB |
|
||||
| Disk | 50 GB SSD | 200+ GB NVMe |
|
||||
| Network | 10 Mbps | 100+ Mbps |
|
||||
|
||||
### Storage Breakdown
|
||||
|
||||
| Directory | Purpose | Estimated Size |
|
||||
|-----------|---------|----------------|
|
||||
| `/var/lib/docker` | Docker images and containers | 30 GB |
|
||||
| `/var/cache/stellaops/builds` | Build cache | 50 GB |
|
||||
| `/var/cache/stellaops/sources` | Source package cache | 20 GB |
|
||||
| `/var/cache/stellaops/artifacts` | Output artifacts | 50 GB |
|
||||
|
||||
---
|
||||
|
||||
## Software Requirements
|
||||
|
||||
### Host System
|
||||
|
||||
| Component | Version | Purpose |
|
||||
|-----------|---------|---------|
|
||||
| Docker | 24.0+ | Container runtime |
|
||||
| Docker Compose | 2.20+ | Multi-container orchestration |
|
||||
| .NET SDK | 10.0 | Worker service runtime |
|
||||
| objdump | binutils 2.40+ | Binary analysis |
|
||||
| readelf | binutils 2.40+ | ELF parsing |
|
||||
|
||||
### Container Images
|
||||
|
||||
The build system uses the following base images:
|
||||
|
||||
| Builder | Base Image | Tag |
|
||||
|---------|------------|-----|
|
||||
| Alpine | `alpine` | `3.19`, `3.18` |
|
||||
| Debian | `debian` | `bookworm`, `bullseye` |
|
||||
| RHEL | `almalinux` | `9`, `8` |
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
### Required Variables
|
||||
|
||||
```bash
|
||||
# Build configuration
|
||||
export STELLAOPS_BUILD_CACHE=/var/cache/stellaops/builds
|
||||
export STELLAOPS_SOURCE_CACHE=/var/cache/stellaops/sources
|
||||
export STELLAOPS_ARTIFACT_DIR=/var/cache/stellaops/artifacts
|
||||
|
||||
# Reproducibility settings
|
||||
export TZ=UTC
|
||||
export LC_ALL=C.UTF-8
|
||||
export SOURCE_DATE_EPOCH=$(date +%s)
|
||||
|
||||
# Docker settings
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
```
|
||||
|
||||
### Optional Variables
|
||||
|
||||
```bash
|
||||
# Parallel build settings
|
||||
export STELLAOPS_MAX_CONCURRENT_BUILDS=2
|
||||
export STELLAOPS_BUILD_TIMEOUT=1800 # 30 minutes
|
||||
|
||||
# Proxy settings (if behind corporate firewall)
|
||||
export HTTP_PROXY=http://proxy:8080
|
||||
export HTTPS_PROXY=http://proxy:8080
|
||||
export NO_PROXY=localhost,127.0.0.1
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Builder-Specific Requirements
|
||||
|
||||
### Alpine Builder
|
||||
|
||||
```dockerfile
|
||||
# Required packages in builder image
|
||||
apk add --no-cache \
|
||||
alpine-sdk \
|
||||
abuild \
|
||||
sudo \
|
||||
binutils \
|
||||
elfutils \
|
||||
build-base
|
||||
```
|
||||
|
||||
**Normalization requirements:**
|
||||
- `SOURCE_DATE_EPOCH` must be set
|
||||
- Use `abuild -r` with reproducible flags
|
||||
- Archive ordering: `--sort=name`
|
||||
|
||||
### Debian Builder
|
||||
|
||||
```dockerfile
|
||||
# Required packages in builder image
|
||||
apt-get install -y \
|
||||
build-essential \
|
||||
devscripts \
|
||||
dpkg-dev \
|
||||
fakeroot \
|
||||
binutils \
|
||||
elfutils \
|
||||
debhelper
|
||||
```
|
||||
|
||||
**Normalization requirements:**
|
||||
- Use `dpkg-buildpackage -b` with reproducible flags
|
||||
- Set `DEB_BUILD_OPTIONS=reproducible`
|
||||
- Apply `dh_strip_nondeterminism` post-build
|
||||
|
||||
### RHEL Builder
|
||||
|
||||
```dockerfile
|
||||
# Required packages in builder image (AlmaLinux 9)
|
||||
dnf install -y \
|
||||
mock \
|
||||
rpm-build \
|
||||
rpmdevtools \
|
||||
binutils \
|
||||
elfutils
|
||||
```
|
||||
|
||||
**Normalization requirements:**
|
||||
- Use mock with `--enable-network=false`
|
||||
- Configure mock for deterministic builds
|
||||
- Set `%_buildhost stellaops.build`
|
||||
|
||||
---
|
||||
|
||||
## Compiler Flags for Reproducibility
|
||||
|
||||
### C/C++ Flags
|
||||
|
||||
```bash
|
||||
CFLAGS="-fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/build -grecord-gcc-switches=off"
|
||||
CXXFLAGS="${CFLAGS}"
|
||||
LDFLAGS="-Wl,--build-id=sha1"
|
||||
```
|
||||
|
||||
### Additional Flags
|
||||
|
||||
```bash
|
||||
# Disable date/time macros
|
||||
-Wdate-time -Werror=date-time
|
||||
|
||||
# Normalize paths
|
||||
-fmacro-prefix-map=$(pwd)=/build
|
||||
-ffile-prefix-map=$(pwd)=/build
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Archive Determinism
|
||||
|
||||
### ar (Static Libraries)
|
||||
|
||||
```bash
|
||||
# Use deterministic mode
|
||||
ar --enable-deterministic-archives crs libfoo.a *.o
|
||||
|
||||
# Or set environment variable
|
||||
export AR_FLAGS=--enable-deterministic-archives
|
||||
```
|
||||
|
||||
### tar (Package Archives)
|
||||
|
||||
```bash
|
||||
# Deterministic tar creation
|
||||
tar --sort=name \
|
||||
--mtime="@${SOURCE_DATE_EPOCH}" \
|
||||
--owner=0 \
|
||||
--group=0 \
|
||||
--numeric-owner \
|
||||
-cf archive.tar directory/
|
||||
```
|
||||
|
||||
### zip/gzip
|
||||
|
||||
```bash
|
||||
# Use gzip -n to avoid timestamp
|
||||
gzip -n file
|
||||
|
||||
# Use mtime for consistent timestamps
|
||||
touch -d "@${SOURCE_DATE_EPOCH}" file
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Network Requirements
|
||||
|
||||
### Outbound Access Required
|
||||
|
||||
| Destination | Port | Purpose |
|
||||
|-------------|------|---------|
|
||||
| `dl-cdn.alpinelinux.org` | 443 | Alpine packages |
|
||||
| `deb.debian.org` | 443 | Debian packages |
|
||||
| `vault.centos.org` | 443 | CentOS/RHEL sources |
|
||||
| `mirror.almalinux.org` | 443 | AlmaLinux packages |
|
||||
| `git.*.org` | 443 | Upstream source repos |
|
||||
|
||||
### Air-Gapped Operation
|
||||
|
||||
For air-gapped environments:
|
||||
|
||||
1. Pre-download source packages
|
||||
2. Configure local mirrors
|
||||
3. Set `STELLAOPS_OFFLINE_MODE=true`
|
||||
4. Use cached build artifacts
|
||||
|
||||
---
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Container Isolation
|
||||
|
||||
- Builders run in unprivileged containers
|
||||
- No host network access
|
||||
- Read-only source mounts
|
||||
- Ephemeral containers (destroyed after build)
|
||||
|
||||
### Signing Keys
|
||||
|
||||
- Build outputs are unsigned by default
|
||||
- DSSE signing requires configured key material
|
||||
- Keys stored in `/etc/stellaops/keys/` or HSM
|
||||
|
||||
### Build Verification
|
||||
|
||||
```bash
|
||||
# Verify reproducibility
|
||||
sha256sum build1/output/* > checksums1.txt
|
||||
sha256sum build2/output/* > checksums2.txt
|
||||
diff checksums1.txt checksums2.txt
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
| Issue | Cause | Resolution |
|
||||
|-------|-------|------------|
|
||||
| Build timestamp differs | `SOURCE_DATE_EPOCH` not set | Export variable before build |
|
||||
| Path in debug info | Missing `-fdebug-prefix-map` | Add to CFLAGS |
|
||||
| ar archive differs | Deterministic mode disabled | Use `--enable-deterministic-archives` |
|
||||
| tar ordering differs | Random file order | Use `--sort=name` |
|
||||
|
||||
### Debugging Reproducibility
|
||||
|
||||
```bash
|
||||
# Compare two builds byte-by-byte
|
||||
diffoscope build1/output/libfoo.so build2/output/libfoo.so
|
||||
|
||||
# Check for timestamp differences
|
||||
objdump -t binary | grep -i time
|
||||
|
||||
# Verify no random UUIDs
|
||||
strings binary | grep -E '[0-9a-f]{8}-[0-9a-f]{4}'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Monitoring and Metrics
|
||||
|
||||
### Key Metrics
|
||||
|
||||
| Metric | Description | Target |
|
||||
|--------|-------------|--------|
|
||||
| `build_reproducibility_rate` | % of reproducible builds | > 95% |
|
||||
| `build_duration_seconds` | Time to complete build | < 1800 |
|
||||
| `fingerprint_extraction_rate` | Functions per second | > 1000 |
|
||||
| `build_cache_hit_rate` | Cache effectiveness | > 80% |
|
||||
|
||||
### Health Checks
|
||||
|
||||
```bash
|
||||
# Verify builder containers are ready
|
||||
docker ps --filter "name=repro-builder"
|
||||
|
||||
# Check cache disk usage
|
||||
df -h /var/cache/stellaops/
|
||||
|
||||
# Verify build queue
|
||||
curl -s http://localhost:9090/metrics | grep stellaops_build
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## References
|
||||
|
||||
- [Reproducible Builds](https://reproducible-builds.org/)
|
||||
- [Debian Reproducible Builds](https://wiki.debian.org/ReproducibleBuilds)
|
||||
- [Alpine Reproducibility](https://wiki.alpinelinux.org/wiki/Reproducible_Builds)
|
||||
- [RPM Reproducibility](https://rpm-software-management.github.io/rpm/manual/reproducibility.html)
|
||||
62
devops/docker/repro-builders/alpine/Dockerfile
Normal file
62
devops/docker/repro-builders/alpine/Dockerfile
Normal file
@@ -0,0 +1,62 @@
|
||||
# Alpine Reproducible Builder
|
||||
# Creates deterministic builds of Alpine packages for fingerprint diffing
|
||||
#
|
||||
# Usage:
|
||||
# docker build -t repro-builder-alpine:3.20 --build-arg RELEASE=3.20 .
|
||||
# docker run -v ./output:/output repro-builder-alpine:3.20 build openssl 3.0.7-r0
|
||||
|
||||
ARG RELEASE=3.20
|
||||
FROM alpine:${RELEASE}
|
||||
|
||||
ARG RELEASE
|
||||
ENV ALPINE_RELEASE=${RELEASE}
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apk add --no-cache \
|
||||
alpine-sdk \
|
||||
abuild \
|
||||
sudo \
|
||||
git \
|
||||
curl \
|
||||
binutils \
|
||||
elfutils \
|
||||
coreutils \
|
||||
tar \
|
||||
gzip \
|
||||
xz \
|
||||
patch \
|
||||
diffutils \
|
||||
file \
|
||||
&& rm -rf /var/cache/apk/*
|
||||
|
||||
# Create build user (abuild requires non-root)
|
||||
RUN adduser -D -G abuild builder \
|
||||
&& echo "builder ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers \
|
||||
&& mkdir -p /var/cache/distfiles \
|
||||
&& chown -R builder:abuild /var/cache/distfiles
|
||||
|
||||
# Setup abuild
|
||||
USER builder
|
||||
WORKDIR /home/builder
|
||||
|
||||
# Generate abuild keys
|
||||
RUN abuild-keygen -a -i -n
|
||||
|
||||
# Copy normalization and build scripts
|
||||
COPY --chown=builder:abuild scripts/normalize.sh /usr/local/bin/normalize.sh
|
||||
COPY --chown=builder:abuild scripts/build.sh /usr/local/bin/build.sh
|
||||
COPY --chown=builder:abuild scripts/extract-functions.sh /usr/local/bin/extract-functions.sh
|
||||
|
||||
RUN chmod +x /usr/local/bin/*.sh
|
||||
|
||||
# Environment for reproducibility
|
||||
ENV TZ=UTC
|
||||
ENV LC_ALL=C.UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
# Build output directory
|
||||
VOLUME /output
|
||||
WORKDIR /build
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/build.sh"]
|
||||
CMD ["--help"]
|
||||
226
devops/docker/repro-builders/alpine/scripts/build.sh
Normal file
226
devops/docker/repro-builders/alpine/scripts/build.sh
Normal file
@@ -0,0 +1,226 @@
|
||||
#!/bin/sh
|
||||
# Alpine Reproducible Build Script
|
||||
# Builds packages with deterministic settings for fingerprint generation
|
||||
#
|
||||
# Usage: build.sh [build|diff] <package> <version> [patch_url...]
|
||||
#
|
||||
# Examples:
|
||||
# build.sh build openssl 3.0.7-r0
|
||||
# build.sh diff openssl 3.0.7-r0 3.0.8-r0
|
||||
# build.sh build openssl 3.0.7-r0 https://patch.url/CVE-2023-1234.patch
|
||||
|
||||
set -eu
|
||||
|
||||
COMMAND="${1:-help}"
|
||||
PACKAGE="${2:-}"
|
||||
VERSION="${3:-}"
|
||||
OUTPUT_DIR="${OUTPUT_DIR:-/output}"
|
||||
|
||||
log() {
|
||||
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] $*" >&2
|
||||
}
|
||||
|
||||
show_help() {
|
||||
cat <<EOF
|
||||
Alpine Reproducible Builder
|
||||
|
||||
Usage:
|
||||
build.sh build <package> <version> [patch_urls...]
|
||||
Build a package with reproducible settings
|
||||
|
||||
build.sh diff <package> <vuln_version> <patched_version>
|
||||
Build two versions and compute fingerprint diff
|
||||
|
||||
build.sh --help
|
||||
Show this help message
|
||||
|
||||
Environment:
|
||||
SOURCE_DATE_EPOCH Override timestamp (extracted from APKBUILD if not set)
|
||||
OUTPUT_DIR Output directory (default: /output)
|
||||
CFLAGS Additional compiler flags
|
||||
LDFLAGS Additional linker flags
|
||||
|
||||
Examples:
|
||||
build.sh build openssl 3.0.7-r0
|
||||
build.sh build curl 8.1.0-r0 https://patch/CVE-2023-1234.patch
|
||||
build.sh diff openssl 3.0.7-r0 3.0.8-r0
|
||||
EOF
|
||||
}
|
||||
|
||||
setup_reproducible_env() {
|
||||
local pkg="$1"
|
||||
local ver="$2"
|
||||
|
||||
# Extract SOURCE_DATE_EPOCH from APKBUILD if not set
|
||||
if [ -z "${SOURCE_DATE_EPOCH:-}" ]; then
|
||||
if [ -f "aports/main/$pkg/APKBUILD" ]; then
|
||||
# Use pkgrel date or fallback to current
|
||||
SOURCE_DATE_EPOCH=$(stat -c %Y "aports/main/$pkg/APKBUILD" 2>/dev/null || date +%s)
|
||||
else
|
||||
SOURCE_DATE_EPOCH=$(date +%s)
|
||||
fi
|
||||
export SOURCE_DATE_EPOCH
|
||||
fi
|
||||
|
||||
log "SOURCE_DATE_EPOCH=$SOURCE_DATE_EPOCH"
|
||||
|
||||
# Reproducible compiler flags
|
||||
export CFLAGS="${CFLAGS:-} -fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/build"
|
||||
export CXXFLAGS="${CXXFLAGS:-} ${CFLAGS}"
|
||||
export LDFLAGS="${LDFLAGS:-}"
|
||||
|
||||
# Locale for deterministic sorting
|
||||
export LC_ALL=C.UTF-8
|
||||
export TZ=UTC
|
||||
}
|
||||
|
||||
fetch_source() {
|
||||
local pkg="$1"
|
||||
local ver="$2"
|
||||
|
||||
log "Fetching source for $pkg-$ver"
|
||||
|
||||
# Clone aports if needed
|
||||
if [ ! -d "aports" ]; then
|
||||
git clone --depth 1 https://gitlab.alpinelinux.org/alpine/aports.git
|
||||
fi
|
||||
|
||||
# Find package
|
||||
local pkg_dir=""
|
||||
for repo in main community testing; do
|
||||
if [ -d "aports/$repo/$pkg" ]; then
|
||||
pkg_dir="aports/$repo/$pkg"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$pkg_dir" ]; then
|
||||
log "ERROR: Package $pkg not found in aports"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Checkout specific version if needed
|
||||
cd "$pkg_dir"
|
||||
abuild fetch
|
||||
abuild unpack
|
||||
}
|
||||
|
||||
apply_patches() {
|
||||
local src_dir="$1"
|
||||
shift
|
||||
|
||||
for patch_url in "$@"; do
|
||||
log "Applying patch: $patch_url"
|
||||
curl -sSL "$patch_url" | patch -d "$src_dir" -p1
|
||||
done
|
||||
}
|
||||
|
||||
build_package() {
|
||||
local pkg="$1"
|
||||
local ver="$2"
|
||||
shift 2
|
||||
local patches="$@"
|
||||
|
||||
log "Building $pkg-$ver"
|
||||
|
||||
setup_reproducible_env "$pkg" "$ver"
|
||||
|
||||
cd /build
|
||||
fetch_source "$pkg" "$ver"
|
||||
|
||||
if [ -n "$patches" ]; then
|
||||
apply_patches "src/$pkg-*" $patches
|
||||
fi
|
||||
|
||||
# Build with reproducible settings
|
||||
abuild -r
|
||||
|
||||
# Copy output
|
||||
local out_dir="$OUTPUT_DIR/$pkg-$ver"
|
||||
mkdir -p "$out_dir"
|
||||
cp -r ~/packages/*/*.apk "$out_dir/" 2>/dev/null || true
|
||||
|
||||
# Extract binaries and fingerprints
|
||||
for apk in "$out_dir"/*.apk; do
|
||||
[ -f "$apk" ] || continue
|
||||
local apk_name=$(basename "$apk" .apk)
|
||||
mkdir -p "$out_dir/extracted/$apk_name"
|
||||
tar -xzf "$apk" -C "$out_dir/extracted/$apk_name"
|
||||
|
||||
# Extract function fingerprints
|
||||
/usr/local/bin/extract-functions.sh "$out_dir/extracted/$apk_name" > "$out_dir/$apk_name.functions.json"
|
||||
done
|
||||
|
||||
log "Build complete: $out_dir"
|
||||
}
|
||||
|
||||
diff_versions() {
|
||||
local pkg="$1"
|
||||
local vuln_ver="$2"
|
||||
local patched_ver="$3"
|
||||
|
||||
log "Building and diffing $pkg: $vuln_ver vs $patched_ver"
|
||||
|
||||
# Build vulnerable version
|
||||
build_package "$pkg" "$vuln_ver"
|
||||
|
||||
# Build patched version
|
||||
build_package "$pkg" "$patched_ver"
|
||||
|
||||
# Compute diff
|
||||
local diff_out="$OUTPUT_DIR/$pkg-diff-$vuln_ver-vs-$patched_ver.json"
|
||||
|
||||
# Simple diff of function fingerprints
|
||||
jq -s '
|
||||
.[0] as $vuln |
|
||||
.[1] as $patched |
|
||||
{
|
||||
package: "'"$pkg"'",
|
||||
vulnerable_version: "'"$vuln_ver"'",
|
||||
patched_version: "'"$patched_ver"'",
|
||||
vulnerable_functions: ($vuln | length),
|
||||
patched_functions: ($patched | length),
|
||||
added: [($patched[] | select(.name as $n | ($vuln | map(.name) | index($n)) == null))],
|
||||
removed: [($vuln[] | select(.name as $n | ($patched | map(.name) | index($n)) == null))],
|
||||
modified: [
|
||||
$vuln[] | .name as $n | .hash as $h |
|
||||
($patched[] | select(.name == $n and .hash != $h)) |
|
||||
{name: $n, vuln_hash: $h, patched_hash: .hash}
|
||||
]
|
||||
}
|
||||
' \
|
||||
"$OUTPUT_DIR/$pkg-$vuln_ver"/*.functions.json \
|
||||
"$OUTPUT_DIR/$pkg-$patched_ver"/*.functions.json \
|
||||
> "$diff_out"
|
||||
|
||||
log "Diff complete: $diff_out"
|
||||
}
|
||||
|
||||
case "$COMMAND" in
|
||||
build)
|
||||
if [ -z "$PACKAGE" ] || [ -z "$VERSION" ]; then
|
||||
log "ERROR: Package and version required"
|
||||
show_help
|
||||
exit 1
|
||||
fi
|
||||
shift 2 # Remove command, package, version
|
||||
build_package "$PACKAGE" "$VERSION" "$@"
|
||||
;;
|
||||
diff)
|
||||
PATCHED_VERSION="${4:-}"
|
||||
if [ -z "$PACKAGE" ] || [ -z "$VERSION" ] || [ -z "$PATCHED_VERSION" ]; then
|
||||
log "ERROR: Package, vulnerable version, and patched version required"
|
||||
show_help
|
||||
exit 1
|
||||
fi
|
||||
diff_versions "$PACKAGE" "$VERSION" "$PATCHED_VERSION"
|
||||
;;
|
||||
--help|help)
|
||||
show_help
|
||||
;;
|
||||
*)
|
||||
log "ERROR: Unknown command: $COMMAND"
|
||||
show_help
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -0,0 +1,71 @@
|
||||
#!/bin/sh
|
||||
# Extract function fingerprints from ELF binaries
|
||||
# Outputs JSON array with function name, offset, size, and hashes
|
||||
#
|
||||
# Usage: extract-functions.sh <directory>
|
||||
#
|
||||
# Dependencies: objdump, readelf, sha256sum, jq
|
||||
|
||||
set -eu
|
||||
|
||||
DIR="${1:-.}"
|
||||
|
||||
extract_functions_from_binary() {
|
||||
local binary="$1"
|
||||
|
||||
# Skip non-ELF files
|
||||
file "$binary" | grep -q "ELF" || return 0
|
||||
|
||||
# Get function symbols
|
||||
objdump -t "$binary" 2>/dev/null | \
|
||||
awk '/\.text.*[0-9a-f]+.*F/ {
|
||||
# Fields: addr flags section size name
|
||||
gsub(/\*.*\*/, "", $1) # Clean address
|
||||
if ($5 != "" && $4 != "00000000" && $4 != "0000000000000000") {
|
||||
printf "%s %s %s\n", $1, $4, $NF
|
||||
}
|
||||
}' | while read -r offset size name; do
|
||||
# Skip compiler-generated symbols
|
||||
case "$name" in
|
||||
__*|_GLOBAL_*|.plt*|.text*|frame_dummy|register_tm_clones|deregister_tm_clones)
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
# Convert hex size to decimal
|
||||
dec_size=$((16#$size))
|
||||
|
||||
# Skip tiny functions (likely padding)
|
||||
[ "$dec_size" -lt 16 ] && continue
|
||||
|
||||
# Extract function bytes and compute hash
|
||||
# Using objdump to get disassembly and hash the opcodes
|
||||
local hash=$(objdump -d --start-address="0x$offset" --stop-address="0x$((16#$offset + dec_size))" "$binary" 2>/dev/null | \
|
||||
grep "^[[:space:]]*[0-9a-f]*:" | \
|
||||
awk '{for(i=2;i<=NF;i++){if($i~/^[0-9a-f]{2}$/){printf "%s", $i}}}' | \
|
||||
sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Output JSON object
|
||||
printf '{"name":"%s","offset":"0x%s","size":%d,"hash":"%s"}\n' \
|
||||
"$name" "$offset" "$dec_size" "${hash:-unknown}"
|
||||
done
|
||||
}
|
||||
|
||||
# Find all ELF binaries in directory
|
||||
echo "["
|
||||
first=true
|
||||
find "$DIR" -type f -executable 2>/dev/null | while read -r binary; do
|
||||
# Check if ELF
|
||||
file "$binary" 2>/dev/null | grep -q "ELF" || continue
|
||||
|
||||
extract_functions_from_binary "$binary" | while read -r json; do
|
||||
[ -z "$json" ] && continue
|
||||
if [ "$first" = "true" ]; then
|
||||
first=false
|
||||
else
|
||||
echo ","
|
||||
fi
|
||||
echo "$json"
|
||||
done
|
||||
done
|
||||
echo "]"
|
||||
65
devops/docker/repro-builders/alpine/scripts/normalize.sh
Normal file
65
devops/docker/repro-builders/alpine/scripts/normalize.sh
Normal file
@@ -0,0 +1,65 @@
|
||||
#!/bin/sh
|
||||
# Normalization scripts for reproducible builds
|
||||
# Strips non-deterministic content from build artifacts
|
||||
#
|
||||
# Usage: normalize.sh <directory>
|
||||
|
||||
set -eu
|
||||
|
||||
DIR="${1:-.}"
|
||||
|
||||
log() {
|
||||
echo "[normalize] $*" >&2
|
||||
}
|
||||
|
||||
# Strip timestamps from __DATE__ and __TIME__ macros
|
||||
strip_date_time() {
|
||||
log "Stripping date/time macros..."
|
||||
# Already handled by SOURCE_DATE_EPOCH in modern GCC
|
||||
}
|
||||
|
||||
# Normalize build paths
|
||||
normalize_paths() {
|
||||
log "Normalizing build paths..."
|
||||
# Handled by -fdebug-prefix-map
|
||||
}
|
||||
|
||||
# Normalize ar archives for deterministic ordering
|
||||
normalize_archives() {
|
||||
log "Normalizing ar archives..."
|
||||
find "$DIR" -name "*.a" -type f | while read -r archive; do
|
||||
if ar --version 2>&1 | grep -q "GNU ar"; then
|
||||
# GNU ar with deterministic mode
|
||||
ar -rcsD "$archive.tmp" "$archive" && mv "$archive.tmp" "$archive" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Strip debug sections that contain non-deterministic info
|
||||
strip_debug_timestamps() {
|
||||
log "Stripping debug timestamps..."
|
||||
find "$DIR" -type f \( -name "*.o" -o -name "*.so" -o -name "*.so.*" -o -executable \) | while read -r obj; do
|
||||
# Check if ELF
|
||||
file "$obj" 2>/dev/null | grep -q "ELF" || continue
|
||||
|
||||
# Strip build-id if not needed (we regenerate it)
|
||||
# objcopy --remove-section=.note.gnu.build-id "$obj" 2>/dev/null || true
|
||||
|
||||
# Remove timestamps from DWARF debug info
|
||||
# This is typically handled by SOURCE_DATE_EPOCH
|
||||
done
|
||||
}
|
||||
|
||||
# Normalize tar archives
|
||||
normalize_tars() {
|
||||
log "Normalizing tar archives..."
|
||||
# When creating tars, use:
|
||||
# tar --sort=name --mtime="@${SOURCE_DATE_EPOCH}" --owner=0 --group=0 --numeric-owner
|
||||
}
|
||||
|
||||
# Run all normalizations
|
||||
normalize_paths
|
||||
normalize_archives
|
||||
strip_debug_timestamps
|
||||
|
||||
log "Normalization complete"
|
||||
59
devops/docker/repro-builders/debian/Dockerfile
Normal file
59
devops/docker/repro-builders/debian/Dockerfile
Normal file
@@ -0,0 +1,59 @@
|
||||
# Debian Reproducible Builder
|
||||
# Creates deterministic builds of Debian packages for fingerprint diffing
|
||||
#
|
||||
# Usage:
|
||||
# docker build -t repro-builder-debian:bookworm --build-arg RELEASE=bookworm .
|
||||
# docker run -v ./output:/output repro-builder-debian:bookworm build openssl 3.0.7-1
|
||||
|
||||
ARG RELEASE=bookworm
|
||||
FROM debian:${RELEASE}
|
||||
|
||||
ARG RELEASE
|
||||
ENV DEBIAN_RELEASE=${RELEASE}
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install build tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
devscripts \
|
||||
dpkg-dev \
|
||||
equivs \
|
||||
fakeroot \
|
||||
git \
|
||||
curl \
|
||||
ca-certificates \
|
||||
binutils \
|
||||
elfutils \
|
||||
coreutils \
|
||||
patch \
|
||||
diffutils \
|
||||
file \
|
||||
jq \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create build user
|
||||
RUN useradd -m -s /bin/bash builder \
|
||||
&& echo "builder ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
USER builder
|
||||
WORKDIR /home/builder
|
||||
|
||||
# Copy scripts
|
||||
COPY --chown=builder:builder scripts/build.sh /usr/local/bin/build.sh
|
||||
COPY --chown=builder:builder scripts/extract-functions.sh /usr/local/bin/extract-functions.sh
|
||||
COPY --chown=builder:builder scripts/normalize.sh /usr/local/bin/normalize.sh
|
||||
|
||||
USER root
|
||||
RUN chmod +x /usr/local/bin/*.sh
|
||||
USER builder
|
||||
|
||||
# Environment for reproducibility
|
||||
ENV TZ=UTC
|
||||
ENV LC_ALL=C.UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
VOLUME /output
|
||||
WORKDIR /build
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/build.sh"]
|
||||
CMD ["--help"]
|
||||
233
devops/docker/repro-builders/debian/scripts/build.sh
Normal file
233
devops/docker/repro-builders/debian/scripts/build.sh
Normal file
@@ -0,0 +1,233 @@
|
||||
#!/bin/bash
|
||||
# Debian Reproducible Build Script
|
||||
# Builds packages with deterministic settings for fingerprint generation
|
||||
#
|
||||
# Usage: build.sh [build|diff] <package> <version> [patch_url...]
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
COMMAND="${1:-help}"
|
||||
PACKAGE="${2:-}"
|
||||
VERSION="${3:-}"
|
||||
OUTPUT_DIR="${OUTPUT_DIR:-/output}"
|
||||
|
||||
log() {
|
||||
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] $*" >&2
|
||||
}
|
||||
|
||||
show_help() {
|
||||
cat <<EOF
|
||||
Debian Reproducible Builder
|
||||
|
||||
Usage:
|
||||
build.sh build <package> <version> [patch_urls...]
|
||||
Build a package with reproducible settings
|
||||
|
||||
build.sh diff <package> <vuln_version> <patched_version>
|
||||
Build two versions and compute fingerprint diff
|
||||
|
||||
build.sh --help
|
||||
Show this help message
|
||||
|
||||
Environment:
|
||||
SOURCE_DATE_EPOCH Override timestamp (extracted from changelog if not set)
|
||||
OUTPUT_DIR Output directory (default: /output)
|
||||
DEB_BUILD_OPTIONS Additional build options
|
||||
|
||||
Examples:
|
||||
build.sh build openssl 3.0.7-1
|
||||
build.sh diff curl 8.1.0-1 8.1.0-2
|
||||
EOF
|
||||
}
|
||||
|
||||
setup_reproducible_env() {
|
||||
local pkg="$1"
|
||||
|
||||
# Reproducible build flags
|
||||
export DEB_BUILD_OPTIONS="${DEB_BUILD_OPTIONS:-} reproducible=+all"
|
||||
export SOURCE_DATE_EPOCH="${SOURCE_DATE_EPOCH:-$(date +%s)}"
|
||||
|
||||
# Compiler flags for reproducibility
|
||||
export CFLAGS="${CFLAGS:-} -fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/build"
|
||||
export CXXFLAGS="${CXXFLAGS:-} ${CFLAGS}"
|
||||
|
||||
export LC_ALL=C.UTF-8
|
||||
export TZ=UTC
|
||||
|
||||
log "SOURCE_DATE_EPOCH=$SOURCE_DATE_EPOCH"
|
||||
}
|
||||
|
||||
fetch_source() {
|
||||
local pkg="$1"
|
||||
local ver="$2"
|
||||
|
||||
log "Fetching source for $pkg=$ver"
|
||||
|
||||
mkdir -p /build/src
|
||||
cd /build/src
|
||||
|
||||
# Enable source repositories
|
||||
sudo sed -i 's/^# deb-src/deb-src/' /etc/apt/sources.list.d/*.sources 2>/dev/null || \
|
||||
sudo sed -i 's/^# deb-src/deb-src/' /etc/apt/sources.list 2>/dev/null || true
|
||||
sudo apt-get update
|
||||
|
||||
# Fetch source
|
||||
if [ -n "$ver" ]; then
|
||||
apt-get source "${pkg}=${ver}" || apt-get source "$pkg"
|
||||
else
|
||||
apt-get source "$pkg"
|
||||
fi
|
||||
|
||||
# Find extracted directory
|
||||
local src_dir=$(ls -d "${pkg}"*/ 2>/dev/null | head -1)
|
||||
if [ -z "$src_dir" ]; then
|
||||
log "ERROR: Could not find source directory for $pkg"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Extract SOURCE_DATE_EPOCH from changelog
|
||||
if [ -z "${SOURCE_DATE_EPOCH:-}" ]; then
|
||||
if [ -f "$src_dir/debian/changelog" ]; then
|
||||
SOURCE_DATE_EPOCH=$(dpkg-parsechangelog -l "$src_dir/debian/changelog" -S Timestamp 2>/dev/null || date +%s)
|
||||
export SOURCE_DATE_EPOCH
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "$src_dir"
|
||||
}
|
||||
|
||||
install_build_deps() {
|
||||
local src_dir="$1"
|
||||
|
||||
log "Installing build dependencies"
|
||||
cd "$src_dir"
|
||||
sudo apt-get build-dep -y . || true
|
||||
}
|
||||
|
||||
apply_patches() {
|
||||
local src_dir="$1"
|
||||
shift
|
||||
|
||||
cd "$src_dir"
|
||||
for patch_url in "$@"; do
|
||||
log "Applying patch: $patch_url"
|
||||
curl -sSL "$patch_url" | patch -p1
|
||||
done
|
||||
}
|
||||
|
||||
build_package() {
|
||||
local pkg="$1"
|
||||
local ver="$2"
|
||||
shift 2
|
||||
local patches="${@:-}"
|
||||
|
||||
log "Building $pkg version $ver"
|
||||
|
||||
setup_reproducible_env "$pkg"
|
||||
|
||||
cd /build
|
||||
local src_dir=$(fetch_source "$pkg" "$ver")
|
||||
|
||||
install_build_deps "$src_dir"
|
||||
|
||||
if [ -n "$patches" ]; then
|
||||
apply_patches "$src_dir" $patches
|
||||
fi
|
||||
|
||||
cd "$src_dir"
|
||||
|
||||
# Build with reproducible settings
|
||||
dpkg-buildpackage -b -us -uc
|
||||
|
||||
# Copy output
|
||||
local out_dir="$OUTPUT_DIR/$pkg-$ver"
|
||||
mkdir -p "$out_dir"
|
||||
cp -r /build/src/*.deb "$out_dir/" 2>/dev/null || true
|
||||
|
||||
# Extract and fingerprint
|
||||
for deb in "$out_dir"/*.deb; do
|
||||
[ -f "$deb" ] || continue
|
||||
local deb_name=$(basename "$deb" .deb)
|
||||
mkdir -p "$out_dir/extracted/$deb_name"
|
||||
dpkg-deb -x "$deb" "$out_dir/extracted/$deb_name"
|
||||
|
||||
# Extract function fingerprints
|
||||
/usr/local/bin/extract-functions.sh "$out_dir/extracted/$deb_name" > "$out_dir/$deb_name.functions.json"
|
||||
done
|
||||
|
||||
log "Build complete: $out_dir"
|
||||
}
|
||||
|
||||
diff_versions() {
|
||||
local pkg="$1"
|
||||
local vuln_ver="$2"
|
||||
local patched_ver="$3"
|
||||
|
||||
log "Building and diffing $pkg: $vuln_ver vs $patched_ver"
|
||||
|
||||
# Build vulnerable version
|
||||
build_package "$pkg" "$vuln_ver"
|
||||
|
||||
# Clean build environment
|
||||
rm -rf /build/src/*
|
||||
|
||||
# Build patched version
|
||||
build_package "$pkg" "$patched_ver"
|
||||
|
||||
# Compute diff
|
||||
local diff_out="$OUTPUT_DIR/$pkg-diff-$vuln_ver-vs-$patched_ver.json"
|
||||
|
||||
jq -s '
|
||||
.[0] as $vuln |
|
||||
.[1] as $patched |
|
||||
{
|
||||
package: "'"$pkg"'",
|
||||
vulnerable_version: "'"$vuln_ver"'",
|
||||
patched_version: "'"$patched_ver"'",
|
||||
vulnerable_functions: ($vuln | length),
|
||||
patched_functions: ($patched | length),
|
||||
added: [($patched[] | select(.name as $n | ($vuln | map(.name) | index($n)) == null))],
|
||||
removed: [($vuln[] | select(.name as $n | ($patched | map(.name) | index($n)) == null))],
|
||||
modified: [
|
||||
$vuln[] | .name as $n | .hash as $h |
|
||||
($patched[] | select(.name == $n and .hash != $h)) |
|
||||
{name: $n, vuln_hash: $h, patched_hash: .hash}
|
||||
]
|
||||
}
|
||||
' \
|
||||
"$OUTPUT_DIR/$pkg-$vuln_ver"/*.functions.json \
|
||||
"$OUTPUT_DIR/$pkg-$patched_ver"/*.functions.json \
|
||||
> "$diff_out" 2>/dev/null || log "Warning: Could not compute diff"
|
||||
|
||||
log "Diff complete: $diff_out"
|
||||
}
|
||||
|
||||
case "$COMMAND" in
|
||||
build)
|
||||
if [ -z "$PACKAGE" ]; then
|
||||
log "ERROR: Package required"
|
||||
show_help
|
||||
exit 1
|
||||
fi
|
||||
shift 2 # Remove command, package
|
||||
[ -n "${VERSION:-}" ] && shift # Remove version if present
|
||||
build_package "$PACKAGE" "${VERSION:-}" "$@"
|
||||
;;
|
||||
diff)
|
||||
PATCHED_VERSION="${4:-}"
|
||||
if [ -z "$PACKAGE" ] || [ -z "$VERSION" ] || [ -z "$PATCHED_VERSION" ]; then
|
||||
log "ERROR: Package, vulnerable version, and patched version required"
|
||||
show_help
|
||||
exit 1
|
||||
fi
|
||||
diff_versions "$PACKAGE" "$VERSION" "$PATCHED_VERSION"
|
||||
;;
|
||||
--help|help)
|
||||
show_help
|
||||
;;
|
||||
*)
|
||||
log "ERROR: Unknown command: $COMMAND"
|
||||
show_help
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
# Extract function fingerprints from ELF binaries
|
||||
# Outputs JSON array with function name, offset, size, and hashes
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DIR="${1:-.}"
|
||||
|
||||
extract_functions_from_binary() {
|
||||
local binary="$1"
|
||||
|
||||
# Skip non-ELF files
|
||||
file "$binary" 2>/dev/null | grep -q "ELF" || return 0
|
||||
|
||||
# Get function symbols with objdump
|
||||
objdump -t "$binary" 2>/dev/null | \
|
||||
awk '/\.text.*[0-9a-f]+.*F/ {
|
||||
gsub(/\*.*\*/, "", $1)
|
||||
if ($5 != "" && length($4) > 0) {
|
||||
size = strtonum("0x" $4)
|
||||
if (size >= 16) {
|
||||
print $1, $4, $NF
|
||||
}
|
||||
}
|
||||
}' | while read -r offset size name; do
|
||||
# Skip compiler-generated symbols
|
||||
case "$name" in
|
||||
__*|_GLOBAL_*|.plt*|.text*|frame_dummy|register_tm_clones|deregister_tm_clones|_start|_init|_fini)
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
# Convert hex size
|
||||
dec_size=$((16#$size))
|
||||
|
||||
# Compute hash of function bytes
|
||||
local hash=$(objdump -d --start-address="0x$offset" --stop-address="$((16#$offset + dec_size))" "$binary" 2>/dev/null | \
|
||||
grep -E "^[[:space:]]*[0-9a-f]+:" | \
|
||||
awk '{for(i=2;i<=NF;i++){if($i~/^[0-9a-f]{2}$/){printf "%s", $i}}}' | \
|
||||
sha256sum | cut -d' ' -f1)
|
||||
|
||||
[ -n "$hash" ] || hash="unknown"
|
||||
|
||||
printf '{"name":"%s","offset":"0x%s","size":%d,"hash":"%s"}\n' \
|
||||
"$name" "$offset" "$dec_size" "$hash"
|
||||
done
|
||||
}
|
||||
|
||||
# Output JSON array
|
||||
echo "["
|
||||
first=true
|
||||
|
||||
find "$DIR" -type f \( -executable -o -name "*.so" -o -name "*.so.*" \) 2>/dev/null | while read -r binary; do
|
||||
file "$binary" 2>/dev/null | grep -q "ELF" || continue
|
||||
|
||||
extract_functions_from_binary "$binary" | while read -r json; do
|
||||
[ -z "$json" ] && continue
|
||||
if [ "$first" = "true" ]; then
|
||||
first=false
|
||||
echo "$json"
|
||||
else
|
||||
echo ",$json"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
echo "]"
|
||||
29
devops/docker/repro-builders/debian/scripts/normalize.sh
Normal file
29
devops/docker/repro-builders/debian/scripts/normalize.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
# Normalization scripts for Debian reproducible builds
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DIR="${1:-.}"
|
||||
|
||||
log() {
|
||||
echo "[normalize] $*" >&2
|
||||
}
|
||||
|
||||
normalize_archives() {
|
||||
log "Normalizing ar archives..."
|
||||
find "$DIR" -name "*.a" -type f | while read -r archive; do
|
||||
if ar --version 2>&1 | grep -q "GNU ar"; then
|
||||
ar -rcsD "$archive.tmp" "$archive" 2>/dev/null && mv "$archive.tmp" "$archive" || true
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
strip_debug_timestamps() {
|
||||
log "Stripping debug timestamps..."
|
||||
# Handled by SOURCE_DATE_EPOCH and DEB_BUILD_OPTIONS
|
||||
}
|
||||
|
||||
normalize_archives
|
||||
strip_debug_timestamps
|
||||
|
||||
log "Normalization complete"
|
||||
85
devops/docker/repro-builders/rhel/Dockerfile
Normal file
85
devops/docker/repro-builders/rhel/Dockerfile
Normal file
@@ -0,0 +1,85 @@
|
||||
# RHEL-compatible Reproducible Build Container
|
||||
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
|
||||
# Task: T3 - RHEL builder with mock-based package building
|
||||
#
|
||||
# Uses AlmaLinux 9 as RHEL-compatible base for open source builds.
|
||||
# Production RHEL builds require valid subscription.
|
||||
|
||||
ARG BASE_IMAGE=almalinux:9
|
||||
FROM ${BASE_IMAGE} AS builder
|
||||
|
||||
LABEL org.opencontainers.image.title="StellaOps RHEL Reproducible Builder"
|
||||
LABEL org.opencontainers.image.description="RHEL-compatible reproducible build environment for security patching"
|
||||
LABEL org.opencontainers.image.vendor="StellaOps"
|
||||
LABEL org.opencontainers.image.source="https://github.com/stellaops/stellaops"
|
||||
|
||||
# Install build dependencies
|
||||
RUN dnf -y update && \
|
||||
dnf -y install \
|
||||
# Core build tools
|
||||
rpm-build \
|
||||
rpmdevtools \
|
||||
rpmlint \
|
||||
mock \
|
||||
# Compiler toolchain
|
||||
gcc \
|
||||
gcc-c++ \
|
||||
make \
|
||||
cmake \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
# Package management
|
||||
dnf-plugins-core \
|
||||
yum-utils \
|
||||
createrepo_c \
|
||||
# Binary analysis
|
||||
binutils \
|
||||
elfutils \
|
||||
gdb \
|
||||
# Reproducibility
|
||||
diffoscope \
|
||||
# Source control
|
||||
git \
|
||||
patch \
|
||||
# Utilities
|
||||
wget \
|
||||
curl \
|
||||
jq \
|
||||
python3 \
|
||||
python3-pip && \
|
||||
dnf clean all
|
||||
|
||||
# Create mock user (mock requires non-root)
|
||||
RUN useradd -m mockbuild && \
|
||||
usermod -a -G mock mockbuild
|
||||
|
||||
# Set up rpmbuild directories
|
||||
RUN mkdir -p /build/{BUILD,RPMS,SOURCES,SPECS,SRPMS} && \
|
||||
chown -R mockbuild:mockbuild /build
|
||||
|
||||
# Copy build scripts
|
||||
COPY scripts/build.sh /usr/local/bin/build.sh
|
||||
COPY scripts/extract-functions.sh /usr/local/bin/extract-functions.sh
|
||||
COPY scripts/normalize.sh /usr/local/bin/normalize.sh
|
||||
COPY scripts/mock-build.sh /usr/local/bin/mock-build.sh
|
||||
|
||||
RUN chmod +x /usr/local/bin/*.sh
|
||||
|
||||
# Set reproducibility environment
|
||||
ENV TZ=UTC
|
||||
ENV LC_ALL=C.UTF-8
|
||||
ENV LANG=C.UTF-8
|
||||
|
||||
# Deterministic compiler flags
|
||||
ENV CFLAGS="-fno-record-gcc-switches -fdebug-prefix-map=/build=/buildroot -O2 -g"
|
||||
ENV CXXFLAGS="${CFLAGS}"
|
||||
|
||||
# Mock configuration for reproducible builds
|
||||
COPY mock/stellaops-repro.cfg /etc/mock/stellaops-repro.cfg
|
||||
|
||||
WORKDIR /build
|
||||
USER mockbuild
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/build.sh"]
|
||||
CMD ["--help"]
|
||||
71
devops/docker/repro-builders/rhel/mock/stellaops-repro.cfg
Normal file
71
devops/docker/repro-builders/rhel/mock/stellaops-repro.cfg
Normal file
@@ -0,0 +1,71 @@
|
||||
# StellaOps Reproducible Build Mock Configuration
|
||||
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
|
||||
#
|
||||
# Mock configuration optimized for reproducible RHEL/AlmaLinux builds
|
||||
|
||||
config_opts['root'] = 'stellaops-repro'
|
||||
config_opts['target_arch'] = 'x86_64'
|
||||
config_opts['legal_host_arches'] = ('x86_64',)
|
||||
config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
|
||||
config_opts['dist'] = 'el9'
|
||||
config_opts['releasever'] = '9'
|
||||
|
||||
# Reproducibility settings
|
||||
config_opts['use_host_resolv'] = False
|
||||
config_opts['rpmbuild_networking'] = False
|
||||
config_opts['cleanup_on_success'] = True
|
||||
config_opts['cleanup_on_failure'] = True
|
||||
|
||||
# Deterministic build settings
|
||||
config_opts['macros']['SOURCE_DATE_EPOCH'] = '%{getenv:SOURCE_DATE_EPOCH}'
|
||||
config_opts['macros']['_buildhost'] = 'stellaops.build'
|
||||
config_opts['macros']['debug_package'] = '%{nil}'
|
||||
config_opts['macros']['_default_patch_fuzz'] = '0'
|
||||
|
||||
# Compiler flags for reproducibility
|
||||
config_opts['macros']['optflags'] = '-O2 -g -fno-record-gcc-switches -fdebug-prefix-map=%{_builddir}=/buildroot'
|
||||
|
||||
# Environment normalization
|
||||
config_opts['environment']['TZ'] = 'UTC'
|
||||
config_opts['environment']['LC_ALL'] = 'C.UTF-8'
|
||||
config_opts['environment']['LANG'] = 'C.UTF-8'
|
||||
|
||||
# Use AlmaLinux as RHEL-compatible base
|
||||
config_opts['dnf.conf'] = """
|
||||
[main]
|
||||
keepcache=1
|
||||
debuglevel=2
|
||||
reposdir=/dev/null
|
||||
logfile=/var/log/yum.log
|
||||
retries=20
|
||||
obsoletes=1
|
||||
gpgcheck=0
|
||||
assumeyes=1
|
||||
syslog_ident=mock
|
||||
syslog_device=
|
||||
metadata_expire=0
|
||||
mdpolicy=group:primary
|
||||
best=1
|
||||
install_weak_deps=0
|
||||
protected_packages=
|
||||
module_platform_id=platform:el9
|
||||
user_agent={{ user_agent }}
|
||||
|
||||
[baseos]
|
||||
name=AlmaLinux $releasever - BaseOS
|
||||
mirrorlist=https://mirrors.almalinux.org/mirrorlist/$releasever/baseos
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
|
||||
[appstream]
|
||||
name=AlmaLinux $releasever - AppStream
|
||||
mirrorlist=https://mirrors.almalinux.org/mirrorlist/$releasever/appstream
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
|
||||
[crb]
|
||||
name=AlmaLinux $releasever - CRB
|
||||
mirrorlist=https://mirrors.almalinux.org/mirrorlist/$releasever/crb
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
"""
|
||||
213
devops/docker/repro-builders/rhel/scripts/build.sh
Normal file
213
devops/docker/repro-builders/rhel/scripts/build.sh
Normal file
@@ -0,0 +1,213 @@
|
||||
#!/bin/bash
|
||||
# RHEL Reproducible Build Script
|
||||
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
|
||||
#
|
||||
# Usage: build.sh --srpm <url_or_path> [--patch <patch_file>] [--output <dir>]
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Default values
|
||||
OUTPUT_DIR="/build/output"
|
||||
WORK_DIR="/build/work"
|
||||
SRPM=""
|
||||
PATCH_FILE=""
|
||||
SOURCE_DATE_EPOCH="${SOURCE_DATE_EPOCH:-}"
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
RHEL Reproducible Build Script
|
||||
|
||||
Usage: $0 [OPTIONS]
|
||||
|
||||
Options:
|
||||
--srpm <path> Path or URL to SRPM file (required)
|
||||
--patch <path> Path to security patch file (optional)
|
||||
--output <dir> Output directory (default: /build/output)
|
||||
--epoch <timestamp> SOURCE_DATE_EPOCH value (default: from changelog)
|
||||
--help Show this help message
|
||||
|
||||
Examples:
|
||||
$0 --srpm openssl-3.0.7-1.el9.src.rpm --patch CVE-2023-0286.patch
|
||||
$0 --srpm https://mirror/srpms/curl-8.0.1-1.el9.src.rpm
|
||||
|
||||
EOF
|
||||
exit 0
|
||||
}
|
||||
|
||||
log() {
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*"
|
||||
}
|
||||
|
||||
error() {
|
||||
log "ERROR: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--srpm)
|
||||
SRPM="$2"
|
||||
shift 2
|
||||
;;
|
||||
--patch)
|
||||
PATCH_FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--output)
|
||||
OUTPUT_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
--epoch)
|
||||
SOURCE_DATE_EPOCH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
error "Unknown option: $1"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -z "${SRPM}" ]] && error "SRPM path required. Use --srpm <path>"
|
||||
|
||||
# Create directories
|
||||
mkdir -p "${OUTPUT_DIR}" "${WORK_DIR}"
|
||||
cd "${WORK_DIR}"
|
||||
|
||||
log "Starting RHEL reproducible build"
|
||||
log "SRPM: ${SRPM}"
|
||||
|
||||
# Download or copy SRPM
|
||||
if [[ "${SRPM}" =~ ^https?:// ]]; then
|
||||
log "Downloading SRPM..."
|
||||
curl -fsSL -o source.src.rpm "${SRPM}"
|
||||
SRPM="source.src.rpm"
|
||||
elif [[ ! -f "${SRPM}" ]]; then
|
||||
error "SRPM file not found: ${SRPM}"
|
||||
fi
|
||||
|
||||
# Install SRPM
|
||||
log "Installing SRPM..."
|
||||
rpm2cpio "${SRPM}" | cpio -idmv
|
||||
|
||||
# Extract SOURCE_DATE_EPOCH from changelog if not provided
|
||||
if [[ -z "${SOURCE_DATE_EPOCH}" ]]; then
|
||||
SPEC_FILE=$(find . -name "*.spec" | head -1)
|
||||
if [[ -n "${SPEC_FILE}" ]]; then
|
||||
# Extract date from first changelog entry
|
||||
CHANGELOG_DATE=$(grep -m1 '^\*' "${SPEC_FILE}" | sed 's/^\* //' | cut -d' ' -f1-3)
|
||||
if [[ -n "${CHANGELOG_DATE}" ]]; then
|
||||
SOURCE_DATE_EPOCH=$(date -d "${CHANGELOG_DATE}" +%s 2>/dev/null || echo "")
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "${SOURCE_DATE_EPOCH}" ]]; then
|
||||
SOURCE_DATE_EPOCH=$(date +%s)
|
||||
log "Warning: Using current time for SOURCE_DATE_EPOCH"
|
||||
fi
|
||||
fi
|
||||
|
||||
export SOURCE_DATE_EPOCH
|
||||
log "SOURCE_DATE_EPOCH: ${SOURCE_DATE_EPOCH}"
|
||||
|
||||
# Apply security patch if provided
|
||||
if [[ -n "${PATCH_FILE}" ]]; then
|
||||
if [[ ! -f "${PATCH_FILE}" ]]; then
|
||||
error "Patch file not found: ${PATCH_FILE}"
|
||||
fi
|
||||
|
||||
log "Applying security patch: ${PATCH_FILE}"
|
||||
|
||||
# Copy patch to SOURCES
|
||||
PATCH_NAME=$(basename "${PATCH_FILE}")
|
||||
cp "${PATCH_FILE}" SOURCES/
|
||||
|
||||
# Add patch to spec file
|
||||
SPEC_FILE=$(find . -name "*.spec" | head -1)
|
||||
if [[ -n "${SPEC_FILE}" ]]; then
|
||||
# Find last Patch line or Source line
|
||||
LAST_PATCH=$(grep -n '^Patch[0-9]*:' "${SPEC_FILE}" | tail -1 | cut -d: -f1)
|
||||
if [[ -z "${LAST_PATCH}" ]]; then
|
||||
LAST_PATCH=$(grep -n '^Source[0-9]*:' "${SPEC_FILE}" | tail -1 | cut -d: -f1)
|
||||
fi
|
||||
|
||||
# Calculate next patch number
|
||||
PATCH_NUM=$(grep -c '^Patch[0-9]*:' "${SPEC_FILE}" || echo 0)
|
||||
PATCH_NUM=$((PATCH_NUM + 100)) # Use 100+ for security patches
|
||||
|
||||
# Insert patch declaration
|
||||
sed -i "${LAST_PATCH}a Patch${PATCH_NUM}: ${PATCH_NAME}" "${SPEC_FILE}"
|
||||
|
||||
# Add %patch to %prep if not using autosetup
|
||||
if ! grep -q '%autosetup' "${SPEC_FILE}"; then
|
||||
PREP_LINE=$(grep -n '^%prep' "${SPEC_FILE}" | head -1 | cut -d: -f1)
|
||||
if [[ -n "${PREP_LINE}" ]]; then
|
||||
# Find last %patch line in %prep
|
||||
LAST_PATCH_LINE=$(sed -n "${PREP_LINE},\$p" "${SPEC_FILE}" | grep -n '^%patch' | tail -1 | cut -d: -f1)
|
||||
if [[ -n "${LAST_PATCH_LINE}" ]]; then
|
||||
INSERT_LINE=$((PREP_LINE + LAST_PATCH_LINE))
|
||||
else
|
||||
INSERT_LINE=$((PREP_LINE + 1))
|
||||
fi
|
||||
sed -i "${INSERT_LINE}a %patch${PATCH_NUM} -p1" "${SPEC_FILE}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Set up rpmbuild tree
|
||||
log "Setting up rpmbuild tree..."
|
||||
rpmdev-setuptree || true
|
||||
|
||||
# Copy sources and spec
|
||||
cp -r SOURCES/* ~/rpmbuild/SOURCES/ 2>/dev/null || true
|
||||
cp *.spec ~/rpmbuild/SPECS/ 2>/dev/null || true
|
||||
|
||||
# Build using mock for isolation and reproducibility
|
||||
log "Building with mock (stellaops-repro config)..."
|
||||
SPEC_FILE=$(find ~/rpmbuild/SPECS -name "*.spec" | head -1)
|
||||
|
||||
if [[ -n "${SPEC_FILE}" ]]; then
|
||||
# Build SRPM first
|
||||
rpmbuild -bs "${SPEC_FILE}"
|
||||
|
||||
BUILT_SRPM=$(find ~/rpmbuild/SRPMS -name "*.src.rpm" | head -1)
|
||||
|
||||
if [[ -n "${BUILT_SRPM}" ]]; then
|
||||
# Build with mock
|
||||
mock -r stellaops-repro --rebuild "${BUILT_SRPM}" --resultdir="${OUTPUT_DIR}/rpms"
|
||||
else
|
||||
error "SRPM build failed"
|
||||
fi
|
||||
else
|
||||
error "No spec file found"
|
||||
fi
|
||||
|
||||
# Extract function fingerprints from built RPMs
|
||||
log "Extracting function fingerprints..."
|
||||
for rpm in "${OUTPUT_DIR}/rpms"/*.rpm; do
|
||||
if [[ -f "${rpm}" ]] && [[ ! "${rpm}" =~ \.src\.rpm$ ]]; then
|
||||
/usr/local/bin/extract-functions.sh "${rpm}" "${OUTPUT_DIR}/fingerprints"
|
||||
fi
|
||||
done
|
||||
|
||||
# Generate build manifest
|
||||
log "Generating build manifest..."
|
||||
cat > "${OUTPUT_DIR}/manifest.json" <<EOF
|
||||
{
|
||||
"builder": "rhel",
|
||||
"base_image": "${BASE_IMAGE:-almalinux:9}",
|
||||
"source_date_epoch": ${SOURCE_DATE_EPOCH},
|
||||
"build_timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
|
||||
"srpm": "${SRPM}",
|
||||
"patch_applied": $(if [[ -n "${PATCH_FILE}" ]]; then echo "\"${PATCH_FILE}\""; else echo "null"; fi),
|
||||
"rpm_outputs": $(find "${OUTPUT_DIR}/rpms" -name "*.rpm" ! -name "*.src.rpm" -printf '"%f",' 2>/dev/null | sed 's/,$//' | sed 's/^/[/' | sed 's/$/]/'),
|
||||
"fingerprint_files": $(find "${OUTPUT_DIR}/fingerprints" -name "*.json" -printf '"%f",' 2>/dev/null | sed 's/,$//' | sed 's/^/[/' | sed 's/$/]/')
|
||||
}
|
||||
EOF
|
||||
|
||||
log "Build complete. Output in: ${OUTPUT_DIR}"
|
||||
log "Manifest: ${OUTPUT_DIR}/manifest.json"
|
||||
@@ -0,0 +1,73 @@
|
||||
#!/bin/bash
|
||||
# RHEL Function Extraction Script
|
||||
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
|
||||
#
|
||||
# Extracts function-level fingerprints from RPM packages
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
RPM_PATH="${1:-}"
|
||||
OUTPUT_DIR="${2:-/build/fingerprints}"
|
||||
|
||||
[[ -z "${RPM_PATH}" ]] && { echo "Usage: $0 <rpm_path> [output_dir]"; exit 1; }
|
||||
[[ ! -f "${RPM_PATH}" ]] && { echo "RPM not found: ${RPM_PATH}"; exit 1; }
|
||||
|
||||
mkdir -p "${OUTPUT_DIR}"
|
||||
|
||||
RPM_NAME=$(rpm -qp --qf '%{NAME}' "${RPM_PATH}" 2>/dev/null)
|
||||
RPM_VERSION=$(rpm -qp --qf '%{VERSION}-%{RELEASE}' "${RPM_PATH}" 2>/dev/null)
|
||||
|
||||
WORK_DIR=$(mktemp -d)
|
||||
trap "rm -rf ${WORK_DIR}" EXIT
|
||||
|
||||
cd "${WORK_DIR}"
|
||||
|
||||
# Extract RPM contents
|
||||
rpm2cpio "${RPM_PATH}" | cpio -idmv 2>/dev/null
|
||||
|
||||
# Find ELF binaries
|
||||
find . -type f -exec file {} \; | grep -E 'ELF.*(executable|shared object)' | cut -d: -f1 | while read -r binary; do
|
||||
BINARY_NAME=$(basename "${binary}")
|
||||
BINARY_PATH="${binary#./}"
|
||||
|
||||
# Get build-id if present
|
||||
BUILD_ID=$(readelf -n "${binary}" 2>/dev/null | grep 'Build ID:' | awk '{print $3}' || echo "")
|
||||
|
||||
# Extract function symbols
|
||||
OUTPUT_FILE="${OUTPUT_DIR}/${RPM_NAME}_${BINARY_NAME}.json"
|
||||
|
||||
{
|
||||
echo "{"
|
||||
echo " \"package\": \"${RPM_NAME}\","
|
||||
echo " \"version\": \"${RPM_VERSION}\","
|
||||
echo " \"binary\": \"${BINARY_PATH}\","
|
||||
echo " \"build_id\": \"${BUILD_ID}\","
|
||||
echo " \"extracted_at\": \"$(date -u '+%Y-%m-%dT%H:%M:%SZ')\","
|
||||
echo " \"functions\": ["
|
||||
|
||||
# Extract function addresses and sizes using nm and objdump
|
||||
FIRST=true
|
||||
nm -S --defined-only "${binary}" 2>/dev/null | grep -E '^[0-9a-f]+ [0-9a-f]+ [Tt]' | while read -r addr size type name; do
|
||||
if [[ "${FIRST}" == "true" ]]; then
|
||||
FIRST=false
|
||||
else
|
||||
echo ","
|
||||
fi
|
||||
|
||||
# Calculate function hash from disassembly
|
||||
FUNC_HASH=$(objdump -d --start-address=0x${addr} --stop-address=$((0x${addr} + 0x${size})) "${binary}" 2>/dev/null | \
|
||||
grep -E '^\s+[0-9a-f]+:' | awk '{$1=""; print}' | sha256sum | cut -d' ' -f1)
|
||||
|
||||
printf ' {"name": "%s", "address": "0x%s", "size": %d, "hash": "%s"}' \
|
||||
"${name}" "${addr}" "$((0x${size}))" "${FUNC_HASH}"
|
||||
done || true
|
||||
|
||||
echo ""
|
||||
echo " ]"
|
||||
echo "}"
|
||||
} > "${OUTPUT_FILE}"
|
||||
|
||||
echo "Extracted: ${OUTPUT_FILE}"
|
||||
done
|
||||
|
||||
echo "Function extraction complete for: ${RPM_NAME}"
|
||||
34
devops/docker/repro-builders/rhel/scripts/mock-build.sh
Normal file
34
devops/docker/repro-builders/rhel/scripts/mock-build.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
# RHEL Mock Build Script
|
||||
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
|
||||
#
|
||||
# Builds SRPMs using mock for isolation and reproducibility
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SRPM="${1:-}"
|
||||
RESULT_DIR="${2:-/build/output}"
|
||||
CONFIG="${3:-stellaops-repro}"
|
||||
|
||||
[[ -z "${SRPM}" ]] && { echo "Usage: $0 <srpm> [result_dir] [mock_config]"; exit 1; }
|
||||
[[ ! -f "${SRPM}" ]] && { echo "SRPM not found: ${SRPM}"; exit 1; }
|
||||
|
||||
mkdir -p "${RESULT_DIR}"
|
||||
|
||||
echo "Building SRPM with mock: ${SRPM}"
|
||||
echo "Config: ${CONFIG}"
|
||||
echo "Output: ${RESULT_DIR}"
|
||||
|
||||
# Initialize mock if needed
|
||||
mock -r "${CONFIG}" --init
|
||||
|
||||
# Build with reproducibility settings
|
||||
mock -r "${CONFIG}" \
|
||||
--rebuild "${SRPM}" \
|
||||
--resultdir="${RESULT_DIR}" \
|
||||
--define "SOURCE_DATE_EPOCH ${SOURCE_DATE_EPOCH:-$(date +%s)}" \
|
||||
--define "_buildhost stellaops.build" \
|
||||
--define "debug_package %{nil}"
|
||||
|
||||
echo "Build complete. Results in: ${RESULT_DIR}"
|
||||
ls -la "${RESULT_DIR}"
|
||||
83
devops/docker/repro-builders/rhel/scripts/normalize.sh
Normal file
83
devops/docker/repro-builders/rhel/scripts/normalize.sh
Normal file
@@ -0,0 +1,83 @@
|
||||
#!/bin/bash
|
||||
# RHEL Build Normalization Script
|
||||
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
|
||||
#
|
||||
# Normalizes RPM build environment for reproducibility
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Normalize environment
|
||||
export TZ=UTC
|
||||
export LC_ALL=C.UTF-8
|
||||
export LANG=C.UTF-8
|
||||
|
||||
# Deterministic compiler flags
|
||||
export CFLAGS="${CFLAGS:--fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/buildroot -O2 -g}"
|
||||
export CXXFLAGS="${CXXFLAGS:-${CFLAGS}}"
|
||||
|
||||
# Disable debug info that varies
|
||||
export DEB_BUILD_OPTIONS="nostrip noopt"
|
||||
|
||||
# RPM-specific reproducibility
|
||||
export RPM_BUILD_NCPUS=1
|
||||
|
||||
# Normalize timestamps in archives
|
||||
normalize_ar() {
|
||||
local archive="$1"
|
||||
if command -v llvm-ar &>/dev/null; then
|
||||
llvm-ar --format=gnu --enable-deterministic-archives rcs "${archive}.new" "${archive}"
|
||||
mv "${archive}.new" "${archive}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Normalize timestamps in tar archives
|
||||
normalize_tar() {
|
||||
local archive="$1"
|
||||
local mtime="${SOURCE_DATE_EPOCH:-0}"
|
||||
|
||||
# Repack with deterministic settings
|
||||
local tmp_dir=$(mktemp -d)
|
||||
tar -xf "${archive}" -C "${tmp_dir}"
|
||||
tar --sort=name \
|
||||
--mtime="@${mtime}" \
|
||||
--owner=0 --group=0 \
|
||||
--numeric-owner \
|
||||
-cf "${archive}.new" -C "${tmp_dir}" .
|
||||
mv "${archive}.new" "${archive}"
|
||||
rm -rf "${tmp_dir}"
|
||||
}
|
||||
|
||||
# Normalize __pycache__ timestamps
|
||||
normalize_python() {
|
||||
find . -name '__pycache__' -type d -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -name '*.pyc' -delete 2>/dev/null || true
|
||||
}
|
||||
|
||||
# Strip build paths from binaries
|
||||
strip_build_paths() {
|
||||
local binary="$1"
|
||||
if command -v objcopy &>/dev/null; then
|
||||
# Remove .note.gnu.build-id if it contains build path
|
||||
objcopy --remove-section=.note.gnu.build-id "${binary}" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
# Main normalization
|
||||
normalize_build() {
|
||||
echo "Normalizing build environment..."
|
||||
|
||||
# Normalize Python bytecode
|
||||
normalize_python
|
||||
|
||||
# Find and normalize archives
|
||||
find . -name '*.a' -type f | while read -r ar; do
|
||||
normalize_ar "${ar}"
|
||||
done
|
||||
|
||||
echo "Normalization complete"
|
||||
}
|
||||
|
||||
# If sourced, export functions; if executed, run normalization
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
normalize_build
|
||||
fi
|
||||
143
devops/releases/service-versions.json
Normal file
143
devops/releases/service-versions.json
Normal file
@@ -0,0 +1,143 @@
|
||||
{
|
||||
"$schema": "./service-versions.schema.json",
|
||||
"schemaVersion": "1.0.0",
|
||||
"lastUpdated": "2025-01-01T00:00:00Z",
|
||||
"registry": "git.stella-ops.org/stella-ops.org",
|
||||
"services": {
|
||||
"authority": {
|
||||
"name": "Authority",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"attestor": {
|
||||
"name": "Attestor",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"concelier": {
|
||||
"name": "Concelier",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"scanner": {
|
||||
"name": "Scanner",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"policy": {
|
||||
"name": "Policy",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"signer": {
|
||||
"name": "Signer",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"excititor": {
|
||||
"name": "Excititor",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"gateway": {
|
||||
"name": "Gateway",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"scheduler": {
|
||||
"name": "Scheduler",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"cli": {
|
||||
"name": "CLI",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"orchestrator": {
|
||||
"name": "Orchestrator",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"notify": {
|
||||
"name": "Notify",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"sbomservice": {
|
||||
"name": "SbomService",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"vexhub": {
|
||||
"name": "VexHub",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
},
|
||||
"evidencelocker": {
|
||||
"name": "EvidenceLocker",
|
||||
"version": "1.0.0",
|
||||
"dockerTag": null,
|
||||
"releasedAt": null,
|
||||
"gitSha": null,
|
||||
"sbomDigest": null,
|
||||
"signatureDigest": null
|
||||
}
|
||||
}
|
||||
}
|
||||
93
devops/scripts/efcore/Scaffold-AllModules.ps1
Normal file
93
devops/scripts/efcore/Scaffold-AllModules.ps1
Normal file
@@ -0,0 +1,93 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Scaffolds EF Core DbContext, entities, and compiled models for all StellaOps modules.
|
||||
|
||||
.DESCRIPTION
|
||||
Iterates through all configured modules and runs Scaffold-Module.ps1 for each.
|
||||
Use this after schema changes or for initial setup.
|
||||
|
||||
.PARAMETER SkipMissing
|
||||
Skip modules whose projects don't exist yet (default: true)
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-AllModules.ps1
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-AllModules.ps1 -SkipMissing:$false
|
||||
#>
|
||||
param(
|
||||
[bool]$SkipMissing = $true
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Module definitions: Module name -> Schema name
|
||||
$modules = @(
|
||||
@{ Module = "Unknowns"; Schema = "unknowns" },
|
||||
@{ Module = "PacksRegistry"; Schema = "packs" },
|
||||
@{ Module = "Authority"; Schema = "authority" },
|
||||
@{ Module = "Scanner"; Schema = "scanner" },
|
||||
@{ Module = "Scheduler"; Schema = "scheduler" },
|
||||
@{ Module = "TaskRunner"; Schema = "taskrunner" },
|
||||
@{ Module = "Policy"; Schema = "policy" },
|
||||
@{ Module = "Notify"; Schema = "notify" },
|
||||
@{ Module = "Concelier"; Schema = "vuln" },
|
||||
@{ Module = "Excititor"; Schema = "vex" },
|
||||
@{ Module = "Signals"; Schema = "signals" },
|
||||
@{ Module = "Attestor"; Schema = "proofchain" },
|
||||
@{ Module = "Signer"; Schema = "signer" }
|
||||
)
|
||||
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$RepoRoot = (Get-Item $ScriptDir).Parent.Parent.Parent.FullName
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " EF Core Scaffolding for All Modules" -ForegroundColor Cyan
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
$successCount = 0
|
||||
$skipCount = 0
|
||||
$failCount = 0
|
||||
|
||||
foreach ($m in $modules) {
|
||||
$projectPath = Join-Path $RepoRoot "src" $m.Module "__Libraries" "StellaOps.$($m.Module).Persistence.EfCore"
|
||||
|
||||
if (-not (Test-Path "$projectPath\*.csproj")) {
|
||||
if ($SkipMissing) {
|
||||
Write-Host "SKIP: $($m.Module) - Project not found" -ForegroundColor DarkGray
|
||||
$skipCount++
|
||||
continue
|
||||
} else {
|
||||
Write-Host "FAIL: $($m.Module) - Project not found at: $projectPath" -ForegroundColor Red
|
||||
$failCount++
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host ">>> Scaffolding $($m.Module)..." -ForegroundColor Magenta
|
||||
|
||||
try {
|
||||
& "$ScriptDir\Scaffold-Module.ps1" -Module $m.Module -Schema $m.Schema
|
||||
$successCount++
|
||||
}
|
||||
catch {
|
||||
Write-Host "FAIL: $($m.Module) - $($_.Exception.Message)" -ForegroundColor Red
|
||||
$failCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " Summary" -ForegroundColor Cyan
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " Success: $successCount"
|
||||
Write-Host " Skipped: $skipCount"
|
||||
Write-Host " Failed: $failCount"
|
||||
Write-Host ""
|
||||
|
||||
if ($failCount -gt 0) {
|
||||
exit 1
|
||||
}
|
||||
162
devops/scripts/efcore/Scaffold-Module.ps1
Normal file
162
devops/scripts/efcore/Scaffold-Module.ps1
Normal file
@@ -0,0 +1,162 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Scaffolds EF Core DbContext, entities, and compiled models from PostgreSQL schema.
|
||||
|
||||
.DESCRIPTION
|
||||
This script performs database-first scaffolding for a StellaOps module:
|
||||
1. Cleans existing generated files (Entities, CompiledModels, DbContext)
|
||||
2. Scaffolds DbContext and entities from live PostgreSQL schema
|
||||
3. Generates compiled models for startup performance
|
||||
|
||||
.PARAMETER Module
|
||||
The module name (e.g., Unknowns, PacksRegistry, Authority)
|
||||
|
||||
.PARAMETER Schema
|
||||
The PostgreSQL schema name (defaults to lowercase module name)
|
||||
|
||||
.PARAMETER ConnectionString
|
||||
PostgreSQL connection string. If not provided, uses default dev connection.
|
||||
|
||||
.PARAMETER ProjectPath
|
||||
Optional custom project path. Defaults to src/{Module}/__Libraries/StellaOps.{Module}.Persistence.EfCore
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-Module.ps1 -Module Unknowns
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-Module.ps1 -Module Unknowns -Schema unknowns -ConnectionString "Host=localhost;Database=stellaops_platform;Username=unknowns_user;Password=unknowns_dev"
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-Module.ps1 -Module PacksRegistry -Schema packs
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Module,
|
||||
|
||||
[string]$Schema,
|
||||
|
||||
[string]$ConnectionString,
|
||||
|
||||
[string]$ProjectPath
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Resolve repository root
|
||||
$RepoRoot = (Get-Item $PSScriptRoot).Parent.Parent.Parent.FullName
|
||||
|
||||
# Default schema to lowercase module name
|
||||
if (-not $Schema) {
|
||||
$Schema = $Module.ToLower()
|
||||
}
|
||||
|
||||
# Default connection string
|
||||
if (-not $ConnectionString) {
|
||||
$user = "${Schema}_user"
|
||||
$password = "${Schema}_dev"
|
||||
$ConnectionString = "Host=localhost;Port=5432;Database=stellaops_platform;Username=$user;Password=$password;SearchPath=$Schema"
|
||||
}
|
||||
|
||||
# Default project path
|
||||
if (-not $ProjectPath) {
|
||||
$ProjectPath = Join-Path $RepoRoot "src" $Module "__Libraries" "StellaOps.$Module.Persistence.EfCore"
|
||||
}
|
||||
|
||||
$ContextDir = "Context"
|
||||
$EntitiesDir = "Entities"
|
||||
$CompiledModelsDir = "CompiledModels"
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " EF Core Scaffolding for Module: $Module" -ForegroundColor Cyan
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " Schema: $Schema"
|
||||
Write-Host " Project: $ProjectPath"
|
||||
Write-Host " Connection: Host=localhost;Database=stellaops_platform;Username=${Schema}_user;..."
|
||||
Write-Host ""
|
||||
|
||||
# Verify project exists
|
||||
if (-not (Test-Path "$ProjectPath\*.csproj")) {
|
||||
Write-Error "Project not found at: $ProjectPath"
|
||||
Write-Host "Create the project first with: dotnet new classlib -n StellaOps.$Module.Persistence.EfCore"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 1: Clean existing generated files
|
||||
Write-Host "[1/4] Cleaning existing generated files..." -ForegroundColor Yellow
|
||||
$paths = @(
|
||||
(Join-Path $ProjectPath $EntitiesDir),
|
||||
(Join-Path $ProjectPath $CompiledModelsDir),
|
||||
(Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs")
|
||||
)
|
||||
foreach ($path in $paths) {
|
||||
if (Test-Path $path) {
|
||||
Remove-Item -Recurse -Force $path
|
||||
Write-Host " Removed: $path" -ForegroundColor DarkGray
|
||||
}
|
||||
}
|
||||
|
||||
# Recreate directories
|
||||
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $EntitiesDir) | Out-Null
|
||||
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $CompiledModelsDir) | Out-Null
|
||||
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $ContextDir) | Out-Null
|
||||
|
||||
# Step 2: Scaffold DbContext and entities
|
||||
Write-Host "[2/4] Scaffolding DbContext and entities from schema '$Schema'..." -ForegroundColor Yellow
|
||||
$scaffoldArgs = @(
|
||||
"ef", "dbcontext", "scaffold",
|
||||
"`"$ConnectionString`"",
|
||||
"Npgsql.EntityFrameworkCore.PostgreSQL",
|
||||
"--project", "`"$ProjectPath`"",
|
||||
"--schema", $Schema,
|
||||
"--context", "${Module}DbContext",
|
||||
"--context-dir", $ContextDir,
|
||||
"--output-dir", $EntitiesDir,
|
||||
"--namespace", "StellaOps.$Module.Persistence.EfCore.Entities",
|
||||
"--context-namespace", "StellaOps.$Module.Persistence.EfCore.Context",
|
||||
"--data-annotations",
|
||||
"--no-onconfiguring",
|
||||
"--force"
|
||||
)
|
||||
|
||||
$process = Start-Process -FilePath "dotnet" -ArgumentList $scaffoldArgs -Wait -PassThru -NoNewWindow
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Write-Error "Scaffold failed with exit code: $($process.ExitCode)"
|
||||
exit 1
|
||||
}
|
||||
Write-Host " Scaffolded entities to: $EntitiesDir" -ForegroundColor DarkGray
|
||||
|
||||
# Step 3: Generate compiled models
|
||||
Write-Host "[3/4] Generating compiled models..." -ForegroundColor Yellow
|
||||
$optimizeArgs = @(
|
||||
"ef", "dbcontext", "optimize",
|
||||
"--project", "`"$ProjectPath`"",
|
||||
"--context", "StellaOps.$Module.Persistence.EfCore.Context.${Module}DbContext",
|
||||
"--output-dir", $CompiledModelsDir,
|
||||
"--namespace", "StellaOps.$Module.Persistence.EfCore.CompiledModels"
|
||||
)
|
||||
|
||||
$process = Start-Process -FilePath "dotnet" -ArgumentList $optimizeArgs -Wait -PassThru -NoNewWindow
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Write-Error "Compiled model generation failed with exit code: $($process.ExitCode)"
|
||||
exit 1
|
||||
}
|
||||
Write-Host " Generated compiled models to: $CompiledModelsDir" -ForegroundColor DarkGray
|
||||
|
||||
# Step 4: Summary
|
||||
Write-Host "[4/4] Scaffolding complete!" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
Write-Host "Generated files:" -ForegroundColor Cyan
|
||||
$contextFile = Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs"
|
||||
$entityFiles = Get-ChildItem -Path (Join-Path $ProjectPath $EntitiesDir) -Filter "*.cs" -ErrorAction SilentlyContinue
|
||||
$compiledFiles = Get-ChildItem -Path (Join-Path $ProjectPath $CompiledModelsDir) -Filter "*.cs" -ErrorAction SilentlyContinue
|
||||
|
||||
Write-Host " Context: $(if (Test-Path $contextFile) { $contextFile } else { 'Not found' })"
|
||||
Write-Host " Entities: $($entityFiles.Count) files"
|
||||
Write-Host " Compiled Models: $($compiledFiles.Count) files"
|
||||
Write-Host ""
|
||||
Write-Host "Next steps:" -ForegroundColor Yellow
|
||||
Write-Host " 1. Review generated entities for any customization needs"
|
||||
Write-Host " 2. Create repository implementations in Repositories/"
|
||||
Write-Host " 3. Add DI registration in Extensions/"
|
||||
Write-Host ""
|
||||
88
devops/scripts/efcore/scaffold-all-modules.sh
Normal file
88
devops/scripts/efcore/scaffold-all-modules.sh
Normal file
@@ -0,0 +1,88 @@
|
||||
#!/bin/bash
|
||||
# ============================================================================
|
||||
# EF Core Scaffolding for All StellaOps Modules
|
||||
# ============================================================================
|
||||
# Iterates through all configured modules and runs scaffold-module.sh for each.
|
||||
# Use this after schema changes or for initial setup.
|
||||
#
|
||||
# Usage: ./scaffold-all-modules.sh [--no-skip-missing]
|
||||
# ============================================================================
|
||||
|
||||
set -e
|
||||
|
||||
SKIP_MISSING=true
|
||||
if [ "$1" = "--no-skip-missing" ]; then
|
||||
SKIP_MISSING=false
|
||||
fi
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
# Module definitions: "Module:Schema"
|
||||
MODULES=(
|
||||
"Unknowns:unknowns"
|
||||
"PacksRegistry:packs"
|
||||
"Authority:authority"
|
||||
"Scanner:scanner"
|
||||
"Scheduler:scheduler"
|
||||
"TaskRunner:taskrunner"
|
||||
"Policy:policy"
|
||||
"Notify:notify"
|
||||
"Concelier:vuln"
|
||||
"Excititor:vex"
|
||||
"Signals:signals"
|
||||
"Attestor:proofchain"
|
||||
"Signer:signer"
|
||||
)
|
||||
|
||||
echo ""
|
||||
echo "============================================================================"
|
||||
echo " EF Core Scaffolding for All Modules"
|
||||
echo "============================================================================"
|
||||
echo ""
|
||||
|
||||
SUCCESS_COUNT=0
|
||||
SKIP_COUNT=0
|
||||
FAIL_COUNT=0
|
||||
|
||||
for entry in "${MODULES[@]}"; do
|
||||
MODULE="${entry%%:*}"
|
||||
SCHEMA="${entry##*:}"
|
||||
|
||||
PROJECT_PATH="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
|
||||
|
||||
if [ ! -f "$PROJECT_PATH"/*.csproj ]; then
|
||||
if [ "$SKIP_MISSING" = true ]; then
|
||||
echo "SKIP: $MODULE - Project not found"
|
||||
((SKIP_COUNT++))
|
||||
continue
|
||||
else
|
||||
echo "FAIL: $MODULE - Project not found at: $PROJECT_PATH"
|
||||
((FAIL_COUNT++))
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo ">>> Scaffolding $MODULE..."
|
||||
|
||||
if "$SCRIPT_DIR/scaffold-module.sh" "$MODULE" "$SCHEMA"; then
|
||||
((SUCCESS_COUNT++))
|
||||
else
|
||||
echo "FAIL: $MODULE - Scaffolding failed"
|
||||
((FAIL_COUNT++))
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "============================================================================"
|
||||
echo " Summary"
|
||||
echo "============================================================================"
|
||||
echo " Success: $SUCCESS_COUNT"
|
||||
echo " Skipped: $SKIP_COUNT"
|
||||
echo " Failed: $FAIL_COUNT"
|
||||
echo ""
|
||||
|
||||
if [ "$FAIL_COUNT" -gt 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
113
devops/scripts/efcore/scaffold-module.sh
Normal file
113
devops/scripts/efcore/scaffold-module.sh
Normal file
@@ -0,0 +1,113 @@
|
||||
#!/bin/bash
|
||||
# ============================================================================
|
||||
# EF Core Scaffolding Script for StellaOps Modules
|
||||
# ============================================================================
|
||||
# Usage: ./scaffold-module.sh <Module> [Schema] [ConnectionString]
|
||||
#
|
||||
# Examples:
|
||||
# ./scaffold-module.sh Unknowns
|
||||
# ./scaffold-module.sh Unknowns unknowns
|
||||
# ./scaffold-module.sh PacksRegistry packs "Host=localhost;..."
|
||||
# ============================================================================
|
||||
|
||||
set -e
|
||||
|
||||
MODULE=$1
|
||||
SCHEMA=${2:-$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')}
|
||||
CONNECTION_STRING=$3
|
||||
|
||||
if [ -z "$MODULE" ]; then
|
||||
echo "Usage: $0 <Module> [Schema] [ConnectionString]"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 Unknowns"
|
||||
echo " $0 Unknowns unknowns"
|
||||
echo " $0 PacksRegistry packs \"Host=localhost;Database=stellaops_platform;Username=packs_user;Password=packs_dev\""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Resolve repository root
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
# Default connection string
|
||||
if [ -z "$CONNECTION_STRING" ]; then
|
||||
USER="${SCHEMA}_user"
|
||||
PASSWORD="${SCHEMA}_dev"
|
||||
CONNECTION_STRING="Host=localhost;Port=5432;Database=stellaops_platform;Username=$USER;Password=$PASSWORD;SearchPath=$SCHEMA"
|
||||
fi
|
||||
|
||||
PROJECT_DIR="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
|
||||
CONTEXT_DIR="Context"
|
||||
ENTITIES_DIR="Entities"
|
||||
COMPILED_DIR="CompiledModels"
|
||||
|
||||
echo ""
|
||||
echo "============================================================================"
|
||||
echo " EF Core Scaffolding for Module: $MODULE"
|
||||
echo "============================================================================"
|
||||
echo " Schema: $SCHEMA"
|
||||
echo " Project: $PROJECT_DIR"
|
||||
echo " Connection: Host=localhost;Database=stellaops_platform;Username=${SCHEMA}_user;..."
|
||||
echo ""
|
||||
|
||||
# Verify project exists
|
||||
if [ ! -f "$PROJECT_DIR"/*.csproj ]; then
|
||||
echo "ERROR: Project not found at: $PROJECT_DIR"
|
||||
echo "Create the project first with: dotnet new classlib -n StellaOps.$MODULE.Persistence.EfCore"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 1: Clean existing generated files
|
||||
echo "[1/4] Cleaning existing generated files..."
|
||||
rm -rf "$PROJECT_DIR/$ENTITIES_DIR"
|
||||
rm -rf "$PROJECT_DIR/$COMPILED_DIR"
|
||||
rm -f "$PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
|
||||
|
||||
mkdir -p "$PROJECT_DIR/$ENTITIES_DIR"
|
||||
mkdir -p "$PROJECT_DIR/$COMPILED_DIR"
|
||||
mkdir -p "$PROJECT_DIR/$CONTEXT_DIR"
|
||||
|
||||
echo " Cleaned: $ENTITIES_DIR, $COMPILED_DIR, ${MODULE}DbContext.cs"
|
||||
|
||||
# Step 2: Scaffold DbContext and entities
|
||||
echo "[2/4] Scaffolding DbContext and entities from schema '$SCHEMA'..."
|
||||
dotnet ef dbcontext scaffold \
|
||||
"$CONNECTION_STRING" \
|
||||
Npgsql.EntityFrameworkCore.PostgreSQL \
|
||||
--project "$PROJECT_DIR" \
|
||||
--schema "$SCHEMA" \
|
||||
--context "${MODULE}DbContext" \
|
||||
--context-dir "$CONTEXT_DIR" \
|
||||
--output-dir "$ENTITIES_DIR" \
|
||||
--namespace "StellaOps.$MODULE.Persistence.EfCore.Entities" \
|
||||
--context-namespace "StellaOps.$MODULE.Persistence.EfCore.Context" \
|
||||
--data-annotations \
|
||||
--no-onconfiguring \
|
||||
--force
|
||||
|
||||
echo " Scaffolded entities to: $ENTITIES_DIR"
|
||||
|
||||
# Step 3: Generate compiled models
|
||||
echo "[3/4] Generating compiled models..."
|
||||
dotnet ef dbcontext optimize \
|
||||
--project "$PROJECT_DIR" \
|
||||
--context "StellaOps.$MODULE.Persistence.EfCore.Context.${MODULE}DbContext" \
|
||||
--output-dir "$COMPILED_DIR" \
|
||||
--namespace "StellaOps.$MODULE.Persistence.EfCore.CompiledModels"
|
||||
|
||||
echo " Generated compiled models to: $COMPILED_DIR"
|
||||
|
||||
# Step 4: Summary
|
||||
echo "[4/4] Scaffolding complete!"
|
||||
echo ""
|
||||
echo "Generated files:"
|
||||
echo " Context: $PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
|
||||
echo " Entities: $(ls -1 "$PROJECT_DIR/$ENTITIES_DIR"/*.cs 2>/dev/null | wc -l) files"
|
||||
echo " Compiled Models: $(ls -1 "$PROJECT_DIR/$COMPILED_DIR"/*.cs 2>/dev/null | wc -l) files"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. Review generated entities for any customization needs"
|
||||
echo " 2. Create repository implementations in Repositories/"
|
||||
echo " 3. Add DI registration in Extensions/"
|
||||
echo ""
|
||||
100
devops/scripts/fix-duplicate-packages.ps1
Normal file
100
devops/scripts/fix-duplicate-packages.ps1
Normal file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# fix-duplicate-packages.ps1 - Remove duplicate PackageReference items from test projects
|
||||
# These are already provided by Directory.Build.props
|
||||
|
||||
param([switch]$DryRun)
|
||||
|
||||
$packagesToRemove = @(
|
||||
"coverlet.collector",
|
||||
"Microsoft.NET.Test.Sdk",
|
||||
"Microsoft.AspNetCore.Mvc.Testing",
|
||||
"xunit",
|
||||
"xunit.runner.visualstudio",
|
||||
"Microsoft.Extensions.TimeProvider.Testing"
|
||||
)
|
||||
|
||||
$sharpCompressPackage = "SharpCompress"
|
||||
|
||||
# Find all test project files
|
||||
$testProjects = Get-ChildItem -Path "src" -Filter "*.Tests.csproj" -Recurse
|
||||
$corpusProjects = Get-ChildItem -Path "src" -Filter "*.Corpus.*.csproj" -Recurse
|
||||
|
||||
Write-Host "=== Fix Duplicate Package References ===" -ForegroundColor Cyan
|
||||
Write-Host "Found $($testProjects.Count) test projects" -ForegroundColor Yellow
|
||||
Write-Host "Found $($corpusProjects.Count) corpus projects (SharpCompress)" -ForegroundColor Yellow
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $testProjects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
$modified = $false
|
||||
|
||||
# Skip projects that opt out of common test infrastructure
|
||||
if ($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") {
|
||||
Write-Host " Skipped (UseConcelierTestInfra=false): $($proj.Name)" -ForegroundColor DarkGray
|
||||
continue
|
||||
}
|
||||
|
||||
foreach ($pkg in $packagesToRemove) {
|
||||
# Match PackageReference for this package (various formats)
|
||||
$patterns = @(
|
||||
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
|
||||
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
|
||||
)
|
||||
|
||||
foreach ($pattern in $patterns) {
|
||||
if ($content -match $pattern) {
|
||||
$content = $content -replace $pattern, ""
|
||||
$modified = $true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Clean up empty ItemGroups
|
||||
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
|
||||
# Clean up ItemGroups with only whitespace/comments
|
||||
$content = $content -replace "(?s)<ItemGroup>\s*<!--[^-]*-->\s*</ItemGroup>", ""
|
||||
|
||||
if ($modified) {
|
||||
$fixedCount++
|
||||
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
if (-not $DryRun) {
|
||||
$content | Set-Content $proj.FullName -NoNewline
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Fix SharpCompress in corpus projects
|
||||
foreach ($proj in $corpusProjects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
$modified = $false
|
||||
|
||||
$patterns = @(
|
||||
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
|
||||
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
|
||||
)
|
||||
|
||||
foreach ($pattern in $patterns) {
|
||||
if ($content -match $pattern) {
|
||||
$content = $content -replace $pattern, ""
|
||||
$modified = $true
|
||||
}
|
||||
}
|
||||
|
||||
# Clean up empty ItemGroups
|
||||
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
|
||||
|
||||
if ($modified) {
|
||||
$fixedCount++
|
||||
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
if (-not $DryRun) {
|
||||
$content | Set-Content $proj.FullName -NoNewline
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan
|
||||
if ($DryRun) {
|
||||
Write-Host "(Dry run - no changes made)" -ForegroundColor Yellow
|
||||
}
|
||||
55
devops/scripts/fix-duplicate-projects.ps1
Normal file
55
devops/scripts/fix-duplicate-projects.ps1
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# fix-duplicate-projects.ps1 - Remove duplicate project entries from solution file
|
||||
|
||||
param(
|
||||
[string]$SlnPath = "src/StellaOps.sln"
|
||||
)
|
||||
|
||||
$content = Get-Content $SlnPath -Raw
|
||||
$lines = $content -split "`r?`n"
|
||||
|
||||
$projectNames = @{}
|
||||
$duplicateGuids = @()
|
||||
$newLines = @()
|
||||
$skipNextEndProject = $false
|
||||
|
||||
foreach ($line in $lines) {
|
||||
if ($skipNextEndProject -and $line -eq "EndProject") {
|
||||
$skipNextEndProject = $false
|
||||
continue
|
||||
}
|
||||
|
||||
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
|
||||
$name = $Matches[1]
|
||||
$guid = $Matches[2]
|
||||
|
||||
if ($projectNames.ContainsKey($name)) {
|
||||
$duplicateGuids += $guid
|
||||
Write-Host "Removing duplicate: $name ($guid)"
|
||||
$skipNextEndProject = $true
|
||||
continue
|
||||
} else {
|
||||
$projectNames[$name] = $true
|
||||
}
|
||||
}
|
||||
|
||||
$newLines += $line
|
||||
}
|
||||
|
||||
# Also remove duplicate GUIDs from GlobalSection
|
||||
$finalLines = @()
|
||||
foreach ($line in $newLines) {
|
||||
$skip = $false
|
||||
foreach ($guid in $duplicateGuids) {
|
||||
if ($line -match $guid) {
|
||||
$skip = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (-not $skip) {
|
||||
$finalLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
$finalLines | Out-File -FilePath $SlnPath -Encoding UTF8 -NoNewline
|
||||
Write-Host "`nRemoved $($duplicateGuids.Count) duplicate projects"
|
||||
55
devops/scripts/fix-duplicate-using-testkit.ps1
Normal file
55
devops/scripts/fix-duplicate-using-testkit.ps1
Normal file
@@ -0,0 +1,55 @@
|
||||
# Fix duplicate "using StellaOps.TestKit;" statements in C# files
|
||||
# The pattern shows files have this statement both at top (correct) and in middle (wrong)
|
||||
# This script removes all occurrences AFTER the first one
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$srcPath = Join-Path $PSScriptRoot "..\..\src"
|
||||
$pattern = "using StellaOps.TestKit;"
|
||||
|
||||
# Find all .cs files containing the pattern
|
||||
$files = Get-ChildItem -Path $srcPath -Recurse -Filter "*.cs" |
|
||||
Where-Object { (Get-Content $_.FullName -Raw) -match [regex]::Escape($pattern) }
|
||||
|
||||
Write-Host "Found $($files.Count) files with 'using StellaOps.TestKit;'" -ForegroundColor Cyan
|
||||
|
||||
$fixedCount = 0
|
||||
$errorCount = 0
|
||||
|
||||
foreach ($file in $files) {
|
||||
try {
|
||||
$lines = Get-Content $file.FullName
|
||||
$newLines = @()
|
||||
$foundFirst = $false
|
||||
$removedAny = $false
|
||||
|
||||
foreach ($line in $lines) {
|
||||
if ($line.Trim() -eq $pattern) {
|
||||
if (-not $foundFirst) {
|
||||
# Keep the first occurrence
|
||||
$newLines += $line
|
||||
$foundFirst = $true
|
||||
} else {
|
||||
# Skip subsequent occurrences
|
||||
$removedAny = $true
|
||||
}
|
||||
} else {
|
||||
$newLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
if ($removedAny) {
|
||||
$newLines | Set-Content -Path $file.FullName -Encoding UTF8
|
||||
Write-Host "Fixed: $($file.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
} catch {
|
||||
Write-Host "Error processing $($file.FullName): $_" -ForegroundColor Red
|
||||
$errorCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Summary:" -ForegroundColor Cyan
|
||||
Write-Host " Files fixed: $fixedCount" -ForegroundColor Green
|
||||
Write-Host " Errors: $errorCount" -ForegroundColor $(if ($errorCount -gt 0) { "Red" } else { "Green" })
|
||||
51
devops/scripts/fix-missing-xunit.ps1
Normal file
51
devops/scripts/fix-missing-xunit.ps1
Normal file
@@ -0,0 +1,51 @@
|
||||
# Fix projects with UseConcelierTestInfra=false that don't have xunit
|
||||
# These projects relied on TestKit for xunit, but now need their own reference
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$srcPath = "E:\dev\git.stella-ops.org\src"
|
||||
|
||||
# Find test projects with UseConcelierTestInfra=false
|
||||
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
|
||||
Where-Object {
|
||||
$content = Get-Content $_.FullName -Raw
|
||||
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
|
||||
(-not ($content -match "xunit\.v3")) -and # Skip xunit.v3 projects
|
||||
(-not ($content -match '<PackageReference\s+Include="xunit"')) # Skip projects that already have xunit
|
||||
}
|
||||
|
||||
Write-Host "Found $($projects.Count) projects needing xunit" -ForegroundColor Cyan
|
||||
|
||||
$xunitPackages = @'
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
'@
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $projects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
|
||||
# Check if it has an ItemGroup with PackageReference
|
||||
if ($content -match '(<ItemGroup>[\s\S]*?<PackageReference)') {
|
||||
# Add xunit packages after first PackageReference ItemGroup opening
|
||||
$newContent = $content -replace '(<ItemGroup>\s*\r?\n)(\s*<PackageReference)', "`$1$xunitPackages`n`$2"
|
||||
} else {
|
||||
# No PackageReference ItemGroup, add one before </Project>
|
||||
$itemGroup = @"
|
||||
|
||||
<ItemGroup>
|
||||
$xunitPackages
|
||||
</ItemGroup>
|
||||
"@
|
||||
$newContent = $content -replace '</Project>', "$itemGroup`n</Project>"
|
||||
}
|
||||
|
||||
if ($newContent -ne $content) {
|
||||
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
|
||||
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan
|
||||
44
devops/scripts/fix-project-references.ps1
Normal file
44
devops/scripts/fix-project-references.ps1
Normal file
@@ -0,0 +1,44 @@
|
||||
# Fix project references in src/__Tests/** that point to wrong relative paths
|
||||
# Pattern: ../../<Module>/... should be ../../../<Module>/...
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$testsPath = "E:\dev\git.stella-ops.org\src\__Tests"
|
||||
|
||||
# Known module prefixes that exist at src/<Module>/
|
||||
$modules = @("Signals", "Scanner", "Concelier", "Scheduler", "Authority", "Attestor",
|
||||
"BinaryIndex", "EvidenceLocker", "Excititor", "ExportCenter", "Gateway",
|
||||
"Graph", "IssuerDirectory", "Notify", "Orchestrator", "Policy", "AirGap",
|
||||
"Provenance", "Replay", "RiskEngine", "SbomService", "Signer", "TaskRunner",
|
||||
"Telemetry", "TimelineIndexer", "Unknowns", "VexHub", "VexLens", "VulnExplorer",
|
||||
"Zastava", "Cli", "Aoc", "Web", "Bench", "Cryptography", "PacksRegistry",
|
||||
"Notifier", "Findings")
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
Get-ChildItem -Path $testsPath -Recurse -Filter "*.csproj" | ForEach-Object {
|
||||
$proj = $_
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
$originalContent = $content
|
||||
|
||||
foreach ($module in $modules) {
|
||||
# Fix ../../<Module>/ to ../../../<Module>/
|
||||
# But not ../../../<Module> (already correct)
|
||||
$pattern = "Include=`"../../$module/"
|
||||
$replacement = "Include=`"../../../$module/"
|
||||
|
||||
if ($content -match [regex]::Escape($pattern) -and $content -notmatch [regex]::Escape("Include=`"../../../$module/")) {
|
||||
$content = $content -replace [regex]::Escape($pattern), $replacement
|
||||
}
|
||||
}
|
||||
|
||||
# Fix __Libraries references that are one level short
|
||||
$content = $content -replace 'Include="../../__Libraries/', 'Include="../../../__Libraries/'
|
||||
|
||||
if ($content -ne $originalContent) {
|
||||
Set-Content -Path $proj.FullName -Value $content -NoNewline
|
||||
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan
|
||||
68
devops/scripts/fix-sln-duplicates.ps1
Normal file
68
devops/scripts/fix-sln-duplicates.ps1
Normal file
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# fix-sln-duplicates.ps1 - Remove duplicate project entries from solution file
|
||||
|
||||
param(
|
||||
[string]$SlnPath = "src/StellaOps.sln"
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
Write-Host "=== Solution Duplicate Cleanup ===" -ForegroundColor Cyan
|
||||
Write-Host "Solution: $SlnPath"
|
||||
|
||||
$content = Get-Content $SlnPath -Raw
|
||||
$lines = $content -split "`r?`n"
|
||||
|
||||
# Track seen project names
|
||||
$seenProjects = @{}
|
||||
$duplicateGuids = @()
|
||||
$newLines = @()
|
||||
$skipNext = $false
|
||||
|
||||
for ($i = 0; $i -lt $lines.Count; $i++) {
|
||||
$line = $lines[$i]
|
||||
|
||||
if ($skipNext) {
|
||||
$skipNext = $false
|
||||
continue
|
||||
}
|
||||
|
||||
# Check for project declaration
|
||||
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
|
||||
$name = $Matches[1]
|
||||
$guid = $Matches[2]
|
||||
|
||||
if ($seenProjects.ContainsKey($name)) {
|
||||
Write-Host "Removing duplicate: $name ($guid)" -ForegroundColor Yellow
|
||||
$duplicateGuids += $guid
|
||||
# Skip this line and the next EndProject line
|
||||
$skipNext = $true
|
||||
continue
|
||||
} else {
|
||||
$seenProjects[$name] = $true
|
||||
}
|
||||
}
|
||||
|
||||
$newLines += $line
|
||||
}
|
||||
|
||||
# Remove GlobalSection references to duplicate GUIDs
|
||||
$finalLines = @()
|
||||
foreach ($line in $newLines) {
|
||||
$skip = $false
|
||||
foreach ($guid in $duplicateGuids) {
|
||||
if ($line -match $guid) {
|
||||
$skip = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (-not $skip) {
|
||||
$finalLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
# Write back
|
||||
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Removed $($duplicateGuids.Count) duplicate projects" -ForegroundColor Green
|
||||
40
devops/scripts/fix-xunit-using.ps1
Normal file
40
devops/scripts/fix-xunit-using.ps1
Normal file
@@ -0,0 +1,40 @@
|
||||
# Add <Using Include="Xunit" /> to test projects with UseConcelierTestInfra=false
|
||||
# that have xunit but don't have the global using
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$srcPath = "E:\dev\git.stella-ops.org\src"
|
||||
|
||||
# Find test projects with UseConcelierTestInfra=false that have xunit but no Using Include="Xunit"
|
||||
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
|
||||
Where-Object {
|
||||
$content = Get-Content $_.FullName -Raw
|
||||
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
|
||||
($content -match '<PackageReference\s+Include="xunit"') -and
|
||||
(-not ($content -match '<Using\s+Include="Xunit"'))
|
||||
}
|
||||
|
||||
Write-Host "Found $($projects.Count) projects needing Xunit using" -ForegroundColor Cyan
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $projects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
|
||||
# Add Using Include="Xunit" before first ProjectReference ItemGroup or at end
|
||||
if ($content -match '(<ItemGroup>\s*\r?\n\s*<ProjectReference)') {
|
||||
$usingBlock = " <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n`n"
|
||||
$newContent = $content -replace '(\s*)(<ItemGroup>\s*\r?\n\s*<ProjectReference)', "$usingBlock`$1`$2"
|
||||
} else {
|
||||
# Add before </Project>
|
||||
$usingBlock = "`n <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n"
|
||||
$newContent = $content -replace '</Project>', "$usingBlock</Project>"
|
||||
}
|
||||
|
||||
if ($newContent -ne $content) {
|
||||
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
|
||||
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan
|
||||
37
devops/scripts/fix-xunit-v3-conflict.ps1
Normal file
37
devops/scripts/fix-xunit-v3-conflict.ps1
Normal file
@@ -0,0 +1,37 @@
|
||||
# Fix xunit.v3 projects that conflict with Directory.Build.props xunit 2.x
|
||||
# Add UseConcelierTestInfra=false to exclude them from common test infrastructure
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$srcPath = Join-Path $PSScriptRoot "..\..\src"
|
||||
|
||||
# Find all csproj files that reference xunit.v3
|
||||
$xunitV3Projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
|
||||
Where-Object { (Get-Content $_.FullName -Raw) -match "xunit\.v3" }
|
||||
|
||||
Write-Host "Found $($xunitV3Projects.Count) projects with xunit.v3" -ForegroundColor Cyan
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $xunitV3Projects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
|
||||
# Check if already has UseConcelierTestInfra set
|
||||
if ($content -match "<UseConcelierTestInfra>") {
|
||||
Write-Host " Skipped (already configured): $($proj.Name)" -ForegroundColor DarkGray
|
||||
continue
|
||||
}
|
||||
|
||||
# Add UseConcelierTestInfra=false after the first <PropertyGroup>
|
||||
$newContent = $content -replace "(<PropertyGroup>)", "`$1`n <UseConcelierTestInfra>false</UseConcelierTestInfra>"
|
||||
|
||||
# Only write if changed
|
||||
if ($newContent -ne $content) {
|
||||
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
|
||||
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan
|
||||
247
devops/scripts/generate-plugin-configs.ps1
Normal file
247
devops/scripts/generate-plugin-configs.ps1
Normal file
@@ -0,0 +1,247 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Generates plugin configuration files for StellaOps modules.
|
||||
|
||||
.DESCRIPTION
|
||||
This script generates plugin.json manifests and config.yaml files for all
|
||||
plugins based on the plugin catalog definition.
|
||||
|
||||
.PARAMETER RepoRoot
|
||||
Path to the repository root. Defaults to the parent of the devops folder.
|
||||
|
||||
.PARAMETER OutputDir
|
||||
Output directory for generated configs. Defaults to etc/plugins/.
|
||||
|
||||
.PARAMETER Force
|
||||
Overwrite existing configuration files.
|
||||
|
||||
.EXAMPLE
|
||||
.\generate-plugin-configs.ps1
|
||||
.\generate-plugin-configs.ps1 -Force
|
||||
#>
|
||||
|
||||
param(
|
||||
[string]$RepoRoot = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
|
||||
[string]$OutputDir = "",
|
||||
[switch]$Force
|
||||
)
|
||||
|
||||
if (-not $OutputDir) {
|
||||
$OutputDir = Join-Path $RepoRoot "etc/plugins"
|
||||
}
|
||||
|
||||
# Plugin catalog - defines all plugins and their metadata
|
||||
$PluginCatalog = @{
|
||||
# Router transports
|
||||
"router/transports" = @{
|
||||
category = "router.transports"
|
||||
plugins = @(
|
||||
@{ id = "tcp"; name = "TCP Transport"; assembly = "StellaOps.Router.Transport.Tcp.dll"; enabled = $true; priority = 50 }
|
||||
@{ id = "tls"; name = "TLS Transport"; assembly = "StellaOps.Router.Transport.Tls.dll"; enabled = $true; priority = 60 }
|
||||
@{ id = "udp"; name = "UDP Transport"; assembly = "StellaOps.Router.Transport.Udp.dll"; enabled = $false; priority = 40 }
|
||||
@{ id = "rabbitmq"; name = "RabbitMQ Transport"; assembly = "StellaOps.Router.Transport.RabbitMq.dll"; enabled = $false; priority = 30 }
|
||||
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Router.Transport.InMemory.dll"; enabled = $false; priority = 10 }
|
||||
)
|
||||
}
|
||||
|
||||
# Excititor connectors
|
||||
"excititor" = @{
|
||||
category = "excititor.connectors"
|
||||
plugins = @(
|
||||
@{ id = "redhat-csaf"; name = "Red Hat CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.RedHat.CSAF.dll"; enabled = $true; priority = 100; vendor = "Red Hat" }
|
||||
@{ id = "cisco-csaf"; name = "Cisco CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Cisco.CSAF.dll"; enabled = $false; priority = 90; vendor = "Cisco" }
|
||||
@{ id = "msrc-csaf"; name = "Microsoft CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.MSRC.CSAF.dll"; enabled = $false; priority = 85; vendor = "Microsoft" }
|
||||
@{ id = "oracle-csaf"; name = "Oracle CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Oracle.CSAF.dll"; enabled = $false; priority = 80; vendor = "Oracle" }
|
||||
@{ id = "ubuntu-csaf"; name = "Ubuntu CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.dll"; enabled = $false; priority = 75; vendor = "Canonical" }
|
||||
@{ id = "suse-rancher"; name = "SUSE Rancher VEX Hub"; assembly = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.dll"; enabled = $false; priority = 70; vendor = "SUSE" }
|
||||
@{ id = "oci-openvex"; name = "OCI OpenVEX Connector"; assembly = "StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.dll"; enabled = $false; priority = 60 }
|
||||
)
|
||||
}
|
||||
|
||||
# Scanner language analyzers
|
||||
"scanner/analyzers/lang" = @{
|
||||
category = "scanner.analyzers.lang"
|
||||
plugins = @(
|
||||
@{ id = "dotnet"; name = ".NET Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.DotNet.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "go"; name = "Go Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Go.dll"; enabled = $true; priority = 95 }
|
||||
@{ id = "node"; name = "Node.js Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Node.dll"; enabled = $true; priority = 90 }
|
||||
@{ id = "python"; name = "Python Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Python.dll"; enabled = $true; priority = 85 }
|
||||
@{ id = "java"; name = "Java Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Java.dll"; enabled = $true; priority = 80 }
|
||||
@{ id = "rust"; name = "Rust Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Rust.dll"; enabled = $false; priority = 75 }
|
||||
@{ id = "ruby"; name = "Ruby Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Ruby.dll"; enabled = $false; priority = 70 }
|
||||
@{ id = "php"; name = "PHP Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Php.dll"; enabled = $false; priority = 65 }
|
||||
@{ id = "swift"; name = "Swift Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Swift.dll"; enabled = $false; priority = 60 }
|
||||
@{ id = "cpp"; name = "C/C++ Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Cpp.dll"; enabled = $false; priority = 55 }
|
||||
)
|
||||
}
|
||||
|
||||
# Scanner OS analyzers
|
||||
"scanner/analyzers/os" = @{
|
||||
category = "scanner.analyzers.os"
|
||||
plugins = @(
|
||||
@{ id = "apk"; name = "Alpine APK Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Apk.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "dpkg"; name = "Debian DPKG Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Dpkg.dll"; enabled = $true; priority = 95 }
|
||||
@{ id = "rpm"; name = "RPM Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Rpm.dll"; enabled = $true; priority = 90 }
|
||||
@{ id = "pacman"; name = "Arch Pacman Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Pacman.dll"; enabled = $false; priority = 80 }
|
||||
@{ id = "homebrew"; name = "Homebrew Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Homebrew.dll"; enabled = $false; priority = 70 }
|
||||
@{ id = "chocolatey"; name = "Chocolatey Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Chocolatey.dll"; enabled = $false; priority = 65 }
|
||||
)
|
||||
}
|
||||
|
||||
# Notify channels
|
||||
"notify" = @{
|
||||
category = "notify.channels"
|
||||
plugins = @(
|
||||
@{ id = "email"; name = "Email Notifier"; assembly = "StellaOps.Notify.Connectors.Email.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "slack"; name = "Slack Notifier"; assembly = "StellaOps.Notify.Connectors.Slack.dll"; enabled = $true; priority = 90 }
|
||||
@{ id = "webhook"; name = "Webhook Notifier"; assembly = "StellaOps.Notify.Connectors.Webhook.dll"; enabled = $true; priority = 80 }
|
||||
@{ id = "teams"; name = "Microsoft Teams Notifier"; assembly = "StellaOps.Notify.Connectors.Teams.dll"; enabled = $false; priority = 85 }
|
||||
@{ id = "pagerduty"; name = "PagerDuty Notifier"; assembly = "StellaOps.Notify.Connectors.PagerDuty.dll"; enabled = $false; priority = 75 }
|
||||
@{ id = "opsgenie"; name = "OpsGenie Notifier"; assembly = "StellaOps.Notify.Connectors.OpsGenie.dll"; enabled = $false; priority = 70 }
|
||||
@{ id = "telegram"; name = "Telegram Notifier"; assembly = "StellaOps.Notify.Connectors.Telegram.dll"; enabled = $false; priority = 65 }
|
||||
@{ id = "discord"; name = "Discord Notifier"; assembly = "StellaOps.Notify.Connectors.Discord.dll"; enabled = $false; priority = 60 }
|
||||
)
|
||||
}
|
||||
|
||||
# Messaging transports
|
||||
"messaging" = @{
|
||||
category = "messaging.transports"
|
||||
plugins = @(
|
||||
@{ id = "valkey"; name = "Valkey Transport"; assembly = "StellaOps.Messaging.Transport.Valkey.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "postgres"; name = "PostgreSQL Transport"; assembly = "StellaOps.Messaging.Transport.Postgres.dll"; enabled = $false; priority = 90 }
|
||||
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Messaging.Transport.InMemory.dll"; enabled = $false; priority = 10 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function New-PluginManifest {
|
||||
param(
|
||||
[string]$ModulePath,
|
||||
[hashtable]$Plugin,
|
||||
[string]$Category
|
||||
)
|
||||
|
||||
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
|
||||
|
||||
$manifest = @{
|
||||
'$schema' = "https://schema.stella-ops.org/plugin-manifest/v2.json"
|
||||
schemaVersion = "2.0"
|
||||
id = $fullId
|
||||
name = $Plugin.name
|
||||
version = "1.0.0"
|
||||
assembly = @{
|
||||
path = $Plugin.assembly
|
||||
}
|
||||
capabilities = @()
|
||||
platforms = @("linux-x64", "linux-arm64", "win-x64", "osx-x64", "osx-arm64")
|
||||
compliance = @("NIST")
|
||||
jurisdiction = "world"
|
||||
priority = $Plugin.priority
|
||||
enabled = $Plugin.enabled
|
||||
metadata = @{
|
||||
author = "StellaOps"
|
||||
license = "AGPL-3.0-or-later"
|
||||
}
|
||||
}
|
||||
|
||||
if ($Plugin.vendor) {
|
||||
$manifest.metadata["vendor"] = $Plugin.vendor
|
||||
}
|
||||
|
||||
return $manifest | ConvertTo-Json -Depth 10
|
||||
}
|
||||
|
||||
function New-PluginConfig {
|
||||
param(
|
||||
[string]$ModulePath,
|
||||
[hashtable]$Plugin,
|
||||
[string]$Category
|
||||
)
|
||||
|
||||
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
|
||||
|
||||
$config = @"
|
||||
id: $fullId
|
||||
name: $($Plugin.name)
|
||||
enabled: $($Plugin.enabled.ToString().ToLower())
|
||||
priority: $($Plugin.priority)
|
||||
config:
|
||||
# Plugin-specific configuration
|
||||
# Add settings here as needed
|
||||
"@
|
||||
|
||||
return $config
|
||||
}
|
||||
|
||||
function New-RegistryFile {
|
||||
param(
|
||||
[string]$Category,
|
||||
[array]$Plugins
|
||||
)
|
||||
|
||||
$entries = $Plugins | ForEach-Object {
|
||||
" $($_.id):`n enabled: $($_.enabled.ToString().ToLower())`n priority: $($_.priority)`n config: $($_.id)/config.yaml"
|
||||
}
|
||||
|
||||
$registry = @"
|
||||
version: "1.0"
|
||||
category: $Category
|
||||
defaults:
|
||||
enabled: false
|
||||
timeout: "00:05:00"
|
||||
plugins:
|
||||
$($entries -join "`n")
|
||||
"@
|
||||
|
||||
return $registry
|
||||
}
|
||||
|
||||
# Main generation logic
|
||||
Write-Host "Generating plugin configurations to: $OutputDir" -ForegroundColor Cyan
|
||||
|
||||
foreach ($modulePath in $PluginCatalog.Keys) {
|
||||
$moduleConfig = $PluginCatalog[$modulePath]
|
||||
$moduleDir = Join-Path $OutputDir $modulePath
|
||||
|
||||
Write-Host "Processing module: $modulePath" -ForegroundColor Yellow
|
||||
|
||||
# Create module directory
|
||||
if (-not (Test-Path $moduleDir)) {
|
||||
New-Item -ItemType Directory -Path $moduleDir -Force | Out-Null
|
||||
}
|
||||
|
||||
# Generate registry.yaml
|
||||
$registryPath = Join-Path $moduleDir "registry.yaml"
|
||||
if ($Force -or -not (Test-Path $registryPath)) {
|
||||
$registryContent = New-RegistryFile -Category $moduleConfig.category -Plugins $moduleConfig.plugins
|
||||
Set-Content -Path $registryPath -Value $registryContent -Encoding utf8
|
||||
Write-Host " Created: registry.yaml" -ForegroundColor Green
|
||||
}
|
||||
|
||||
# Generate plugin configs
|
||||
foreach ($plugin in $moduleConfig.plugins) {
|
||||
$pluginDir = Join-Path $moduleDir $plugin.id
|
||||
|
||||
if (-not (Test-Path $pluginDir)) {
|
||||
New-Item -ItemType Directory -Path $pluginDir -Force | Out-Null
|
||||
}
|
||||
|
||||
# plugin.json
|
||||
$manifestPath = Join-Path $pluginDir "plugin.json"
|
||||
if ($Force -or -not (Test-Path $manifestPath)) {
|
||||
$manifestContent = New-PluginManifest -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
|
||||
Set-Content -Path $manifestPath -Value $manifestContent -Encoding utf8
|
||||
Write-Host " Created: $($plugin.id)/plugin.json" -ForegroundColor Green
|
||||
}
|
||||
|
||||
# config.yaml
|
||||
$configPath = Join-Path $pluginDir "config.yaml"
|
||||
if ($Force -or -not (Test-Path $configPath)) {
|
||||
$configContent = New-PluginConfig -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
|
||||
Set-Content -Path $configPath -Value $configContent -Encoding utf8
|
||||
Write-Host " Created: $($plugin.id)/config.yaml" -ForegroundColor Green
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nPlugin configuration generation complete!" -ForegroundColor Cyan
|
||||
406
devops/scripts/lib/ci-common.sh
Normal file
406
devops/scripts/lib/ci-common.sh
Normal file
@@ -0,0 +1,406 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# CI COMMON FUNCTIONS
|
||||
# =============================================================================
|
||||
# Shared utility functions for local CI testing scripts.
|
||||
#
|
||||
# Usage:
|
||||
# source "$SCRIPT_DIR/lib/ci-common.sh"
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
# Prevent multiple sourcing
|
||||
[[ -n "${_CI_COMMON_LOADED:-}" ]] && return
|
||||
_CI_COMMON_LOADED=1
|
||||
|
||||
# =============================================================================
|
||||
# COLOR DEFINITIONS
|
||||
# =============================================================================
|
||||
|
||||
if [[ -t 1 ]] && [[ -n "${TERM:-}" ]] && [[ "${TERM}" != "dumb" ]]; then
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
BLUE='\033[0;34m'
|
||||
MAGENTA='\033[0;35m'
|
||||
CYAN='\033[0;36m'
|
||||
WHITE='\033[0;37m'
|
||||
BOLD='\033[1m'
|
||||
DIM='\033[2m'
|
||||
RESET='\033[0m'
|
||||
else
|
||||
RED=''
|
||||
GREEN=''
|
||||
YELLOW=''
|
||||
BLUE=''
|
||||
MAGENTA=''
|
||||
CYAN=''
|
||||
WHITE=''
|
||||
BOLD=''
|
||||
DIM=''
|
||||
RESET=''
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# LOGGING FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Log an info message
|
||||
log_info() {
|
||||
echo -e "${BLUE}[INFO]${RESET} $*"
|
||||
}
|
||||
|
||||
# Log a success message
|
||||
log_success() {
|
||||
echo -e "${GREEN}[OK]${RESET} $*"
|
||||
}
|
||||
|
||||
# Log a warning message
|
||||
log_warn() {
|
||||
echo -e "${YELLOW}[WARN]${RESET} $*" >&2
|
||||
}
|
||||
|
||||
# Log an error message
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${RESET} $*" >&2
|
||||
}
|
||||
|
||||
# Log a debug message (only if VERBOSE is true)
|
||||
log_debug() {
|
||||
if [[ "${VERBOSE:-false}" == "true" ]]; then
|
||||
echo -e "${DIM}[DEBUG]${RESET} $*"
|
||||
fi
|
||||
}
|
||||
|
||||
# Log a step in a process
|
||||
log_step() {
|
||||
local step_num="$1"
|
||||
local total_steps="$2"
|
||||
local message="$3"
|
||||
echo -e "${CYAN}[${step_num}/${total_steps}]${RESET} ${BOLD}${message}${RESET}"
|
||||
}
|
||||
|
||||
# Log a section header
|
||||
log_section() {
|
||||
echo ""
|
||||
echo -e "${BOLD}${MAGENTA}=== $* ===${RESET}"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Log a subsection header
|
||||
log_subsection() {
|
||||
echo -e "${CYAN}--- $* ---${RESET}"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# ERROR HANDLING
|
||||
# =============================================================================
|
||||
|
||||
# Exit with error message
|
||||
die() {
|
||||
log_error "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if a command exists
|
||||
require_command() {
|
||||
local cmd="$1"
|
||||
local install_hint="${2:-}"
|
||||
|
||||
if ! command -v "$cmd" &>/dev/null; then
|
||||
log_error "Required command not found: $cmd"
|
||||
if [[ -n "$install_hint" ]]; then
|
||||
log_info "Install with: $install_hint"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Check if a file exists
|
||||
require_file() {
|
||||
local file="$1"
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "Required file not found: $file"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Check if a directory exists
|
||||
require_dir() {
|
||||
local dir="$1"
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
log_error "Required directory not found: $dir"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# TIMING FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Get current timestamp in seconds
|
||||
get_timestamp() {
|
||||
date +%s
|
||||
}
|
||||
|
||||
# Format duration in human-readable format
|
||||
format_duration() {
|
||||
local seconds="$1"
|
||||
local minutes=$((seconds / 60))
|
||||
local remaining_seconds=$((seconds % 60))
|
||||
|
||||
if [[ $minutes -gt 0 ]]; then
|
||||
echo "${minutes}m ${remaining_seconds}s"
|
||||
else
|
||||
echo "${remaining_seconds}s"
|
||||
fi
|
||||
}
|
||||
|
||||
# Start a timer and return the start time
|
||||
start_timer() {
|
||||
get_timestamp
|
||||
}
|
||||
|
||||
# Stop a timer and print the duration
|
||||
stop_timer() {
|
||||
local start_time="$1"
|
||||
local label="${2:-Operation}"
|
||||
local end_time
|
||||
end_time=$(get_timestamp)
|
||||
local duration=$((end_time - start_time))
|
||||
|
||||
log_info "$label completed in $(format_duration $duration)"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# STRING FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Convert string to lowercase
|
||||
to_lower() {
|
||||
echo "$1" | tr '[:upper:]' '[:lower:]'
|
||||
}
|
||||
|
||||
# Convert string to uppercase
|
||||
to_upper() {
|
||||
echo "$1" | tr '[:lower:]' '[:upper:]'
|
||||
}
|
||||
|
||||
# Trim whitespace from string
|
||||
trim() {
|
||||
local var="$*"
|
||||
var="${var#"${var%%[![:space:]]*}"}"
|
||||
var="${var%"${var##*[![:space:]]}"}"
|
||||
echo -n "$var"
|
||||
}
|
||||
|
||||
# Join array elements with delimiter
|
||||
join_by() {
|
||||
local delimiter="$1"
|
||||
shift
|
||||
local first="$1"
|
||||
shift
|
||||
printf '%s' "$first" "${@/#/$delimiter}"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# ARRAY FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Check if array contains element
|
||||
array_contains() {
|
||||
local needle="$1"
|
||||
shift
|
||||
local element
|
||||
for element in "$@"; do
|
||||
[[ "$element" == "$needle" ]] && return 0
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# FILE FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Create directory if it doesn't exist
|
||||
ensure_dir() {
|
||||
local dir="$1"
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
mkdir -p "$dir"
|
||||
log_debug "Created directory: $dir"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get absolute path
|
||||
get_absolute_path() {
|
||||
local path="$1"
|
||||
if [[ -d "$path" ]]; then
|
||||
(cd "$path" && pwd)
|
||||
elif [[ -f "$path" ]]; then
|
||||
local dir
|
||||
dir=$(dirname "$path")
|
||||
echo "$(cd "$dir" && pwd)/$(basename "$path")"
|
||||
else
|
||||
echo "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# GIT FUNCTIONS
|
||||
# =============================================================================
|
||||
|
||||
# Get the repository root directory
|
||||
get_repo_root() {
|
||||
git rev-parse --show-toplevel 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current branch name
|
||||
get_current_branch() {
|
||||
git rev-parse --abbrev-ref HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current commit SHA
|
||||
get_current_sha() {
|
||||
git rev-parse HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get short commit SHA
|
||||
get_short_sha() {
|
||||
git rev-parse --short HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if working directory is clean
|
||||
is_git_clean() {
|
||||
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
|
||||
}
|
||||
|
||||
# Get list of changed files compared to main branch
|
||||
get_changed_files() {
|
||||
local base_branch="${1:-main}"
|
||||
git diff --name-only "$base_branch"...HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# MODULE DETECTION
|
||||
# =============================================================================
|
||||
|
||||
# Map of module names to source paths
|
||||
declare -A MODULE_PATHS=(
|
||||
["Scanner"]="src/Scanner src/BinaryIndex"
|
||||
["Concelier"]="src/Concelier src/Excititor"
|
||||
["Authority"]="src/Authority"
|
||||
["Policy"]="src/Policy src/RiskEngine"
|
||||
["Attestor"]="src/Attestor src/Provenance"
|
||||
["EvidenceLocker"]="src/EvidenceLocker"
|
||||
["ExportCenter"]="src/ExportCenter"
|
||||
["Findings"]="src/Findings"
|
||||
["SbomService"]="src/SbomService"
|
||||
["Notify"]="src/Notify src/Notifier"
|
||||
["Router"]="src/Router src/Gateway"
|
||||
["Cryptography"]="src/Cryptography"
|
||||
["AirGap"]="src/AirGap"
|
||||
["Cli"]="src/Cli"
|
||||
["AdvisoryAI"]="src/AdvisoryAI"
|
||||
["ReachGraph"]="src/ReachGraph"
|
||||
["Orchestrator"]="src/Orchestrator"
|
||||
["PacksRegistry"]="src/PacksRegistry"
|
||||
["Replay"]="src/Replay"
|
||||
["Aoc"]="src/Aoc"
|
||||
["IssuerDirectory"]="src/IssuerDirectory"
|
||||
["Telemetry"]="src/Telemetry"
|
||||
["Signals"]="src/Signals"
|
||||
["Web"]="src/Web"
|
||||
["DevPortal"]="src/DevPortal"
|
||||
)
|
||||
|
||||
# Modules that use Node.js/npm instead of .NET
|
||||
declare -a NODE_MODULES=("Web" "DevPortal")
|
||||
|
||||
# Detect which modules have changed based on git diff
|
||||
detect_changed_modules() {
|
||||
local base_branch="${1:-main}"
|
||||
local changed_files
|
||||
changed_files=$(get_changed_files "$base_branch")
|
||||
|
||||
local changed_modules=()
|
||||
local module
|
||||
local paths
|
||||
|
||||
for module in "${!MODULE_PATHS[@]}"; do
|
||||
paths="${MODULE_PATHS[$module]}"
|
||||
for path in $paths; do
|
||||
if echo "$changed_files" | grep -q "^${path}/"; then
|
||||
if ! array_contains "$module" "${changed_modules[@]}"; then
|
||||
changed_modules+=("$module")
|
||||
fi
|
||||
break
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# Check for infrastructure changes that affect all modules
|
||||
if echo "$changed_files" | grep -qE "^(Directory\.Build\.props|Directory\.Packages\.props|nuget\.config)"; then
|
||||
echo "ALL"
|
||||
return
|
||||
fi
|
||||
|
||||
# Check for shared library changes
|
||||
if echo "$changed_files" | grep -q "^src/__Libraries/"; then
|
||||
echo "ALL"
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ ${#changed_modules[@]} -eq 0 ]]; then
|
||||
echo "NONE"
|
||||
else
|
||||
echo "${changed_modules[*]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# RESULT REPORTING
|
||||
# =============================================================================
|
||||
|
||||
# Print a summary table row
|
||||
print_table_row() {
|
||||
local col1="$1"
|
||||
local col2="$2"
|
||||
local col3="${3:-}"
|
||||
|
||||
printf " %-30s %-15s %s\n" "$col1" "$col2" "$col3"
|
||||
}
|
||||
|
||||
# Print pass/fail status
|
||||
print_status() {
|
||||
local name="$1"
|
||||
local passed="$2"
|
||||
local duration="${3:-}"
|
||||
|
||||
if [[ "$passed" == "true" ]]; then
|
||||
print_table_row "$name" "${GREEN}PASSED${RESET}" "$duration"
|
||||
else
|
||||
print_table_row "$name" "${RED}FAILED${RESET}" "$duration"
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# ENVIRONMENT LOADING
|
||||
# =============================================================================
|
||||
|
||||
# Load environment file if it exists
|
||||
load_env_file() {
|
||||
local env_file="$1"
|
||||
|
||||
if [[ -f "$env_file" ]]; then
|
||||
log_debug "Loading environment from: $env_file"
|
||||
set -a
|
||||
# shellcheck source=/dev/null
|
||||
source "$env_file"
|
||||
set +a
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
342
devops/scripts/lib/ci-docker.sh
Normal file
342
devops/scripts/lib/ci-docker.sh
Normal file
@@ -0,0 +1,342 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# CI DOCKER UTILITIES
|
||||
# =============================================================================
|
||||
# Docker-related utility functions for local CI testing.
|
||||
#
|
||||
# Usage:
|
||||
# source "$SCRIPT_DIR/lib/ci-docker.sh"
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
# Prevent multiple sourcing
|
||||
[[ -n "${_CI_DOCKER_LOADED:-}" ]] && return
|
||||
_CI_DOCKER_LOADED=1
|
||||
|
||||
# =============================================================================
|
||||
# CONFIGURATION
|
||||
# =============================================================================
|
||||
|
||||
CI_COMPOSE_FILE="${CI_COMPOSE_FILE:-devops/compose/docker-compose.ci.yaml}"
|
||||
CI_IMAGE="${CI_IMAGE:-stellaops-ci:local}"
|
||||
CI_DOCKERFILE="${CI_DOCKERFILE:-devops/docker/Dockerfile.ci}"
|
||||
CI_PROJECT_NAME="${CI_PROJECT_NAME:-stellaops-ci}"
|
||||
|
||||
# Service names from docker-compose.ci.yaml
|
||||
CI_SERVICES=(postgres-ci valkey-ci nats-ci mock-registry minio-ci)
|
||||
|
||||
# =============================================================================
|
||||
# DOCKER CHECK
|
||||
# =============================================================================
|
||||
|
||||
# Check if Docker is available and running
|
||||
check_docker() {
|
||||
if ! command -v docker &>/dev/null; then
|
||||
log_error "Docker is not installed or not in PATH"
|
||||
log_info "Install Docker: https://docs.docker.com/get-docker/"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! docker info &>/dev/null; then
|
||||
log_error "Docker daemon is not running"
|
||||
log_info "Start Docker Desktop or run: sudo systemctl start docker"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_debug "Docker is available and running"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Check if Docker Compose is available
|
||||
check_docker_compose() {
|
||||
if docker compose version &>/dev/null; then
|
||||
DOCKER_COMPOSE="docker compose"
|
||||
log_debug "Using Docker Compose plugin"
|
||||
return 0
|
||||
elif command -v docker-compose &>/dev/null; then
|
||||
DOCKER_COMPOSE="docker-compose"
|
||||
log_debug "Using standalone docker-compose"
|
||||
return 0
|
||||
else
|
||||
log_error "Docker Compose is not installed"
|
||||
log_info "Install with: docker compose plugin or standalone docker-compose"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CI SERVICES MANAGEMENT
|
||||
# =============================================================================
|
||||
|
||||
# Start CI services
|
||||
start_ci_services() {
|
||||
local services=("$@")
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
if [[ ! -f "$compose_file" ]]; then
|
||||
log_error "Compose file not found: $compose_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
check_docker || return 1
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_section "Starting CI Services"
|
||||
|
||||
if [[ ${#services[@]} -eq 0 ]]; then
|
||||
# Start all services
|
||||
log_info "Starting all CI services..."
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d
|
||||
else
|
||||
# Start specific services
|
||||
log_info "Starting services: ${services[*]}"
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d "${services[@]}"
|
||||
fi
|
||||
|
||||
local result=$?
|
||||
if [[ $result -ne 0 ]]; then
|
||||
log_error "Failed to start CI services"
|
||||
return $result
|
||||
fi
|
||||
|
||||
# Wait for services to be healthy
|
||||
wait_for_services "${services[@]}"
|
||||
}
|
||||
|
||||
# Stop CI services
|
||||
stop_ci_services() {
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
if [[ ! -f "$compose_file" ]]; then
|
||||
log_debug "Compose file not found, nothing to stop"
|
||||
return 0
|
||||
fi
|
||||
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_section "Stopping CI Services"
|
||||
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down
|
||||
}
|
||||
|
||||
# Stop CI services and remove volumes
|
||||
cleanup_ci_services() {
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
if [[ ! -f "$compose_file" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_section "Cleaning Up CI Services"
|
||||
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down -v --remove-orphans
|
||||
}
|
||||
|
||||
# Check status of CI services
|
||||
check_ci_services_status() {
|
||||
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
|
||||
|
||||
check_docker_compose || return 1
|
||||
|
||||
log_subsection "CI Services Status"
|
||||
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" ps
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# HEALTH CHECKS
|
||||
# =============================================================================
|
||||
|
||||
# Wait for a specific service to be healthy
|
||||
wait_for_service() {
|
||||
local service="$1"
|
||||
local timeout="${2:-60}"
|
||||
local interval="${3:-2}"
|
||||
|
||||
log_info "Waiting for $service to be healthy..."
|
||||
|
||||
local elapsed=0
|
||||
while [[ $elapsed -lt $timeout ]]; do
|
||||
local status
|
||||
status=$(docker inspect --format='{{.State.Health.Status}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "not found")
|
||||
|
||||
if [[ "$status" == "healthy" ]]; then
|
||||
log_success "$service is healthy"
|
||||
return 0
|
||||
elif [[ "$status" == "not found" ]]; then
|
||||
# Container might not have health check, check if running
|
||||
local running
|
||||
running=$(docker inspect --format='{{.State.Running}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "false")
|
||||
if [[ "$running" == "true" ]]; then
|
||||
log_success "$service is running (no health check)"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
sleep "$interval"
|
||||
elapsed=$((elapsed + interval))
|
||||
done
|
||||
|
||||
log_error "$service did not become healthy within ${timeout}s"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Wait for multiple services to be healthy
|
||||
wait_for_services() {
|
||||
local services=("$@")
|
||||
local failed=0
|
||||
|
||||
if [[ ${#services[@]} -eq 0 ]]; then
|
||||
services=("${CI_SERVICES[@]}")
|
||||
fi
|
||||
|
||||
log_info "Waiting for services to be ready..."
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
if ! wait_for_service "$service" 60 2; then
|
||||
failed=1
|
||||
fi
|
||||
done
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
# Check if PostgreSQL is accepting connections
|
||||
check_postgres_ready() {
|
||||
local host="${1:-localhost}"
|
||||
local port="${2:-5433}"
|
||||
local user="${3:-stellaops_ci}"
|
||||
local db="${4:-stellaops_test}"
|
||||
|
||||
if command -v pg_isready &>/dev/null; then
|
||||
pg_isready -h "$host" -p "$port" -U "$user" -d "$db" &>/dev/null
|
||||
else
|
||||
# Fallback to nc if pg_isready not available
|
||||
nc -z "$host" "$port" &>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if Valkey/Redis is accepting connections
|
||||
check_valkey_ready() {
|
||||
local host="${1:-localhost}"
|
||||
local port="${2:-6380}"
|
||||
|
||||
if command -v valkey-cli &>/dev/null; then
|
||||
valkey-cli -h "$host" -p "$port" ping &>/dev/null
|
||||
elif command -v redis-cli &>/dev/null; then
|
||||
redis-cli -h "$host" -p "$port" ping &>/dev/null
|
||||
else
|
||||
nc -z "$host" "$port" &>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CI DOCKER IMAGE MANAGEMENT
|
||||
# =============================================================================
|
||||
|
||||
# Check if CI image exists
|
||||
ci_image_exists() {
|
||||
docker image inspect "$CI_IMAGE" &>/dev/null
|
||||
}
|
||||
|
||||
# Build CI Docker image
|
||||
build_ci_image() {
|
||||
local force_rebuild="${1:-false}"
|
||||
local dockerfile="$REPO_ROOT/$CI_DOCKERFILE"
|
||||
|
||||
if [[ ! -f "$dockerfile" ]]; then
|
||||
log_error "Dockerfile not found: $dockerfile"
|
||||
return 1
|
||||
fi
|
||||
|
||||
check_docker || return 1
|
||||
|
||||
if ci_image_exists && [[ "$force_rebuild" != "true" ]]; then
|
||||
log_info "CI image already exists: $CI_IMAGE"
|
||||
log_info "Use --rebuild to force rebuild"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_section "Building CI Docker Image"
|
||||
log_info "Dockerfile: $dockerfile"
|
||||
log_info "Image: $CI_IMAGE"
|
||||
|
||||
docker build -t "$CI_IMAGE" -f "$dockerfile" "$REPO_ROOT"
|
||||
|
||||
if [[ $? -ne 0 ]]; then
|
||||
log_error "Failed to build CI image"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_success "CI image built successfully: $CI_IMAGE"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CONTAINER EXECUTION
|
||||
# =============================================================================
|
||||
|
||||
# Run a command inside the CI container
|
||||
run_in_ci_container() {
|
||||
local command="$*"
|
||||
|
||||
check_docker || return 1
|
||||
|
||||
if ! ci_image_exists; then
|
||||
log_info "CI image not found, building..."
|
||||
build_ci_image || return 1
|
||||
fi
|
||||
|
||||
local docker_args=(
|
||||
--rm
|
||||
-v "$REPO_ROOT:/src"
|
||||
-v "$REPO_ROOT/TestResults:/src/TestResults"
|
||||
-e DOTNET_NOLOGO=1
|
||||
-e DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
-e DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
|
||||
-e TZ=UTC
|
||||
-w /src
|
||||
)
|
||||
|
||||
# Mount Docker socket for Testcontainers
|
||||
if [[ -S /var/run/docker.sock ]]; then
|
||||
docker_args+=(-v /var/run/docker.sock:/var/run/docker.sock)
|
||||
fi
|
||||
|
||||
# Load environment file if exists
|
||||
local env_file="$REPO_ROOT/devops/ci-local/.env.local"
|
||||
if [[ -f "$env_file" ]]; then
|
||||
docker_args+=(--env-file "$env_file")
|
||||
fi
|
||||
|
||||
# Connect to CI network if services are running
|
||||
if docker network inspect stellaops-ci-net &>/dev/null; then
|
||||
docker_args+=(--network stellaops-ci-net)
|
||||
fi
|
||||
|
||||
log_debug "Running in CI container: $command"
|
||||
docker run "${docker_args[@]}" "$CI_IMAGE" bash -c "$command"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# DOCKER NETWORK UTILITIES
|
||||
# =============================================================================
|
||||
|
||||
# Get the IP address of a running container
|
||||
get_container_ip() {
|
||||
local container="$1"
|
||||
docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$container" 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if container is running
|
||||
is_container_running() {
|
||||
local container="$1"
|
||||
[[ "$(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null)" == "true" ]]
|
||||
}
|
||||
|
||||
# Get container logs
|
||||
get_container_logs() {
|
||||
local container="$1"
|
||||
local lines="${2:-100}"
|
||||
docker logs --tail "$lines" "$container" 2>&1
|
||||
}
|
||||
475
devops/scripts/lib/ci-web.sh
Normal file
475
devops/scripts/lib/ci-web.sh
Normal file
@@ -0,0 +1,475 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# CI-WEB.SH - Angular Web Testing Utilities
|
||||
# =============================================================================
|
||||
# Functions for running Angular/Web frontend tests locally.
|
||||
#
|
||||
# Test Types:
|
||||
# - Unit Tests (Karma/Jasmine)
|
||||
# - E2E Tests (Playwright)
|
||||
# - Accessibility Tests (Axe-core)
|
||||
# - Lighthouse Audits
|
||||
# - Storybook Build
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
# Prevent direct execution
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
echo "This script should be sourced, not executed directly."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# CONSTANTS
|
||||
# =============================================================================
|
||||
|
||||
WEB_DIR="${REPO_ROOT:-$(git rev-parse --show-toplevel)}/src/Web/StellaOps.Web"
|
||||
WEB_NODE_VERSION="20"
|
||||
|
||||
# Test categories for Web
|
||||
WEB_TEST_CATEGORIES=(
|
||||
"web:unit" # Karma unit tests
|
||||
"web:e2e" # Playwright E2E
|
||||
"web:a11y" # Accessibility
|
||||
"web:lighthouse" # Performance/a11y audit
|
||||
"web:build" # Production build
|
||||
"web:storybook" # Storybook build
|
||||
)
|
||||
|
||||
# =============================================================================
|
||||
# DEPENDENCY CHECKS
|
||||
# =============================================================================
|
||||
|
||||
check_node_version() {
|
||||
if ! command -v node &>/dev/null; then
|
||||
log_error "Node.js not found"
|
||||
log_info "Install Node.js $WEB_NODE_VERSION+: https://nodejs.org"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local version
|
||||
version=$(node --version | sed 's/v//' | cut -d. -f1)
|
||||
if [[ "$version" -lt "$WEB_NODE_VERSION" ]]; then
|
||||
log_warn "Node.js version $version is below recommended $WEB_NODE_VERSION"
|
||||
else
|
||||
log_debug "Node.js version: $(node --version)"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
check_npm() {
|
||||
if ! command -v npm &>/dev/null; then
|
||||
log_error "npm not found"
|
||||
return 1
|
||||
fi
|
||||
log_debug "npm version: $(npm --version)"
|
||||
return 0
|
||||
}
|
||||
|
||||
check_web_dependencies() {
|
||||
log_subsection "Checking Web Dependencies"
|
||||
|
||||
check_node_version || return 1
|
||||
check_npm || return 1
|
||||
|
||||
# Check if node_modules exists
|
||||
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
|
||||
log_warn "node_modules not found - will install dependencies"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# SETUP
|
||||
# =============================================================================
|
||||
|
||||
install_web_dependencies() {
|
||||
log_subsection "Installing Web Dependencies"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
# Check if package-lock.json exists
|
||||
if [[ -f "package-lock.json" ]]; then
|
||||
log_info "Running npm ci (clean install)..."
|
||||
npm ci --prefer-offline --no-audit --no-fund || {
|
||||
log_error "npm ci failed"
|
||||
popd > /dev/null
|
||||
return 1
|
||||
}
|
||||
else
|
||||
log_info "Running npm install..."
|
||||
npm install --no-audit --no-fund || {
|
||||
log_error "npm install failed"
|
||||
popd > /dev/null
|
||||
return 1
|
||||
}
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
log_success "Web dependencies installed"
|
||||
return 0
|
||||
}
|
||||
|
||||
ensure_web_dependencies() {
|
||||
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
|
||||
install_web_dependencies || return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# TEST RUNNERS
|
||||
# =============================================================================
|
||||
|
||||
run_web_unit_tests() {
|
||||
log_subsection "Running Web Unit Tests (Karma/Jasmine)"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run test:ci"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run tests
|
||||
npm run test:ci
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web unit tests"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web unit tests passed"
|
||||
else
|
||||
log_error "Web unit tests failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_e2e_tests() {
|
||||
log_subsection "Running Web E2E Tests (Playwright)"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
# Install Playwright browsers if needed
|
||||
if [[ ! -d "$HOME/.cache/ms-playwright" ]] && [[ ! -d "node_modules/.cache/ms-playwright" ]]; then
|
||||
log_info "Installing Playwright browsers..."
|
||||
npx playwright install --with-deps chromium || {
|
||||
log_warn "Playwright browser installation failed - E2E tests may fail"
|
||||
}
|
||||
fi
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run test:e2e"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run E2E tests
|
||||
npm run test:e2e
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web E2E tests"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web E2E tests passed"
|
||||
else
|
||||
log_error "Web E2E tests failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_a11y_tests() {
|
||||
log_subsection "Running Web Accessibility Tests (Axe)"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run test:a11y"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run accessibility tests
|
||||
npm run test:a11y
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web accessibility tests"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web accessibility tests passed"
|
||||
else
|
||||
log_warn "Web accessibility tests had issues (non-blocking)"
|
||||
fi
|
||||
|
||||
# A11y tests are non-blocking by default
|
||||
return 0
|
||||
}
|
||||
|
||||
run_web_build() {
|
||||
log_subsection "Building Web Application"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run build -- --configuration production"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Build production bundle
|
||||
npm run build -- --configuration production --progress=false
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Web build"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Web build completed"
|
||||
|
||||
# Check bundle size
|
||||
if [[ -d "$WEB_DIR/dist" ]]; then
|
||||
local size
|
||||
size=$(du -sh "$WEB_DIR/dist" 2>/dev/null | cut -f1)
|
||||
log_info "Bundle size: $size"
|
||||
fi
|
||||
else
|
||||
log_error "Web build failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_storybook_build() {
|
||||
log_subsection "Building Storybook"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: npm run storybook:build"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Build Storybook
|
||||
npm run storybook:build
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Storybook build"
|
||||
popd > /dev/null
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Storybook build completed"
|
||||
else
|
||||
log_error "Storybook build failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_web_lighthouse() {
|
||||
log_subsection "Running Lighthouse Audit"
|
||||
|
||||
if [[ ! -d "$WEB_DIR" ]]; then
|
||||
log_error "Web directory not found: $WEB_DIR"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check if lighthouse is available
|
||||
if ! command -v lhci &>/dev/null && ! npx lhci --version &>/dev/null 2>&1; then
|
||||
log_warn "Lighthouse CI not installed - skipping audit"
|
||||
log_info "Install with: npm install -g @lhci/cli"
|
||||
return 0
|
||||
fi
|
||||
|
||||
ensure_web_dependencies || return 1
|
||||
|
||||
# Build first if not already built
|
||||
if [[ ! -d "$WEB_DIR/dist" ]]; then
|
||||
run_web_build || return 1
|
||||
fi
|
||||
|
||||
pushd "$WEB_DIR" > /dev/null || return 1
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would run: lhci autorun"
|
||||
popd > /dev/null
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run Lighthouse
|
||||
npx lhci autorun \
|
||||
--collect.staticDistDir=./dist/stellaops-web/browser \
|
||||
--collect.numberOfRuns=1 \
|
||||
--upload.target=filesystem \
|
||||
--upload.outputDir=./lighthouse-results 2>/dev/null || {
|
||||
log_warn "Lighthouse audit had issues"
|
||||
}
|
||||
|
||||
stop_timer "$start_time" "Lighthouse audit"
|
||||
popd > /dev/null
|
||||
|
||||
log_success "Lighthouse audit completed"
|
||||
return 0
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# COMPOSITE RUNNERS
|
||||
# =============================================================================
|
||||
|
||||
run_web_smoke() {
|
||||
log_section "Web Smoke Tests"
|
||||
log_info "Running quick web validation"
|
||||
|
||||
local failed=0
|
||||
|
||||
run_web_build || failed=1
|
||||
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_web_unit_tests || failed=1
|
||||
fi
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
run_web_pr_gating() {
|
||||
log_section "Web PR-Gating Tests"
|
||||
log_info "Running full web PR-gating suite"
|
||||
|
||||
local failed=0
|
||||
local results=()
|
||||
|
||||
# Build
|
||||
run_web_build
|
||||
results+=("Build:$?")
|
||||
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
|
||||
|
||||
# Unit tests
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_web_unit_tests
|
||||
results+=("Unit:$?")
|
||||
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
|
||||
fi
|
||||
|
||||
# E2E tests
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_web_e2e_tests
|
||||
results+=("E2E:$?")
|
||||
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
|
||||
fi
|
||||
|
||||
# A11y tests (non-blocking)
|
||||
run_web_a11y_tests
|
||||
results+=("A11y:$?")
|
||||
|
||||
# Print summary
|
||||
log_section "Web Test Results"
|
||||
for result in "${results[@]}"; do
|
||||
local name="${result%%:*}"
|
||||
local status="${result##*:}"
|
||||
if [[ "$status" == "0" ]]; then
|
||||
print_status "Web $name" "true"
|
||||
else
|
||||
print_status "Web $name" "false"
|
||||
fi
|
||||
done
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
run_web_full() {
|
||||
log_section "Full Web Test Suite"
|
||||
log_info "Running all web tests including extended categories"
|
||||
|
||||
local failed=0
|
||||
|
||||
# PR-gating tests
|
||||
run_web_pr_gating || failed=1
|
||||
|
||||
# Extended tests
|
||||
run_web_storybook_build || log_warn "Storybook build failed (non-blocking)"
|
||||
run_web_lighthouse || log_warn "Lighthouse audit failed (non-blocking)"
|
||||
|
||||
return $failed
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# EXPORTS
|
||||
# =============================================================================
|
||||
|
||||
export -f check_web_dependencies
|
||||
export -f install_web_dependencies
|
||||
export -f ensure_web_dependencies
|
||||
export -f run_web_unit_tests
|
||||
export -f run_web_e2e_tests
|
||||
export -f run_web_a11y_tests
|
||||
export -f run_web_build
|
||||
export -f run_web_storybook_build
|
||||
export -f run_web_lighthouse
|
||||
export -f run_web_smoke
|
||||
export -f run_web_pr_gating
|
||||
export -f run_web_full
|
||||
178
devops/scripts/lib/exit-codes.sh
Normal file
178
devops/scripts/lib/exit-codes.sh
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Exit Codes Registry
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Standard exit codes for all CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/exit-codes.sh"
|
||||
#
|
||||
# Exit codes follow POSIX conventions (0-125)
|
||||
# 126-127 reserved for shell errors
|
||||
# 128+ reserved for signal handling
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_EXIT_CODES_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_EXIT_CODES_LOADED=1
|
||||
|
||||
# ============================================================================
|
||||
# Standard Exit Codes
|
||||
# ============================================================================
|
||||
|
||||
# Success
|
||||
export EXIT_SUCCESS=0
|
||||
|
||||
# General errors (1-9)
|
||||
export EXIT_ERROR=1 # Generic error
|
||||
export EXIT_USAGE=2 # Invalid usage/arguments
|
||||
export EXIT_CONFIG_ERROR=3 # Configuration error
|
||||
export EXIT_NOT_FOUND=4 # File/resource not found
|
||||
export EXIT_PERMISSION=5 # Permission denied
|
||||
export EXIT_IO_ERROR=6 # I/O error
|
||||
export EXIT_NETWORK_ERROR=7 # Network error
|
||||
export EXIT_TIMEOUT=8 # Operation timed out
|
||||
export EXIT_INTERRUPTED=9 # User interrupted (Ctrl+C)
|
||||
|
||||
# Tool/dependency errors (10-19)
|
||||
export EXIT_MISSING_TOOL=10 # Required tool not installed
|
||||
export EXIT_TOOL_ERROR=11 # Tool execution failed
|
||||
export EXIT_VERSION_MISMATCH=12 # Wrong tool version
|
||||
export EXIT_DEPENDENCY_ERROR=13 # Dependency resolution failed
|
||||
|
||||
# Build errors (20-29)
|
||||
export EXIT_BUILD_FAILED=20 # Build compilation failed
|
||||
export EXIT_RESTORE_FAILED=21 # Package restore failed
|
||||
export EXIT_PUBLISH_FAILED=22 # Publish failed
|
||||
export EXIT_PACKAGING_FAILED=23 # Packaging failed
|
||||
|
||||
# Test errors (30-39)
|
||||
export EXIT_TEST_FAILED=30 # Tests failed
|
||||
export EXIT_TEST_TIMEOUT=31 # Test timed out
|
||||
export EXIT_FIXTURE_ERROR=32 # Test fixture error
|
||||
export EXIT_DETERMINISM_FAIL=33 # Determinism check failed
|
||||
|
||||
# Deployment errors (40-49)
|
||||
export EXIT_DEPLOY_FAILED=40 # Deployment failed
|
||||
export EXIT_ROLLBACK_FAILED=41 # Rollback failed
|
||||
export EXIT_HEALTH_CHECK_FAIL=42 # Health check failed
|
||||
export EXIT_REGISTRY_ERROR=43 # Container registry error
|
||||
|
||||
# Validation errors (50-59)
|
||||
export EXIT_VALIDATION_FAILED=50 # General validation failed
|
||||
export EXIT_SCHEMA_ERROR=51 # Schema validation failed
|
||||
export EXIT_LINT_ERROR=52 # Lint check failed
|
||||
export EXIT_FORMAT_ERROR=53 # Format check failed
|
||||
export EXIT_LICENSE_ERROR=54 # License compliance failed
|
||||
|
||||
# Security errors (60-69)
|
||||
export EXIT_SECURITY_ERROR=60 # Security check failed
|
||||
export EXIT_SECRETS_FOUND=61 # Secrets detected in code
|
||||
export EXIT_VULN_FOUND=62 # Vulnerabilities found
|
||||
export EXIT_SIGN_FAILED=63 # Signing failed
|
||||
export EXIT_VERIFY_FAILED=64 # Verification failed
|
||||
|
||||
# Git/VCS errors (70-79)
|
||||
export EXIT_GIT_ERROR=70 # Git operation failed
|
||||
export EXIT_DIRTY_WORKTREE=71 # Uncommitted changes
|
||||
export EXIT_MERGE_CONFLICT=72 # Merge conflict
|
||||
export EXIT_BRANCH_ERROR=73 # Branch operation failed
|
||||
|
||||
# Reserved for specific tools (80-99)
|
||||
export EXIT_DOTNET_ERROR=80 # .NET specific error
|
||||
export EXIT_DOCKER_ERROR=81 # Docker specific error
|
||||
export EXIT_HELM_ERROR=82 # Helm specific error
|
||||
export EXIT_KUBECTL_ERROR=83 # kubectl specific error
|
||||
export EXIT_NPM_ERROR=84 # npm specific error
|
||||
export EXIT_PYTHON_ERROR=85 # Python specific error
|
||||
|
||||
# Legacy compatibility
|
||||
export EXIT_TOOLCHAIN=69 # Tool not found (legacy, use EXIT_MISSING_TOOL)
|
||||
|
||||
# ============================================================================
|
||||
# Helper Functions
|
||||
# ============================================================================
|
||||
|
||||
# Get exit code name from number
|
||||
exit_code_name() {
|
||||
local code="${1:-}"
|
||||
|
||||
case "$code" in
|
||||
0) echo "SUCCESS" ;;
|
||||
1) echo "ERROR" ;;
|
||||
2) echo "USAGE" ;;
|
||||
3) echo "CONFIG_ERROR" ;;
|
||||
4) echo "NOT_FOUND" ;;
|
||||
5) echo "PERMISSION" ;;
|
||||
6) echo "IO_ERROR" ;;
|
||||
7) echo "NETWORK_ERROR" ;;
|
||||
8) echo "TIMEOUT" ;;
|
||||
9) echo "INTERRUPTED" ;;
|
||||
10) echo "MISSING_TOOL" ;;
|
||||
11) echo "TOOL_ERROR" ;;
|
||||
12) echo "VERSION_MISMATCH" ;;
|
||||
13) echo "DEPENDENCY_ERROR" ;;
|
||||
20) echo "BUILD_FAILED" ;;
|
||||
21) echo "RESTORE_FAILED" ;;
|
||||
22) echo "PUBLISH_FAILED" ;;
|
||||
23) echo "PACKAGING_FAILED" ;;
|
||||
30) echo "TEST_FAILED" ;;
|
||||
31) echo "TEST_TIMEOUT" ;;
|
||||
32) echo "FIXTURE_ERROR" ;;
|
||||
33) echo "DETERMINISM_FAIL" ;;
|
||||
40) echo "DEPLOY_FAILED" ;;
|
||||
41) echo "ROLLBACK_FAILED" ;;
|
||||
42) echo "HEALTH_CHECK_FAIL" ;;
|
||||
43) echo "REGISTRY_ERROR" ;;
|
||||
50) echo "VALIDATION_FAILED" ;;
|
||||
51) echo "SCHEMA_ERROR" ;;
|
||||
52) echo "LINT_ERROR" ;;
|
||||
53) echo "FORMAT_ERROR" ;;
|
||||
54) echo "LICENSE_ERROR" ;;
|
||||
60) echo "SECURITY_ERROR" ;;
|
||||
61) echo "SECRETS_FOUND" ;;
|
||||
62) echo "VULN_FOUND" ;;
|
||||
63) echo "SIGN_FAILED" ;;
|
||||
64) echo "VERIFY_FAILED" ;;
|
||||
69) echo "TOOLCHAIN (legacy)" ;;
|
||||
70) echo "GIT_ERROR" ;;
|
||||
71) echo "DIRTY_WORKTREE" ;;
|
||||
72) echo "MERGE_CONFLICT" ;;
|
||||
73) echo "BRANCH_ERROR" ;;
|
||||
80) echo "DOTNET_ERROR" ;;
|
||||
81) echo "DOCKER_ERROR" ;;
|
||||
82) echo "HELM_ERROR" ;;
|
||||
83) echo "KUBECTL_ERROR" ;;
|
||||
84) echo "NPM_ERROR" ;;
|
||||
85) echo "PYTHON_ERROR" ;;
|
||||
126) echo "COMMAND_NOT_EXECUTABLE" ;;
|
||||
127) echo "COMMAND_NOT_FOUND" ;;
|
||||
*)
|
||||
if [[ $code -ge 128 ]] && [[ $code -le 255 ]]; then
|
||||
local signal=$((code - 128))
|
||||
echo "SIGNAL_${signal}"
|
||||
else
|
||||
echo "UNKNOWN_${code}"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if exit code indicates success
|
||||
is_success() {
|
||||
[[ "${1:-1}" -eq 0 ]]
|
||||
}
|
||||
|
||||
# Check if exit code indicates error
|
||||
is_error() {
|
||||
[[ "${1:-0}" -ne 0 ]]
|
||||
}
|
||||
|
||||
# Exit with message and code
|
||||
exit_with() {
|
||||
local code="${1:-1}"
|
||||
shift
|
||||
if [[ $# -gt 0 ]]; then
|
||||
echo "$@" >&2
|
||||
fi
|
||||
exit "$code"
|
||||
}
|
||||
262
devops/scripts/lib/git-utils.sh
Normal file
262
devops/scripts/lib/git-utils.sh
Normal file
@@ -0,0 +1,262 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Git Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Common git operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/git-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_GIT_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_GIT_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Repository Information
|
||||
# ============================================================================
|
||||
|
||||
# Get repository root directory
|
||||
git_root() {
|
||||
git rev-parse --show-toplevel 2>/dev/null || echo "."
|
||||
}
|
||||
|
||||
# Check if current directory is a git repository
|
||||
is_git_repo() {
|
||||
git rev-parse --git-dir >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Get current commit SHA (full)
|
||||
git_sha() {
|
||||
git rev-parse HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current commit SHA (short)
|
||||
git_sha_short() {
|
||||
git rev-parse --short HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current branch name
|
||||
git_branch() {
|
||||
git rev-parse --abbrev-ref HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Get current tag (if HEAD is tagged)
|
||||
git_tag() {
|
||||
git describe --tags --exact-match HEAD 2>/dev/null || echo ""
|
||||
}
|
||||
|
||||
# Get latest tag
|
||||
git_latest_tag() {
|
||||
git describe --tags --abbrev=0 2>/dev/null || echo ""
|
||||
}
|
||||
|
||||
# Get remote URL
|
||||
git_remote_url() {
|
||||
local remote="${1:-origin}"
|
||||
git remote get-url "$remote" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get repository name from remote URL
|
||||
git_repo_name() {
|
||||
local url
|
||||
url=$(git_remote_url "${1:-origin}")
|
||||
basename "$url" .git
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Commit Information
|
||||
# ============================================================================
|
||||
|
||||
# Get commit message
|
||||
git_commit_message() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%s" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit author
|
||||
git_commit_author() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%an" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit author email
|
||||
git_commit_author_email() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%ae" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit timestamp (ISO 8601)
|
||||
git_commit_timestamp() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%aI" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get commit timestamp (Unix epoch)
|
||||
git_commit_epoch() {
|
||||
local sha="${1:-HEAD}"
|
||||
git log -1 --format="%at" "$sha" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Working Tree State
|
||||
# ============================================================================
|
||||
|
||||
# Check if working tree is clean
|
||||
git_is_clean() {
|
||||
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
|
||||
}
|
||||
|
||||
# Check if working tree is dirty
|
||||
git_is_dirty() {
|
||||
! git_is_clean
|
||||
}
|
||||
|
||||
# Get list of changed files
|
||||
git_changed_files() {
|
||||
git status --porcelain 2>/dev/null | awk '{print $2}'
|
||||
}
|
||||
|
||||
# Get list of staged files
|
||||
git_staged_files() {
|
||||
git diff --cached --name-only 2>/dev/null
|
||||
}
|
||||
|
||||
# Get list of untracked files
|
||||
git_untracked_files() {
|
||||
git ls-files --others --exclude-standard 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Diff and History
|
||||
# ============================================================================
|
||||
|
||||
# Get files changed between two refs
|
||||
git_diff_files() {
|
||||
local from="${1:-HEAD~1}"
|
||||
local to="${2:-HEAD}"
|
||||
git diff --name-only "$from" "$to" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get files changed in last N commits
|
||||
git_recent_files() {
|
||||
local count="${1:-1}"
|
||||
git diff --name-only "HEAD~${count}" HEAD 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if file was changed between two refs
|
||||
git_file_changed() {
|
||||
local file="$1"
|
||||
local from="${2:-HEAD~1}"
|
||||
local to="${3:-HEAD}"
|
||||
git diff --name-only "$from" "$to" -- "$file" 2>/dev/null | grep -q "$file"
|
||||
}
|
||||
|
||||
# Get commits between two refs
|
||||
git_commits_between() {
|
||||
local from="${1:-HEAD~10}"
|
||||
local to="${2:-HEAD}"
|
||||
git log --oneline "$from".."$to" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Tag Operations
|
||||
# ============================================================================
|
||||
|
||||
# Create a tag
|
||||
git_create_tag() {
|
||||
local tag="$1"
|
||||
local message="${2:-}"
|
||||
|
||||
if [[ -n "$message" ]]; then
|
||||
git tag -a "$tag" -m "$message"
|
||||
else
|
||||
git tag "$tag"
|
||||
fi
|
||||
}
|
||||
|
||||
# Delete a tag
|
||||
git_delete_tag() {
|
||||
local tag="$1"
|
||||
git tag -d "$tag" 2>/dev/null
|
||||
}
|
||||
|
||||
# Push tag to remote
|
||||
git_push_tag() {
|
||||
local tag="$1"
|
||||
local remote="${2:-origin}"
|
||||
git push "$remote" "$tag"
|
||||
}
|
||||
|
||||
# List tags matching pattern
|
||||
git_list_tags() {
|
||||
local pattern="${1:-*}"
|
||||
git tag -l "$pattern" 2>/dev/null
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Branch Operations
|
||||
# ============================================================================
|
||||
|
||||
# Check if branch exists
|
||||
git_branch_exists() {
|
||||
local branch="$1"
|
||||
git show-ref --verify --quiet "refs/heads/$branch" 2>/dev/null
|
||||
}
|
||||
|
||||
# Check if remote branch exists
|
||||
git_remote_branch_exists() {
|
||||
local branch="$1"
|
||||
local remote="${2:-origin}"
|
||||
git show-ref --verify --quiet "refs/remotes/$remote/$branch" 2>/dev/null
|
||||
}
|
||||
|
||||
# Get default branch
|
||||
git_default_branch() {
|
||||
local remote="${1:-origin}"
|
||||
git remote show "$remote" 2>/dev/null | grep "HEAD branch" | awk '{print $NF}'
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# CI/CD Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Get version string for CI builds
|
||||
git_ci_version() {
|
||||
local tag
|
||||
tag=$(git_tag)
|
||||
|
||||
if [[ -n "$tag" ]]; then
|
||||
echo "$tag"
|
||||
else
|
||||
local branch sha
|
||||
branch=$(git_branch | tr '/' '-')
|
||||
sha=$(git_sha_short)
|
||||
echo "${branch}-${sha}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if current commit is on default branch
|
||||
git_is_default_branch() {
|
||||
local current default
|
||||
current=$(git_branch)
|
||||
default=$(git_default_branch)
|
||||
[[ "$current" == "$default" ]]
|
||||
}
|
||||
|
||||
# Check if running in CI environment
|
||||
git_is_ci() {
|
||||
[[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]] || [[ -n "${GITLAB_CI:-}" ]]
|
||||
}
|
||||
|
||||
# Ensure clean worktree or fail
|
||||
git_require_clean() {
|
||||
if git_is_dirty; then
|
||||
log_error "Working tree is dirty. Commit or stash changes first."
|
||||
return "${EXIT_DIRTY_WORKTREE:-71}"
|
||||
fi
|
||||
}
|
||||
266
devops/scripts/lib/hash-utils.sh
Normal file
266
devops/scripts/lib/hash-utils.sh
Normal file
@@ -0,0 +1,266 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Hash/Checksum Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Cryptographic hash and checksum operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/hash-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_HASH_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_HASH_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Hash Computation
|
||||
# ============================================================================
|
||||
|
||||
# Compute SHA-256 hash of a file
|
||||
compute_sha256() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v sha256sum >/dev/null 2>&1; then
|
||||
sha256sum "$file" | awk '{print $1}'
|
||||
elif command -v shasum >/dev/null 2>&1; then
|
||||
shasum -a 256 "$file" | awk '{print $1}'
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -sha256 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No SHA-256 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute SHA-512 hash of a file
|
||||
compute_sha512() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v sha512sum >/dev/null 2>&1; then
|
||||
sha512sum "$file" | awk '{print $1}'
|
||||
elif command -v shasum >/dev/null 2>&1; then
|
||||
shasum -a 512 "$file" | awk '{print $1}'
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -sha512 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No SHA-512 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute MD5 hash of a file (for compatibility, not security)
|
||||
compute_md5() {
|
||||
local file="$1"
|
||||
|
||||
if [[ ! -f "$file" ]]; then
|
||||
log_error "File not found: $file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
if command -v md5sum >/dev/null 2>&1; then
|
||||
md5sum "$file" | awk '{print $1}'
|
||||
elif command -v md5 >/dev/null 2>&1; then
|
||||
md5 -q "$file"
|
||||
elif command -v openssl >/dev/null 2>&1; then
|
||||
openssl dgst -md5 "$file" | awk '{print $NF}'
|
||||
else
|
||||
log_error "No MD5 tool available"
|
||||
return "${EXIT_MISSING_TOOL:-10}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Compute hash of string
|
||||
compute_string_hash() {
|
||||
local string="$1"
|
||||
local algorithm="${2:-sha256}"
|
||||
|
||||
case "$algorithm" in
|
||||
sha256)
|
||||
echo -n "$string" | sha256sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | shasum -a 256 2>/dev/null | awk '{print $1}'
|
||||
;;
|
||||
sha512)
|
||||
echo -n "$string" | sha512sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | shasum -a 512 2>/dev/null | awk '{print $1}'
|
||||
;;
|
||||
md5)
|
||||
echo -n "$string" | md5sum 2>/dev/null | awk '{print $1}' || \
|
||||
echo -n "$string" | md5 2>/dev/null
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Checksum Files
|
||||
# ============================================================================
|
||||
|
||||
# Write checksum file for a single file
|
||||
write_checksum() {
|
||||
local file="$1"
|
||||
local checksum_file="${2:-${file}.sha256}"
|
||||
local algorithm="${3:-sha256}"
|
||||
|
||||
local hash
|
||||
case "$algorithm" in
|
||||
sha256) hash=$(compute_sha256 "$file") ;;
|
||||
sha512) hash=$(compute_sha512 "$file") ;;
|
||||
md5) hash=$(compute_md5 "$file") ;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ -z "$hash" ]]; then
|
||||
return "${EXIT_ERROR:-1}"
|
||||
fi
|
||||
|
||||
local basename
|
||||
basename=$(basename "$file")
|
||||
echo "$hash $basename" > "$checksum_file"
|
||||
log_debug "Wrote checksum to $checksum_file"
|
||||
}
|
||||
|
||||
# Write checksums for multiple files
|
||||
write_checksums() {
|
||||
local output_file="$1"
|
||||
shift
|
||||
local files=("$@")
|
||||
|
||||
: > "$output_file"
|
||||
|
||||
for file in "${files[@]}"; do
|
||||
if [[ -f "$file" ]]; then
|
||||
local hash basename
|
||||
hash=$(compute_sha256 "$file")
|
||||
basename=$(basename "$file")
|
||||
echo "$hash $basename" >> "$output_file"
|
||||
fi
|
||||
done
|
||||
|
||||
log_debug "Wrote checksums to $output_file"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Checksum Verification
|
||||
# ============================================================================
|
||||
|
||||
# Verify checksum of a file
|
||||
verify_checksum() {
|
||||
local file="$1"
|
||||
local expected_hash="$2"
|
||||
local algorithm="${3:-sha256}"
|
||||
|
||||
local actual_hash
|
||||
case "$algorithm" in
|
||||
sha256) actual_hash=$(compute_sha256 "$file") ;;
|
||||
sha512) actual_hash=$(compute_sha512 "$file") ;;
|
||||
md5) actual_hash=$(compute_md5 "$file") ;;
|
||||
*)
|
||||
log_error "Unknown algorithm: $algorithm"
|
||||
return "${EXIT_USAGE:-2}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$actual_hash" == "$expected_hash" ]]; then
|
||||
log_debug "Checksum verified: $file"
|
||||
return 0
|
||||
else
|
||||
log_error "Checksum mismatch for $file"
|
||||
log_error " Expected: $expected_hash"
|
||||
log_error " Actual: $actual_hash"
|
||||
return "${EXIT_VERIFY_FAILED:-64}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Verify checksums from file (sha256sum -c style)
|
||||
verify_checksums_file() {
|
||||
local checksum_file="$1"
|
||||
local base_dir="${2:-.}"
|
||||
|
||||
if [[ ! -f "$checksum_file" ]]; then
|
||||
log_error "Checksum file not found: $checksum_file"
|
||||
return "${EXIT_NOT_FOUND:-4}"
|
||||
fi
|
||||
|
||||
local failures=0
|
||||
|
||||
while IFS= read -r line; do
|
||||
# Skip empty lines and comments
|
||||
[[ -z "$line" ]] && continue
|
||||
[[ "$line" == \#* ]] && continue
|
||||
|
||||
local hash filename
|
||||
hash=$(echo "$line" | awk '{print $1}')
|
||||
filename=$(echo "$line" | awk '{print $2}')
|
||||
|
||||
local filepath="${base_dir}/${filename}"
|
||||
|
||||
if [[ ! -f "$filepath" ]]; then
|
||||
log_error "File not found: $filepath"
|
||||
((failures++))
|
||||
continue
|
||||
fi
|
||||
|
||||
if ! verify_checksum "$filepath" "$hash"; then
|
||||
((failures++))
|
||||
fi
|
||||
done < "$checksum_file"
|
||||
|
||||
if [[ $failures -gt 0 ]]; then
|
||||
log_error "$failures checksum verification(s) failed"
|
||||
return "${EXIT_VERIFY_FAILED:-64}"
|
||||
fi
|
||||
|
||||
log_info "All checksums verified"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Check if two files have the same content
|
||||
files_identical() {
|
||||
local file1="$1"
|
||||
local file2="$2"
|
||||
|
||||
[[ -f "$file1" ]] && [[ -f "$file2" ]] || return 1
|
||||
|
||||
local hash1 hash2
|
||||
hash1=$(compute_sha256 "$file1")
|
||||
hash2=$(compute_sha256 "$file2")
|
||||
|
||||
[[ "$hash1" == "$hash2" ]]
|
||||
}
|
||||
|
||||
# Get short hash for display
|
||||
short_hash() {
|
||||
local hash="$1"
|
||||
local length="${2:-8}"
|
||||
echo "${hash:0:$length}"
|
||||
}
|
||||
|
||||
# Generate deterministic ID from inputs
|
||||
generate_id() {
|
||||
local inputs="$*"
|
||||
compute_string_hash "$inputs" sha256 | head -c 16
|
||||
}
|
||||
181
devops/scripts/lib/logging.sh
Normal file
181
devops/scripts/lib/logging.sh
Normal file
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Logging Library
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Standard logging functions for all CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/logging.sh"
|
||||
#
|
||||
# Log Levels: DEBUG, INFO, WARN, ERROR
|
||||
# Set LOG_LEVEL environment variable to control verbosity (default: INFO)
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_LOGGING_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_LOGGING_LOADED=1
|
||||
|
||||
# Colors (disable with NO_COLOR=1)
|
||||
if [[ -z "${NO_COLOR:-}" ]] && [[ -t 1 ]]; then
|
||||
export LOG_COLOR_RED='\033[0;31m'
|
||||
export LOG_COLOR_GREEN='\033[0;32m'
|
||||
export LOG_COLOR_YELLOW='\033[1;33m'
|
||||
export LOG_COLOR_BLUE='\033[0;34m'
|
||||
export LOG_COLOR_MAGENTA='\033[0;35m'
|
||||
export LOG_COLOR_CYAN='\033[0;36m'
|
||||
export LOG_COLOR_GRAY='\033[0;90m'
|
||||
export LOG_COLOR_RESET='\033[0m'
|
||||
else
|
||||
export LOG_COLOR_RED=''
|
||||
export LOG_COLOR_GREEN=''
|
||||
export LOG_COLOR_YELLOW=''
|
||||
export LOG_COLOR_BLUE=''
|
||||
export LOG_COLOR_MAGENTA=''
|
||||
export LOG_COLOR_CYAN=''
|
||||
export LOG_COLOR_GRAY=''
|
||||
export LOG_COLOR_RESET=''
|
||||
fi
|
||||
|
||||
# Log level configuration
|
||||
export LOG_LEVEL="${LOG_LEVEL:-INFO}"
|
||||
|
||||
# Convert log level to numeric for comparison
|
||||
_log_level_to_num() {
|
||||
case "$1" in
|
||||
DEBUG) echo 0 ;;
|
||||
INFO) echo 1 ;;
|
||||
WARN) echo 2 ;;
|
||||
ERROR) echo 3 ;;
|
||||
*) echo 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if message should be logged based on level
|
||||
_should_log() {
|
||||
local msg_level="$1"
|
||||
local current_level="${LOG_LEVEL:-INFO}"
|
||||
|
||||
local msg_num current_num
|
||||
msg_num=$(_log_level_to_num "$msg_level")
|
||||
current_num=$(_log_level_to_num "$current_level")
|
||||
|
||||
[[ $msg_num -ge $current_num ]]
|
||||
}
|
||||
|
||||
# Format timestamp
|
||||
_log_timestamp() {
|
||||
if [[ "${LOG_TIMESTAMPS:-true}" == "true" ]]; then
|
||||
date -u +"%Y-%m-%dT%H:%M:%SZ"
|
||||
fi
|
||||
}
|
||||
|
||||
# Core logging function
|
||||
_log() {
|
||||
local level="$1"
|
||||
local color="$2"
|
||||
shift 2
|
||||
|
||||
if ! _should_log "$level"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local timestamp
|
||||
timestamp=$(_log_timestamp)
|
||||
|
||||
local prefix=""
|
||||
if [[ -n "$timestamp" ]]; then
|
||||
prefix="${LOG_COLOR_GRAY}${timestamp}${LOG_COLOR_RESET} "
|
||||
fi
|
||||
|
||||
echo -e "${prefix}${color}[${level}]${LOG_COLOR_RESET} $*"
|
||||
}
|
||||
|
||||
# Public logging functions
|
||||
log_debug() {
|
||||
_log "DEBUG" "${LOG_COLOR_GRAY}" "$@"
|
||||
}
|
||||
|
||||
log_info() {
|
||||
_log "INFO" "${LOG_COLOR_GREEN}" "$@"
|
||||
}
|
||||
|
||||
log_warn() {
|
||||
_log "WARN" "${LOG_COLOR_YELLOW}" "$@"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
_log "ERROR" "${LOG_COLOR_RED}" "$@" >&2
|
||||
}
|
||||
|
||||
# Step logging (for workflow stages)
|
||||
log_step() {
|
||||
_log "STEP" "${LOG_COLOR_BLUE}" "$@"
|
||||
}
|
||||
|
||||
# Success message
|
||||
log_success() {
|
||||
_log "OK" "${LOG_COLOR_GREEN}" "$@"
|
||||
}
|
||||
|
||||
# GitHub Actions annotations
|
||||
log_gh_notice() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::notice::$*"
|
||||
else
|
||||
log_info "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
log_gh_warning() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::warning::$*"
|
||||
else
|
||||
log_warn "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
log_gh_error() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::error::$*"
|
||||
else
|
||||
log_error "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Group logging (for GitHub Actions)
|
||||
log_group_start() {
|
||||
local title="$1"
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::group::$title"
|
||||
else
|
||||
log_step "=== $title ==="
|
||||
fi
|
||||
}
|
||||
|
||||
log_group_end() {
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
}
|
||||
|
||||
# Masked logging (for secrets)
|
||||
log_masked() {
|
||||
local value="$1"
|
||||
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
|
||||
echo "::add-mask::$value"
|
||||
fi
|
||||
}
|
||||
|
||||
# Die with error message
|
||||
die() {
|
||||
log_error "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Conditional die
|
||||
die_if() {
|
||||
local condition="$1"
|
||||
shift
|
||||
if eval "$condition"; then
|
||||
die "$@"
|
||||
fi
|
||||
}
|
||||
274
devops/scripts/lib/path-utils.sh
Normal file
274
devops/scripts/lib/path-utils.sh
Normal file
@@ -0,0 +1,274 @@
|
||||
#!/usr/bin/env bash
|
||||
# Shared Path Utilities
|
||||
# Sprint: CI/CD Enhancement - Script Consolidation
|
||||
#
|
||||
# Purpose: Path manipulation and file operations for CI/CD scripts
|
||||
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/path-utils.sh"
|
||||
|
||||
# Prevent multiple sourcing
|
||||
if [[ -n "${__STELLAOPS_PATH_UTILS_LOADED:-}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
export __STELLAOPS_PATH_UTILS_LOADED=1
|
||||
|
||||
# Source dependencies
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
|
||||
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
|
||||
|
||||
# ============================================================================
|
||||
# Path Normalization
|
||||
# ============================================================================
|
||||
|
||||
# Normalize path (resolve .., ., symlinks)
|
||||
normalize_path() {
|
||||
local path="$1"
|
||||
|
||||
# Handle empty path
|
||||
if [[ -z "$path" ]]; then
|
||||
echo "."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Try realpath first (most reliable)
|
||||
if command -v realpath >/dev/null 2>&1; then
|
||||
realpath -m "$path" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Fallback to Python
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -c "import os; print(os.path.normpath('$path'))" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Manual normalization (basic)
|
||||
echo "$path" | sed 's|/\./|/|g' | sed 's|/[^/]*/\.\./|/|g' | sed 's|//|/|g'
|
||||
}
|
||||
|
||||
# Get absolute path
|
||||
absolute_path() {
|
||||
local path="$1"
|
||||
|
||||
if [[ "$path" == /* ]]; then
|
||||
normalize_path "$path"
|
||||
else
|
||||
normalize_path "$(pwd)/$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get relative path from one path to another
|
||||
relative_path() {
|
||||
local from="$1"
|
||||
local to="$2"
|
||||
|
||||
if command -v realpath >/dev/null 2>&1; then
|
||||
realpath --relative-to="$from" "$to" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -c "import os.path; print(os.path.relpath('$to', '$from'))" 2>/dev/null && return 0
|
||||
fi
|
||||
|
||||
# Fallback: just return absolute path
|
||||
absolute_path "$to"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Path Components
|
||||
# ============================================================================
|
||||
|
||||
# Get directory name
|
||||
dir_name() {
|
||||
dirname "$1"
|
||||
}
|
||||
|
||||
# Get base name
|
||||
base_name() {
|
||||
basename "$1"
|
||||
}
|
||||
|
||||
# Get file extension
|
||||
file_extension() {
|
||||
local path="$1"
|
||||
local base
|
||||
base=$(basename "$path")
|
||||
|
||||
if [[ "$base" == *.* ]]; then
|
||||
echo "${base##*.}"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Get file name without extension
|
||||
file_stem() {
|
||||
local path="$1"
|
||||
local base
|
||||
base=$(basename "$path")
|
||||
|
||||
if [[ "$base" == *.* ]]; then
|
||||
echo "${base%.*}"
|
||||
else
|
||||
echo "$base"
|
||||
fi
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Directory Operations
|
||||
# ============================================================================
|
||||
|
||||
# Ensure directory exists
|
||||
ensure_directory() {
|
||||
local dir="$1"
|
||||
if [[ ! -d "$dir" ]]; then
|
||||
mkdir -p "$dir"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create temporary directory
|
||||
create_temp_dir() {
|
||||
local prefix="${1:-stellaops}"
|
||||
mktemp -d "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
|
||||
}
|
||||
|
||||
# Create temporary file
|
||||
create_temp_file() {
|
||||
local prefix="${1:-stellaops}"
|
||||
local suffix="${2:-}"
|
||||
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX${suffix}"
|
||||
}
|
||||
|
||||
# Clean temporary directory
|
||||
clean_temp() {
|
||||
local path="$1"
|
||||
if [[ -d "$path" ]] && [[ "$path" == *stellaops* ]]; then
|
||||
rm -rf "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# File Existence Checks
|
||||
# ============================================================================
|
||||
|
||||
# Check if file exists
|
||||
file_exists() {
|
||||
[[ -f "$1" ]]
|
||||
}
|
||||
|
||||
# Check if directory exists
|
||||
dir_exists() {
|
||||
[[ -d "$1" ]]
|
||||
}
|
||||
|
||||
# Check if path exists (file or directory)
|
||||
path_exists() {
|
||||
[[ -e "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is readable
|
||||
file_readable() {
|
||||
[[ -r "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is writable
|
||||
file_writable() {
|
||||
[[ -w "$1" ]]
|
||||
}
|
||||
|
||||
# Check if file is executable
|
||||
file_executable() {
|
||||
[[ -x "$1" ]]
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# File Discovery
|
||||
# ============================================================================
|
||||
|
||||
# Find files by pattern
|
||||
find_files() {
|
||||
local dir="${1:-.}"
|
||||
local pattern="${2:-*}"
|
||||
find "$dir" -type f -name "$pattern" 2>/dev/null
|
||||
}
|
||||
|
||||
# Find files by extension
|
||||
find_by_extension() {
|
||||
local dir="${1:-.}"
|
||||
local ext="${2:-}"
|
||||
find "$dir" -type f -name "*.${ext}" 2>/dev/null
|
||||
}
|
||||
|
||||
# Find project files (csproj, package.json, etc.)
|
||||
find_project_files() {
|
||||
local dir="${1:-.}"
|
||||
find "$dir" -type f \( \
|
||||
-name "*.csproj" -o \
|
||||
-name "*.fsproj" -o \
|
||||
-name "package.json" -o \
|
||||
-name "Cargo.toml" -o \
|
||||
-name "go.mod" -o \
|
||||
-name "pom.xml" -o \
|
||||
-name "build.gradle" \
|
||||
\) 2>/dev/null | grep -v node_modules | grep -v bin | grep -v obj
|
||||
}
|
||||
|
||||
# Find test projects
|
||||
find_test_projects() {
|
||||
local dir="${1:-.}"
|
||||
find "$dir" -type f -name "*.Tests.csproj" 2>/dev/null | grep -v bin | grep -v obj
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Path Validation
|
||||
# ============================================================================
|
||||
|
||||
# Check if path is under directory
|
||||
path_under() {
|
||||
local path="$1"
|
||||
local dir="$2"
|
||||
|
||||
local abs_path abs_dir
|
||||
abs_path=$(absolute_path "$path")
|
||||
abs_dir=$(absolute_path "$dir")
|
||||
|
||||
[[ "$abs_path" == "$abs_dir"* ]]
|
||||
}
|
||||
|
||||
# Validate path is safe (no directory traversal)
|
||||
path_is_safe() {
|
||||
local path="$1"
|
||||
local base="${2:-.}"
|
||||
|
||||
# Check for obvious traversal attempts
|
||||
if [[ "$path" == *".."* ]] || [[ "$path" == "/*" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Verify resolved path is under base
|
||||
path_under "$path" "$base"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# CI/CD Helpers
|
||||
# ============================================================================
|
||||
|
||||
# Get artifact output directory
|
||||
get_artifact_dir() {
|
||||
local name="${1:-artifacts}"
|
||||
local base="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
echo "${base}/out/${name}"
|
||||
}
|
||||
|
||||
# Get test results directory
|
||||
get_test_results_dir() {
|
||||
local base="${GITHUB_WORKSPACE:-$(pwd)}"
|
||||
echo "${base}/TestResults"
|
||||
}
|
||||
|
||||
# Ensure artifact directory exists and return path
|
||||
ensure_artifact_dir() {
|
||||
local name="${1:-artifacts}"
|
||||
local dir
|
||||
dir=$(get_artifact_dir "$name")
|
||||
ensure_directory "$dir"
|
||||
echo "$dir"
|
||||
}
|
||||
264
devops/scripts/local-ci.ps1
Normal file
264
devops/scripts/local-ci.ps1
Normal file
@@ -0,0 +1,264 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Local CI Runner for Windows
|
||||
PowerShell wrapper for local-ci.sh
|
||||
|
||||
.DESCRIPTION
|
||||
Unified local CI/CD testing runner for StellaOps on Windows.
|
||||
This script wraps the Bash implementation via WSL2 or Git Bash.
|
||||
|
||||
.PARAMETER Mode
|
||||
The testing mode to run:
|
||||
- smoke : Quick smoke test (unit tests only, ~2 min)
|
||||
- pr : Full PR-gating suite (all required checks, ~15 min)
|
||||
- module : Module-specific tests (auto-detect or specified)
|
||||
- workflow : Simulate specific workflow via act
|
||||
- release : Release simulation (dry-run)
|
||||
- full : All tests including extended categories (~45 min)
|
||||
|
||||
.PARAMETER Category
|
||||
Specific test category to run (Unit, Architecture, Contract, Integration, Security, Golden)
|
||||
|
||||
.PARAMETER Module
|
||||
Specific module to test (Scanner, Concelier, Authority, etc.)
|
||||
|
||||
.PARAMETER Workflow
|
||||
Specific workflow to simulate (for workflow mode)
|
||||
|
||||
.PARAMETER Docker
|
||||
Force Docker execution mode
|
||||
|
||||
.PARAMETER Native
|
||||
Force native execution mode
|
||||
|
||||
.PARAMETER Act
|
||||
Force act execution mode
|
||||
|
||||
.PARAMETER Parallel
|
||||
Number of parallel test runners (default: auto-detect)
|
||||
|
||||
.PARAMETER Verbose
|
||||
Enable verbose output
|
||||
|
||||
.PARAMETER DryRun
|
||||
Show what would run without executing
|
||||
|
||||
.PARAMETER Rebuild
|
||||
Force rebuild of CI Docker image
|
||||
|
||||
.PARAMETER NoServices
|
||||
Skip starting CI services
|
||||
|
||||
.PARAMETER KeepServices
|
||||
Don't stop services after tests
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 smoke
|
||||
Quick validation before push
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 pr
|
||||
Full PR check
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 module -Module Scanner
|
||||
Test specific module
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 workflow -Workflow test-matrix
|
||||
Simulate specific workflow
|
||||
|
||||
.NOTES
|
||||
Requires WSL2 or Git Bash to execute the underlying Bash script.
|
||||
For full feature support, use WSL2 with Ubuntu.
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Position = 0)]
|
||||
[ValidateSet('smoke', 'pr', 'module', 'workflow', 'release', 'full')]
|
||||
[string]$Mode = 'smoke',
|
||||
|
||||
[string]$Category,
|
||||
[string]$Module,
|
||||
[string]$Workflow,
|
||||
|
||||
[switch]$Docker,
|
||||
[switch]$Native,
|
||||
[switch]$Act,
|
||||
|
||||
[int]$Parallel,
|
||||
|
||||
[switch]$Verbose,
|
||||
[switch]$DryRun,
|
||||
[switch]$Rebuild,
|
||||
[switch]$NoServices,
|
||||
[switch]$KeepServices,
|
||||
|
||||
[switch]$Help
|
||||
)
|
||||
|
||||
# Script location
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$RepoRoot = Split-Path -Parent (Split-Path -Parent $ScriptDir)
|
||||
|
||||
# Show help if requested
|
||||
if ($Help) {
|
||||
Get-Help $MyInvocation.MyCommand.Path -Detailed
|
||||
exit 0
|
||||
}
|
||||
|
||||
function Write-ColoredOutput {
|
||||
param(
|
||||
[string]$Message,
|
||||
[ConsoleColor]$Color = [ConsoleColor]::White
|
||||
)
|
||||
$originalColor = $Host.UI.RawUI.ForegroundColor
|
||||
$Host.UI.RawUI.ForegroundColor = $Color
|
||||
Write-Host $Message
|
||||
$Host.UI.RawUI.ForegroundColor = $originalColor
|
||||
}
|
||||
|
||||
function Write-Info { Write-ColoredOutput "[INFO] $args" -Color Cyan }
|
||||
function Write-Success { Write-ColoredOutput "[OK] $args" -Color Green }
|
||||
function Write-Warning { Write-ColoredOutput "[WARN] $args" -Color Yellow }
|
||||
function Write-Error { Write-ColoredOutput "[ERROR] $args" -Color Red }
|
||||
|
||||
# Find Bash executable
|
||||
function Find-BashExecutable {
|
||||
# Priority: WSL2 > Git Bash > Windows Subsystem for Linux (legacy)
|
||||
|
||||
# Check for WSL
|
||||
$wsl = Get-Command wsl -ErrorAction SilentlyContinue
|
||||
if ($wsl) {
|
||||
# Verify WSL is working
|
||||
$wslCheck = & wsl --status 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
Write-Info "Using WSL2 for Bash execution"
|
||||
return @{ Type = 'wsl'; Path = 'wsl' }
|
||||
}
|
||||
}
|
||||
|
||||
# Check for Git Bash
|
||||
$gitBashPaths = @(
|
||||
"C:\Program Files\Git\bin\bash.exe",
|
||||
"C:\Program Files (x86)\Git\bin\bash.exe",
|
||||
"$env:LOCALAPPDATA\Programs\Git\bin\bash.exe"
|
||||
)
|
||||
|
||||
foreach ($path in $gitBashPaths) {
|
||||
if (Test-Path $path) {
|
||||
Write-Info "Using Git Bash for execution"
|
||||
return @{ Type = 'gitbash'; Path = $path }
|
||||
}
|
||||
}
|
||||
|
||||
# Check PATH for bash
|
||||
$bashInPath = Get-Command bash -ErrorAction SilentlyContinue
|
||||
if ($bashInPath) {
|
||||
Write-Info "Using Bash from PATH"
|
||||
return @{ Type = 'path'; Path = $bashInPath.Source }
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
# Convert Windows path to Unix path for WSL
|
||||
function Convert-ToUnixPath {
|
||||
param([string]$WindowsPath)
|
||||
|
||||
if ($WindowsPath -match '^([A-Za-z]):(.*)$') {
|
||||
$drive = $Matches[1].ToLower()
|
||||
$rest = $Matches[2] -replace '\\', '/'
|
||||
return "/mnt/$drive$rest"
|
||||
}
|
||||
return $WindowsPath -replace '\\', '/'
|
||||
}
|
||||
|
||||
# Build argument list
|
||||
function Build-Arguments {
|
||||
$args = @($Mode)
|
||||
|
||||
if ($Category) { $args += "--category"; $args += $Category }
|
||||
if ($Module) { $args += "--module"; $args += $Module }
|
||||
if ($Workflow) { $args += "--workflow"; $args += $Workflow }
|
||||
if ($Docker) { $args += "--docker" }
|
||||
if ($Native) { $args += "--native" }
|
||||
if ($Act) { $args += "--act" }
|
||||
if ($Parallel) { $args += "--parallel"; $args += $Parallel }
|
||||
if ($Verbose) { $args += "--verbose" }
|
||||
if ($DryRun) { $args += "--dry-run" }
|
||||
if ($Rebuild) { $args += "--rebuild" }
|
||||
if ($NoServices) { $args += "--no-services" }
|
||||
if ($KeepServices) { $args += "--keep-services" }
|
||||
|
||||
return $args
|
||||
}
|
||||
|
||||
# Main execution
|
||||
Write-Host ""
|
||||
Write-Host "=========================================" -ForegroundColor Magenta
|
||||
Write-Host " StellaOps Local CI Runner (Windows) " -ForegroundColor Magenta
|
||||
Write-Host "=========================================" -ForegroundColor Magenta
|
||||
Write-Host ""
|
||||
|
||||
# Find Bash
|
||||
$bash = Find-BashExecutable
|
||||
if (-not $bash) {
|
||||
Write-Error "Bash not found. Please install one of the following:"
|
||||
Write-Host " - WSL2: https://docs.microsoft.com/en-us/windows/wsl/install"
|
||||
Write-Host " - Git for Windows: https://git-scm.com/download/win"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build script path
|
||||
$scriptPath = Join-Path $ScriptDir "local-ci.sh"
|
||||
if (-not (Test-Path $scriptPath)) {
|
||||
Write-Error "Script not found: $scriptPath"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build arguments
|
||||
$bashArgs = Build-Arguments
|
||||
|
||||
Write-Info "Mode: $Mode"
|
||||
Write-Info "Bash: $($bash.Type)"
|
||||
Write-Info "Repository: $RepoRoot"
|
||||
Write-Host ""
|
||||
|
||||
# Execute based on Bash type
|
||||
try {
|
||||
switch ($bash.Type) {
|
||||
'wsl' {
|
||||
$unixScript = Convert-ToUnixPath $scriptPath
|
||||
Write-Info "Executing: wsl bash $unixScript $($bashArgs -join ' ')"
|
||||
& wsl bash $unixScript @bashArgs
|
||||
}
|
||||
'gitbash' {
|
||||
# Git Bash uses its own path conversion
|
||||
$unixScript = $scriptPath -replace '\\', '/'
|
||||
Write-Info "Executing: $($bash.Path) $unixScript $($bashArgs -join ' ')"
|
||||
& $bash.Path $unixScript @bashArgs
|
||||
}
|
||||
'path' {
|
||||
Write-Info "Executing: bash $scriptPath $($bashArgs -join ' ')"
|
||||
& bash $scriptPath @bashArgs
|
||||
}
|
||||
}
|
||||
|
||||
$exitCode = $LASTEXITCODE
|
||||
}
|
||||
catch {
|
||||
Write-Error "Execution failed: $_"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# Report result
|
||||
Write-Host ""
|
||||
if ($exitCode -eq 0) {
|
||||
Write-Success "Local CI completed successfully!"
|
||||
} else {
|
||||
Write-Error "Local CI failed with exit code: $exitCode"
|
||||
}
|
||||
|
||||
exit $exitCode
|
||||
818
devops/scripts/local-ci.sh
Normal file
818
devops/scripts/local-ci.sh
Normal file
@@ -0,0 +1,818 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# LOCAL CI RUNNER
|
||||
# =============================================================================
|
||||
# Unified local CI/CD testing runner for StellaOps.
|
||||
#
|
||||
# Usage:
|
||||
# ./devops/scripts/local-ci.sh [mode] [options]
|
||||
#
|
||||
# Modes:
|
||||
# smoke - Quick smoke test (unit tests only, ~2 min)
|
||||
# pr - Full PR-gating suite (all required checks, ~15 min)
|
||||
# module - Module-specific tests (auto-detect or specified)
|
||||
# workflow - Simulate specific workflow via act
|
||||
# release - Release simulation (dry-run)
|
||||
# full - All tests including extended categories (~45 min)
|
||||
#
|
||||
# Options:
|
||||
# --category <cat> Run specific test category
|
||||
# --workflow <name> Specific workflow to simulate
|
||||
# --module <name> Specific module to test
|
||||
# --docker Force Docker execution
|
||||
# --native Force native execution
|
||||
# --act Force act execution
|
||||
# --parallel <n> Parallel test runners (default: CPU count)
|
||||
# --verbose Verbose output
|
||||
# --dry-run Show what would run without executing
|
||||
# --rebuild Force rebuild of CI Docker image
|
||||
# --no-services Skip starting CI services
|
||||
# --keep-services Don't stop services after tests
|
||||
# --help Show this help message
|
||||
#
|
||||
# Examples:
|
||||
# ./local-ci.sh smoke # Quick validation
|
||||
# ./local-ci.sh pr # Full PR check
|
||||
# ./local-ci.sh module --module Scanner # Test Scanner module
|
||||
# ./local-ci.sh workflow --workflow test-matrix
|
||||
# ./local-ci.sh release --dry-run
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# =============================================================================
|
||||
# SCRIPT INITIALIZATION
|
||||
# =============================================================================
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
export REPO_ROOT
|
||||
|
||||
# Source libraries
|
||||
source "$SCRIPT_DIR/lib/ci-common.sh"
|
||||
source "$SCRIPT_DIR/lib/ci-docker.sh"
|
||||
source "$SCRIPT_DIR/lib/ci-web.sh" 2>/dev/null || true # Web testing utilities
|
||||
|
||||
# =============================================================================
|
||||
# CONSTANTS
|
||||
# =============================================================================
|
||||
|
||||
# Modes
|
||||
MODE_SMOKE="smoke"
|
||||
MODE_PR="pr"
|
||||
MODE_MODULE="module"
|
||||
MODE_WORKFLOW="workflow"
|
||||
MODE_RELEASE="release"
|
||||
MODE_FULL="full"
|
||||
|
||||
# Test categories
|
||||
PR_GATING_CATEGORIES=(Unit Architecture Contract Integration Security Golden)
|
||||
EXTENDED_CATEGORIES=(Performance Benchmark AirGap Chaos Determinism Resilience Observability)
|
||||
ALL_CATEGORIES=("${PR_GATING_CATEGORIES[@]}" "${EXTENDED_CATEGORIES[@]}")
|
||||
|
||||
# Default configuration
|
||||
RESULTS_DIR="$REPO_ROOT/out/local-ci"
|
||||
TRX_DIR="$RESULTS_DIR/trx"
|
||||
LOGS_DIR="$RESULTS_DIR/logs"
|
||||
|
||||
# =============================================================================
|
||||
# CONFIGURATION
|
||||
# =============================================================================
|
||||
|
||||
MODE=""
|
||||
EXECUTION_ENGINE="" # docker, native, act
|
||||
SPECIFIC_CATEGORY=""
|
||||
SPECIFIC_MODULE=""
|
||||
SPECIFIC_WORKFLOW=""
|
||||
PARALLEL_JOBS=""
|
||||
VERBOSE=false
|
||||
DRY_RUN=false
|
||||
REBUILD_IMAGE=false
|
||||
SKIP_SERVICES=false
|
||||
KEEP_SERVICES=false
|
||||
|
||||
# =============================================================================
|
||||
# USAGE
|
||||
# =============================================================================
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: $(basename "$0") [mode] [options]
|
||||
|
||||
Modes:
|
||||
smoke Quick smoke test (unit tests only, ~2 min)
|
||||
pr Full PR-gating suite (all required checks, ~15 min)
|
||||
module Module-specific tests (auto-detect or specified)
|
||||
workflow Simulate specific workflow via act
|
||||
release Release simulation (dry-run)
|
||||
full All tests including extended categories (~45 min)
|
||||
|
||||
Options:
|
||||
--category <cat> Run specific test category (${ALL_CATEGORIES[*]})
|
||||
--workflow <name> Specific workflow to simulate (for workflow mode)
|
||||
--module <name> Specific module to test (for module mode)
|
||||
--docker Force Docker execution
|
||||
--native Force native execution
|
||||
--act Force act execution
|
||||
--parallel <n> Parallel test runners (default: auto-detect)
|
||||
--verbose Verbose output
|
||||
--dry-run Show what would run without executing
|
||||
--rebuild Force rebuild of CI Docker image
|
||||
--no-services Skip starting CI services
|
||||
--keep-services Don't stop services after tests
|
||||
--help Show this help message
|
||||
|
||||
Examples:
|
||||
$(basename "$0") smoke # Quick validation before push
|
||||
$(basename "$0") pr # Full PR check
|
||||
$(basename "$0") pr --category Unit # Only run Unit tests
|
||||
$(basename "$0") module # Auto-detect changed modules
|
||||
$(basename "$0") module --module Scanner # Test specific module
|
||||
$(basename "$0") workflow --workflow test-matrix
|
||||
$(basename "$0") release --dry-run
|
||||
$(basename "$0") pr --verbose --docker
|
||||
|
||||
Test Categories:
|
||||
PR-Gating: ${PR_GATING_CATEGORIES[*]}
|
||||
Extended: ${EXTENDED_CATEGORIES[*]}
|
||||
EOF
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# ARGUMENT PARSING
|
||||
# =============================================================================
|
||||
|
||||
parse_args() {
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
smoke|pr|module|workflow|release|full)
|
||||
MODE="$1"
|
||||
shift
|
||||
;;
|
||||
--category)
|
||||
SPECIFIC_CATEGORY="$2"
|
||||
shift 2
|
||||
;;
|
||||
--workflow)
|
||||
SPECIFIC_WORKFLOW="$2"
|
||||
shift 2
|
||||
;;
|
||||
--module)
|
||||
SPECIFIC_MODULE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--docker)
|
||||
EXECUTION_ENGINE="docker"
|
||||
shift
|
||||
;;
|
||||
--native)
|
||||
EXECUTION_ENGINE="native"
|
||||
shift
|
||||
;;
|
||||
--act)
|
||||
EXECUTION_ENGINE="act"
|
||||
shift
|
||||
;;
|
||||
--parallel)
|
||||
PARALLEL_JOBS="$2"
|
||||
shift 2
|
||||
;;
|
||||
--verbose|-v)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=true
|
||||
shift
|
||||
;;
|
||||
--rebuild)
|
||||
REBUILD_IMAGE=true
|
||||
shift
|
||||
;;
|
||||
--no-services)
|
||||
SKIP_SERVICES=true
|
||||
shift
|
||||
;;
|
||||
--keep-services)
|
||||
KEEP_SERVICES=true
|
||||
shift
|
||||
;;
|
||||
--help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown option: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Default mode is smoke
|
||||
if [[ -z "$MODE" ]]; then
|
||||
MODE="$MODE_SMOKE"
|
||||
fi
|
||||
|
||||
# Default execution engine based on mode
|
||||
if [[ -z "$EXECUTION_ENGINE" ]]; then
|
||||
case "$MODE" in
|
||||
workflow)
|
||||
EXECUTION_ENGINE="act"
|
||||
;;
|
||||
*)
|
||||
EXECUTION_ENGINE="native"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Auto-detect parallel jobs
|
||||
if [[ -z "$PARALLEL_JOBS" ]]; then
|
||||
PARALLEL_JOBS=$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 4)
|
||||
fi
|
||||
|
||||
export VERBOSE
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# DEPENDENCY CHECKS
|
||||
# =============================================================================
|
||||
|
||||
check_dependencies() {
|
||||
log_subsection "Checking Dependencies"
|
||||
|
||||
local missing=0
|
||||
|
||||
# Always required
|
||||
if ! require_command "dotnet" "https://dot.net/download"; then
|
||||
missing=1
|
||||
else
|
||||
local dotnet_version
|
||||
dotnet_version=$(dotnet --version 2>/dev/null || echo "unknown")
|
||||
log_debug "dotnet version: $dotnet_version"
|
||||
fi
|
||||
|
||||
if ! require_command "git"; then
|
||||
missing=1
|
||||
fi
|
||||
|
||||
# Docker required for docker mode
|
||||
if [[ "$EXECUTION_ENGINE" == "docker" ]]; then
|
||||
if ! check_docker; then
|
||||
missing=1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Act required for workflow mode
|
||||
if [[ "$EXECUTION_ENGINE" == "act" ]] || [[ "$MODE" == "$MODE_WORKFLOW" ]]; then
|
||||
if ! require_command "act" "brew install act (macOS) or https://github.com/nektos/act"; then
|
||||
log_warn "act not found - workflow simulation will be limited"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check for solution file
|
||||
if ! require_file "$REPO_ROOT/src/StellaOps.sln"; then
|
||||
missing=1
|
||||
fi
|
||||
|
||||
return $missing
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# RESULT INITIALIZATION
|
||||
# =============================================================================
|
||||
|
||||
init_results() {
|
||||
ensure_dir "$RESULTS_DIR"
|
||||
ensure_dir "$TRX_DIR"
|
||||
ensure_dir "$LOGS_DIR"
|
||||
|
||||
# Create run metadata
|
||||
local run_id
|
||||
run_id=$(date +%Y%m%d_%H%M%S)
|
||||
export RUN_ID="$run_id"
|
||||
|
||||
log_debug "Results directory: $RESULTS_DIR"
|
||||
log_debug "Run ID: $RUN_ID"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# TEST EXECUTION
|
||||
# =============================================================================
|
||||
|
||||
run_dotnet_tests() {
|
||||
local category="$1"
|
||||
local filter="Category=$category"
|
||||
|
||||
log_subsection "Running $category Tests"
|
||||
|
||||
local trx_file="$TRX_DIR/${category}-${RUN_ID}.trx"
|
||||
local log_file="$LOGS_DIR/${category}-${RUN_ID}.log"
|
||||
|
||||
local test_cmd=(
|
||||
dotnet test "$REPO_ROOT/src/StellaOps.sln"
|
||||
--filter "$filter"
|
||||
--configuration Release
|
||||
--no-build
|
||||
--logger "trx;LogFileName=$trx_file"
|
||||
--results-directory "$TRX_DIR"
|
||||
--verbosity minimal
|
||||
)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would execute: ${test_cmd[*]}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
"${test_cmd[@]}" 2>&1 | tee "$log_file"
|
||||
else
|
||||
"${test_cmd[@]}" > "$log_file" 2>&1
|
||||
fi
|
||||
|
||||
local result=$?
|
||||
stop_timer "$start_time" "$category tests"
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "$category tests passed"
|
||||
else
|
||||
log_error "$category tests failed (see $log_file)"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
run_dotnet_build() {
|
||||
log_subsection "Building Solution"
|
||||
|
||||
local build_cmd=(
|
||||
dotnet build "$REPO_ROOT/src/StellaOps.sln"
|
||||
--configuration Release
|
||||
)
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_info "[DRY-RUN] Would execute: ${build_cmd[*]}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
"${build_cmd[@]}"
|
||||
|
||||
local result=$?
|
||||
stop_timer "$start_time" "Build"
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "Build completed successfully"
|
||||
else
|
||||
log_error "Build failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# MODE IMPLEMENTATIONS
|
||||
# =============================================================================
|
||||
|
||||
run_smoke_mode() {
|
||||
log_section "Smoke Test Mode"
|
||||
log_info "Running quick validation (Unit tests only)"
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
# Build
|
||||
run_dotnet_build || return 1
|
||||
|
||||
# Run Unit tests only
|
||||
run_dotnet_tests "Unit"
|
||||
local result=$?
|
||||
|
||||
stop_timer "$start_time" "Smoke test"
|
||||
return $result
|
||||
}
|
||||
|
||||
run_pr_mode() {
|
||||
log_section "PR-Gating Mode"
|
||||
log_info "Running full PR-gating suite"
|
||||
log_info "Categories: ${PR_GATING_CATEGORIES[*]}"
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
local failed=0
|
||||
local results=()
|
||||
|
||||
# Check if Web module has changes
|
||||
local web_changed=false
|
||||
local changed_files
|
||||
changed_files=$(get_changed_files main 2>/dev/null || echo "")
|
||||
if echo "$changed_files" | grep -q "^src/Web/"; then
|
||||
web_changed=true
|
||||
log_info "Web module changes detected - will run Web tests"
|
||||
fi
|
||||
|
||||
# Start services if needed
|
||||
if [[ "$SKIP_SERVICES" != "true" ]]; then
|
||||
start_ci_services postgres-ci valkey-ci || {
|
||||
log_warn "Failed to start services, continuing anyway..."
|
||||
}
|
||||
fi
|
||||
|
||||
# Build .NET solution
|
||||
run_dotnet_build || return 1
|
||||
|
||||
# Run each .NET category
|
||||
if [[ -n "$SPECIFIC_CATEGORY" ]]; then
|
||||
if [[ "$SPECIFIC_CATEGORY" == "Web" ]] || [[ "$SPECIFIC_CATEGORY" == "web" ]]; then
|
||||
# Run Web tests only
|
||||
if type run_web_pr_gating &>/dev/null; then
|
||||
run_web_pr_gating
|
||||
results+=("Web:$?")
|
||||
fi
|
||||
else
|
||||
run_dotnet_tests "$SPECIFIC_CATEGORY"
|
||||
results+=("$SPECIFIC_CATEGORY:$?")
|
||||
fi
|
||||
else
|
||||
for category in "${PR_GATING_CATEGORIES[@]}"; do
|
||||
run_dotnet_tests "$category"
|
||||
local cat_result=$?
|
||||
results+=("$category:$cat_result")
|
||||
if [[ $cat_result -ne 0 ]]; then
|
||||
failed=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Run Web tests if Web module changed
|
||||
if [[ "$web_changed" == "true" ]]; then
|
||||
log_subsection "Web Module Tests"
|
||||
if type run_web_pr_gating &>/dev/null; then
|
||||
run_web_pr_gating
|
||||
local web_result=$?
|
||||
results+=("Web:$web_result")
|
||||
if [[ $web_result -ne 0 ]]; then
|
||||
failed=1
|
||||
fi
|
||||
else
|
||||
log_warn "Web testing library not loaded"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Stop services
|
||||
if [[ "$SKIP_SERVICES" != "true" ]] && [[ "$KEEP_SERVICES" != "true" ]]; then
|
||||
stop_ci_services
|
||||
fi
|
||||
|
||||
# Print summary
|
||||
log_section "PR-Gating Results"
|
||||
for result in "${results[@]}"; do
|
||||
local name="${result%%:*}"
|
||||
local status="${result##*:}"
|
||||
if [[ "$status" == "0" ]]; then
|
||||
print_status "$name" "true"
|
||||
else
|
||||
print_status "$name" "false"
|
||||
fi
|
||||
done
|
||||
|
||||
stop_timer "$start_time" "PR-gating suite"
|
||||
return $failed
|
||||
}
|
||||
|
||||
run_module_mode() {
|
||||
log_section "Module-Specific Mode"
|
||||
|
||||
local modules_to_test=()
|
||||
local has_dotnet_modules=false
|
||||
local has_node_modules=false
|
||||
|
||||
if [[ -n "$SPECIFIC_MODULE" ]]; then
|
||||
modules_to_test=("$SPECIFIC_MODULE")
|
||||
log_info "Testing specified module: $SPECIFIC_MODULE"
|
||||
else
|
||||
log_info "Auto-detecting changed modules..."
|
||||
local detected
|
||||
detected=$(detect_changed_modules main)
|
||||
|
||||
if [[ "$detected" == "ALL" ]]; then
|
||||
log_info "Infrastructure changes detected - running all tests"
|
||||
run_pr_mode
|
||||
return $?
|
||||
elif [[ "$detected" == "NONE" ]]; then
|
||||
log_info "No module changes detected"
|
||||
return 0
|
||||
else
|
||||
read -ra modules_to_test <<< "$detected"
|
||||
log_info "Detected changed modules: ${modules_to_test[*]}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Categorize modules
|
||||
for module in "${modules_to_test[@]}"; do
|
||||
if [[ " ${NODE_MODULES[*]} " =~ " ${module} " ]]; then
|
||||
has_node_modules=true
|
||||
else
|
||||
has_dotnet_modules=true
|
||||
fi
|
||||
done
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
local failed=0
|
||||
|
||||
# Build .NET solution if we have .NET modules
|
||||
if [[ "$has_dotnet_modules" == "true" ]]; then
|
||||
run_dotnet_build || return 1
|
||||
fi
|
||||
|
||||
for module in "${modules_to_test[@]}"; do
|
||||
log_subsection "Testing Module: $module"
|
||||
|
||||
# Check if this is a Node.js module (Web, DevPortal)
|
||||
if [[ " ${NODE_MODULES[*]} " =~ " ${module} " ]]; then
|
||||
log_info "Running Node.js tests for $module"
|
||||
|
||||
case "$module" in
|
||||
Web)
|
||||
if type run_web_pr_gating &>/dev/null; then
|
||||
run_web_pr_gating || failed=1
|
||||
else
|
||||
log_warn "Web testing library not loaded - running basic npm test"
|
||||
pushd "$REPO_ROOT/src/Web/StellaOps.Web" > /dev/null 2>&1 || continue
|
||||
npm ci --prefer-offline --no-audit 2>/dev/null || npm install
|
||||
npm run test:ci || failed=1
|
||||
popd > /dev/null
|
||||
fi
|
||||
;;
|
||||
DevPortal)
|
||||
local portal_dir="$REPO_ROOT/src/DevPortal/StellaOps.DevPortal.Site"
|
||||
if [[ -d "$portal_dir" ]]; then
|
||||
pushd "$portal_dir" > /dev/null || continue
|
||||
npm ci --prefer-offline --no-audit 2>/dev/null || npm install
|
||||
npm test 2>/dev/null || log_warn "DevPortal tests not configured"
|
||||
popd > /dev/null
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
continue
|
||||
fi
|
||||
|
||||
# .NET module handling
|
||||
local test_paths="${MODULE_PATHS[$module]:-}"
|
||||
if [[ -z "$test_paths" ]]; then
|
||||
log_warn "Unknown module: $module"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Run tests for each path
|
||||
for path in $test_paths; do
|
||||
local test_dir="$REPO_ROOT/$path/__Tests"
|
||||
if [[ -d "$test_dir" ]]; then
|
||||
log_info "Running tests in: $test_dir"
|
||||
|
||||
local test_projects
|
||||
test_projects=$(find "$test_dir" -name "*.Tests.csproj" -type f 2>/dev/null)
|
||||
|
||||
for project in $test_projects; do
|
||||
log_debug "Testing: $project"
|
||||
dotnet test "$project" --configuration Release --no-build --verbosity minimal || {
|
||||
failed=1
|
||||
}
|
||||
done
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
stop_timer "$start_time" "Module tests"
|
||||
return $failed
|
||||
}
|
||||
|
||||
run_workflow_mode() {
|
||||
log_section "Workflow Simulation Mode"
|
||||
|
||||
if [[ -z "$SPECIFIC_WORKFLOW" ]]; then
|
||||
log_error "No workflow specified. Use --workflow <name>"
|
||||
log_info "Example: --workflow test-matrix"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local workflow_file="$REPO_ROOT/.gitea/workflows/${SPECIFIC_WORKFLOW}.yml"
|
||||
if [[ ! -f "$workflow_file" ]]; then
|
||||
# Try without .yml extension
|
||||
workflow_file="$REPO_ROOT/.gitea/workflows/${SPECIFIC_WORKFLOW}"
|
||||
if [[ ! -f "$workflow_file" ]]; then
|
||||
log_error "Workflow not found: $SPECIFIC_WORKFLOW"
|
||||
log_info "Available workflows:"
|
||||
ls -1 "$REPO_ROOT/.gitea/workflows/"*.yml 2>/dev/null | xargs -n1 basename | head -20
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
log_info "Simulating workflow: $SPECIFIC_WORKFLOW"
|
||||
log_info "Workflow file: $workflow_file"
|
||||
|
||||
if ! command -v act &>/dev/null; then
|
||||
log_error "act is required for workflow simulation"
|
||||
log_info "Install with: brew install act (macOS)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Build CI image if needed
|
||||
if [[ "$REBUILD_IMAGE" == "true" ]] || ! ci_image_exists; then
|
||||
build_ci_image "$REBUILD_IMAGE" || return 1
|
||||
fi
|
||||
|
||||
local event_file="$REPO_ROOT/devops/ci-local/events/pull-request.json"
|
||||
local actrc_file="$REPO_ROOT/.actrc"
|
||||
|
||||
local act_args=(
|
||||
-W "$workflow_file"
|
||||
--platform "ubuntu-22.04=$CI_IMAGE"
|
||||
--platform "ubuntu-latest=$CI_IMAGE"
|
||||
--env "DOTNET_NOLOGO=1"
|
||||
--env "DOTNET_CLI_TELEMETRY_OPTOUT=1"
|
||||
--env "TZ=UTC"
|
||||
--bind
|
||||
)
|
||||
|
||||
if [[ -f "$event_file" ]]; then
|
||||
act_args+=(--eventpath "$event_file")
|
||||
fi
|
||||
|
||||
if [[ -f "$REPO_ROOT/devops/ci-local/.env.local" ]]; then
|
||||
act_args+=(--env-file "$REPO_ROOT/devops/ci-local/.env.local")
|
||||
fi
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
act_args+=(-n)
|
||||
fi
|
||||
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
act_args+=(--verbose)
|
||||
fi
|
||||
|
||||
log_info "Running: act ${act_args[*]}"
|
||||
act "${act_args[@]}"
|
||||
}
|
||||
|
||||
run_release_mode() {
|
||||
log_section "Release Simulation Mode"
|
||||
log_info "Running release dry-run"
|
||||
|
||||
if [[ "$DRY_RUN" != "true" ]]; then
|
||||
log_warn "Release mode always runs as dry-run for safety"
|
||||
DRY_RUN=true
|
||||
fi
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
|
||||
# Build all modules
|
||||
log_subsection "Building All Modules"
|
||||
run_dotnet_build || return 1
|
||||
|
||||
# Package CLI
|
||||
log_subsection "Packaging CLI"
|
||||
local cli_project="$REPO_ROOT/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
|
||||
if [[ -f "$cli_project" ]]; then
|
||||
log_info "[DRY-RUN] Would build CLI for: linux-x64, linux-arm64, osx-arm64, win-x64"
|
||||
fi
|
||||
|
||||
# Validate Helm chart
|
||||
log_subsection "Validating Helm Chart"
|
||||
if command -v helm &>/dev/null; then
|
||||
local helm_chart="$REPO_ROOT/devops/helm/stellaops"
|
||||
if [[ -d "$helm_chart" ]]; then
|
||||
helm lint "$helm_chart" || log_warn "Helm lint warnings"
|
||||
fi
|
||||
else
|
||||
log_info "helm not found - skipping chart validation"
|
||||
fi
|
||||
|
||||
# Generate release manifest
|
||||
log_subsection "Release Manifest"
|
||||
log_info "[DRY-RUN] Would generate:"
|
||||
log_info " - Release notes"
|
||||
log_info " - Changelog"
|
||||
log_info " - Docker Compose files"
|
||||
log_info " - SBOM"
|
||||
log_info " - Checksums"
|
||||
|
||||
stop_timer "$start_time" "Release simulation"
|
||||
return 0
|
||||
}
|
||||
|
||||
run_full_mode() {
|
||||
log_section "Full Test Mode"
|
||||
log_info "Running all tests including extended categories"
|
||||
log_info "Categories: ${ALL_CATEGORIES[*]}"
|
||||
|
||||
local start_time
|
||||
start_time=$(start_timer)
|
||||
local failed=0
|
||||
|
||||
# Start all services
|
||||
if [[ "$SKIP_SERVICES" != "true" ]]; then
|
||||
start_ci_services || {
|
||||
log_warn "Failed to start services, continuing anyway..."
|
||||
}
|
||||
fi
|
||||
|
||||
# Build
|
||||
run_dotnet_build || return 1
|
||||
|
||||
# Run all categories
|
||||
for category in "${ALL_CATEGORIES[@]}"; do
|
||||
run_dotnet_tests "$category" || {
|
||||
failed=1
|
||||
log_warn "Continuing after $category failure..."
|
||||
}
|
||||
done
|
||||
|
||||
# Stop services
|
||||
if [[ "$SKIP_SERVICES" != "true" ]] && [[ "$KEEP_SERVICES" != "true" ]]; then
|
||||
stop_ci_services
|
||||
fi
|
||||
|
||||
stop_timer "$start_time" "Full test suite"
|
||||
return $failed
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# MAIN
|
||||
# =============================================================================
|
||||
|
||||
main() {
|
||||
parse_args "$@"
|
||||
|
||||
log_section "StellaOps Local CI Runner"
|
||||
log_info "Mode: $MODE"
|
||||
log_info "Engine: $EXECUTION_ENGINE"
|
||||
log_info "Parallel: $PARALLEL_JOBS jobs"
|
||||
log_info "Repository: $REPO_ROOT"
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
log_warn "DRY-RUN MODE - No changes will be made"
|
||||
fi
|
||||
|
||||
# Check dependencies
|
||||
check_dependencies || exit 1
|
||||
|
||||
# Initialize results directory
|
||||
init_results
|
||||
|
||||
# Load environment
|
||||
load_env_file "$REPO_ROOT/devops/ci-local/.env.local" || true
|
||||
|
||||
# Run selected mode
|
||||
case "$MODE" in
|
||||
"$MODE_SMOKE")
|
||||
run_smoke_mode
|
||||
;;
|
||||
"$MODE_PR")
|
||||
run_pr_mode
|
||||
;;
|
||||
"$MODE_MODULE")
|
||||
run_module_mode
|
||||
;;
|
||||
"$MODE_WORKFLOW")
|
||||
run_workflow_mode
|
||||
;;
|
||||
"$MODE_RELEASE")
|
||||
run_release_mode
|
||||
;;
|
||||
"$MODE_FULL")
|
||||
run_full_mode
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown mode: $MODE"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
local result=$?
|
||||
|
||||
log_section "Summary"
|
||||
log_info "Results saved to: $RESULTS_DIR"
|
||||
|
||||
if [[ $result -eq 0 ]]; then
|
||||
log_success "All tests passed!"
|
||||
else
|
||||
log_error "Some tests failed"
|
||||
fi
|
||||
|
||||
return $result
|
||||
}
|
||||
|
||||
# Run main if executed directly
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
main "$@"
|
||||
fi
|
||||
244
devops/scripts/migrations-reset-pre-1.0.sql
Normal file
244
devops/scripts/migrations-reset-pre-1.0.sql
Normal file
@@ -0,0 +1,244 @@
|
||||
-- ============================================================================
|
||||
-- StellaOps Migration Reset Script for Pre-1.0 Deployments
|
||||
-- ============================================================================
|
||||
-- This script updates schema_migrations tables to recognize the 1.0.0 compacted
|
||||
-- migrations for deployments that upgraded from pre-1.0 versions.
|
||||
--
|
||||
-- Run via: psql -f migrations-reset-pre-1.0.sql
|
||||
-- Or with connection: psql -h <host> -U <user> -d <db> -f migrations-reset-pre-1.0.sql
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Authority Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001_initial_schema, 002_mongo_store_equivalents, 003_enable_rls,
|
||||
-- 004_offline_kit_audit, 005_verdict_manifests
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM authority.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'002_mongo_store_equivalents.sql',
|
||||
'003_enable_rls.sql',
|
||||
'004_offline_kit_audit.sql',
|
||||
'005_verdict_manifests.sql'
|
||||
);
|
||||
|
||||
INSERT INTO authority.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Scheduler Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001_initial_schema, 002_graph_jobs, 003_runs_policy,
|
||||
-- 010_generated_columns_runs, 011_enable_rls, 012_partition_audit,
|
||||
-- 012b_migrate_audit_data
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM scheduler.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'002_graph_jobs.sql',
|
||||
'003_runs_policy.sql',
|
||||
'010_generated_columns_runs.sql',
|
||||
'011_enable_rls.sql',
|
||||
'012_partition_audit.sql',
|
||||
'012b_migrate_audit_data.sql'
|
||||
);
|
||||
|
||||
INSERT INTO scheduler.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Scanner Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001-034 plus various numbered files (27 total)
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM scanner.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_create_tables.sql',
|
||||
'002_proof_spine_tables.sql',
|
||||
'003_classification_history.sql',
|
||||
'004_scan_metrics.sql',
|
||||
'005_smart_diff_tables.sql',
|
||||
'006_score_replay_tables.sql',
|
||||
'007_unknowns_ranking_containment.sql',
|
||||
'008_epss_integration.sql',
|
||||
'0059_scans_table.sql',
|
||||
'0065_unknowns_table.sql',
|
||||
'0075_scan_findings_table.sql',
|
||||
'020_call_graph_tables.sql',
|
||||
'021_smart_diff_tables_search_path.sql',
|
||||
'022_reachability_drift_tables.sql',
|
||||
'023_scanner_api_ingestion.sql',
|
||||
'024_smart_diff_priority_score_widen.sql',
|
||||
'025_epss_raw_layer.sql',
|
||||
'026_epss_signal_layer.sql',
|
||||
'027_witness_storage.sql',
|
||||
'028_epss_triage_columns.sql',
|
||||
'029_vuln_surfaces.sql',
|
||||
'030_vuln_surface_triggers_update.sql',
|
||||
'031_reach_cache.sql',
|
||||
'032_idempotency_keys.sql',
|
||||
'033_binary_evidence.sql',
|
||||
'034_func_proof_tables.sql',
|
||||
'DM001_rename_scanner_migrations.sql'
|
||||
);
|
||||
|
||||
INSERT INTO scanner.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Policy Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001-013 (14 files, includes duplicate 010 prefix)
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM policy.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'002_cvss_receipts.sql',
|
||||
'003_snapshots_violations.sql',
|
||||
'004_epss_risk_scores.sql',
|
||||
'005_cvss_multiversion.sql',
|
||||
'006_enable_rls.sql',
|
||||
'007_unknowns_registry.sql',
|
||||
'008_exception_objects.sql',
|
||||
'009_exception_applications.sql',
|
||||
'010_recheck_evidence.sql',
|
||||
'010_unknowns_blast_radius_containment.sql',
|
||||
'011_unknowns_reason_codes.sql',
|
||||
'012_budget_ledger.sql',
|
||||
'013_exception_approval.sql'
|
||||
);
|
||||
|
||||
INSERT INTO policy.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Notify Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001_initial_schema, 010_enable_rls, 011_partition_deliveries,
|
||||
-- 011b_migrate_deliveries_data
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM notify.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'010_enable_rls.sql',
|
||||
'011_partition_deliveries.sql',
|
||||
'011b_migrate_deliveries_data.sql'
|
||||
);
|
||||
|
||||
INSERT INTO notify.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Concelier Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 17 migration files
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM concelier.schema_migrations
|
||||
WHERE migration_name ~ '^[0-9]{3}_.*\.sql$';
|
||||
|
||||
INSERT INTO concelier.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Attestor Module Reset (proofchain + attestor schemas)
|
||||
-- ============================================================================
|
||||
-- Original: 20251214000001_AddProofChainSchema.sql, 20251216_001_create_rekor_submission_queue.sql
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM proofchain.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'20251214000001_AddProofChainSchema.sql',
|
||||
'20251214000002_RollbackProofChainSchema.sql',
|
||||
'20251216_001_create_rekor_submission_queue.sql'
|
||||
);
|
||||
|
||||
INSERT INTO proofchain.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Signer Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 20251214000001_AddKeyManagementSchema.sql
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM signer.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'20251214000001_AddKeyManagementSchema.sql'
|
||||
);
|
||||
|
||||
INSERT INTO signer.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Signals Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: V0000_001__extensions.sql, V1102_001__unknowns_scoring_schema.sql,
|
||||
-- V1105_001__deploy_refs_graph_metrics.sql, V3102_001__callgraph_relational_tables.sql
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM signals.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'V0000_001__extensions.sql',
|
||||
'V1102_001__unknowns_scoring_schema.sql',
|
||||
'V1105_001__deploy_refs_graph_metrics.sql',
|
||||
'V3102_001__callgraph_relational_tables.sql'
|
||||
);
|
||||
|
||||
INSERT INTO signals.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Verification
|
||||
-- ============================================================================
|
||||
-- Display current migration status per module
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
v_module TEXT;
|
||||
v_count INT;
|
||||
BEGIN
|
||||
FOR v_module IN SELECT unnest(ARRAY['authority', 'scheduler', 'scanner', 'policy', 'notify', 'concelier', 'proofchain', 'signer', 'signals']) LOOP
|
||||
EXECUTE format('SELECT COUNT(*) FROM %I.schema_migrations', v_module) INTO v_count;
|
||||
RAISE NOTICE '% module: % migrations registered', v_module, v_count;
|
||||
END LOOP;
|
||||
END $$;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- Post-Reset Notes
|
||||
-- ============================================================================
|
||||
-- After running this script:
|
||||
-- 1. All modules should show exactly 1 migration registered
|
||||
-- 2. The schema structure should be identical to a fresh 1.0.0 deployment
|
||||
-- 3. Future migrations (002+) will apply normally
|
||||
--
|
||||
-- To verify manually:
|
||||
-- SELECT * FROM authority.schema_migrations;
|
||||
-- SELECT * FROM scheduler.schema_migrations;
|
||||
-- SELECT * FROM scanner.schema_migrations;
|
||||
-- SELECT * FROM policy.schema_migrations;
|
||||
-- SELECT * FROM notify.schema_migrations;
|
||||
-- SELECT * FROM concelier.schema_migrations;
|
||||
-- SELECT * FROM proofchain.schema_migrations;
|
||||
-- SELECT * FROM signer.schema_migrations;
|
||||
-- SELECT * FROM signals.schema_migrations;
|
||||
-- ============================================================================
|
||||
169
devops/scripts/regenerate-solution.ps1
Normal file
169
devops/scripts/regenerate-solution.ps1
Normal file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# regenerate-solution.ps1 - Regenerate StellaOps.sln without duplicate projects
|
||||
#
|
||||
# This script:
|
||||
# 1. Backs up the existing solution
|
||||
# 2. Creates a new solution
|
||||
# 3. Adds all .csproj files, skipping duplicates
|
||||
# 4. Preserves solution folders where possible
|
||||
|
||||
param(
|
||||
[string]$SolutionPath = "src/StellaOps.sln",
|
||||
[switch]$DryRun
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Canonical locations for test projects (in priority order)
|
||||
# Later entries win when there are duplicates
|
||||
$canonicalPatterns = @(
|
||||
# Module-local tests (highest priority)
|
||||
"src/*/__Tests/*/*.csproj",
|
||||
"src/*/__Libraries/__Tests/*/*.csproj",
|
||||
"src/__Libraries/__Tests/*/*.csproj",
|
||||
# Cross-module integration tests
|
||||
"src/__Tests/Integration/*/*.csproj",
|
||||
"src/__Tests/__Libraries/*/*.csproj",
|
||||
# Category-based cross-module tests
|
||||
"src/__Tests/chaos/*/*.csproj",
|
||||
"src/__Tests/security/*/*.csproj",
|
||||
"src/__Tests/interop/*/*.csproj",
|
||||
"src/__Tests/parity/*/*.csproj",
|
||||
"src/__Tests/reachability/*/*.csproj",
|
||||
# Single global tests
|
||||
"src/__Tests/*/*.csproj"
|
||||
)
|
||||
|
||||
Write-Host "=== Solution Regeneration Script ===" -ForegroundColor Cyan
|
||||
Write-Host "Solution: $SolutionPath"
|
||||
Write-Host "Dry Run: $DryRun"
|
||||
Write-Host ""
|
||||
|
||||
# Find all .csproj files
|
||||
Write-Host "Finding all project files..." -ForegroundColor Yellow
|
||||
$allProjects = Get-ChildItem -Path "src" -Filter "*.csproj" -Recurse |
|
||||
Where-Object { $_.FullName -notmatch "\\obj\\" -and $_.FullName -notmatch "\\bin\\" }
|
||||
|
||||
Write-Host "Found $($allProjects.Count) project files"
|
||||
|
||||
# Build a map of project name -> list of paths
|
||||
$projectMap = @{}
|
||||
foreach ($proj in $allProjects) {
|
||||
$name = $proj.BaseName
|
||||
if (-not $projectMap.ContainsKey($name)) {
|
||||
$projectMap[$name] = @()
|
||||
}
|
||||
$projectMap[$name] += $proj.FullName
|
||||
}
|
||||
|
||||
# Find duplicates
|
||||
$duplicates = $projectMap.GetEnumerator() | Where-Object { $_.Value.Count -gt 1 }
|
||||
Write-Host ""
|
||||
Write-Host "Found $($duplicates.Count) projects with duplicate names:" -ForegroundColor Yellow
|
||||
foreach ($dup in $duplicates) {
|
||||
Write-Host " $($dup.Key):" -ForegroundColor Red
|
||||
foreach ($path in $dup.Value) {
|
||||
Write-Host " - $path"
|
||||
}
|
||||
}
|
||||
|
||||
# Select canonical path for each project
|
||||
function Get-CanonicalPath {
|
||||
param([string[]]$Paths)
|
||||
|
||||
# Prefer module-local __Tests over global __Tests
|
||||
$moduleTests = $Paths | Where-Object { $_ -match "src\\[^_][^\\]+\\__Tests\\" }
|
||||
if ($moduleTests.Count -gt 0) { return $moduleTests[0] }
|
||||
|
||||
# Prefer __Libraries/__Tests
|
||||
$libTests = $Paths | Where-Object { $_ -match "__Libraries\\__Tests\\" }
|
||||
if ($libTests.Count -gt 0) { return $libTests[0] }
|
||||
|
||||
# Prefer __Tests over non-__Tests location in same parent
|
||||
$testsPath = $Paths | Where-Object { $_ -match "\\__Tests\\" }
|
||||
if ($testsPath.Count -gt 0) { return $testsPath[0] }
|
||||
|
||||
# Otherwise, take first
|
||||
return $Paths[0]
|
||||
}
|
||||
|
||||
# Build final project list
|
||||
$finalProjects = @()
|
||||
foreach ($entry in $projectMap.GetEnumerator()) {
|
||||
$canonical = Get-CanonicalPath -Paths $entry.Value
|
||||
$finalProjects += $canonical
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Final project count: $($finalProjects.Count)" -ForegroundColor Green
|
||||
|
||||
if ($DryRun) {
|
||||
Write-Host ""
|
||||
Write-Host "=== DRY RUN - No changes made ===" -ForegroundColor Magenta
|
||||
Write-Host "Would add the following projects to solution:"
|
||||
$finalProjects | ForEach-Object { Write-Host " $_" }
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Backup existing solution
|
||||
$backupPath = "$SolutionPath.bak"
|
||||
if (Test-Path $SolutionPath) {
|
||||
Copy-Item $SolutionPath $backupPath -Force
|
||||
Write-Host "Backed up existing solution to $backupPath" -ForegroundColor Gray
|
||||
}
|
||||
|
||||
# Create new solution
|
||||
Write-Host ""
|
||||
Write-Host "Creating new solution..." -ForegroundColor Yellow
|
||||
$slnDir = Split-Path $SolutionPath -Parent
|
||||
$slnName = [System.IO.Path]::GetFileNameWithoutExtension($SolutionPath)
|
||||
|
||||
# Remove old solution
|
||||
if (Test-Path $SolutionPath) {
|
||||
Remove-Item $SolutionPath -Force
|
||||
}
|
||||
|
||||
# Create fresh solution
|
||||
Push-Location $slnDir
|
||||
dotnet new sln -n $slnName --force 2>$null
|
||||
Pop-Location
|
||||
|
||||
# Add projects in batches (dotnet sln add can handle multiple)
|
||||
Write-Host "Adding projects to solution..." -ForegroundColor Yellow
|
||||
$added = 0
|
||||
$failed = 0
|
||||
|
||||
foreach ($proj in $finalProjects) {
|
||||
try {
|
||||
$result = dotnet sln $SolutionPath add $proj 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$added++
|
||||
if ($added % 50 -eq 0) {
|
||||
Write-Host " Added $added projects..." -ForegroundColor Gray
|
||||
}
|
||||
} else {
|
||||
Write-Host " Failed to add: $proj" -ForegroundColor Red
|
||||
$failed++
|
||||
}
|
||||
} catch {
|
||||
Write-Host " Error adding: $proj - $_" -ForegroundColor Red
|
||||
$failed++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "=== Summary ===" -ForegroundColor Cyan
|
||||
Write-Host "Projects added: $added" -ForegroundColor Green
|
||||
Write-Host "Projects failed: $failed" -ForegroundColor $(if ($failed -gt 0) { "Red" } else { "Green" })
|
||||
Write-Host ""
|
||||
Write-Host "Solution regenerated at: $SolutionPath"
|
||||
|
||||
# Verify
|
||||
Write-Host ""
|
||||
Write-Host "Verifying solution..." -ForegroundColor Yellow
|
||||
$verifyResult = dotnet build $SolutionPath --no-restore -t:ValidateSolutionConfiguration 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
Write-Host "Solution validation passed!" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Solution validation had issues - check manually" -ForegroundColor Yellow
|
||||
}
|
||||
70
devops/scripts/remove-stale-refs.ps1
Normal file
70
devops/scripts/remove-stale-refs.ps1
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# remove-stale-refs.ps1 - Remove stale project references that don't exist
|
||||
|
||||
param([string]$SlnPath = "src/StellaOps.sln")
|
||||
|
||||
$content = Get-Content $SlnPath -Raw
|
||||
$lines = $content -split "`r?`n"
|
||||
|
||||
# Stale project paths (relative from solution location)
|
||||
$staleProjects = @(
|
||||
"__Tests\AirGap\StellaOps.AirGap.Controller.Tests",
|
||||
"__Tests\AirGap\StellaOps.AirGap.Importer.Tests",
|
||||
"__Tests\AirGap\StellaOps.AirGap.Time.Tests",
|
||||
"__Tests\StellaOps.Gateway.WebService.Tests",
|
||||
"__Tests\Graph\StellaOps.Graph.Indexer.Tests",
|
||||
"Scanner\StellaOps.Scanner.Analyzers.Native",
|
||||
"__Libraries\__Tests\StellaOps.Signals.Tests",
|
||||
"__Tests\StellaOps.Audit.ReplayToken.Tests",
|
||||
"__Tests\StellaOps.Router.Gateway.Tests",
|
||||
"__Libraries\StellaOps.Cryptography"
|
||||
)
|
||||
|
||||
$staleGuids = @()
|
||||
$newLines = @()
|
||||
$skipNext = $false
|
||||
|
||||
for ($i = 0; $i -lt $lines.Count; $i++) {
|
||||
$line = $lines[$i]
|
||||
|
||||
if ($skipNext) {
|
||||
$skipNext = $false
|
||||
continue
|
||||
}
|
||||
|
||||
$isStale = $false
|
||||
foreach ($stalePath in $staleProjects) {
|
||||
if ($line -like "*$stalePath*") {
|
||||
# Extract GUID
|
||||
if ($line -match '\{([A-F0-9-]+)\}"?$') {
|
||||
$staleGuids += $Matches[1]
|
||||
}
|
||||
Write-Host "Removing stale: $stalePath"
|
||||
$isStale = $true
|
||||
$skipNext = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $isStale) {
|
||||
$newLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
# Remove GlobalSection references to stale GUIDs
|
||||
$finalLines = @()
|
||||
foreach ($line in $newLines) {
|
||||
$skip = $false
|
||||
foreach ($guid in $staleGuids) {
|
||||
if ($line -match $guid) {
|
||||
$skip = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (-not $skip) {
|
||||
$finalLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
|
||||
Write-Host "Removed $($staleGuids.Count) stale project references"
|
||||
61
devops/scripts/restore-deleted-tests.ps1
Normal file
61
devops/scripts/restore-deleted-tests.ps1
Normal file
@@ -0,0 +1,61 @@
|
||||
# Restore deleted test files from commit parent
|
||||
# Maps old locations to new locations
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$parentCommit = "74c7aa250c401ee9ac332686832b256159efa604^"
|
||||
|
||||
# Mapping: old path -> new path
|
||||
$mappings = @{
|
||||
"src/__Tests/AirGap/StellaOps.AirGap.Importer.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Importer.Tests"
|
||||
"src/__Tests/AirGap/StellaOps.AirGap.Controller.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Controller.Tests"
|
||||
"src/__Tests/AirGap/StellaOps.AirGap.Time.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Time.Tests"
|
||||
"src/__Tests/StellaOps.Gateway.WebService.Tests" = "src/Gateway/__Tests/StellaOps.Gateway.WebService.Tests"
|
||||
"src/__Tests/Replay/StellaOps.Replay.Core.Tests" = "src/Replay/__Tests/StellaOps.Replay.Core.Tests"
|
||||
"src/__Tests/Provenance/StellaOps.Provenance.Attestation.Tests" = "src/Provenance/__Tests/StellaOps.Provenance.Attestation.Tests"
|
||||
"src/__Tests/Policy/StellaOps.Policy.Scoring.Tests" = "src/Policy/__Tests/StellaOps.Policy.Scoring.Tests"
|
||||
}
|
||||
|
||||
Set-Location "E:\dev\git.stella-ops.org"
|
||||
|
||||
foreach ($mapping in $mappings.GetEnumerator()) {
|
||||
$oldPath = $mapping.Key
|
||||
$newPath = $mapping.Value
|
||||
|
||||
Write-Host "`nProcessing: $oldPath -> $newPath" -ForegroundColor Cyan
|
||||
|
||||
# Get list of files from old location in git
|
||||
$files = git ls-tree -r --name-only "$parentCommit" -- $oldPath 2>$null
|
||||
|
||||
if (-not $files) {
|
||||
Write-Host " No files found at old path" -ForegroundColor Yellow
|
||||
continue
|
||||
}
|
||||
|
||||
foreach ($file in $files) {
|
||||
# Calculate relative path and new file path
|
||||
$relativePath = $file.Substring($oldPath.Length + 1)
|
||||
$newFilePath = Join-Path $newPath $relativePath
|
||||
|
||||
# Create directory if needed
|
||||
$newDir = Split-Path $newFilePath -Parent
|
||||
if (-not (Test-Path $newDir)) {
|
||||
New-Item -ItemType Directory -Path $newDir -Force | Out-Null
|
||||
}
|
||||
|
||||
# Check if file exists
|
||||
if (Test-Path $newFilePath) {
|
||||
Write-Host " Exists: $relativePath" -ForegroundColor DarkGray
|
||||
continue
|
||||
}
|
||||
|
||||
# Restore file
|
||||
git show "${parentCommit}:${file}" > $newFilePath 2>$null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
Write-Host " Restored: $relativePath" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host " Failed: $relativePath" -ForegroundColor Red
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nDone!" -ForegroundColor Cyan
|
||||
176
devops/scripts/validate-before-commit.ps1
Normal file
176
devops/scripts/validate-before-commit.ps1
Normal file
@@ -0,0 +1,176 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Pre-Commit Validation Script for Windows
|
||||
|
||||
.DESCRIPTION
|
||||
Run this script before committing to ensure all CI checks will pass.
|
||||
Wraps the Bash validation script via WSL2 or Git Bash.
|
||||
|
||||
.PARAMETER Level
|
||||
Validation level:
|
||||
- quick : Smoke test only (~2 min)
|
||||
- pr : Full PR-gating suite (~15 min) [default]
|
||||
- full : All tests including extended (~45 min)
|
||||
|
||||
.EXAMPLE
|
||||
.\validate-before-commit.ps1
|
||||
Run PR-gating validation
|
||||
|
||||
.EXAMPLE
|
||||
.\validate-before-commit.ps1 quick
|
||||
Run quick smoke test only
|
||||
|
||||
.EXAMPLE
|
||||
.\validate-before-commit.ps1 full
|
||||
Run full test suite
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Position = 0)]
|
||||
[ValidateSet('quick', 'pr', 'full')]
|
||||
[string]$Level = 'pr',
|
||||
|
||||
[switch]$Help
|
||||
)
|
||||
|
||||
# Script location
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$RepoRoot = Split-Path -Parent (Split-Path -Parent $ScriptDir)
|
||||
|
||||
if ($Help) {
|
||||
Get-Help $MyInvocation.MyCommand.Path -Detailed
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Colors
|
||||
function Write-ColoredOutput {
|
||||
param(
|
||||
[string]$Message,
|
||||
[ConsoleColor]$Color = [ConsoleColor]::White
|
||||
)
|
||||
$originalColor = $Host.UI.RawUI.ForegroundColor
|
||||
$Host.UI.RawUI.ForegroundColor = $Color
|
||||
Write-Host $Message
|
||||
$Host.UI.RawUI.ForegroundColor = $originalColor
|
||||
}
|
||||
|
||||
function Write-Header {
|
||||
param([string]$Message)
|
||||
Write-Host ""
|
||||
Write-ColoredOutput "=============================================" -Color Cyan
|
||||
Write-ColoredOutput " $Message" -Color Cyan
|
||||
Write-ColoredOutput "=============================================" -Color Cyan
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
function Write-Step { Write-ColoredOutput ">>> $args" -Color Blue }
|
||||
function Write-Pass { Write-ColoredOutput "[PASS] $args" -Color Green }
|
||||
function Write-Fail { Write-ColoredOutput "[FAIL] $args" -Color Red }
|
||||
function Write-Warn { Write-ColoredOutput "[WARN] $args" -Color Yellow }
|
||||
function Write-Info { Write-ColoredOutput "[INFO] $args" -Color Cyan }
|
||||
|
||||
# Find Bash
|
||||
function Find-BashExecutable {
|
||||
# Check WSL
|
||||
$wsl = Get-Command wsl -ErrorAction SilentlyContinue
|
||||
if ($wsl) {
|
||||
$wslCheck = & wsl --status 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
return @{ Type = 'wsl'; Path = 'wsl' }
|
||||
}
|
||||
}
|
||||
|
||||
# Check Git Bash
|
||||
$gitBashPaths = @(
|
||||
"C:\Program Files\Git\bin\bash.exe",
|
||||
"C:\Program Files (x86)\Git\bin\bash.exe",
|
||||
"$env:LOCALAPPDATA\Programs\Git\bin\bash.exe"
|
||||
)
|
||||
|
||||
foreach ($path in $gitBashPaths) {
|
||||
if (Test-Path $path) {
|
||||
return @{ Type = 'gitbash'; Path = $path }
|
||||
}
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
function Convert-ToUnixPath {
|
||||
param([string]$WindowsPath)
|
||||
if ($WindowsPath -match '^([A-Za-z]):(.*)$') {
|
||||
$drive = $Matches[1].ToLower()
|
||||
$rest = $Matches[2] -replace '\\', '/'
|
||||
return "/mnt/$drive$rest"
|
||||
}
|
||||
return $WindowsPath -replace '\\', '/'
|
||||
}
|
||||
|
||||
# Main
|
||||
Write-Header "Pre-Commit Validation (Windows)"
|
||||
Write-Info "Level: $Level"
|
||||
Write-Info "Repository: $RepoRoot"
|
||||
|
||||
$bash = Find-BashExecutable
|
||||
if (-not $bash) {
|
||||
Write-Fail "Bash not found. Install WSL2 or Git for Windows."
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Info "Using: $($bash.Type)"
|
||||
|
||||
$scriptPath = Join-Path $ScriptDir "validate-before-commit.sh"
|
||||
if (-not (Test-Path $scriptPath)) {
|
||||
Write-Fail "Script not found: $scriptPath"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$startTime = Get-Date
|
||||
|
||||
try {
|
||||
switch ($bash.Type) {
|
||||
'wsl' {
|
||||
$unixScript = Convert-ToUnixPath $scriptPath
|
||||
& wsl bash $unixScript $Level
|
||||
}
|
||||
'gitbash' {
|
||||
$unixScript = $scriptPath -replace '\\', '/'
|
||||
& $bash.Path $unixScript $Level
|
||||
}
|
||||
}
|
||||
$exitCode = $LASTEXITCODE
|
||||
}
|
||||
catch {
|
||||
Write-Fail "Execution failed: $_"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
$duration = (Get-Date) - $startTime
|
||||
$minutes = [math]::Floor($duration.TotalMinutes)
|
||||
$seconds = $duration.Seconds
|
||||
|
||||
Write-Header "Summary"
|
||||
Write-Info "Duration: ${minutes}m ${seconds}s"
|
||||
|
||||
if ($exitCode -eq 0) {
|
||||
Write-Host ""
|
||||
Write-ColoredOutput "=============================================" -Color Green
|
||||
Write-ColoredOutput " ALL CHECKS PASSED - Ready to commit!" -Color Green
|
||||
Write-ColoredOutput "=============================================" -Color Green
|
||||
Write-Host ""
|
||||
Write-Host "Next steps:"
|
||||
Write-Host " git add -A"
|
||||
Write-Host ' git commit -m "Your commit message"'
|
||||
Write-Host ""
|
||||
} else {
|
||||
Write-Host ""
|
||||
Write-ColoredOutput "=============================================" -Color Red
|
||||
Write-ColoredOutput " VALIDATION FAILED - Do not commit!" -Color Red
|
||||
Write-ColoredOutput "=============================================" -Color Red
|
||||
Write-Host ""
|
||||
Write-Host "Check the logs in: out/local-ci/logs/"
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
exit $exitCode
|
||||
318
devops/scripts/validate-before-commit.sh
Normal file
318
devops/scripts/validate-before-commit.sh
Normal file
@@ -0,0 +1,318 @@
|
||||
#!/usr/bin/env bash
|
||||
# =============================================================================
|
||||
# PRE-COMMIT VALIDATION SCRIPT
|
||||
# =============================================================================
|
||||
# Run this script before committing to ensure all CI checks will pass.
|
||||
#
|
||||
# Usage:
|
||||
# ./devops/scripts/validate-before-commit.sh [level]
|
||||
#
|
||||
# Levels:
|
||||
# quick - Smoke test only (~2 min)
|
||||
# pr - Full PR-gating suite (~15 min) [default]
|
||||
# full - All tests including extended (~45 min)
|
||||
#
|
||||
# Examples:
|
||||
# ./devops/scripts/validate-before-commit.sh # PR-gating
|
||||
# ./devops/scripts/validate-before-commit.sh quick # Smoke only
|
||||
# ./devops/scripts/validate-before-commit.sh full # Everything
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Validation level
|
||||
LEVEL="${1:-pr}"
|
||||
|
||||
# =============================================================================
|
||||
# UTILITIES
|
||||
# =============================================================================
|
||||
|
||||
print_header() {
|
||||
echo ""
|
||||
echo -e "${CYAN}=============================================${NC}"
|
||||
echo -e "${CYAN} $1${NC}"
|
||||
echo -e "${CYAN}=============================================${NC}"
|
||||
echo ""
|
||||
}
|
||||
|
||||
print_step() {
|
||||
echo -e "${BLUE}>>> $1${NC}"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}[PASS] $1${NC}"
|
||||
}
|
||||
|
||||
print_fail() {
|
||||
echo -e "${RED}[FAIL] $1${NC}"
|
||||
}
|
||||
|
||||
print_warn() {
|
||||
echo -e "${YELLOW}[WARN] $1${NC}"
|
||||
}
|
||||
|
||||
print_info() {
|
||||
echo -e "${CYAN}[INFO] $1${NC}"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# CHECKS
|
||||
# =============================================================================
|
||||
|
||||
check_git_status() {
|
||||
print_step "Checking git status..."
|
||||
|
||||
# Check for uncommitted changes
|
||||
if ! git diff --quiet 2>/dev/null; then
|
||||
print_warn "You have unstaged changes"
|
||||
fi
|
||||
|
||||
# Check for untracked files
|
||||
local untracked
|
||||
untracked=$(git ls-files --others --exclude-standard 2>/dev/null | wc -l)
|
||||
if [[ "$untracked" -gt 0 ]]; then
|
||||
print_warn "You have $untracked untracked file(s)"
|
||||
fi
|
||||
|
||||
# Show current branch
|
||||
local branch
|
||||
branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null)
|
||||
print_info "Current branch: $branch"
|
||||
}
|
||||
|
||||
check_dependencies() {
|
||||
print_step "Checking dependencies..."
|
||||
|
||||
local missing=0
|
||||
|
||||
# Check .NET
|
||||
if ! command -v dotnet &>/dev/null; then
|
||||
print_fail ".NET SDK not found"
|
||||
missing=1
|
||||
else
|
||||
local version
|
||||
version=$(dotnet --version)
|
||||
print_success ".NET SDK: $version"
|
||||
fi
|
||||
|
||||
# Check Docker
|
||||
if ! command -v docker &>/dev/null; then
|
||||
print_warn "Docker not found (some tests may fail)"
|
||||
else
|
||||
if docker info &>/dev/null; then
|
||||
print_success "Docker: running"
|
||||
else
|
||||
print_warn "Docker: not running"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check Git
|
||||
if ! command -v git &>/dev/null; then
|
||||
print_fail "Git not found"
|
||||
missing=1
|
||||
else
|
||||
print_success "Git: installed"
|
||||
fi
|
||||
|
||||
return $missing
|
||||
}
|
||||
|
||||
run_smoke_tests() {
|
||||
print_step "Running smoke tests..."
|
||||
|
||||
if "$SCRIPT_DIR/local-ci.sh" smoke; then
|
||||
print_success "Smoke tests passed"
|
||||
return 0
|
||||
else
|
||||
print_fail "Smoke tests failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_pr_tests() {
|
||||
print_step "Running PR-gating suite..."
|
||||
|
||||
if "$SCRIPT_DIR/local-ci.sh" pr; then
|
||||
print_success "PR-gating suite passed"
|
||||
return 0
|
||||
else
|
||||
print_fail "PR-gating suite failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_full_tests() {
|
||||
print_step "Running full test suite..."
|
||||
|
||||
if "$SCRIPT_DIR/local-ci.sh" full; then
|
||||
print_success "Full test suite passed"
|
||||
return 0
|
||||
else
|
||||
print_fail "Full test suite failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_module_tests() {
|
||||
print_step "Running module tests..."
|
||||
|
||||
if "$SCRIPT_DIR/local-ci.sh" module; then
|
||||
print_success "Module tests passed"
|
||||
return 0
|
||||
else
|
||||
print_fail "Module tests failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
validate_helm() {
|
||||
if command -v helm &>/dev/null; then
|
||||
print_step "Validating Helm chart..."
|
||||
local chart="$REPO_ROOT/devops/helm/stellaops"
|
||||
if [[ -d "$chart" ]]; then
|
||||
if helm lint "$chart" &>/dev/null; then
|
||||
print_success "Helm chart valid"
|
||||
else
|
||||
print_warn "Helm chart has warnings"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
validate_compose() {
|
||||
print_step "Validating Docker Compose..."
|
||||
local compose="$REPO_ROOT/devops/compose/docker-compose.ci.yaml"
|
||||
if [[ -f "$compose" ]]; then
|
||||
if docker compose -f "$compose" config &>/dev/null; then
|
||||
print_success "Docker Compose valid"
|
||||
else
|
||||
print_warn "Docker Compose has issues"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# MAIN
|
||||
# =============================================================================
|
||||
|
||||
main() {
|
||||
print_header "Pre-Commit Validation"
|
||||
print_info "Level: $LEVEL"
|
||||
print_info "Repository: $REPO_ROOT"
|
||||
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
local failed=0
|
||||
|
||||
# Always run these checks
|
||||
check_git_status
|
||||
check_dependencies || failed=1
|
||||
|
||||
if [[ $failed -eq 1 ]]; then
|
||||
print_fail "Dependency check failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run appropriate test level
|
||||
case "$LEVEL" in
|
||||
quick|smoke)
|
||||
run_smoke_tests || failed=1
|
||||
;;
|
||||
pr|default)
|
||||
run_smoke_tests || failed=1
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_module_tests || failed=1
|
||||
fi
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_pr_tests || failed=1
|
||||
fi
|
||||
validate_helm
|
||||
validate_compose
|
||||
;;
|
||||
full|all)
|
||||
run_smoke_tests || failed=1
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
run_full_tests || failed=1
|
||||
fi
|
||||
validate_helm
|
||||
validate_compose
|
||||
;;
|
||||
*)
|
||||
print_fail "Unknown level: $LEVEL"
|
||||
echo "Valid levels: quick, pr, full"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Calculate duration
|
||||
local end_time
|
||||
end_time=$(date +%s)
|
||||
local duration=$((end_time - start_time))
|
||||
local minutes=$((duration / 60))
|
||||
local seconds=$((duration % 60))
|
||||
|
||||
# Final summary
|
||||
print_header "Summary"
|
||||
print_info "Duration: ${minutes}m ${seconds}s"
|
||||
|
||||
if [[ $failed -eq 0 ]]; then
|
||||
echo ""
|
||||
echo -e "${GREEN}=============================================${NC}"
|
||||
echo -e "${GREEN} ALL CHECKS PASSED - Ready to commit!${NC}"
|
||||
echo -e "${GREEN}=============================================${NC}"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " git add -A"
|
||||
echo " git commit -m \"Your commit message\""
|
||||
echo ""
|
||||
exit 0
|
||||
else
|
||||
echo ""
|
||||
echo -e "${RED}=============================================${NC}"
|
||||
echo -e "${RED} VALIDATION FAILED - Do not commit!${NC}"
|
||||
echo -e "${RED}=============================================${NC}"
|
||||
echo ""
|
||||
echo "Check the logs in: out/local-ci/logs/"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Show usage if --help
|
||||
if [[ "${1:-}" == "--help" ]] || [[ "${1:-}" == "-h" ]]; then
|
||||
cat <<EOF
|
||||
Pre-Commit Validation Script
|
||||
|
||||
Usage: $(basename "$0") [level]
|
||||
|
||||
Levels:
|
||||
quick Smoke test only (~2 min)
|
||||
pr Full PR-gating suite (~15 min) [default]
|
||||
full All tests including extended (~45 min)
|
||||
|
||||
Examples:
|
||||
$(basename "$0") # Run PR-gating validation
|
||||
$(basename "$0") quick # Quick smoke test only
|
||||
$(basename "$0") full # Run everything
|
||||
|
||||
What each level validates:
|
||||
quick: Build + Unit tests
|
||||
pr: Build + Unit + Architecture + Contract + Integration + Security + Golden
|
||||
full: All PR-gating + Performance + Benchmark + AirGap + Chaos + Determinism
|
||||
EOF
|
||||
exit 0
|
||||
fi
|
||||
|
||||
main "$@"
|
||||
Reference in New Issue
Block a user