save progress

This commit is contained in:
StellaOps Bot
2025-12-26 22:03:32 +02:00
parent 9a4cd2e0f7
commit e6c47c8f50
3634 changed files with 253222 additions and 56632 deletions

View File

@@ -28,6 +28,7 @@ services:
PGDATA: /var/lib/postgresql/data/pgdata
volumes:
- postgres-data:/var/lib/postgresql/data
- ./postgres-init:/docker-entrypoint-initdb.d:ro
ports:
- "${POSTGRES_PORT:-5432}:5432"
networks:

View File

@@ -1,5 +1,7 @@
-- PostgreSQL initialization for StellaOps air-gap deployment
-- ============================================================================
-- PostgreSQL initialization for StellaOps
-- This script runs automatically on first container start
-- ============================================================================
-- Enable pg_stat_statements extension for query performance analysis
CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
@@ -9,25 +11,59 @@ CREATE EXTENSION IF NOT EXISTS pg_trgm; -- Fuzzy text search
CREATE EXTENSION IF NOT EXISTS btree_gin; -- GIN indexes for scalar types
CREATE EXTENSION IF NOT EXISTS pgcrypto; -- Cryptographic functions
-- ============================================================================
-- Create schemas for all modules
-- Migrations will create tables within these schemas
CREATE SCHEMA IF NOT EXISTS authority;
CREATE SCHEMA IF NOT EXISTS vuln;
CREATE SCHEMA IF NOT EXISTS vex;
CREATE SCHEMA IF NOT EXISTS scheduler;
CREATE SCHEMA IF NOT EXISTS notify;
CREATE SCHEMA IF NOT EXISTS policy;
CREATE SCHEMA IF NOT EXISTS concelier;
CREATE SCHEMA IF NOT EXISTS audit;
CREATE SCHEMA IF NOT EXISTS unknowns;
-- ============================================================================
-- Grant usage to application user (assumes POSTGRES_USER is the app user)
GRANT USAGE ON SCHEMA authority TO PUBLIC;
GRANT USAGE ON SCHEMA vuln TO PUBLIC;
GRANT USAGE ON SCHEMA vex TO PUBLIC;
GRANT USAGE ON SCHEMA scheduler TO PUBLIC;
GRANT USAGE ON SCHEMA notify TO PUBLIC;
GRANT USAGE ON SCHEMA policy TO PUBLIC;
GRANT USAGE ON SCHEMA concelier TO PUBLIC;
GRANT USAGE ON SCHEMA audit TO PUBLIC;
GRANT USAGE ON SCHEMA unknowns TO PUBLIC;
-- Core Platform
CREATE SCHEMA IF NOT EXISTS authority; -- Authentication, authorization, OAuth/OIDC
-- Data Ingestion
CREATE SCHEMA IF NOT EXISTS vuln; -- Concelier vulnerability data
CREATE SCHEMA IF NOT EXISTS vex; -- Excititor VEX documents
-- Scanning & Analysis
CREATE SCHEMA IF NOT EXISTS scanner; -- Container scanning, SBOM generation
-- Scheduling & Orchestration
CREATE SCHEMA IF NOT EXISTS scheduler; -- Job scheduling
CREATE SCHEMA IF NOT EXISTS taskrunner; -- Task execution
-- Policy & Risk
CREATE SCHEMA IF NOT EXISTS policy; -- Policy engine
CREATE SCHEMA IF NOT EXISTS unknowns; -- Unknown component tracking
-- Artifacts & Evidence
CREATE SCHEMA IF NOT EXISTS proofchain; -- Attestor proof chains
CREATE SCHEMA IF NOT EXISTS attestor; -- Attestor submission queue
CREATE SCHEMA IF NOT EXISTS signer; -- Key management
-- Notifications
CREATE SCHEMA IF NOT EXISTS notify; -- Notification delivery
-- Signals & Observability
CREATE SCHEMA IF NOT EXISTS signals; -- Runtime signals
-- Registry
CREATE SCHEMA IF NOT EXISTS packs; -- Task packs registry
-- Audit
CREATE SCHEMA IF NOT EXISTS audit; -- System-wide audit log
-- ============================================================================
-- Grant usage to application user (for single-user mode)
-- Per-module users are created in 02-create-users.sql
-- ============================================================================
DO $$
DECLARE
schema_name TEXT;
BEGIN
FOR schema_name IN SELECT unnest(ARRAY[
'authority', 'vuln', 'vex', 'scanner', 'scheduler', 'taskrunner',
'policy', 'unknowns', 'proofchain', 'attestor', 'signer',
'notify', 'signals', 'packs', 'audit'
]) LOOP
EXECUTE format('GRANT USAGE ON SCHEMA %I TO PUBLIC', schema_name);
END LOOP;
END $$;

View File

@@ -0,0 +1,53 @@
-- ============================================================================
-- Per-Module Database Users
-- ============================================================================
-- Creates isolated database users for each StellaOps module.
-- This enables least-privilege access control and audit trail per module.
--
-- Password format: {module}_dev (for development only)
-- In production, use secrets management and rotate credentials.
-- ============================================================================
-- Core Platform
CREATE USER authority_user WITH PASSWORD 'authority_dev';
-- Data Ingestion
CREATE USER concelier_user WITH PASSWORD 'concelier_dev';
CREATE USER excititor_user WITH PASSWORD 'excititor_dev';
-- Scanning & Analysis
CREATE USER scanner_user WITH PASSWORD 'scanner_dev';
-- Scheduling & Orchestration
CREATE USER scheduler_user WITH PASSWORD 'scheduler_dev';
CREATE USER taskrunner_user WITH PASSWORD 'taskrunner_dev';
-- Policy & Risk
CREATE USER policy_user WITH PASSWORD 'policy_dev';
CREATE USER unknowns_user WITH PASSWORD 'unknowns_dev';
-- Artifacts & Evidence
CREATE USER attestor_user WITH PASSWORD 'attestor_dev';
CREATE USER signer_user WITH PASSWORD 'signer_dev';
-- Notifications
CREATE USER notify_user WITH PASSWORD 'notify_dev';
-- Signals & Observability
CREATE USER signals_user WITH PASSWORD 'signals_dev';
-- Registry
CREATE USER packs_user WITH PASSWORD 'packs_dev';
-- ============================================================================
-- Log created users
-- ============================================================================
DO $$
BEGIN
RAISE NOTICE 'Created per-module database users:';
RAISE NOTICE ' - authority_user, concelier_user, excititor_user';
RAISE NOTICE ' - scanner_user, scheduler_user, taskrunner_user';
RAISE NOTICE ' - policy_user, unknowns_user';
RAISE NOTICE ' - attestor_user, signer_user';
RAISE NOTICE ' - notify_user, signals_user, packs_user';
END $$;

View File

@@ -0,0 +1,153 @@
-- ============================================================================
-- Per-Module Schema Permissions
-- ============================================================================
-- Grants each module user access to their respective schema(s).
-- Users can only access tables in their designated schemas.
-- ============================================================================
-- ============================================================================
-- Authority Module
-- ============================================================================
GRANT USAGE ON SCHEMA authority TO authority_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA authority TO authority_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA authority TO authority_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA authority GRANT ALL ON TABLES TO authority_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA authority GRANT ALL ON SEQUENCES TO authority_user;
-- ============================================================================
-- Concelier Module (uses 'vuln' schema)
-- ============================================================================
GRANT USAGE ON SCHEMA vuln TO concelier_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA vuln TO concelier_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA vuln TO concelier_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA vuln GRANT ALL ON TABLES TO concelier_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA vuln GRANT ALL ON SEQUENCES TO concelier_user;
-- ============================================================================
-- Excititor Module (uses 'vex' schema)
-- ============================================================================
GRANT USAGE ON SCHEMA vex TO excititor_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA vex TO excititor_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA vex TO excititor_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA vex GRANT ALL ON TABLES TO excititor_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA vex GRANT ALL ON SEQUENCES TO excititor_user;
-- ============================================================================
-- Scanner Module
-- ============================================================================
GRANT USAGE ON SCHEMA scanner TO scanner_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA scanner TO scanner_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA scanner TO scanner_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA scanner GRANT ALL ON TABLES TO scanner_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA scanner GRANT ALL ON SEQUENCES TO scanner_user;
-- ============================================================================
-- Scheduler Module
-- ============================================================================
GRANT USAGE ON SCHEMA scheduler TO scheduler_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA scheduler TO scheduler_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA scheduler TO scheduler_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA scheduler GRANT ALL ON TABLES TO scheduler_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA scheduler GRANT ALL ON SEQUENCES TO scheduler_user;
-- ============================================================================
-- TaskRunner Module
-- ============================================================================
GRANT USAGE ON SCHEMA taskrunner TO taskrunner_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA taskrunner TO taskrunner_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA taskrunner TO taskrunner_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA taskrunner GRANT ALL ON TABLES TO taskrunner_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA taskrunner GRANT ALL ON SEQUENCES TO taskrunner_user;
-- ============================================================================
-- Policy Module
-- ============================================================================
GRANT USAGE ON SCHEMA policy TO policy_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA policy TO policy_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA policy TO policy_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA policy GRANT ALL ON TABLES TO policy_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA policy GRANT ALL ON SEQUENCES TO policy_user;
-- ============================================================================
-- Unknowns Module
-- ============================================================================
GRANT USAGE ON SCHEMA unknowns TO unknowns_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA unknowns TO unknowns_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA unknowns TO unknowns_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA unknowns GRANT ALL ON TABLES TO unknowns_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA unknowns GRANT ALL ON SEQUENCES TO unknowns_user;
-- ============================================================================
-- Attestor Module (uses 'proofchain' and 'attestor' schemas)
-- ============================================================================
GRANT USAGE ON SCHEMA proofchain TO attestor_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA proofchain TO attestor_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA proofchain TO attestor_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA proofchain GRANT ALL ON TABLES TO attestor_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA proofchain GRANT ALL ON SEQUENCES TO attestor_user;
GRANT USAGE ON SCHEMA attestor TO attestor_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA attestor TO attestor_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA attestor TO attestor_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA attestor GRANT ALL ON TABLES TO attestor_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA attestor GRANT ALL ON SEQUENCES TO attestor_user;
-- ============================================================================
-- Signer Module
-- ============================================================================
GRANT USAGE ON SCHEMA signer TO signer_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA signer TO signer_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA signer TO signer_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA signer GRANT ALL ON TABLES TO signer_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA signer GRANT ALL ON SEQUENCES TO signer_user;
-- ============================================================================
-- Notify Module
-- ============================================================================
GRANT USAGE ON SCHEMA notify TO notify_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA notify TO notify_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA notify TO notify_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA notify GRANT ALL ON TABLES TO notify_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA notify GRANT ALL ON SEQUENCES TO notify_user;
-- ============================================================================
-- Signals Module
-- ============================================================================
GRANT USAGE ON SCHEMA signals TO signals_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA signals TO signals_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA signals TO signals_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA signals GRANT ALL ON TABLES TO signals_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA signals GRANT ALL ON SEQUENCES TO signals_user;
-- ============================================================================
-- Packs Registry Module
-- ============================================================================
GRANT USAGE ON SCHEMA packs TO packs_user;
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA packs TO packs_user;
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA packs TO packs_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA packs GRANT ALL ON TABLES TO packs_user;
ALTER DEFAULT PRIVILEGES IN SCHEMA packs GRANT ALL ON SEQUENCES TO packs_user;
-- ============================================================================
-- Verification
-- ============================================================================
DO $$
DECLARE
v_user TEXT;
v_schema TEXT;
BEGIN
RAISE NOTICE 'Per-module permissions granted:';
RAISE NOTICE ' authority_user -> authority';
RAISE NOTICE ' concelier_user -> vuln';
RAISE NOTICE ' excititor_user -> vex';
RAISE NOTICE ' scanner_user -> scanner';
RAISE NOTICE ' scheduler_user -> scheduler';
RAISE NOTICE ' taskrunner_user -> taskrunner';
RAISE NOTICE ' policy_user -> policy';
RAISE NOTICE ' unknowns_user -> unknowns';
RAISE NOTICE ' attestor_user -> proofchain, attestor';
RAISE NOTICE ' signer_user -> signer';
RAISE NOTICE ' notify_user -> notify';
RAISE NOTICE ' signals_user -> signals';
RAISE NOTICE ' packs_user -> packs';
END $$;

View File

@@ -0,0 +1,318 @@
# Reproducible Build Environment Requirements
**Sprint:** SPRINT_1227_0002_0001_LB_reproducible_builders
**Task:** T12 — Document build environment requirements
---
## Overview
This document describes the environment requirements for running reproducible distro package builds. The build system supports Alpine, Debian, and RHEL package ecosystems.
---
## Hardware Requirements
### Minimum Requirements
| Resource | Minimum | Recommended |
|----------|---------|-------------|
| CPU | 4 cores | 8+ cores |
| RAM | 8 GB | 16+ GB |
| Disk | 50 GB SSD | 200+ GB NVMe |
| Network | 10 Mbps | 100+ Mbps |
### Storage Breakdown
| Directory | Purpose | Estimated Size |
|-----------|---------|----------------|
| `/var/lib/docker` | Docker images and containers | 30 GB |
| `/var/cache/stellaops/builds` | Build cache | 50 GB |
| `/var/cache/stellaops/sources` | Source package cache | 20 GB |
| `/var/cache/stellaops/artifacts` | Output artifacts | 50 GB |
---
## Software Requirements
### Host System
| Component | Version | Purpose |
|-----------|---------|---------|
| Docker | 24.0+ | Container runtime |
| Docker Compose | 2.20+ | Multi-container orchestration |
| .NET SDK | 10.0 | Worker service runtime |
| objdump | binutils 2.40+ | Binary analysis |
| readelf | binutils 2.40+ | ELF parsing |
### Container Images
The build system uses the following base images:
| Builder | Base Image | Tag |
|---------|------------|-----|
| Alpine | `alpine` | `3.19`, `3.18` |
| Debian | `debian` | `bookworm`, `bullseye` |
| RHEL | `almalinux` | `9`, `8` |
---
## Environment Variables
### Required Variables
```bash
# Build configuration
export STELLAOPS_BUILD_CACHE=/var/cache/stellaops/builds
export STELLAOPS_SOURCE_CACHE=/var/cache/stellaops/sources
export STELLAOPS_ARTIFACT_DIR=/var/cache/stellaops/artifacts
# Reproducibility settings
export TZ=UTC
export LC_ALL=C.UTF-8
export SOURCE_DATE_EPOCH=$(date +%s)
# Docker settings
export DOCKER_BUILDKIT=1
export COMPOSE_DOCKER_CLI_BUILD=1
```
### Optional Variables
```bash
# Parallel build settings
export STELLAOPS_MAX_CONCURRENT_BUILDS=2
export STELLAOPS_BUILD_TIMEOUT=1800 # 30 minutes
# Proxy settings (if behind corporate firewall)
export HTTP_PROXY=http://proxy:8080
export HTTPS_PROXY=http://proxy:8080
export NO_PROXY=localhost,127.0.0.1
```
---
## Builder-Specific Requirements
### Alpine Builder
```dockerfile
# Required packages in builder image
apk add --no-cache \
alpine-sdk \
abuild \
sudo \
binutils \
elfutils \
build-base
```
**Normalization requirements:**
- `SOURCE_DATE_EPOCH` must be set
- Use `abuild -r` with reproducible flags
- Archive ordering: `--sort=name`
### Debian Builder
```dockerfile
# Required packages in builder image
apt-get install -y \
build-essential \
devscripts \
dpkg-dev \
fakeroot \
binutils \
elfutils \
debhelper
```
**Normalization requirements:**
- Use `dpkg-buildpackage -b` with reproducible flags
- Set `DEB_BUILD_OPTIONS=reproducible`
- Apply `dh_strip_nondeterminism` post-build
### RHEL Builder
```dockerfile
# Required packages in builder image (AlmaLinux 9)
dnf install -y \
mock \
rpm-build \
rpmdevtools \
binutils \
elfutils
```
**Normalization requirements:**
- Use mock with `--enable-network=false`
- Configure mock for deterministic builds
- Set `%_buildhost stellaops.build`
---
## Compiler Flags for Reproducibility
### C/C++ Flags
```bash
CFLAGS="-fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/build -grecord-gcc-switches=off"
CXXFLAGS="${CFLAGS}"
LDFLAGS="-Wl,--build-id=sha1"
```
### Additional Flags
```bash
# Disable date/time macros
-Wdate-time -Werror=date-time
# Normalize paths
-fmacro-prefix-map=$(pwd)=/build
-ffile-prefix-map=$(pwd)=/build
```
---
## Archive Determinism
### ar (Static Libraries)
```bash
# Use deterministic mode
ar --enable-deterministic-archives crs libfoo.a *.o
# Or set environment variable
export AR_FLAGS=--enable-deterministic-archives
```
### tar (Package Archives)
```bash
# Deterministic tar creation
tar --sort=name \
--mtime="@${SOURCE_DATE_EPOCH}" \
--owner=0 \
--group=0 \
--numeric-owner \
-cf archive.tar directory/
```
### zip/gzip
```bash
# Use gzip -n to avoid timestamp
gzip -n file
# Use mtime for consistent timestamps
touch -d "@${SOURCE_DATE_EPOCH}" file
```
---
## Network Requirements
### Outbound Access Required
| Destination | Port | Purpose |
|-------------|------|---------|
| `dl-cdn.alpinelinux.org` | 443 | Alpine packages |
| `deb.debian.org` | 443 | Debian packages |
| `vault.centos.org` | 443 | CentOS/RHEL sources |
| `mirror.almalinux.org` | 443 | AlmaLinux packages |
| `git.*.org` | 443 | Upstream source repos |
### Air-Gapped Operation
For air-gapped environments:
1. Pre-download source packages
2. Configure local mirrors
3. Set `STELLAOPS_OFFLINE_MODE=true`
4. Use cached build artifacts
---
## Security Considerations
### Container Isolation
- Builders run in unprivileged containers
- No host network access
- Read-only source mounts
- Ephemeral containers (destroyed after build)
### Signing Keys
- Build outputs are unsigned by default
- DSSE signing requires configured key material
- Keys stored in `/etc/stellaops/keys/` or HSM
### Build Verification
```bash
# Verify reproducibility
sha256sum build1/output/* > checksums1.txt
sha256sum build2/output/* > checksums2.txt
diff checksums1.txt checksums2.txt
```
---
## Troubleshooting
### Common Issues
| Issue | Cause | Resolution |
|-------|-------|------------|
| Build timestamp differs | `SOURCE_DATE_EPOCH` not set | Export variable before build |
| Path in debug info | Missing `-fdebug-prefix-map` | Add to CFLAGS |
| ar archive differs | Deterministic mode disabled | Use `--enable-deterministic-archives` |
| tar ordering differs | Random file order | Use `--sort=name` |
### Debugging Reproducibility
```bash
# Compare two builds byte-by-byte
diffoscope build1/output/libfoo.so build2/output/libfoo.so
# Check for timestamp differences
objdump -t binary | grep -i time
# Verify no random UUIDs
strings binary | grep -E '[0-9a-f]{8}-[0-9a-f]{4}'
```
---
## Monitoring and Metrics
### Key Metrics
| Metric | Description | Target |
|--------|-------------|--------|
| `build_reproducibility_rate` | % of reproducible builds | > 95% |
| `build_duration_seconds` | Time to complete build | < 1800 |
| `fingerprint_extraction_rate` | Functions per second | > 1000 |
| `build_cache_hit_rate` | Cache effectiveness | > 80% |
### Health Checks
```bash
# Verify builder containers are ready
docker ps --filter "name=repro-builder"
# Check cache disk usage
df -h /var/cache/stellaops/
# Verify build queue
curl -s http://localhost:9090/metrics | grep stellaops_build
```
---
## References
- [Reproducible Builds](https://reproducible-builds.org/)
- [Debian Reproducible Builds](https://wiki.debian.org/ReproducibleBuilds)
- [Alpine Reproducibility](https://wiki.alpinelinux.org/wiki/Reproducible_Builds)
- [RPM Reproducibility](https://rpm-software-management.github.io/rpm/manual/reproducibility.html)

View File

@@ -0,0 +1,62 @@
# Alpine Reproducible Builder
# Creates deterministic builds of Alpine packages for fingerprint diffing
#
# Usage:
# docker build -t repro-builder-alpine:3.20 --build-arg RELEASE=3.20 .
# docker run -v ./output:/output repro-builder-alpine:3.20 build openssl 3.0.7-r0
ARG RELEASE=3.20
FROM alpine:${RELEASE}
ARG RELEASE
ENV ALPINE_RELEASE=${RELEASE}
# Install build tools and dependencies
RUN apk add --no-cache \
alpine-sdk \
abuild \
sudo \
git \
curl \
binutils \
elfutils \
coreutils \
tar \
gzip \
xz \
patch \
diffutils \
file \
&& rm -rf /var/cache/apk/*
# Create build user (abuild requires non-root)
RUN adduser -D -G abuild builder \
&& echo "builder ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers \
&& mkdir -p /var/cache/distfiles \
&& chown -R builder:abuild /var/cache/distfiles
# Setup abuild
USER builder
WORKDIR /home/builder
# Generate abuild keys
RUN abuild-keygen -a -i -n
# Copy normalization and build scripts
COPY --chown=builder:abuild scripts/normalize.sh /usr/local/bin/normalize.sh
COPY --chown=builder:abuild scripts/build.sh /usr/local/bin/build.sh
COPY --chown=builder:abuild scripts/extract-functions.sh /usr/local/bin/extract-functions.sh
RUN chmod +x /usr/local/bin/*.sh
# Environment for reproducibility
ENV TZ=UTC
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8
# Build output directory
VOLUME /output
WORKDIR /build
ENTRYPOINT ["/usr/local/bin/build.sh"]
CMD ["--help"]

View File

@@ -0,0 +1,226 @@
#!/bin/sh
# Alpine Reproducible Build Script
# Builds packages with deterministic settings for fingerprint generation
#
# Usage: build.sh [build|diff] <package> <version> [patch_url...]
#
# Examples:
# build.sh build openssl 3.0.7-r0
# build.sh diff openssl 3.0.7-r0 3.0.8-r0
# build.sh build openssl 3.0.7-r0 https://patch.url/CVE-2023-1234.patch
set -eu
COMMAND="${1:-help}"
PACKAGE="${2:-}"
VERSION="${3:-}"
OUTPUT_DIR="${OUTPUT_DIR:-/output}"
log() {
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] $*" >&2
}
show_help() {
cat <<EOF
Alpine Reproducible Builder
Usage:
build.sh build <package> <version> [patch_urls...]
Build a package with reproducible settings
build.sh diff <package> <vuln_version> <patched_version>
Build two versions and compute fingerprint diff
build.sh --help
Show this help message
Environment:
SOURCE_DATE_EPOCH Override timestamp (extracted from APKBUILD if not set)
OUTPUT_DIR Output directory (default: /output)
CFLAGS Additional compiler flags
LDFLAGS Additional linker flags
Examples:
build.sh build openssl 3.0.7-r0
build.sh build curl 8.1.0-r0 https://patch/CVE-2023-1234.patch
build.sh diff openssl 3.0.7-r0 3.0.8-r0
EOF
}
setup_reproducible_env() {
local pkg="$1"
local ver="$2"
# Extract SOURCE_DATE_EPOCH from APKBUILD if not set
if [ -z "${SOURCE_DATE_EPOCH:-}" ]; then
if [ -f "aports/main/$pkg/APKBUILD" ]; then
# Use pkgrel date or fallback to current
SOURCE_DATE_EPOCH=$(stat -c %Y "aports/main/$pkg/APKBUILD" 2>/dev/null || date +%s)
else
SOURCE_DATE_EPOCH=$(date +%s)
fi
export SOURCE_DATE_EPOCH
fi
log "SOURCE_DATE_EPOCH=$SOURCE_DATE_EPOCH"
# Reproducible compiler flags
export CFLAGS="${CFLAGS:-} -fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/build"
export CXXFLAGS="${CXXFLAGS:-} ${CFLAGS}"
export LDFLAGS="${LDFLAGS:-}"
# Locale for deterministic sorting
export LC_ALL=C.UTF-8
export TZ=UTC
}
fetch_source() {
local pkg="$1"
local ver="$2"
log "Fetching source for $pkg-$ver"
# Clone aports if needed
if [ ! -d "aports" ]; then
git clone --depth 1 https://gitlab.alpinelinux.org/alpine/aports.git
fi
# Find package
local pkg_dir=""
for repo in main community testing; do
if [ -d "aports/$repo/$pkg" ]; then
pkg_dir="aports/$repo/$pkg"
break
fi
done
if [ -z "$pkg_dir" ]; then
log "ERROR: Package $pkg not found in aports"
return 1
fi
# Checkout specific version if needed
cd "$pkg_dir"
abuild fetch
abuild unpack
}
apply_patches() {
local src_dir="$1"
shift
for patch_url in "$@"; do
log "Applying patch: $patch_url"
curl -sSL "$patch_url" | patch -d "$src_dir" -p1
done
}
build_package() {
local pkg="$1"
local ver="$2"
shift 2
local patches="$@"
log "Building $pkg-$ver"
setup_reproducible_env "$pkg" "$ver"
cd /build
fetch_source "$pkg" "$ver"
if [ -n "$patches" ]; then
apply_patches "src/$pkg-*" $patches
fi
# Build with reproducible settings
abuild -r
# Copy output
local out_dir="$OUTPUT_DIR/$pkg-$ver"
mkdir -p "$out_dir"
cp -r ~/packages/*/*.apk "$out_dir/" 2>/dev/null || true
# Extract binaries and fingerprints
for apk in "$out_dir"/*.apk; do
[ -f "$apk" ] || continue
local apk_name=$(basename "$apk" .apk)
mkdir -p "$out_dir/extracted/$apk_name"
tar -xzf "$apk" -C "$out_dir/extracted/$apk_name"
# Extract function fingerprints
/usr/local/bin/extract-functions.sh "$out_dir/extracted/$apk_name" > "$out_dir/$apk_name.functions.json"
done
log "Build complete: $out_dir"
}
diff_versions() {
local pkg="$1"
local vuln_ver="$2"
local patched_ver="$3"
log "Building and diffing $pkg: $vuln_ver vs $patched_ver"
# Build vulnerable version
build_package "$pkg" "$vuln_ver"
# Build patched version
build_package "$pkg" "$patched_ver"
# Compute diff
local diff_out="$OUTPUT_DIR/$pkg-diff-$vuln_ver-vs-$patched_ver.json"
# Simple diff of function fingerprints
jq -s '
.[0] as $vuln |
.[1] as $patched |
{
package: "'"$pkg"'",
vulnerable_version: "'"$vuln_ver"'",
patched_version: "'"$patched_ver"'",
vulnerable_functions: ($vuln | length),
patched_functions: ($patched | length),
added: [($patched[] | select(.name as $n | ($vuln | map(.name) | index($n)) == null))],
removed: [($vuln[] | select(.name as $n | ($patched | map(.name) | index($n)) == null))],
modified: [
$vuln[] | .name as $n | .hash as $h |
($patched[] | select(.name == $n and .hash != $h)) |
{name: $n, vuln_hash: $h, patched_hash: .hash}
]
}
' \
"$OUTPUT_DIR/$pkg-$vuln_ver"/*.functions.json \
"$OUTPUT_DIR/$pkg-$patched_ver"/*.functions.json \
> "$diff_out"
log "Diff complete: $diff_out"
}
case "$COMMAND" in
build)
if [ -z "$PACKAGE" ] || [ -z "$VERSION" ]; then
log "ERROR: Package and version required"
show_help
exit 1
fi
shift 2 # Remove command, package, version
build_package "$PACKAGE" "$VERSION" "$@"
;;
diff)
PATCHED_VERSION="${4:-}"
if [ -z "$PACKAGE" ] || [ -z "$VERSION" ] || [ -z "$PATCHED_VERSION" ]; then
log "ERROR: Package, vulnerable version, and patched version required"
show_help
exit 1
fi
diff_versions "$PACKAGE" "$VERSION" "$PATCHED_VERSION"
;;
--help|help)
show_help
;;
*)
log "ERROR: Unknown command: $COMMAND"
show_help
exit 1
;;
esac

View File

@@ -0,0 +1,71 @@
#!/bin/sh
# Extract function fingerprints from ELF binaries
# Outputs JSON array with function name, offset, size, and hashes
#
# Usage: extract-functions.sh <directory>
#
# Dependencies: objdump, readelf, sha256sum, jq
set -eu
DIR="${1:-.}"
extract_functions_from_binary() {
local binary="$1"
# Skip non-ELF files
file "$binary" | grep -q "ELF" || return 0
# Get function symbols
objdump -t "$binary" 2>/dev/null | \
awk '/\.text.*[0-9a-f]+.*F/ {
# Fields: addr flags section size name
gsub(/\*.*\*/, "", $1) # Clean address
if ($5 != "" && $4 != "00000000" && $4 != "0000000000000000") {
printf "%s %s %s\n", $1, $4, $NF
}
}' | while read -r offset size name; do
# Skip compiler-generated symbols
case "$name" in
__*|_GLOBAL_*|.plt*|.text*|frame_dummy|register_tm_clones|deregister_tm_clones)
continue
;;
esac
# Convert hex size to decimal
dec_size=$((16#$size))
# Skip tiny functions (likely padding)
[ "$dec_size" -lt 16 ] && continue
# Extract function bytes and compute hash
# Using objdump to get disassembly and hash the opcodes
local hash=$(objdump -d --start-address="0x$offset" --stop-address="0x$((16#$offset + dec_size))" "$binary" 2>/dev/null | \
grep "^[[:space:]]*[0-9a-f]*:" | \
awk '{for(i=2;i<=NF;i++){if($i~/^[0-9a-f]{2}$/){printf "%s", $i}}}' | \
sha256sum | cut -d' ' -f1)
# Output JSON object
printf '{"name":"%s","offset":"0x%s","size":%d,"hash":"%s"}\n' \
"$name" "$offset" "$dec_size" "${hash:-unknown}"
done
}
# Find all ELF binaries in directory
echo "["
first=true
find "$DIR" -type f -executable 2>/dev/null | while read -r binary; do
# Check if ELF
file "$binary" 2>/dev/null | grep -q "ELF" || continue
extract_functions_from_binary "$binary" | while read -r json; do
[ -z "$json" ] && continue
if [ "$first" = "true" ]; then
first=false
else
echo ","
fi
echo "$json"
done
done
echo "]"

View File

@@ -0,0 +1,65 @@
#!/bin/sh
# Normalization scripts for reproducible builds
# Strips non-deterministic content from build artifacts
#
# Usage: normalize.sh <directory>
set -eu
DIR="${1:-.}"
log() {
echo "[normalize] $*" >&2
}
# Strip timestamps from __DATE__ and __TIME__ macros
strip_date_time() {
log "Stripping date/time macros..."
# Already handled by SOURCE_DATE_EPOCH in modern GCC
}
# Normalize build paths
normalize_paths() {
log "Normalizing build paths..."
# Handled by -fdebug-prefix-map
}
# Normalize ar archives for deterministic ordering
normalize_archives() {
log "Normalizing ar archives..."
find "$DIR" -name "*.a" -type f | while read -r archive; do
if ar --version 2>&1 | grep -q "GNU ar"; then
# GNU ar with deterministic mode
ar -rcsD "$archive.tmp" "$archive" && mv "$archive.tmp" "$archive" 2>/dev/null || true
fi
done
}
# Strip debug sections that contain non-deterministic info
strip_debug_timestamps() {
log "Stripping debug timestamps..."
find "$DIR" -type f \( -name "*.o" -o -name "*.so" -o -name "*.so.*" -o -executable \) | while read -r obj; do
# Check if ELF
file "$obj" 2>/dev/null | grep -q "ELF" || continue
# Strip build-id if not needed (we regenerate it)
# objcopy --remove-section=.note.gnu.build-id "$obj" 2>/dev/null || true
# Remove timestamps from DWARF debug info
# This is typically handled by SOURCE_DATE_EPOCH
done
}
# Normalize tar archives
normalize_tars() {
log "Normalizing tar archives..."
# When creating tars, use:
# tar --sort=name --mtime="@${SOURCE_DATE_EPOCH}" --owner=0 --group=0 --numeric-owner
}
# Run all normalizations
normalize_paths
normalize_archives
strip_debug_timestamps
log "Normalization complete"

View File

@@ -0,0 +1,59 @@
# Debian Reproducible Builder
# Creates deterministic builds of Debian packages for fingerprint diffing
#
# Usage:
# docker build -t repro-builder-debian:bookworm --build-arg RELEASE=bookworm .
# docker run -v ./output:/output repro-builder-debian:bookworm build openssl 3.0.7-1
ARG RELEASE=bookworm
FROM debian:${RELEASE}
ARG RELEASE
ENV DEBIAN_RELEASE=${RELEASE}
ENV DEBIAN_FRONTEND=noninteractive
# Install build tools
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
devscripts \
dpkg-dev \
equivs \
fakeroot \
git \
curl \
ca-certificates \
binutils \
elfutils \
coreutils \
patch \
diffutils \
file \
jq \
&& rm -rf /var/lib/apt/lists/*
# Create build user
RUN useradd -m -s /bin/bash builder \
&& echo "builder ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
USER builder
WORKDIR /home/builder
# Copy scripts
COPY --chown=builder:builder scripts/build.sh /usr/local/bin/build.sh
COPY --chown=builder:builder scripts/extract-functions.sh /usr/local/bin/extract-functions.sh
COPY --chown=builder:builder scripts/normalize.sh /usr/local/bin/normalize.sh
USER root
RUN chmod +x /usr/local/bin/*.sh
USER builder
# Environment for reproducibility
ENV TZ=UTC
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8
VOLUME /output
WORKDIR /build
ENTRYPOINT ["/usr/local/bin/build.sh"]
CMD ["--help"]

View File

@@ -0,0 +1,233 @@
#!/bin/bash
# Debian Reproducible Build Script
# Builds packages with deterministic settings for fingerprint generation
#
# Usage: build.sh [build|diff] <package> <version> [patch_url...]
set -euo pipefail
COMMAND="${1:-help}"
PACKAGE="${2:-}"
VERSION="${3:-}"
OUTPUT_DIR="${OUTPUT_DIR:-/output}"
log() {
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] $*" >&2
}
show_help() {
cat <<EOF
Debian Reproducible Builder
Usage:
build.sh build <package> <version> [patch_urls...]
Build a package with reproducible settings
build.sh diff <package> <vuln_version> <patched_version>
Build two versions and compute fingerprint diff
build.sh --help
Show this help message
Environment:
SOURCE_DATE_EPOCH Override timestamp (extracted from changelog if not set)
OUTPUT_DIR Output directory (default: /output)
DEB_BUILD_OPTIONS Additional build options
Examples:
build.sh build openssl 3.0.7-1
build.sh diff curl 8.1.0-1 8.1.0-2
EOF
}
setup_reproducible_env() {
local pkg="$1"
# Reproducible build flags
export DEB_BUILD_OPTIONS="${DEB_BUILD_OPTIONS:-} reproducible=+all"
export SOURCE_DATE_EPOCH="${SOURCE_DATE_EPOCH:-$(date +%s)}"
# Compiler flags for reproducibility
export CFLAGS="${CFLAGS:-} -fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/build"
export CXXFLAGS="${CXXFLAGS:-} ${CFLAGS}"
export LC_ALL=C.UTF-8
export TZ=UTC
log "SOURCE_DATE_EPOCH=$SOURCE_DATE_EPOCH"
}
fetch_source() {
local pkg="$1"
local ver="$2"
log "Fetching source for $pkg=$ver"
mkdir -p /build/src
cd /build/src
# Enable source repositories
sudo sed -i 's/^# deb-src/deb-src/' /etc/apt/sources.list.d/*.sources 2>/dev/null || \
sudo sed -i 's/^# deb-src/deb-src/' /etc/apt/sources.list 2>/dev/null || true
sudo apt-get update
# Fetch source
if [ -n "$ver" ]; then
apt-get source "${pkg}=${ver}" || apt-get source "$pkg"
else
apt-get source "$pkg"
fi
# Find extracted directory
local src_dir=$(ls -d "${pkg}"*/ 2>/dev/null | head -1)
if [ -z "$src_dir" ]; then
log "ERROR: Could not find source directory for $pkg"
return 1
fi
# Extract SOURCE_DATE_EPOCH from changelog
if [ -z "${SOURCE_DATE_EPOCH:-}" ]; then
if [ -f "$src_dir/debian/changelog" ]; then
SOURCE_DATE_EPOCH=$(dpkg-parsechangelog -l "$src_dir/debian/changelog" -S Timestamp 2>/dev/null || date +%s)
export SOURCE_DATE_EPOCH
fi
fi
echo "$src_dir"
}
install_build_deps() {
local src_dir="$1"
log "Installing build dependencies"
cd "$src_dir"
sudo apt-get build-dep -y . || true
}
apply_patches() {
local src_dir="$1"
shift
cd "$src_dir"
for patch_url in "$@"; do
log "Applying patch: $patch_url"
curl -sSL "$patch_url" | patch -p1
done
}
build_package() {
local pkg="$1"
local ver="$2"
shift 2
local patches="${@:-}"
log "Building $pkg version $ver"
setup_reproducible_env "$pkg"
cd /build
local src_dir=$(fetch_source "$pkg" "$ver")
install_build_deps "$src_dir"
if [ -n "$patches" ]; then
apply_patches "$src_dir" $patches
fi
cd "$src_dir"
# Build with reproducible settings
dpkg-buildpackage -b -us -uc
# Copy output
local out_dir="$OUTPUT_DIR/$pkg-$ver"
mkdir -p "$out_dir"
cp -r /build/src/*.deb "$out_dir/" 2>/dev/null || true
# Extract and fingerprint
for deb in "$out_dir"/*.deb; do
[ -f "$deb" ] || continue
local deb_name=$(basename "$deb" .deb)
mkdir -p "$out_dir/extracted/$deb_name"
dpkg-deb -x "$deb" "$out_dir/extracted/$deb_name"
# Extract function fingerprints
/usr/local/bin/extract-functions.sh "$out_dir/extracted/$deb_name" > "$out_dir/$deb_name.functions.json"
done
log "Build complete: $out_dir"
}
diff_versions() {
local pkg="$1"
local vuln_ver="$2"
local patched_ver="$3"
log "Building and diffing $pkg: $vuln_ver vs $patched_ver"
# Build vulnerable version
build_package "$pkg" "$vuln_ver"
# Clean build environment
rm -rf /build/src/*
# Build patched version
build_package "$pkg" "$patched_ver"
# Compute diff
local diff_out="$OUTPUT_DIR/$pkg-diff-$vuln_ver-vs-$patched_ver.json"
jq -s '
.[0] as $vuln |
.[1] as $patched |
{
package: "'"$pkg"'",
vulnerable_version: "'"$vuln_ver"'",
patched_version: "'"$patched_ver"'",
vulnerable_functions: ($vuln | length),
patched_functions: ($patched | length),
added: [($patched[] | select(.name as $n | ($vuln | map(.name) | index($n)) == null))],
removed: [($vuln[] | select(.name as $n | ($patched | map(.name) | index($n)) == null))],
modified: [
$vuln[] | .name as $n | .hash as $h |
($patched[] | select(.name == $n and .hash != $h)) |
{name: $n, vuln_hash: $h, patched_hash: .hash}
]
}
' \
"$OUTPUT_DIR/$pkg-$vuln_ver"/*.functions.json \
"$OUTPUT_DIR/$pkg-$patched_ver"/*.functions.json \
> "$diff_out" 2>/dev/null || log "Warning: Could not compute diff"
log "Diff complete: $diff_out"
}
case "$COMMAND" in
build)
if [ -z "$PACKAGE" ]; then
log "ERROR: Package required"
show_help
exit 1
fi
shift 2 # Remove command, package
[ -n "${VERSION:-}" ] && shift # Remove version if present
build_package "$PACKAGE" "${VERSION:-}" "$@"
;;
diff)
PATCHED_VERSION="${4:-}"
if [ -z "$PACKAGE" ] || [ -z "$VERSION" ] || [ -z "$PATCHED_VERSION" ]; then
log "ERROR: Package, vulnerable version, and patched version required"
show_help
exit 1
fi
diff_versions "$PACKAGE" "$VERSION" "$PATCHED_VERSION"
;;
--help|help)
show_help
;;
*)
log "ERROR: Unknown command: $COMMAND"
show_help
exit 1
;;
esac

View File

@@ -0,0 +1,67 @@
#!/bin/bash
# Extract function fingerprints from ELF binaries
# Outputs JSON array with function name, offset, size, and hashes
set -euo pipefail
DIR="${1:-.}"
extract_functions_from_binary() {
local binary="$1"
# Skip non-ELF files
file "$binary" 2>/dev/null | grep -q "ELF" || return 0
# Get function symbols with objdump
objdump -t "$binary" 2>/dev/null | \
awk '/\.text.*[0-9a-f]+.*F/ {
gsub(/\*.*\*/, "", $1)
if ($5 != "" && length($4) > 0) {
size = strtonum("0x" $4)
if (size >= 16) {
print $1, $4, $NF
}
}
}' | while read -r offset size name; do
# Skip compiler-generated symbols
case "$name" in
__*|_GLOBAL_*|.plt*|.text*|frame_dummy|register_tm_clones|deregister_tm_clones|_start|_init|_fini)
continue
;;
esac
# Convert hex size
dec_size=$((16#$size))
# Compute hash of function bytes
local hash=$(objdump -d --start-address="0x$offset" --stop-address="$((16#$offset + dec_size))" "$binary" 2>/dev/null | \
grep -E "^[[:space:]]*[0-9a-f]+:" | \
awk '{for(i=2;i<=NF;i++){if($i~/^[0-9a-f]{2}$/){printf "%s", $i}}}' | \
sha256sum | cut -d' ' -f1)
[ -n "$hash" ] || hash="unknown"
printf '{"name":"%s","offset":"0x%s","size":%d,"hash":"%s"}\n' \
"$name" "$offset" "$dec_size" "$hash"
done
}
# Output JSON array
echo "["
first=true
find "$DIR" -type f \( -executable -o -name "*.so" -o -name "*.so.*" \) 2>/dev/null | while read -r binary; do
file "$binary" 2>/dev/null | grep -q "ELF" || continue
extract_functions_from_binary "$binary" | while read -r json; do
[ -z "$json" ] && continue
if [ "$first" = "true" ]; then
first=false
echo "$json"
else
echo ",$json"
fi
done
done
echo "]"

View File

@@ -0,0 +1,29 @@
#!/bin/bash
# Normalization scripts for Debian reproducible builds
set -euo pipefail
DIR="${1:-.}"
log() {
echo "[normalize] $*" >&2
}
normalize_archives() {
log "Normalizing ar archives..."
find "$DIR" -name "*.a" -type f | while read -r archive; do
if ar --version 2>&1 | grep -q "GNU ar"; then
ar -rcsD "$archive.tmp" "$archive" 2>/dev/null && mv "$archive.tmp" "$archive" || true
fi
done
}
strip_debug_timestamps() {
log "Stripping debug timestamps..."
# Handled by SOURCE_DATE_EPOCH and DEB_BUILD_OPTIONS
}
normalize_archives
strip_debug_timestamps
log "Normalization complete"

View File

@@ -0,0 +1,85 @@
# RHEL-compatible Reproducible Build Container
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
# Task: T3 - RHEL builder with mock-based package building
#
# Uses AlmaLinux 9 as RHEL-compatible base for open source builds.
# Production RHEL builds require valid subscription.
ARG BASE_IMAGE=almalinux:9
FROM ${BASE_IMAGE} AS builder
LABEL org.opencontainers.image.title="StellaOps RHEL Reproducible Builder"
LABEL org.opencontainers.image.description="RHEL-compatible reproducible build environment for security patching"
LABEL org.opencontainers.image.vendor="StellaOps"
LABEL org.opencontainers.image.source="https://github.com/stellaops/stellaops"
# Install build dependencies
RUN dnf -y update && \
dnf -y install \
# Core build tools
rpm-build \
rpmdevtools \
rpmlint \
mock \
# Compiler toolchain
gcc \
gcc-c++ \
make \
cmake \
autoconf \
automake \
libtool \
# Package management
dnf-plugins-core \
yum-utils \
createrepo_c \
# Binary analysis
binutils \
elfutils \
gdb \
# Reproducibility
diffoscope \
# Source control
git \
patch \
# Utilities
wget \
curl \
jq \
python3 \
python3-pip && \
dnf clean all
# Create mock user (mock requires non-root)
RUN useradd -m mockbuild && \
usermod -a -G mock mockbuild
# Set up rpmbuild directories
RUN mkdir -p /build/{BUILD,RPMS,SOURCES,SPECS,SRPMS} && \
chown -R mockbuild:mockbuild /build
# Copy build scripts
COPY scripts/build.sh /usr/local/bin/build.sh
COPY scripts/extract-functions.sh /usr/local/bin/extract-functions.sh
COPY scripts/normalize.sh /usr/local/bin/normalize.sh
COPY scripts/mock-build.sh /usr/local/bin/mock-build.sh
RUN chmod +x /usr/local/bin/*.sh
# Set reproducibility environment
ENV TZ=UTC
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8
# Deterministic compiler flags
ENV CFLAGS="-fno-record-gcc-switches -fdebug-prefix-map=/build=/buildroot -O2 -g"
ENV CXXFLAGS="${CFLAGS}"
# Mock configuration for reproducible builds
COPY mock/stellaops-repro.cfg /etc/mock/stellaops-repro.cfg
WORKDIR /build
USER mockbuild
ENTRYPOINT ["/usr/local/bin/build.sh"]
CMD ["--help"]

View File

@@ -0,0 +1,71 @@
# StellaOps Reproducible Build Mock Configuration
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
#
# Mock configuration optimized for reproducible RHEL/AlmaLinux builds
config_opts['root'] = 'stellaops-repro'
config_opts['target_arch'] = 'x86_64'
config_opts['legal_host_arches'] = ('x86_64',)
config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
config_opts['dist'] = 'el9'
config_opts['releasever'] = '9'
# Reproducibility settings
config_opts['use_host_resolv'] = False
config_opts['rpmbuild_networking'] = False
config_opts['cleanup_on_success'] = True
config_opts['cleanup_on_failure'] = True
# Deterministic build settings
config_opts['macros']['SOURCE_DATE_EPOCH'] = '%{getenv:SOURCE_DATE_EPOCH}'
config_opts['macros']['_buildhost'] = 'stellaops.build'
config_opts['macros']['debug_package'] = '%{nil}'
config_opts['macros']['_default_patch_fuzz'] = '0'
# Compiler flags for reproducibility
config_opts['macros']['optflags'] = '-O2 -g -fno-record-gcc-switches -fdebug-prefix-map=%{_builddir}=/buildroot'
# Environment normalization
config_opts['environment']['TZ'] = 'UTC'
config_opts['environment']['LC_ALL'] = 'C.UTF-8'
config_opts['environment']['LANG'] = 'C.UTF-8'
# Use AlmaLinux as RHEL-compatible base
config_opts['dnf.conf'] = """
[main]
keepcache=1
debuglevel=2
reposdir=/dev/null
logfile=/var/log/yum.log
retries=20
obsoletes=1
gpgcheck=0
assumeyes=1
syslog_ident=mock
syslog_device=
metadata_expire=0
mdpolicy=group:primary
best=1
install_weak_deps=0
protected_packages=
module_platform_id=platform:el9
user_agent={{ user_agent }}
[baseos]
name=AlmaLinux $releasever - BaseOS
mirrorlist=https://mirrors.almalinux.org/mirrorlist/$releasever/baseos
enabled=1
gpgcheck=0
[appstream]
name=AlmaLinux $releasever - AppStream
mirrorlist=https://mirrors.almalinux.org/mirrorlist/$releasever/appstream
enabled=1
gpgcheck=0
[crb]
name=AlmaLinux $releasever - CRB
mirrorlist=https://mirrors.almalinux.org/mirrorlist/$releasever/crb
enabled=1
gpgcheck=0
"""

View File

@@ -0,0 +1,213 @@
#!/bin/bash
# RHEL Reproducible Build Script
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
#
# Usage: build.sh --srpm <url_or_path> [--patch <patch_file>] [--output <dir>]
set -euo pipefail
# Default values
OUTPUT_DIR="/build/output"
WORK_DIR="/build/work"
SRPM=""
PATCH_FILE=""
SOURCE_DATE_EPOCH="${SOURCE_DATE_EPOCH:-}"
usage() {
cat <<EOF
RHEL Reproducible Build Script
Usage: $0 [OPTIONS]
Options:
--srpm <path> Path or URL to SRPM file (required)
--patch <path> Path to security patch file (optional)
--output <dir> Output directory (default: /build/output)
--epoch <timestamp> SOURCE_DATE_EPOCH value (default: from changelog)
--help Show this help message
Examples:
$0 --srpm openssl-3.0.7-1.el9.src.rpm --patch CVE-2023-0286.patch
$0 --srpm https://mirror/srpms/curl-8.0.1-1.el9.src.rpm
EOF
exit 0
}
log() {
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*"
}
error() {
log "ERROR: $*" >&2
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--srpm)
SRPM="$2"
shift 2
;;
--patch)
PATCH_FILE="$2"
shift 2
;;
--output)
OUTPUT_DIR="$2"
shift 2
;;
--epoch)
SOURCE_DATE_EPOCH="$2"
shift 2
;;
--help)
usage
;;
*)
error "Unknown option: $1"
;;
esac
done
[[ -z "${SRPM}" ]] && error "SRPM path required. Use --srpm <path>"
# Create directories
mkdir -p "${OUTPUT_DIR}" "${WORK_DIR}"
cd "${WORK_DIR}"
log "Starting RHEL reproducible build"
log "SRPM: ${SRPM}"
# Download or copy SRPM
if [[ "${SRPM}" =~ ^https?:// ]]; then
log "Downloading SRPM..."
curl -fsSL -o source.src.rpm "${SRPM}"
SRPM="source.src.rpm"
elif [[ ! -f "${SRPM}" ]]; then
error "SRPM file not found: ${SRPM}"
fi
# Install SRPM
log "Installing SRPM..."
rpm2cpio "${SRPM}" | cpio -idmv
# Extract SOURCE_DATE_EPOCH from changelog if not provided
if [[ -z "${SOURCE_DATE_EPOCH}" ]]; then
SPEC_FILE=$(find . -name "*.spec" | head -1)
if [[ -n "${SPEC_FILE}" ]]; then
# Extract date from first changelog entry
CHANGELOG_DATE=$(grep -m1 '^\*' "${SPEC_FILE}" | sed 's/^\* //' | cut -d' ' -f1-3)
if [[ -n "${CHANGELOG_DATE}" ]]; then
SOURCE_DATE_EPOCH=$(date -d "${CHANGELOG_DATE}" +%s 2>/dev/null || echo "")
fi
fi
if [[ -z "${SOURCE_DATE_EPOCH}" ]]; then
SOURCE_DATE_EPOCH=$(date +%s)
log "Warning: Using current time for SOURCE_DATE_EPOCH"
fi
fi
export SOURCE_DATE_EPOCH
log "SOURCE_DATE_EPOCH: ${SOURCE_DATE_EPOCH}"
# Apply security patch if provided
if [[ -n "${PATCH_FILE}" ]]; then
if [[ ! -f "${PATCH_FILE}" ]]; then
error "Patch file not found: ${PATCH_FILE}"
fi
log "Applying security patch: ${PATCH_FILE}"
# Copy patch to SOURCES
PATCH_NAME=$(basename "${PATCH_FILE}")
cp "${PATCH_FILE}" SOURCES/
# Add patch to spec file
SPEC_FILE=$(find . -name "*.spec" | head -1)
if [[ -n "${SPEC_FILE}" ]]; then
# Find last Patch line or Source line
LAST_PATCH=$(grep -n '^Patch[0-9]*:' "${SPEC_FILE}" | tail -1 | cut -d: -f1)
if [[ -z "${LAST_PATCH}" ]]; then
LAST_PATCH=$(grep -n '^Source[0-9]*:' "${SPEC_FILE}" | tail -1 | cut -d: -f1)
fi
# Calculate next patch number
PATCH_NUM=$(grep -c '^Patch[0-9]*:' "${SPEC_FILE}" || echo 0)
PATCH_NUM=$((PATCH_NUM + 100)) # Use 100+ for security patches
# Insert patch declaration
sed -i "${LAST_PATCH}a Patch${PATCH_NUM}: ${PATCH_NAME}" "${SPEC_FILE}"
# Add %patch to %prep if not using autosetup
if ! grep -q '%autosetup' "${SPEC_FILE}"; then
PREP_LINE=$(grep -n '^%prep' "${SPEC_FILE}" | head -1 | cut -d: -f1)
if [[ -n "${PREP_LINE}" ]]; then
# Find last %patch line in %prep
LAST_PATCH_LINE=$(sed -n "${PREP_LINE},\$p" "${SPEC_FILE}" | grep -n '^%patch' | tail -1 | cut -d: -f1)
if [[ -n "${LAST_PATCH_LINE}" ]]; then
INSERT_LINE=$((PREP_LINE + LAST_PATCH_LINE))
else
INSERT_LINE=$((PREP_LINE + 1))
fi
sed -i "${INSERT_LINE}a %patch${PATCH_NUM} -p1" "${SPEC_FILE}"
fi
fi
fi
fi
# Set up rpmbuild tree
log "Setting up rpmbuild tree..."
rpmdev-setuptree || true
# Copy sources and spec
cp -r SOURCES/* ~/rpmbuild/SOURCES/ 2>/dev/null || true
cp *.spec ~/rpmbuild/SPECS/ 2>/dev/null || true
# Build using mock for isolation and reproducibility
log "Building with mock (stellaops-repro config)..."
SPEC_FILE=$(find ~/rpmbuild/SPECS -name "*.spec" | head -1)
if [[ -n "${SPEC_FILE}" ]]; then
# Build SRPM first
rpmbuild -bs "${SPEC_FILE}"
BUILT_SRPM=$(find ~/rpmbuild/SRPMS -name "*.src.rpm" | head -1)
if [[ -n "${BUILT_SRPM}" ]]; then
# Build with mock
mock -r stellaops-repro --rebuild "${BUILT_SRPM}" --resultdir="${OUTPUT_DIR}/rpms"
else
error "SRPM build failed"
fi
else
error "No spec file found"
fi
# Extract function fingerprints from built RPMs
log "Extracting function fingerprints..."
for rpm in "${OUTPUT_DIR}/rpms"/*.rpm; do
if [[ -f "${rpm}" ]] && [[ ! "${rpm}" =~ \.src\.rpm$ ]]; then
/usr/local/bin/extract-functions.sh "${rpm}" "${OUTPUT_DIR}/fingerprints"
fi
done
# Generate build manifest
log "Generating build manifest..."
cat > "${OUTPUT_DIR}/manifest.json" <<EOF
{
"builder": "rhel",
"base_image": "${BASE_IMAGE:-almalinux:9}",
"source_date_epoch": ${SOURCE_DATE_EPOCH},
"build_timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"srpm": "${SRPM}",
"patch_applied": $(if [[ -n "${PATCH_FILE}" ]]; then echo "\"${PATCH_FILE}\""; else echo "null"; fi),
"rpm_outputs": $(find "${OUTPUT_DIR}/rpms" -name "*.rpm" ! -name "*.src.rpm" -printf '"%f",' 2>/dev/null | sed 's/,$//' | sed 's/^/[/' | sed 's/$/]/'),
"fingerprint_files": $(find "${OUTPUT_DIR}/fingerprints" -name "*.json" -printf '"%f",' 2>/dev/null | sed 's/,$//' | sed 's/^/[/' | sed 's/$/]/')
}
EOF
log "Build complete. Output in: ${OUTPUT_DIR}"
log "Manifest: ${OUTPUT_DIR}/manifest.json"

View File

@@ -0,0 +1,73 @@
#!/bin/bash
# RHEL Function Extraction Script
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
#
# Extracts function-level fingerprints from RPM packages
set -euo pipefail
RPM_PATH="${1:-}"
OUTPUT_DIR="${2:-/build/fingerprints}"
[[ -z "${RPM_PATH}" ]] && { echo "Usage: $0 <rpm_path> [output_dir]"; exit 1; }
[[ ! -f "${RPM_PATH}" ]] && { echo "RPM not found: ${RPM_PATH}"; exit 1; }
mkdir -p "${OUTPUT_DIR}"
RPM_NAME=$(rpm -qp --qf '%{NAME}' "${RPM_PATH}" 2>/dev/null)
RPM_VERSION=$(rpm -qp --qf '%{VERSION}-%{RELEASE}' "${RPM_PATH}" 2>/dev/null)
WORK_DIR=$(mktemp -d)
trap "rm -rf ${WORK_DIR}" EXIT
cd "${WORK_DIR}"
# Extract RPM contents
rpm2cpio "${RPM_PATH}" | cpio -idmv 2>/dev/null
# Find ELF binaries
find . -type f -exec file {} \; | grep -E 'ELF.*(executable|shared object)' | cut -d: -f1 | while read -r binary; do
BINARY_NAME=$(basename "${binary}")
BINARY_PATH="${binary#./}"
# Get build-id if present
BUILD_ID=$(readelf -n "${binary}" 2>/dev/null | grep 'Build ID:' | awk '{print $3}' || echo "")
# Extract function symbols
OUTPUT_FILE="${OUTPUT_DIR}/${RPM_NAME}_${BINARY_NAME}.json"
{
echo "{"
echo " \"package\": \"${RPM_NAME}\","
echo " \"version\": \"${RPM_VERSION}\","
echo " \"binary\": \"${BINARY_PATH}\","
echo " \"build_id\": \"${BUILD_ID}\","
echo " \"extracted_at\": \"$(date -u '+%Y-%m-%dT%H:%M:%SZ')\","
echo " \"functions\": ["
# Extract function addresses and sizes using nm and objdump
FIRST=true
nm -S --defined-only "${binary}" 2>/dev/null | grep -E '^[0-9a-f]+ [0-9a-f]+ [Tt]' | while read -r addr size type name; do
if [[ "${FIRST}" == "true" ]]; then
FIRST=false
else
echo ","
fi
# Calculate function hash from disassembly
FUNC_HASH=$(objdump -d --start-address=0x${addr} --stop-address=$((0x${addr} + 0x${size})) "${binary}" 2>/dev/null | \
grep -E '^\s+[0-9a-f]+:' | awk '{$1=""; print}' | sha256sum | cut -d' ' -f1)
printf ' {"name": "%s", "address": "0x%s", "size": %d, "hash": "%s"}' \
"${name}" "${addr}" "$((0x${size}))" "${FUNC_HASH}"
done || true
echo ""
echo " ]"
echo "}"
} > "${OUTPUT_FILE}"
echo "Extracted: ${OUTPUT_FILE}"
done
echo "Function extraction complete for: ${RPM_NAME}"

View File

@@ -0,0 +1,34 @@
#!/bin/bash
# RHEL Mock Build Script
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
#
# Builds SRPMs using mock for isolation and reproducibility
set -euo pipefail
SRPM="${1:-}"
RESULT_DIR="${2:-/build/output}"
CONFIG="${3:-stellaops-repro}"
[[ -z "${SRPM}" ]] && { echo "Usage: $0 <srpm> [result_dir] [mock_config]"; exit 1; }
[[ ! -f "${SRPM}" ]] && { echo "SRPM not found: ${SRPM}"; exit 1; }
mkdir -p "${RESULT_DIR}"
echo "Building SRPM with mock: ${SRPM}"
echo "Config: ${CONFIG}"
echo "Output: ${RESULT_DIR}"
# Initialize mock if needed
mock -r "${CONFIG}" --init
# Build with reproducibility settings
mock -r "${CONFIG}" \
--rebuild "${SRPM}" \
--resultdir="${RESULT_DIR}" \
--define "SOURCE_DATE_EPOCH ${SOURCE_DATE_EPOCH:-$(date +%s)}" \
--define "_buildhost stellaops.build" \
--define "debug_package %{nil}"
echo "Build complete. Results in: ${RESULT_DIR}"
ls -la "${RESULT_DIR}"

View File

@@ -0,0 +1,83 @@
#!/bin/bash
# RHEL Build Normalization Script
# Sprint: SPRINT_1227_0002_0001 (Reproducible Builders)
#
# Normalizes RPM build environment for reproducibility
set -euo pipefail
# Normalize environment
export TZ=UTC
export LC_ALL=C.UTF-8
export LANG=C.UTF-8
# Deterministic compiler flags
export CFLAGS="${CFLAGS:--fno-record-gcc-switches -fdebug-prefix-map=$(pwd)=/buildroot -O2 -g}"
export CXXFLAGS="${CXXFLAGS:-${CFLAGS}}"
# Disable debug info that varies
export DEB_BUILD_OPTIONS="nostrip noopt"
# RPM-specific reproducibility
export RPM_BUILD_NCPUS=1
# Normalize timestamps in archives
normalize_ar() {
local archive="$1"
if command -v llvm-ar &>/dev/null; then
llvm-ar --format=gnu --enable-deterministic-archives rcs "${archive}.new" "${archive}"
mv "${archive}.new" "${archive}"
fi
}
# Normalize timestamps in tar archives
normalize_tar() {
local archive="$1"
local mtime="${SOURCE_DATE_EPOCH:-0}"
# Repack with deterministic settings
local tmp_dir=$(mktemp -d)
tar -xf "${archive}" -C "${tmp_dir}"
tar --sort=name \
--mtime="@${mtime}" \
--owner=0 --group=0 \
--numeric-owner \
-cf "${archive}.new" -C "${tmp_dir}" .
mv "${archive}.new" "${archive}"
rm -rf "${tmp_dir}"
}
# Normalize __pycache__ timestamps
normalize_python() {
find . -name '__pycache__' -type d -exec rm -rf {} + 2>/dev/null || true
find . -name '*.pyc' -delete 2>/dev/null || true
}
# Strip build paths from binaries
strip_build_paths() {
local binary="$1"
if command -v objcopy &>/dev/null; then
# Remove .note.gnu.build-id if it contains build path
objcopy --remove-section=.note.gnu.build-id "${binary}" 2>/dev/null || true
fi
}
# Main normalization
normalize_build() {
echo "Normalizing build environment..."
# Normalize Python bytecode
normalize_python
# Find and normalize archives
find . -name '*.a' -type f | while read -r ar; do
normalize_ar "${ar}"
done
echo "Normalization complete"
}
# If sourced, export functions; if executed, run normalization
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
normalize_build
fi

View File

@@ -0,0 +1,143 @@
{
"$schema": "./service-versions.schema.json",
"schemaVersion": "1.0.0",
"lastUpdated": "2025-01-01T00:00:00Z",
"registry": "git.stella-ops.org/stella-ops.org",
"services": {
"authority": {
"name": "Authority",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"attestor": {
"name": "Attestor",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"concelier": {
"name": "Concelier",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"scanner": {
"name": "Scanner",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"policy": {
"name": "Policy",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"signer": {
"name": "Signer",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"excititor": {
"name": "Excititor",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"gateway": {
"name": "Gateway",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"scheduler": {
"name": "Scheduler",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"cli": {
"name": "CLI",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"orchestrator": {
"name": "Orchestrator",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"notify": {
"name": "Notify",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"sbomservice": {
"name": "SbomService",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"vexhub": {
"name": "VexHub",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
},
"evidencelocker": {
"name": "EvidenceLocker",
"version": "1.0.0",
"dockerTag": null,
"releasedAt": null,
"gitSha": null,
"sbomDigest": null,
"signatureDigest": null
}
}
}

View File

@@ -0,0 +1,93 @@
<#
.SYNOPSIS
Scaffolds EF Core DbContext, entities, and compiled models for all StellaOps modules.
.DESCRIPTION
Iterates through all configured modules and runs Scaffold-Module.ps1 for each.
Use this after schema changes or for initial setup.
.PARAMETER SkipMissing
Skip modules whose projects don't exist yet (default: true)
.EXAMPLE
.\Scaffold-AllModules.ps1
.EXAMPLE
.\Scaffold-AllModules.ps1 -SkipMissing:$false
#>
param(
[bool]$SkipMissing = $true
)
$ErrorActionPreference = "Stop"
# Module definitions: Module name -> Schema name
$modules = @(
@{ Module = "Unknowns"; Schema = "unknowns" },
@{ Module = "PacksRegistry"; Schema = "packs" },
@{ Module = "Authority"; Schema = "authority" },
@{ Module = "Scanner"; Schema = "scanner" },
@{ Module = "Scheduler"; Schema = "scheduler" },
@{ Module = "TaskRunner"; Schema = "taskrunner" },
@{ Module = "Policy"; Schema = "policy" },
@{ Module = "Notify"; Schema = "notify" },
@{ Module = "Concelier"; Schema = "vuln" },
@{ Module = "Excititor"; Schema = "vex" },
@{ Module = "Signals"; Schema = "signals" },
@{ Module = "Attestor"; Schema = "proofchain" },
@{ Module = "Signer"; Schema = "signer" }
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
$RepoRoot = (Get-Item $ScriptDir).Parent.Parent.Parent.FullName
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " EF Core Scaffolding for All Modules" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host ""
$successCount = 0
$skipCount = 0
$failCount = 0
foreach ($m in $modules) {
$projectPath = Join-Path $RepoRoot "src" $m.Module "__Libraries" "StellaOps.$($m.Module).Persistence.EfCore"
if (-not (Test-Path "$projectPath\*.csproj")) {
if ($SkipMissing) {
Write-Host "SKIP: $($m.Module) - Project not found" -ForegroundColor DarkGray
$skipCount++
continue
} else {
Write-Host "FAIL: $($m.Module) - Project not found at: $projectPath" -ForegroundColor Red
$failCount++
continue
}
}
Write-Host ""
Write-Host ">>> Scaffolding $($m.Module)..." -ForegroundColor Magenta
try {
& "$ScriptDir\Scaffold-Module.ps1" -Module $m.Module -Schema $m.Schema
$successCount++
}
catch {
Write-Host "FAIL: $($m.Module) - $($_.Exception.Message)" -ForegroundColor Red
$failCount++
}
}
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Summary" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Success: $successCount"
Write-Host " Skipped: $skipCount"
Write-Host " Failed: $failCount"
Write-Host ""
if ($failCount -gt 0) {
exit 1
}

View File

@@ -0,0 +1,162 @@
<#
.SYNOPSIS
Scaffolds EF Core DbContext, entities, and compiled models from PostgreSQL schema.
.DESCRIPTION
This script performs database-first scaffolding for a StellaOps module:
1. Cleans existing generated files (Entities, CompiledModels, DbContext)
2. Scaffolds DbContext and entities from live PostgreSQL schema
3. Generates compiled models for startup performance
.PARAMETER Module
The module name (e.g., Unknowns, PacksRegistry, Authority)
.PARAMETER Schema
The PostgreSQL schema name (defaults to lowercase module name)
.PARAMETER ConnectionString
PostgreSQL connection string. If not provided, uses default dev connection.
.PARAMETER ProjectPath
Optional custom project path. Defaults to src/{Module}/__Libraries/StellaOps.{Module}.Persistence.EfCore
.EXAMPLE
.\Scaffold-Module.ps1 -Module Unknowns
.EXAMPLE
.\Scaffold-Module.ps1 -Module Unknowns -Schema unknowns -ConnectionString "Host=localhost;Database=stellaops_platform;Username=unknowns_user;Password=unknowns_dev"
.EXAMPLE
.\Scaffold-Module.ps1 -Module PacksRegistry -Schema packs
#>
param(
[Parameter(Mandatory=$true)]
[string]$Module,
[string]$Schema,
[string]$ConnectionString,
[string]$ProjectPath
)
$ErrorActionPreference = "Stop"
# Resolve repository root
$RepoRoot = (Get-Item $PSScriptRoot).Parent.Parent.Parent.FullName
# Default schema to lowercase module name
if (-not $Schema) {
$Schema = $Module.ToLower()
}
# Default connection string
if (-not $ConnectionString) {
$user = "${Schema}_user"
$password = "${Schema}_dev"
$ConnectionString = "Host=localhost;Port=5432;Database=stellaops_platform;Username=$user;Password=$password;SearchPath=$Schema"
}
# Default project path
if (-not $ProjectPath) {
$ProjectPath = Join-Path $RepoRoot "src" $Module "__Libraries" "StellaOps.$Module.Persistence.EfCore"
}
$ContextDir = "Context"
$EntitiesDir = "Entities"
$CompiledModelsDir = "CompiledModels"
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " EF Core Scaffolding for Module: $Module" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Schema: $Schema"
Write-Host " Project: $ProjectPath"
Write-Host " Connection: Host=localhost;Database=stellaops_platform;Username=${Schema}_user;..."
Write-Host ""
# Verify project exists
if (-not (Test-Path "$ProjectPath\*.csproj")) {
Write-Error "Project not found at: $ProjectPath"
Write-Host "Create the project first with: dotnet new classlib -n StellaOps.$Module.Persistence.EfCore"
exit 1
}
# Step 1: Clean existing generated files
Write-Host "[1/4] Cleaning existing generated files..." -ForegroundColor Yellow
$paths = @(
(Join-Path $ProjectPath $EntitiesDir),
(Join-Path $ProjectPath $CompiledModelsDir),
(Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs")
)
foreach ($path in $paths) {
if (Test-Path $path) {
Remove-Item -Recurse -Force $path
Write-Host " Removed: $path" -ForegroundColor DarkGray
}
}
# Recreate directories
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $EntitiesDir) | Out-Null
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $CompiledModelsDir) | Out-Null
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $ContextDir) | Out-Null
# Step 2: Scaffold DbContext and entities
Write-Host "[2/4] Scaffolding DbContext and entities from schema '$Schema'..." -ForegroundColor Yellow
$scaffoldArgs = @(
"ef", "dbcontext", "scaffold",
"`"$ConnectionString`"",
"Npgsql.EntityFrameworkCore.PostgreSQL",
"--project", "`"$ProjectPath`"",
"--schema", $Schema,
"--context", "${Module}DbContext",
"--context-dir", $ContextDir,
"--output-dir", $EntitiesDir,
"--namespace", "StellaOps.$Module.Persistence.EfCore.Entities",
"--context-namespace", "StellaOps.$Module.Persistence.EfCore.Context",
"--data-annotations",
"--no-onconfiguring",
"--force"
)
$process = Start-Process -FilePath "dotnet" -ArgumentList $scaffoldArgs -Wait -PassThru -NoNewWindow
if ($process.ExitCode -ne 0) {
Write-Error "Scaffold failed with exit code: $($process.ExitCode)"
exit 1
}
Write-Host " Scaffolded entities to: $EntitiesDir" -ForegroundColor DarkGray
# Step 3: Generate compiled models
Write-Host "[3/4] Generating compiled models..." -ForegroundColor Yellow
$optimizeArgs = @(
"ef", "dbcontext", "optimize",
"--project", "`"$ProjectPath`"",
"--context", "StellaOps.$Module.Persistence.EfCore.Context.${Module}DbContext",
"--output-dir", $CompiledModelsDir,
"--namespace", "StellaOps.$Module.Persistence.EfCore.CompiledModels"
)
$process = Start-Process -FilePath "dotnet" -ArgumentList $optimizeArgs -Wait -PassThru -NoNewWindow
if ($process.ExitCode -ne 0) {
Write-Error "Compiled model generation failed with exit code: $($process.ExitCode)"
exit 1
}
Write-Host " Generated compiled models to: $CompiledModelsDir" -ForegroundColor DarkGray
# Step 4: Summary
Write-Host "[4/4] Scaffolding complete!" -ForegroundColor Green
Write-Host ""
Write-Host "Generated files:" -ForegroundColor Cyan
$contextFile = Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs"
$entityFiles = Get-ChildItem -Path (Join-Path $ProjectPath $EntitiesDir) -Filter "*.cs" -ErrorAction SilentlyContinue
$compiledFiles = Get-ChildItem -Path (Join-Path $ProjectPath $CompiledModelsDir) -Filter "*.cs" -ErrorAction SilentlyContinue
Write-Host " Context: $(if (Test-Path $contextFile) { $contextFile } else { 'Not found' })"
Write-Host " Entities: $($entityFiles.Count) files"
Write-Host " Compiled Models: $($compiledFiles.Count) files"
Write-Host ""
Write-Host "Next steps:" -ForegroundColor Yellow
Write-Host " 1. Review generated entities for any customization needs"
Write-Host " 2. Create repository implementations in Repositories/"
Write-Host " 3. Add DI registration in Extensions/"
Write-Host ""

View File

@@ -0,0 +1,88 @@
#!/bin/bash
# ============================================================================
# EF Core Scaffolding for All StellaOps Modules
# ============================================================================
# Iterates through all configured modules and runs scaffold-module.sh for each.
# Use this after schema changes or for initial setup.
#
# Usage: ./scaffold-all-modules.sh [--no-skip-missing]
# ============================================================================
set -e
SKIP_MISSING=true
if [ "$1" = "--no-skip-missing" ]; then
SKIP_MISSING=false
fi
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
# Module definitions: "Module:Schema"
MODULES=(
"Unknowns:unknowns"
"PacksRegistry:packs"
"Authority:authority"
"Scanner:scanner"
"Scheduler:scheduler"
"TaskRunner:taskrunner"
"Policy:policy"
"Notify:notify"
"Concelier:vuln"
"Excititor:vex"
"Signals:signals"
"Attestor:proofchain"
"Signer:signer"
)
echo ""
echo "============================================================================"
echo " EF Core Scaffolding for All Modules"
echo "============================================================================"
echo ""
SUCCESS_COUNT=0
SKIP_COUNT=0
FAIL_COUNT=0
for entry in "${MODULES[@]}"; do
MODULE="${entry%%:*}"
SCHEMA="${entry##*:}"
PROJECT_PATH="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
if [ ! -f "$PROJECT_PATH"/*.csproj ]; then
if [ "$SKIP_MISSING" = true ]; then
echo "SKIP: $MODULE - Project not found"
((SKIP_COUNT++))
continue
else
echo "FAIL: $MODULE - Project not found at: $PROJECT_PATH"
((FAIL_COUNT++))
continue
fi
fi
echo ""
echo ">>> Scaffolding $MODULE..."
if "$SCRIPT_DIR/scaffold-module.sh" "$MODULE" "$SCHEMA"; then
((SUCCESS_COUNT++))
else
echo "FAIL: $MODULE - Scaffolding failed"
((FAIL_COUNT++))
fi
done
echo ""
echo "============================================================================"
echo " Summary"
echo "============================================================================"
echo " Success: $SUCCESS_COUNT"
echo " Skipped: $SKIP_COUNT"
echo " Failed: $FAIL_COUNT"
echo ""
if [ "$FAIL_COUNT" -gt 0 ]; then
exit 1
fi

View File

@@ -0,0 +1,113 @@
#!/bin/bash
# ============================================================================
# EF Core Scaffolding Script for StellaOps Modules
# ============================================================================
# Usage: ./scaffold-module.sh <Module> [Schema] [ConnectionString]
#
# Examples:
# ./scaffold-module.sh Unknowns
# ./scaffold-module.sh Unknowns unknowns
# ./scaffold-module.sh PacksRegistry packs "Host=localhost;..."
# ============================================================================
set -e
MODULE=$1
SCHEMA=${2:-$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')}
CONNECTION_STRING=$3
if [ -z "$MODULE" ]; then
echo "Usage: $0 <Module> [Schema] [ConnectionString]"
echo ""
echo "Examples:"
echo " $0 Unknowns"
echo " $0 Unknowns unknowns"
echo " $0 PacksRegistry packs \"Host=localhost;Database=stellaops_platform;Username=packs_user;Password=packs_dev\""
exit 1
fi
# Resolve repository root
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
# Default connection string
if [ -z "$CONNECTION_STRING" ]; then
USER="${SCHEMA}_user"
PASSWORD="${SCHEMA}_dev"
CONNECTION_STRING="Host=localhost;Port=5432;Database=stellaops_platform;Username=$USER;Password=$PASSWORD;SearchPath=$SCHEMA"
fi
PROJECT_DIR="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
CONTEXT_DIR="Context"
ENTITIES_DIR="Entities"
COMPILED_DIR="CompiledModels"
echo ""
echo "============================================================================"
echo " EF Core Scaffolding for Module: $MODULE"
echo "============================================================================"
echo " Schema: $SCHEMA"
echo " Project: $PROJECT_DIR"
echo " Connection: Host=localhost;Database=stellaops_platform;Username=${SCHEMA}_user;..."
echo ""
# Verify project exists
if [ ! -f "$PROJECT_DIR"/*.csproj ]; then
echo "ERROR: Project not found at: $PROJECT_DIR"
echo "Create the project first with: dotnet new classlib -n StellaOps.$MODULE.Persistence.EfCore"
exit 1
fi
# Step 1: Clean existing generated files
echo "[1/4] Cleaning existing generated files..."
rm -rf "$PROJECT_DIR/$ENTITIES_DIR"
rm -rf "$PROJECT_DIR/$COMPILED_DIR"
rm -f "$PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
mkdir -p "$PROJECT_DIR/$ENTITIES_DIR"
mkdir -p "$PROJECT_DIR/$COMPILED_DIR"
mkdir -p "$PROJECT_DIR/$CONTEXT_DIR"
echo " Cleaned: $ENTITIES_DIR, $COMPILED_DIR, ${MODULE}DbContext.cs"
# Step 2: Scaffold DbContext and entities
echo "[2/4] Scaffolding DbContext and entities from schema '$SCHEMA'..."
dotnet ef dbcontext scaffold \
"$CONNECTION_STRING" \
Npgsql.EntityFrameworkCore.PostgreSQL \
--project "$PROJECT_DIR" \
--schema "$SCHEMA" \
--context "${MODULE}DbContext" \
--context-dir "$CONTEXT_DIR" \
--output-dir "$ENTITIES_DIR" \
--namespace "StellaOps.$MODULE.Persistence.EfCore.Entities" \
--context-namespace "StellaOps.$MODULE.Persistence.EfCore.Context" \
--data-annotations \
--no-onconfiguring \
--force
echo " Scaffolded entities to: $ENTITIES_DIR"
# Step 3: Generate compiled models
echo "[3/4] Generating compiled models..."
dotnet ef dbcontext optimize \
--project "$PROJECT_DIR" \
--context "StellaOps.$MODULE.Persistence.EfCore.Context.${MODULE}DbContext" \
--output-dir "$COMPILED_DIR" \
--namespace "StellaOps.$MODULE.Persistence.EfCore.CompiledModels"
echo " Generated compiled models to: $COMPILED_DIR"
# Step 4: Summary
echo "[4/4] Scaffolding complete!"
echo ""
echo "Generated files:"
echo " Context: $PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
echo " Entities: $(ls -1 "$PROJECT_DIR/$ENTITIES_DIR"/*.cs 2>/dev/null | wc -l) files"
echo " Compiled Models: $(ls -1 "$PROJECT_DIR/$COMPILED_DIR"/*.cs 2>/dev/null | wc -l) files"
echo ""
echo "Next steps:"
echo " 1. Review generated entities for any customization needs"
echo " 2. Create repository implementations in Repositories/"
echo " 3. Add DI registration in Extensions/"
echo ""

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env pwsh
# fix-duplicate-packages.ps1 - Remove duplicate PackageReference items from test projects
# These are already provided by Directory.Build.props
param([switch]$DryRun)
$packagesToRemove = @(
"coverlet.collector",
"Microsoft.NET.Test.Sdk",
"Microsoft.AspNetCore.Mvc.Testing",
"xunit",
"xunit.runner.visualstudio",
"Microsoft.Extensions.TimeProvider.Testing"
)
$sharpCompressPackage = "SharpCompress"
# Find all test project files
$testProjects = Get-ChildItem -Path "src" -Filter "*.Tests.csproj" -Recurse
$corpusProjects = Get-ChildItem -Path "src" -Filter "*.Corpus.*.csproj" -Recurse
Write-Host "=== Fix Duplicate Package References ===" -ForegroundColor Cyan
Write-Host "Found $($testProjects.Count) test projects" -ForegroundColor Yellow
Write-Host "Found $($corpusProjects.Count) corpus projects (SharpCompress)" -ForegroundColor Yellow
$fixedCount = 0
foreach ($proj in $testProjects) {
$content = Get-Content $proj.FullName -Raw
$modified = $false
# Skip projects that opt out of common test infrastructure
if ($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") {
Write-Host " Skipped (UseConcelierTestInfra=false): $($proj.Name)" -ForegroundColor DarkGray
continue
}
foreach ($pkg in $packagesToRemove) {
# Match PackageReference for this package (various formats)
$patterns = @(
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
)
foreach ($pattern in $patterns) {
if ($content -match $pattern) {
$content = $content -replace $pattern, ""
$modified = $true
}
}
}
# Clean up empty ItemGroups
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
# Clean up ItemGroups with only whitespace/comments
$content = $content -replace "(?s)<ItemGroup>\s*<!--[^-]*-->\s*</ItemGroup>", ""
if ($modified) {
$fixedCount++
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
if (-not $DryRun) {
$content | Set-Content $proj.FullName -NoNewline
}
}
}
# Fix SharpCompress in corpus projects
foreach ($proj in $corpusProjects) {
$content = Get-Content $proj.FullName -Raw
$modified = $false
$patterns = @(
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
)
foreach ($pattern in $patterns) {
if ($content -match $pattern) {
$content = $content -replace $pattern, ""
$modified = $true
}
}
# Clean up empty ItemGroups
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
if ($modified) {
$fixedCount++
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
if (-not $DryRun) {
$content | Set-Content $proj.FullName -NoNewline
}
}
}
Write-Host ""
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan
if ($DryRun) {
Write-Host "(Dry run - no changes made)" -ForegroundColor Yellow
}

View File

@@ -0,0 +1,55 @@
# Fix duplicate "using StellaOps.TestKit;" statements in C# files
# The pattern shows files have this statement both at top (correct) and in middle (wrong)
# This script removes all occurrences AFTER the first one
$ErrorActionPreference = "Stop"
$srcPath = Join-Path $PSScriptRoot "..\..\src"
$pattern = "using StellaOps.TestKit;"
# Find all .cs files containing the pattern
$files = Get-ChildItem -Path $srcPath -Recurse -Filter "*.cs" |
Where-Object { (Get-Content $_.FullName -Raw) -match [regex]::Escape($pattern) }
Write-Host "Found $($files.Count) files with 'using StellaOps.TestKit;'" -ForegroundColor Cyan
$fixedCount = 0
$errorCount = 0
foreach ($file in $files) {
try {
$lines = Get-Content $file.FullName
$newLines = @()
$foundFirst = $false
$removedAny = $false
foreach ($line in $lines) {
if ($line.Trim() -eq $pattern) {
if (-not $foundFirst) {
# Keep the first occurrence
$newLines += $line
$foundFirst = $true
} else {
# Skip subsequent occurrences
$removedAny = $true
}
} else {
$newLines += $line
}
}
if ($removedAny) {
$newLines | Set-Content -Path $file.FullName -Encoding UTF8
Write-Host "Fixed: $($file.Name)" -ForegroundColor Green
$fixedCount++
}
} catch {
Write-Host "Error processing $($file.FullName): $_" -ForegroundColor Red
$errorCount++
}
}
Write-Host ""
Write-Host "Summary:" -ForegroundColor Cyan
Write-Host " Files fixed: $fixedCount" -ForegroundColor Green
Write-Host " Errors: $errorCount" -ForegroundColor $(if ($errorCount -gt 0) { "Red" } else { "Green" })

View File

@@ -0,0 +1,51 @@
# Fix projects with UseConcelierTestInfra=false that don't have xunit
# These projects relied on TestKit for xunit, but now need their own reference
$ErrorActionPreference = "Stop"
$srcPath = "E:\dev\git.stella-ops.org\src"
# Find test projects with UseConcelierTestInfra=false
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object {
$content = Get-Content $_.FullName -Raw
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
(-not ($content -match "xunit\.v3")) -and # Skip xunit.v3 projects
(-not ($content -match '<PackageReference\s+Include="xunit"')) # Skip projects that already have xunit
}
Write-Host "Found $($projects.Count) projects needing xunit" -ForegroundColor Cyan
$xunitPackages = @'
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
'@
$fixedCount = 0
foreach ($proj in $projects) {
$content = Get-Content $proj.FullName -Raw
# Check if it has an ItemGroup with PackageReference
if ($content -match '(<ItemGroup>[\s\S]*?<PackageReference)') {
# Add xunit packages after first PackageReference ItemGroup opening
$newContent = $content -replace '(<ItemGroup>\s*\r?\n)(\s*<PackageReference)', "`$1$xunitPackages`n`$2"
} else {
# No PackageReference ItemGroup, add one before </Project>
$itemGroup = @"
<ItemGroup>
$xunitPackages
</ItemGroup>
"@
$newContent = $content -replace '</Project>', "$itemGroup`n</Project>"
}
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,44 @@
# Fix project references in src/__Tests/** that point to wrong relative paths
# Pattern: ../../<Module>/... should be ../../../<Module>/...
$ErrorActionPreference = "Stop"
$testsPath = "E:\dev\git.stella-ops.org\src\__Tests"
# Known module prefixes that exist at src/<Module>/
$modules = @("Signals", "Scanner", "Concelier", "Scheduler", "Authority", "Attestor",
"BinaryIndex", "EvidenceLocker", "Excititor", "ExportCenter", "Gateway",
"Graph", "IssuerDirectory", "Notify", "Orchestrator", "Policy", "AirGap",
"Provenance", "Replay", "RiskEngine", "SbomService", "Signer", "TaskRunner",
"Telemetry", "TimelineIndexer", "Unknowns", "VexHub", "VexLens", "VulnExplorer",
"Zastava", "Cli", "Aoc", "Web", "Bench", "Cryptography", "PacksRegistry",
"Notifier", "Findings")
$fixedCount = 0
Get-ChildItem -Path $testsPath -Recurse -Filter "*.csproj" | ForEach-Object {
$proj = $_
$content = Get-Content $proj.FullName -Raw
$originalContent = $content
foreach ($module in $modules) {
# Fix ../../<Module>/ to ../../../<Module>/
# But not ../../../<Module> (already correct)
$pattern = "Include=`"../../$module/"
$replacement = "Include=`"../../../$module/"
if ($content -match [regex]::Escape($pattern) -and $content -notmatch [regex]::Escape("Include=`"../../../$module/")) {
$content = $content -replace [regex]::Escape($pattern), $replacement
}
}
# Fix __Libraries references that are one level short
$content = $content -replace 'Include="../../__Libraries/', 'Include="../../../__Libraries/'
if ($content -ne $originalContent) {
Set-Content -Path $proj.FullName -Value $content -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,68 @@
#!/usr/bin/env pwsh
# fix-sln-duplicates.ps1 - Remove duplicate project entries from solution file
param(
[string]$SlnPath = "src/StellaOps.sln"
)
$ErrorActionPreference = "Stop"
Write-Host "=== Solution Duplicate Cleanup ===" -ForegroundColor Cyan
Write-Host "Solution: $SlnPath"
$content = Get-Content $SlnPath -Raw
$lines = $content -split "`r?`n"
# Track seen project names
$seenProjects = @{}
$duplicateGuids = @()
$newLines = @()
$skipNext = $false
for ($i = 0; $i -lt $lines.Count; $i++) {
$line = $lines[$i]
if ($skipNext) {
$skipNext = $false
continue
}
# Check for project declaration
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
$name = $Matches[1]
$guid = $Matches[2]
if ($seenProjects.ContainsKey($name)) {
Write-Host "Removing duplicate: $name ($guid)" -ForegroundColor Yellow
$duplicateGuids += $guid
# Skip this line and the next EndProject line
$skipNext = $true
continue
} else {
$seenProjects[$name] = $true
}
}
$newLines += $line
}
# Remove GlobalSection references to duplicate GUIDs
$finalLines = @()
foreach ($line in $newLines) {
$skip = $false
foreach ($guid in $duplicateGuids) {
if ($line -match $guid) {
$skip = $true
break
}
}
if (-not $skip) {
$finalLines += $line
}
}
# Write back
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
Write-Host ""
Write-Host "Removed $($duplicateGuids.Count) duplicate projects" -ForegroundColor Green

View File

@@ -0,0 +1,40 @@
# Add <Using Include="Xunit" /> to test projects with UseConcelierTestInfra=false
# that have xunit but don't have the global using
$ErrorActionPreference = "Stop"
$srcPath = "E:\dev\git.stella-ops.org\src"
# Find test projects with UseConcelierTestInfra=false that have xunit but no Using Include="Xunit"
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object {
$content = Get-Content $_.FullName -Raw
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
($content -match '<PackageReference\s+Include="xunit"') -and
(-not ($content -match '<Using\s+Include="Xunit"'))
}
Write-Host "Found $($projects.Count) projects needing Xunit using" -ForegroundColor Cyan
$fixedCount = 0
foreach ($proj in $projects) {
$content = Get-Content $proj.FullName -Raw
# Add Using Include="Xunit" before first ProjectReference ItemGroup or at end
if ($content -match '(<ItemGroup>\s*\r?\n\s*<ProjectReference)') {
$usingBlock = " <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n`n"
$newContent = $content -replace '(\s*)(<ItemGroup>\s*\r?\n\s*<ProjectReference)', "$usingBlock`$1`$2"
} else {
# Add before </Project>
$usingBlock = "`n <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n"
$newContent = $content -replace '</Project>', "$usingBlock</Project>"
}
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,37 @@
# Fix xunit.v3 projects that conflict with Directory.Build.props xunit 2.x
# Add UseConcelierTestInfra=false to exclude them from common test infrastructure
$ErrorActionPreference = "Stop"
$srcPath = Join-Path $PSScriptRoot "..\..\src"
# Find all csproj files that reference xunit.v3
$xunitV3Projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object { (Get-Content $_.FullName -Raw) -match "xunit\.v3" }
Write-Host "Found $($xunitV3Projects.Count) projects with xunit.v3" -ForegroundColor Cyan
$fixedCount = 0
foreach ($proj in $xunitV3Projects) {
$content = Get-Content $proj.FullName -Raw
# Check if already has UseConcelierTestInfra set
if ($content -match "<UseConcelierTestInfra>") {
Write-Host " Skipped (already configured): $($proj.Name)" -ForegroundColor DarkGray
continue
}
# Add UseConcelierTestInfra=false after the first <PropertyGroup>
$newContent = $content -replace "(<PropertyGroup>)", "`$1`n <UseConcelierTestInfra>false</UseConcelierTestInfra>"
# Only write if changed
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host ""
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,247 @@
<#
.SYNOPSIS
Generates plugin configuration files for StellaOps modules.
.DESCRIPTION
This script generates plugin.json manifests and config.yaml files for all
plugins based on the plugin catalog definition.
.PARAMETER RepoRoot
Path to the repository root. Defaults to the parent of the devops folder.
.PARAMETER OutputDir
Output directory for generated configs. Defaults to etc/plugins/.
.PARAMETER Force
Overwrite existing configuration files.
.EXAMPLE
.\generate-plugin-configs.ps1
.\generate-plugin-configs.ps1 -Force
#>
param(
[string]$RepoRoot = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
[string]$OutputDir = "",
[switch]$Force
)
if (-not $OutputDir) {
$OutputDir = Join-Path $RepoRoot "etc/plugins"
}
# Plugin catalog - defines all plugins and their metadata
$PluginCatalog = @{
# Router transports
"router/transports" = @{
category = "router.transports"
plugins = @(
@{ id = "tcp"; name = "TCP Transport"; assembly = "StellaOps.Router.Transport.Tcp.dll"; enabled = $true; priority = 50 }
@{ id = "tls"; name = "TLS Transport"; assembly = "StellaOps.Router.Transport.Tls.dll"; enabled = $true; priority = 60 }
@{ id = "udp"; name = "UDP Transport"; assembly = "StellaOps.Router.Transport.Udp.dll"; enabled = $false; priority = 40 }
@{ id = "rabbitmq"; name = "RabbitMQ Transport"; assembly = "StellaOps.Router.Transport.RabbitMq.dll"; enabled = $false; priority = 30 }
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Router.Transport.InMemory.dll"; enabled = $false; priority = 10 }
)
}
# Excititor connectors
"excititor" = @{
category = "excititor.connectors"
plugins = @(
@{ id = "redhat-csaf"; name = "Red Hat CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.RedHat.CSAF.dll"; enabled = $true; priority = 100; vendor = "Red Hat" }
@{ id = "cisco-csaf"; name = "Cisco CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Cisco.CSAF.dll"; enabled = $false; priority = 90; vendor = "Cisco" }
@{ id = "msrc-csaf"; name = "Microsoft CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.MSRC.CSAF.dll"; enabled = $false; priority = 85; vendor = "Microsoft" }
@{ id = "oracle-csaf"; name = "Oracle CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Oracle.CSAF.dll"; enabled = $false; priority = 80; vendor = "Oracle" }
@{ id = "ubuntu-csaf"; name = "Ubuntu CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.dll"; enabled = $false; priority = 75; vendor = "Canonical" }
@{ id = "suse-rancher"; name = "SUSE Rancher VEX Hub"; assembly = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.dll"; enabled = $false; priority = 70; vendor = "SUSE" }
@{ id = "oci-openvex"; name = "OCI OpenVEX Connector"; assembly = "StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.dll"; enabled = $false; priority = 60 }
)
}
# Scanner language analyzers
"scanner/analyzers/lang" = @{
category = "scanner.analyzers.lang"
plugins = @(
@{ id = "dotnet"; name = ".NET Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.DotNet.dll"; enabled = $true; priority = 100 }
@{ id = "go"; name = "Go Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Go.dll"; enabled = $true; priority = 95 }
@{ id = "node"; name = "Node.js Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Node.dll"; enabled = $true; priority = 90 }
@{ id = "python"; name = "Python Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Python.dll"; enabled = $true; priority = 85 }
@{ id = "java"; name = "Java Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Java.dll"; enabled = $true; priority = 80 }
@{ id = "rust"; name = "Rust Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Rust.dll"; enabled = $false; priority = 75 }
@{ id = "ruby"; name = "Ruby Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Ruby.dll"; enabled = $false; priority = 70 }
@{ id = "php"; name = "PHP Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Php.dll"; enabled = $false; priority = 65 }
@{ id = "swift"; name = "Swift Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Swift.dll"; enabled = $false; priority = 60 }
@{ id = "cpp"; name = "C/C++ Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Cpp.dll"; enabled = $false; priority = 55 }
)
}
# Scanner OS analyzers
"scanner/analyzers/os" = @{
category = "scanner.analyzers.os"
plugins = @(
@{ id = "apk"; name = "Alpine APK Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Apk.dll"; enabled = $true; priority = 100 }
@{ id = "dpkg"; name = "Debian DPKG Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Dpkg.dll"; enabled = $true; priority = 95 }
@{ id = "rpm"; name = "RPM Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Rpm.dll"; enabled = $true; priority = 90 }
@{ id = "pacman"; name = "Arch Pacman Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Pacman.dll"; enabled = $false; priority = 80 }
@{ id = "homebrew"; name = "Homebrew Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Homebrew.dll"; enabled = $false; priority = 70 }
@{ id = "chocolatey"; name = "Chocolatey Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Chocolatey.dll"; enabled = $false; priority = 65 }
)
}
# Notify channels
"notify" = @{
category = "notify.channels"
plugins = @(
@{ id = "email"; name = "Email Notifier"; assembly = "StellaOps.Notify.Connectors.Email.dll"; enabled = $true; priority = 100 }
@{ id = "slack"; name = "Slack Notifier"; assembly = "StellaOps.Notify.Connectors.Slack.dll"; enabled = $true; priority = 90 }
@{ id = "webhook"; name = "Webhook Notifier"; assembly = "StellaOps.Notify.Connectors.Webhook.dll"; enabled = $true; priority = 80 }
@{ id = "teams"; name = "Microsoft Teams Notifier"; assembly = "StellaOps.Notify.Connectors.Teams.dll"; enabled = $false; priority = 85 }
@{ id = "pagerduty"; name = "PagerDuty Notifier"; assembly = "StellaOps.Notify.Connectors.PagerDuty.dll"; enabled = $false; priority = 75 }
@{ id = "opsgenie"; name = "OpsGenie Notifier"; assembly = "StellaOps.Notify.Connectors.OpsGenie.dll"; enabled = $false; priority = 70 }
@{ id = "telegram"; name = "Telegram Notifier"; assembly = "StellaOps.Notify.Connectors.Telegram.dll"; enabled = $false; priority = 65 }
@{ id = "discord"; name = "Discord Notifier"; assembly = "StellaOps.Notify.Connectors.Discord.dll"; enabled = $false; priority = 60 }
)
}
# Messaging transports
"messaging" = @{
category = "messaging.transports"
plugins = @(
@{ id = "valkey"; name = "Valkey Transport"; assembly = "StellaOps.Messaging.Transport.Valkey.dll"; enabled = $true; priority = 100 }
@{ id = "postgres"; name = "PostgreSQL Transport"; assembly = "StellaOps.Messaging.Transport.Postgres.dll"; enabled = $false; priority = 90 }
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Messaging.Transport.InMemory.dll"; enabled = $false; priority = 10 }
)
}
}
function New-PluginManifest {
param(
[string]$ModulePath,
[hashtable]$Plugin,
[string]$Category
)
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
$manifest = @{
'$schema' = "https://schema.stella-ops.org/plugin-manifest/v2.json"
schemaVersion = "2.0"
id = $fullId
name = $Plugin.name
version = "1.0.0"
assembly = @{
path = $Plugin.assembly
}
capabilities = @()
platforms = @("linux-x64", "linux-arm64", "win-x64", "osx-x64", "osx-arm64")
compliance = @("NIST")
jurisdiction = "world"
priority = $Plugin.priority
enabled = $Plugin.enabled
metadata = @{
author = "StellaOps"
license = "AGPL-3.0-or-later"
}
}
if ($Plugin.vendor) {
$manifest.metadata["vendor"] = $Plugin.vendor
}
return $manifest | ConvertTo-Json -Depth 10
}
function New-PluginConfig {
param(
[string]$ModulePath,
[hashtable]$Plugin,
[string]$Category
)
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
$config = @"
id: $fullId
name: $($Plugin.name)
enabled: $($Plugin.enabled.ToString().ToLower())
priority: $($Plugin.priority)
config:
# Plugin-specific configuration
# Add settings here as needed
"@
return $config
}
function New-RegistryFile {
param(
[string]$Category,
[array]$Plugins
)
$entries = $Plugins | ForEach-Object {
" $($_.id):`n enabled: $($_.enabled.ToString().ToLower())`n priority: $($_.priority)`n config: $($_.id)/config.yaml"
}
$registry = @"
version: "1.0"
category: $Category
defaults:
enabled: false
timeout: "00:05:00"
plugins:
$($entries -join "`n")
"@
return $registry
}
# Main generation logic
Write-Host "Generating plugin configurations to: $OutputDir" -ForegroundColor Cyan
foreach ($modulePath in $PluginCatalog.Keys) {
$moduleConfig = $PluginCatalog[$modulePath]
$moduleDir = Join-Path $OutputDir $modulePath
Write-Host "Processing module: $modulePath" -ForegroundColor Yellow
# Create module directory
if (-not (Test-Path $moduleDir)) {
New-Item -ItemType Directory -Path $moduleDir -Force | Out-Null
}
# Generate registry.yaml
$registryPath = Join-Path $moduleDir "registry.yaml"
if ($Force -or -not (Test-Path $registryPath)) {
$registryContent = New-RegistryFile -Category $moduleConfig.category -Plugins $moduleConfig.plugins
Set-Content -Path $registryPath -Value $registryContent -Encoding utf8
Write-Host " Created: registry.yaml" -ForegroundColor Green
}
# Generate plugin configs
foreach ($plugin in $moduleConfig.plugins) {
$pluginDir = Join-Path $moduleDir $plugin.id
if (-not (Test-Path $pluginDir)) {
New-Item -ItemType Directory -Path $pluginDir -Force | Out-Null
}
# plugin.json
$manifestPath = Join-Path $pluginDir "plugin.json"
if ($Force -or -not (Test-Path $manifestPath)) {
$manifestContent = New-PluginManifest -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
Set-Content -Path $manifestPath -Value $manifestContent -Encoding utf8
Write-Host " Created: $($plugin.id)/plugin.json" -ForegroundColor Green
}
# config.yaml
$configPath = Join-Path $pluginDir "config.yaml"
if ($Force -or -not (Test-Path $configPath)) {
$configContent = New-PluginConfig -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
Set-Content -Path $configPath -Value $configContent -Encoding utf8
Write-Host " Created: $($plugin.id)/config.yaml" -ForegroundColor Green
}
}
}
Write-Host "`nPlugin configuration generation complete!" -ForegroundColor Cyan

View File

@@ -0,0 +1,178 @@
#!/usr/bin/env bash
# Shared Exit Codes Registry
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard exit codes for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/exit-codes.sh"
#
# Exit codes follow POSIX conventions (0-125)
# 126-127 reserved for shell errors
# 128+ reserved for signal handling
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_EXIT_CODES_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_EXIT_CODES_LOADED=1
# ============================================================================
# Standard Exit Codes
# ============================================================================
# Success
export EXIT_SUCCESS=0
# General errors (1-9)
export EXIT_ERROR=1 # Generic error
export EXIT_USAGE=2 # Invalid usage/arguments
export EXIT_CONFIG_ERROR=3 # Configuration error
export EXIT_NOT_FOUND=4 # File/resource not found
export EXIT_PERMISSION=5 # Permission denied
export EXIT_IO_ERROR=6 # I/O error
export EXIT_NETWORK_ERROR=7 # Network error
export EXIT_TIMEOUT=8 # Operation timed out
export EXIT_INTERRUPTED=9 # User interrupted (Ctrl+C)
# Tool/dependency errors (10-19)
export EXIT_MISSING_TOOL=10 # Required tool not installed
export EXIT_TOOL_ERROR=11 # Tool execution failed
export EXIT_VERSION_MISMATCH=12 # Wrong tool version
export EXIT_DEPENDENCY_ERROR=13 # Dependency resolution failed
# Build errors (20-29)
export EXIT_BUILD_FAILED=20 # Build compilation failed
export EXIT_RESTORE_FAILED=21 # Package restore failed
export EXIT_PUBLISH_FAILED=22 # Publish failed
export EXIT_PACKAGING_FAILED=23 # Packaging failed
# Test errors (30-39)
export EXIT_TEST_FAILED=30 # Tests failed
export EXIT_TEST_TIMEOUT=31 # Test timed out
export EXIT_FIXTURE_ERROR=32 # Test fixture error
export EXIT_DETERMINISM_FAIL=33 # Determinism check failed
# Deployment errors (40-49)
export EXIT_DEPLOY_FAILED=40 # Deployment failed
export EXIT_ROLLBACK_FAILED=41 # Rollback failed
export EXIT_HEALTH_CHECK_FAIL=42 # Health check failed
export EXIT_REGISTRY_ERROR=43 # Container registry error
# Validation errors (50-59)
export EXIT_VALIDATION_FAILED=50 # General validation failed
export EXIT_SCHEMA_ERROR=51 # Schema validation failed
export EXIT_LINT_ERROR=52 # Lint check failed
export EXIT_FORMAT_ERROR=53 # Format check failed
export EXIT_LICENSE_ERROR=54 # License compliance failed
# Security errors (60-69)
export EXIT_SECURITY_ERROR=60 # Security check failed
export EXIT_SECRETS_FOUND=61 # Secrets detected in code
export EXIT_VULN_FOUND=62 # Vulnerabilities found
export EXIT_SIGN_FAILED=63 # Signing failed
export EXIT_VERIFY_FAILED=64 # Verification failed
# Git/VCS errors (70-79)
export EXIT_GIT_ERROR=70 # Git operation failed
export EXIT_DIRTY_WORKTREE=71 # Uncommitted changes
export EXIT_MERGE_CONFLICT=72 # Merge conflict
export EXIT_BRANCH_ERROR=73 # Branch operation failed
# Reserved for specific tools (80-99)
export EXIT_DOTNET_ERROR=80 # .NET specific error
export EXIT_DOCKER_ERROR=81 # Docker specific error
export EXIT_HELM_ERROR=82 # Helm specific error
export EXIT_KUBECTL_ERROR=83 # kubectl specific error
export EXIT_NPM_ERROR=84 # npm specific error
export EXIT_PYTHON_ERROR=85 # Python specific error
# Legacy compatibility
export EXIT_TOOLCHAIN=69 # Tool not found (legacy, use EXIT_MISSING_TOOL)
# ============================================================================
# Helper Functions
# ============================================================================
# Get exit code name from number
exit_code_name() {
local code="${1:-}"
case "$code" in
0) echo "SUCCESS" ;;
1) echo "ERROR" ;;
2) echo "USAGE" ;;
3) echo "CONFIG_ERROR" ;;
4) echo "NOT_FOUND" ;;
5) echo "PERMISSION" ;;
6) echo "IO_ERROR" ;;
7) echo "NETWORK_ERROR" ;;
8) echo "TIMEOUT" ;;
9) echo "INTERRUPTED" ;;
10) echo "MISSING_TOOL" ;;
11) echo "TOOL_ERROR" ;;
12) echo "VERSION_MISMATCH" ;;
13) echo "DEPENDENCY_ERROR" ;;
20) echo "BUILD_FAILED" ;;
21) echo "RESTORE_FAILED" ;;
22) echo "PUBLISH_FAILED" ;;
23) echo "PACKAGING_FAILED" ;;
30) echo "TEST_FAILED" ;;
31) echo "TEST_TIMEOUT" ;;
32) echo "FIXTURE_ERROR" ;;
33) echo "DETERMINISM_FAIL" ;;
40) echo "DEPLOY_FAILED" ;;
41) echo "ROLLBACK_FAILED" ;;
42) echo "HEALTH_CHECK_FAIL" ;;
43) echo "REGISTRY_ERROR" ;;
50) echo "VALIDATION_FAILED" ;;
51) echo "SCHEMA_ERROR" ;;
52) echo "LINT_ERROR" ;;
53) echo "FORMAT_ERROR" ;;
54) echo "LICENSE_ERROR" ;;
60) echo "SECURITY_ERROR" ;;
61) echo "SECRETS_FOUND" ;;
62) echo "VULN_FOUND" ;;
63) echo "SIGN_FAILED" ;;
64) echo "VERIFY_FAILED" ;;
69) echo "TOOLCHAIN (legacy)" ;;
70) echo "GIT_ERROR" ;;
71) echo "DIRTY_WORKTREE" ;;
72) echo "MERGE_CONFLICT" ;;
73) echo "BRANCH_ERROR" ;;
80) echo "DOTNET_ERROR" ;;
81) echo "DOCKER_ERROR" ;;
82) echo "HELM_ERROR" ;;
83) echo "KUBECTL_ERROR" ;;
84) echo "NPM_ERROR" ;;
85) echo "PYTHON_ERROR" ;;
126) echo "COMMAND_NOT_EXECUTABLE" ;;
127) echo "COMMAND_NOT_FOUND" ;;
*)
if [[ $code -ge 128 ]] && [[ $code -le 255 ]]; then
local signal=$((code - 128))
echo "SIGNAL_${signal}"
else
echo "UNKNOWN_${code}"
fi
;;
esac
}
# Check if exit code indicates success
is_success() {
[[ "${1:-1}" -eq 0 ]]
}
# Check if exit code indicates error
is_error() {
[[ "${1:-0}" -ne 0 ]]
}
# Exit with message and code
exit_with() {
local code="${1:-1}"
shift
if [[ $# -gt 0 ]]; then
echo "$@" >&2
fi
exit "$code"
}

View File

@@ -0,0 +1,262 @@
#!/usr/bin/env bash
# Shared Git Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Common git operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/git-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_GIT_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_GIT_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Repository Information
# ============================================================================
# Get repository root directory
git_root() {
git rev-parse --show-toplevel 2>/dev/null || echo "."
}
# Check if current directory is a git repository
is_git_repo() {
git rev-parse --git-dir >/dev/null 2>&1
}
# Get current commit SHA (full)
git_sha() {
git rev-parse HEAD 2>/dev/null
}
# Get current commit SHA (short)
git_sha_short() {
git rev-parse --short HEAD 2>/dev/null
}
# Get current branch name
git_branch() {
git rev-parse --abbrev-ref HEAD 2>/dev/null
}
# Get current tag (if HEAD is tagged)
git_tag() {
git describe --tags --exact-match HEAD 2>/dev/null || echo ""
}
# Get latest tag
git_latest_tag() {
git describe --tags --abbrev=0 2>/dev/null || echo ""
}
# Get remote URL
git_remote_url() {
local remote="${1:-origin}"
git remote get-url "$remote" 2>/dev/null
}
# Get repository name from remote URL
git_repo_name() {
local url
url=$(git_remote_url "${1:-origin}")
basename "$url" .git
}
# ============================================================================
# Commit Information
# ============================================================================
# Get commit message
git_commit_message() {
local sha="${1:-HEAD}"
git log -1 --format="%s" "$sha" 2>/dev/null
}
# Get commit author
git_commit_author() {
local sha="${1:-HEAD}"
git log -1 --format="%an" "$sha" 2>/dev/null
}
# Get commit author email
git_commit_author_email() {
local sha="${1:-HEAD}"
git log -1 --format="%ae" "$sha" 2>/dev/null
}
# Get commit timestamp (ISO 8601)
git_commit_timestamp() {
local sha="${1:-HEAD}"
git log -1 --format="%aI" "$sha" 2>/dev/null
}
# Get commit timestamp (Unix epoch)
git_commit_epoch() {
local sha="${1:-HEAD}"
git log -1 --format="%at" "$sha" 2>/dev/null
}
# ============================================================================
# Working Tree State
# ============================================================================
# Check if working tree is clean
git_is_clean() {
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
}
# Check if working tree is dirty
git_is_dirty() {
! git_is_clean
}
# Get list of changed files
git_changed_files() {
git status --porcelain 2>/dev/null | awk '{print $2}'
}
# Get list of staged files
git_staged_files() {
git diff --cached --name-only 2>/dev/null
}
# Get list of untracked files
git_untracked_files() {
git ls-files --others --exclude-standard 2>/dev/null
}
# ============================================================================
# Diff and History
# ============================================================================
# Get files changed between two refs
git_diff_files() {
local from="${1:-HEAD~1}"
local to="${2:-HEAD}"
git diff --name-only "$from" "$to" 2>/dev/null
}
# Get files changed in last N commits
git_recent_files() {
local count="${1:-1}"
git diff --name-only "HEAD~${count}" HEAD 2>/dev/null
}
# Check if file was changed between two refs
git_file_changed() {
local file="$1"
local from="${2:-HEAD~1}"
local to="${3:-HEAD}"
git diff --name-only "$from" "$to" -- "$file" 2>/dev/null | grep -q "$file"
}
# Get commits between two refs
git_commits_between() {
local from="${1:-HEAD~10}"
local to="${2:-HEAD}"
git log --oneline "$from".."$to" 2>/dev/null
}
# ============================================================================
# Tag Operations
# ============================================================================
# Create a tag
git_create_tag() {
local tag="$1"
local message="${2:-}"
if [[ -n "$message" ]]; then
git tag -a "$tag" -m "$message"
else
git tag "$tag"
fi
}
# Delete a tag
git_delete_tag() {
local tag="$1"
git tag -d "$tag" 2>/dev/null
}
# Push tag to remote
git_push_tag() {
local tag="$1"
local remote="${2:-origin}"
git push "$remote" "$tag"
}
# List tags matching pattern
git_list_tags() {
local pattern="${1:-*}"
git tag -l "$pattern" 2>/dev/null
}
# ============================================================================
# Branch Operations
# ============================================================================
# Check if branch exists
git_branch_exists() {
local branch="$1"
git show-ref --verify --quiet "refs/heads/$branch" 2>/dev/null
}
# Check if remote branch exists
git_remote_branch_exists() {
local branch="$1"
local remote="${2:-origin}"
git show-ref --verify --quiet "refs/remotes/$remote/$branch" 2>/dev/null
}
# Get default branch
git_default_branch() {
local remote="${1:-origin}"
git remote show "$remote" 2>/dev/null | grep "HEAD branch" | awk '{print $NF}'
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get version string for CI builds
git_ci_version() {
local tag
tag=$(git_tag)
if [[ -n "$tag" ]]; then
echo "$tag"
else
local branch sha
branch=$(git_branch | tr '/' '-')
sha=$(git_sha_short)
echo "${branch}-${sha}"
fi
}
# Check if current commit is on default branch
git_is_default_branch() {
local current default
current=$(git_branch)
default=$(git_default_branch)
[[ "$current" == "$default" ]]
}
# Check if running in CI environment
git_is_ci() {
[[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]] || [[ -n "${GITLAB_CI:-}" ]]
}
# Ensure clean worktree or fail
git_require_clean() {
if git_is_dirty; then
log_error "Working tree is dirty. Commit or stash changes first."
return "${EXIT_DIRTY_WORKTREE:-71}"
fi
}

View File

@@ -0,0 +1,266 @@
#!/usr/bin/env bash
# Shared Hash/Checksum Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Cryptographic hash and checksum operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/hash-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_HASH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_HASH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Hash Computation
# ============================================================================
# Compute SHA-256 hash of a file
compute_sha256() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha256sum >/dev/null 2>&1; then
sha256sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 256 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha256 "$file" | awk '{print $NF}'
else
log_error "No SHA-256 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute SHA-512 hash of a file
compute_sha512() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha512sum >/dev/null 2>&1; then
sha512sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 512 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha512 "$file" | awk '{print $NF}'
else
log_error "No SHA-512 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute MD5 hash of a file (for compatibility, not security)
compute_md5() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v md5sum >/dev/null 2>&1; then
md5sum "$file" | awk '{print $1}'
elif command -v md5 >/dev/null 2>&1; then
md5 -q "$file"
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -md5 "$file" | awk '{print $NF}'
else
log_error "No MD5 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute hash of string
compute_string_hash() {
local string="$1"
local algorithm="${2:-sha256}"
case "$algorithm" in
sha256)
echo -n "$string" | sha256sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 256 2>/dev/null | awk '{print $1}'
;;
sha512)
echo -n "$string" | sha512sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 512 2>/dev/null | awk '{print $1}'
;;
md5)
echo -n "$string" | md5sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | md5 2>/dev/null
;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
}
# ============================================================================
# Checksum Files
# ============================================================================
# Write checksum file for a single file
write_checksum() {
local file="$1"
local checksum_file="${2:-${file}.sha256}"
local algorithm="${3:-sha256}"
local hash
case "$algorithm" in
sha256) hash=$(compute_sha256 "$file") ;;
sha512) hash=$(compute_sha512 "$file") ;;
md5) hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ -z "$hash" ]]; then
return "${EXIT_ERROR:-1}"
fi
local basename
basename=$(basename "$file")
echo "$hash $basename" > "$checksum_file"
log_debug "Wrote checksum to $checksum_file"
}
# Write checksums for multiple files
write_checksums() {
local output_file="$1"
shift
local files=("$@")
: > "$output_file"
for file in "${files[@]}"; do
if [[ -f "$file" ]]; then
local hash basename
hash=$(compute_sha256 "$file")
basename=$(basename "$file")
echo "$hash $basename" >> "$output_file"
fi
done
log_debug "Wrote checksums to $output_file"
}
# ============================================================================
# Checksum Verification
# ============================================================================
# Verify checksum of a file
verify_checksum() {
local file="$1"
local expected_hash="$2"
local algorithm="${3:-sha256}"
local actual_hash
case "$algorithm" in
sha256) actual_hash=$(compute_sha256 "$file") ;;
sha512) actual_hash=$(compute_sha512 "$file") ;;
md5) actual_hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ "$actual_hash" == "$expected_hash" ]]; then
log_debug "Checksum verified: $file"
return 0
else
log_error "Checksum mismatch for $file"
log_error " Expected: $expected_hash"
log_error " Actual: $actual_hash"
return "${EXIT_VERIFY_FAILED:-64}"
fi
}
# Verify checksums from file (sha256sum -c style)
verify_checksums_file() {
local checksum_file="$1"
local base_dir="${2:-.}"
if [[ ! -f "$checksum_file" ]]; then
log_error "Checksum file not found: $checksum_file"
return "${EXIT_NOT_FOUND:-4}"
fi
local failures=0
while IFS= read -r line; do
# Skip empty lines and comments
[[ -z "$line" ]] && continue
[[ "$line" == \#* ]] && continue
local hash filename
hash=$(echo "$line" | awk '{print $1}')
filename=$(echo "$line" | awk '{print $2}')
local filepath="${base_dir}/${filename}"
if [[ ! -f "$filepath" ]]; then
log_error "File not found: $filepath"
((failures++))
continue
fi
if ! verify_checksum "$filepath" "$hash"; then
((failures++))
fi
done < "$checksum_file"
if [[ $failures -gt 0 ]]; then
log_error "$failures checksum verification(s) failed"
return "${EXIT_VERIFY_FAILED:-64}"
fi
log_info "All checksums verified"
return 0
}
# ============================================================================
# Helpers
# ============================================================================
# Check if two files have the same content
files_identical() {
local file1="$1"
local file2="$2"
[[ -f "$file1" ]] && [[ -f "$file2" ]] || return 1
local hash1 hash2
hash1=$(compute_sha256 "$file1")
hash2=$(compute_sha256 "$file2")
[[ "$hash1" == "$hash2" ]]
}
# Get short hash for display
short_hash() {
local hash="$1"
local length="${2:-8}"
echo "${hash:0:$length}"
}
# Generate deterministic ID from inputs
generate_id() {
local inputs="$*"
compute_string_hash "$inputs" sha256 | head -c 16
}

View File

@@ -0,0 +1,181 @@
#!/usr/bin/env bash
# Shared Logging Library
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard logging functions for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/logging.sh"
#
# Log Levels: DEBUG, INFO, WARN, ERROR
# Set LOG_LEVEL environment variable to control verbosity (default: INFO)
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_LOGGING_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_LOGGING_LOADED=1
# Colors (disable with NO_COLOR=1)
if [[ -z "${NO_COLOR:-}" ]] && [[ -t 1 ]]; then
export LOG_COLOR_RED='\033[0;31m'
export LOG_COLOR_GREEN='\033[0;32m'
export LOG_COLOR_YELLOW='\033[1;33m'
export LOG_COLOR_BLUE='\033[0;34m'
export LOG_COLOR_MAGENTA='\033[0;35m'
export LOG_COLOR_CYAN='\033[0;36m'
export LOG_COLOR_GRAY='\033[0;90m'
export LOG_COLOR_RESET='\033[0m'
else
export LOG_COLOR_RED=''
export LOG_COLOR_GREEN=''
export LOG_COLOR_YELLOW=''
export LOG_COLOR_BLUE=''
export LOG_COLOR_MAGENTA=''
export LOG_COLOR_CYAN=''
export LOG_COLOR_GRAY=''
export LOG_COLOR_RESET=''
fi
# Log level configuration
export LOG_LEVEL="${LOG_LEVEL:-INFO}"
# Convert log level to numeric for comparison
_log_level_to_num() {
case "$1" in
DEBUG) echo 0 ;;
INFO) echo 1 ;;
WARN) echo 2 ;;
ERROR) echo 3 ;;
*) echo 1 ;;
esac
}
# Check if message should be logged based on level
_should_log() {
local msg_level="$1"
local current_level="${LOG_LEVEL:-INFO}"
local msg_num current_num
msg_num=$(_log_level_to_num "$msg_level")
current_num=$(_log_level_to_num "$current_level")
[[ $msg_num -ge $current_num ]]
}
# Format timestamp
_log_timestamp() {
if [[ "${LOG_TIMESTAMPS:-true}" == "true" ]]; then
date -u +"%Y-%m-%dT%H:%M:%SZ"
fi
}
# Core logging function
_log() {
local level="$1"
local color="$2"
shift 2
if ! _should_log "$level"; then
return 0
fi
local timestamp
timestamp=$(_log_timestamp)
local prefix=""
if [[ -n "$timestamp" ]]; then
prefix="${LOG_COLOR_GRAY}${timestamp}${LOG_COLOR_RESET} "
fi
echo -e "${prefix}${color}[${level}]${LOG_COLOR_RESET} $*"
}
# Public logging functions
log_debug() {
_log "DEBUG" "${LOG_COLOR_GRAY}" "$@"
}
log_info() {
_log "INFO" "${LOG_COLOR_GREEN}" "$@"
}
log_warn() {
_log "WARN" "${LOG_COLOR_YELLOW}" "$@"
}
log_error() {
_log "ERROR" "${LOG_COLOR_RED}" "$@" >&2
}
# Step logging (for workflow stages)
log_step() {
_log "STEP" "${LOG_COLOR_BLUE}" "$@"
}
# Success message
log_success() {
_log "OK" "${LOG_COLOR_GREEN}" "$@"
}
# GitHub Actions annotations
log_gh_notice() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::notice::$*"
else
log_info "$@"
fi
}
log_gh_warning() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::warning::$*"
else
log_warn "$@"
fi
}
log_gh_error() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::error::$*"
else
log_error "$@"
fi
}
# Group logging (for GitHub Actions)
log_group_start() {
local title="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::group::$title"
else
log_step "=== $title ==="
fi
}
log_group_end() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::endgroup::"
fi
}
# Masked logging (for secrets)
log_masked() {
local value="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::add-mask::$value"
fi
}
# Die with error message
die() {
log_error "$@"
exit 1
}
# Conditional die
die_if() {
local condition="$1"
shift
if eval "$condition"; then
die "$@"
fi
}

View File

@@ -0,0 +1,274 @@
#!/usr/bin/env bash
# Shared Path Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Path manipulation and file operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/path-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_PATH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_PATH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Path Normalization
# ============================================================================
# Normalize path (resolve .., ., symlinks)
normalize_path() {
local path="$1"
# Handle empty path
if [[ -z "$path" ]]; then
echo "."
return 0
fi
# Try realpath first (most reliable)
if command -v realpath >/dev/null 2>&1; then
realpath -m "$path" 2>/dev/null && return 0
fi
# Fallback to Python
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os; print(os.path.normpath('$path'))" 2>/dev/null && return 0
fi
# Manual normalization (basic)
echo "$path" | sed 's|/\./|/|g' | sed 's|/[^/]*/\.\./|/|g' | sed 's|//|/|g'
}
# Get absolute path
absolute_path() {
local path="$1"
if [[ "$path" == /* ]]; then
normalize_path "$path"
else
normalize_path "$(pwd)/$path"
fi
}
# Get relative path from one path to another
relative_path() {
local from="$1"
local to="$2"
if command -v realpath >/dev/null 2>&1; then
realpath --relative-to="$from" "$to" 2>/dev/null && return 0
fi
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os.path; print(os.path.relpath('$to', '$from'))" 2>/dev/null && return 0
fi
# Fallback: just return absolute path
absolute_path "$to"
}
# ============================================================================
# Path Components
# ============================================================================
# Get directory name
dir_name() {
dirname "$1"
}
# Get base name
base_name() {
basename "$1"
}
# Get file extension
file_extension() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base##*.}"
else
echo ""
fi
}
# Get file name without extension
file_stem() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base%.*}"
else
echo "$base"
fi
}
# ============================================================================
# Directory Operations
# ============================================================================
# Ensure directory exists
ensure_directory() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
fi
}
# Create temporary directory
create_temp_dir() {
local prefix="${1:-stellaops}"
mktemp -d "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
}
# Create temporary file
create_temp_file() {
local prefix="${1:-stellaops}"
local suffix="${2:-}"
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX${suffix}"
}
# Clean temporary directory
clean_temp() {
local path="$1"
if [[ -d "$path" ]] && [[ "$path" == *stellaops* ]]; then
rm -rf "$path"
fi
}
# ============================================================================
# File Existence Checks
# ============================================================================
# Check if file exists
file_exists() {
[[ -f "$1" ]]
}
# Check if directory exists
dir_exists() {
[[ -d "$1" ]]
}
# Check if path exists (file or directory)
path_exists() {
[[ -e "$1" ]]
}
# Check if file is readable
file_readable() {
[[ -r "$1" ]]
}
# Check if file is writable
file_writable() {
[[ -w "$1" ]]
}
# Check if file is executable
file_executable() {
[[ -x "$1" ]]
}
# ============================================================================
# File Discovery
# ============================================================================
# Find files by pattern
find_files() {
local dir="${1:-.}"
local pattern="${2:-*}"
find "$dir" -type f -name "$pattern" 2>/dev/null
}
# Find files by extension
find_by_extension() {
local dir="${1:-.}"
local ext="${2:-}"
find "$dir" -type f -name "*.${ext}" 2>/dev/null
}
# Find project files (csproj, package.json, etc.)
find_project_files() {
local dir="${1:-.}"
find "$dir" -type f \( \
-name "*.csproj" -o \
-name "*.fsproj" -o \
-name "package.json" -o \
-name "Cargo.toml" -o \
-name "go.mod" -o \
-name "pom.xml" -o \
-name "build.gradle" \
\) 2>/dev/null | grep -v node_modules | grep -v bin | grep -v obj
}
# Find test projects
find_test_projects() {
local dir="${1:-.}"
find "$dir" -type f -name "*.Tests.csproj" 2>/dev/null | grep -v bin | grep -v obj
}
# ============================================================================
# Path Validation
# ============================================================================
# Check if path is under directory
path_under() {
local path="$1"
local dir="$2"
local abs_path abs_dir
abs_path=$(absolute_path "$path")
abs_dir=$(absolute_path "$dir")
[[ "$abs_path" == "$abs_dir"* ]]
}
# Validate path is safe (no directory traversal)
path_is_safe() {
local path="$1"
local base="${2:-.}"
# Check for obvious traversal attempts
if [[ "$path" == *".."* ]] || [[ "$path" == "/*" ]]; then
return 1
fi
# Verify resolved path is under base
path_under "$path" "$base"
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get artifact output directory
get_artifact_dir() {
local name="${1:-artifacts}"
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/out/${name}"
}
# Get test results directory
get_test_results_dir() {
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/TestResults"
}
# Ensure artifact directory exists and return path
ensure_artifact_dir() {
local name="${1:-artifacts}"
local dir
dir=$(get_artifact_dir "$name")
ensure_directory "$dir"
echo "$dir"
}

View File

@@ -0,0 +1,244 @@
-- ============================================================================
-- StellaOps Migration Reset Script for Pre-1.0 Deployments
-- ============================================================================
-- This script updates schema_migrations tables to recognize the 1.0.0 compacted
-- migrations for deployments that upgraded from pre-1.0 versions.
--
-- Run via: psql -f migrations-reset-pre-1.0.sql
-- Or with connection: psql -h <host> -U <user> -d <db> -f migrations-reset-pre-1.0.sql
-- ============================================================================
BEGIN;
-- ============================================================================
-- Authority Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 002_mongo_store_equivalents, 003_enable_rls,
-- 004_offline_kit_audit, 005_verdict_manifests
-- New: 001_initial_schema (compacted)
DELETE FROM authority.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_mongo_store_equivalents.sql',
'003_enable_rls.sql',
'004_offline_kit_audit.sql',
'005_verdict_manifests.sql'
);
INSERT INTO authority.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Scheduler Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 002_graph_jobs, 003_runs_policy,
-- 010_generated_columns_runs, 011_enable_rls, 012_partition_audit,
-- 012b_migrate_audit_data
-- New: 001_initial_schema (compacted)
DELETE FROM scheduler.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_graph_jobs.sql',
'003_runs_policy.sql',
'010_generated_columns_runs.sql',
'011_enable_rls.sql',
'012_partition_audit.sql',
'012b_migrate_audit_data.sql'
);
INSERT INTO scheduler.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Scanner Module Reset
-- ============================================================================
-- Original: 001-034 plus various numbered files (27 total)
-- New: 001_initial_schema (compacted)
DELETE FROM scanner.schema_migrations
WHERE migration_name IN (
'001_create_tables.sql',
'002_proof_spine_tables.sql',
'003_classification_history.sql',
'004_scan_metrics.sql',
'005_smart_diff_tables.sql',
'006_score_replay_tables.sql',
'007_unknowns_ranking_containment.sql',
'008_epss_integration.sql',
'0059_scans_table.sql',
'0065_unknowns_table.sql',
'0075_scan_findings_table.sql',
'020_call_graph_tables.sql',
'021_smart_diff_tables_search_path.sql',
'022_reachability_drift_tables.sql',
'023_scanner_api_ingestion.sql',
'024_smart_diff_priority_score_widen.sql',
'025_epss_raw_layer.sql',
'026_epss_signal_layer.sql',
'027_witness_storage.sql',
'028_epss_triage_columns.sql',
'029_vuln_surfaces.sql',
'030_vuln_surface_triggers_update.sql',
'031_reach_cache.sql',
'032_idempotency_keys.sql',
'033_binary_evidence.sql',
'034_func_proof_tables.sql',
'DM001_rename_scanner_migrations.sql'
);
INSERT INTO scanner.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Policy Module Reset
-- ============================================================================
-- Original: 001-013 (14 files, includes duplicate 010 prefix)
-- New: 001_initial_schema (compacted)
DELETE FROM policy.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_cvss_receipts.sql',
'003_snapshots_violations.sql',
'004_epss_risk_scores.sql',
'005_cvss_multiversion.sql',
'006_enable_rls.sql',
'007_unknowns_registry.sql',
'008_exception_objects.sql',
'009_exception_applications.sql',
'010_recheck_evidence.sql',
'010_unknowns_blast_radius_containment.sql',
'011_unknowns_reason_codes.sql',
'012_budget_ledger.sql',
'013_exception_approval.sql'
);
INSERT INTO policy.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Notify Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 010_enable_rls, 011_partition_deliveries,
-- 011b_migrate_deliveries_data
-- New: 001_initial_schema (compacted)
DELETE FROM notify.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'010_enable_rls.sql',
'011_partition_deliveries.sql',
'011b_migrate_deliveries_data.sql'
);
INSERT INTO notify.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Concelier Module Reset
-- ============================================================================
-- Original: 17 migration files
-- New: 001_initial_schema (compacted)
DELETE FROM concelier.schema_migrations
WHERE migration_name ~ '^[0-9]{3}_.*\.sql$';
INSERT INTO concelier.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Attestor Module Reset (proofchain + attestor schemas)
-- ============================================================================
-- Original: 20251214000001_AddProofChainSchema.sql, 20251216_001_create_rekor_submission_queue.sql
-- New: 001_initial_schema (compacted)
DELETE FROM proofchain.schema_migrations
WHERE migration_name IN (
'20251214000001_AddProofChainSchema.sql',
'20251214000002_RollbackProofChainSchema.sql',
'20251216_001_create_rekor_submission_queue.sql'
);
INSERT INTO proofchain.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Signer Module Reset
-- ============================================================================
-- Original: 20251214000001_AddKeyManagementSchema.sql
-- New: 001_initial_schema (compacted)
DELETE FROM signer.schema_migrations
WHERE migration_name IN (
'20251214000001_AddKeyManagementSchema.sql'
);
INSERT INTO signer.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Signals Module Reset
-- ============================================================================
-- Original: V0000_001__extensions.sql, V1102_001__unknowns_scoring_schema.sql,
-- V1105_001__deploy_refs_graph_metrics.sql, V3102_001__callgraph_relational_tables.sql
-- New: 001_initial_schema (compacted)
DELETE FROM signals.schema_migrations
WHERE migration_name IN (
'V0000_001__extensions.sql',
'V1102_001__unknowns_scoring_schema.sql',
'V1105_001__deploy_refs_graph_metrics.sql',
'V3102_001__callgraph_relational_tables.sql'
);
INSERT INTO signals.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Verification
-- ============================================================================
-- Display current migration status per module
DO $$
DECLARE
v_module TEXT;
v_count INT;
BEGIN
FOR v_module IN SELECT unnest(ARRAY['authority', 'scheduler', 'scanner', 'policy', 'notify', 'concelier', 'proofchain', 'signer', 'signals']) LOOP
EXECUTE format('SELECT COUNT(*) FROM %I.schema_migrations', v_module) INTO v_count;
RAISE NOTICE '% module: % migrations registered', v_module, v_count;
END LOOP;
END $$;
COMMIT;
-- ============================================================================
-- Post-Reset Notes
-- ============================================================================
-- After running this script:
-- 1. All modules should show exactly 1 migration registered
-- 2. The schema structure should be identical to a fresh 1.0.0 deployment
-- 3. Future migrations (002+) will apply normally
--
-- To verify manually:
-- SELECT * FROM authority.schema_migrations;
-- SELECT * FROM scheduler.schema_migrations;
-- SELECT * FROM scanner.schema_migrations;
-- SELECT * FROM policy.schema_migrations;
-- SELECT * FROM notify.schema_migrations;
-- SELECT * FROM concelier.schema_migrations;
-- SELECT * FROM proofchain.schema_migrations;
-- SELECT * FROM signer.schema_migrations;
-- SELECT * FROM signals.schema_migrations;
-- ============================================================================

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env pwsh
# regenerate-solution.ps1 - Regenerate StellaOps.sln without duplicate projects
#
# This script:
# 1. Backs up the existing solution
# 2. Creates a new solution
# 3. Adds all .csproj files, skipping duplicates
# 4. Preserves solution folders where possible
param(
[string]$SolutionPath = "src/StellaOps.sln",
[switch]$DryRun
)
$ErrorActionPreference = "Stop"
# Canonical locations for test projects (in priority order)
# Later entries win when there are duplicates
$canonicalPatterns = @(
# Module-local tests (highest priority)
"src/*/__Tests/*/*.csproj",
"src/*/__Libraries/__Tests/*/*.csproj",
"src/__Libraries/__Tests/*/*.csproj",
# Cross-module integration tests
"src/__Tests/Integration/*/*.csproj",
"src/__Tests/__Libraries/*/*.csproj",
# Category-based cross-module tests
"src/__Tests/chaos/*/*.csproj",
"src/__Tests/security/*/*.csproj",
"src/__Tests/interop/*/*.csproj",
"src/__Tests/parity/*/*.csproj",
"src/__Tests/reachability/*/*.csproj",
# Single global tests
"src/__Tests/*/*.csproj"
)
Write-Host "=== Solution Regeneration Script ===" -ForegroundColor Cyan
Write-Host "Solution: $SolutionPath"
Write-Host "Dry Run: $DryRun"
Write-Host ""
# Find all .csproj files
Write-Host "Finding all project files..." -ForegroundColor Yellow
$allProjects = Get-ChildItem -Path "src" -Filter "*.csproj" -Recurse |
Where-Object { $_.FullName -notmatch "\\obj\\" -and $_.FullName -notmatch "\\bin\\" }
Write-Host "Found $($allProjects.Count) project files"
# Build a map of project name -> list of paths
$projectMap = @{}
foreach ($proj in $allProjects) {
$name = $proj.BaseName
if (-not $projectMap.ContainsKey($name)) {
$projectMap[$name] = @()
}
$projectMap[$name] += $proj.FullName
}
# Find duplicates
$duplicates = $projectMap.GetEnumerator() | Where-Object { $_.Value.Count -gt 1 }
Write-Host ""
Write-Host "Found $($duplicates.Count) projects with duplicate names:" -ForegroundColor Yellow
foreach ($dup in $duplicates) {
Write-Host " $($dup.Key):" -ForegroundColor Red
foreach ($path in $dup.Value) {
Write-Host " - $path"
}
}
# Select canonical path for each project
function Get-CanonicalPath {
param([string[]]$Paths)
# Prefer module-local __Tests over global __Tests
$moduleTests = $Paths | Where-Object { $_ -match "src\\[^_][^\\]+\\__Tests\\" }
if ($moduleTests.Count -gt 0) { return $moduleTests[0] }
# Prefer __Libraries/__Tests
$libTests = $Paths | Where-Object { $_ -match "__Libraries\\__Tests\\" }
if ($libTests.Count -gt 0) { return $libTests[0] }
# Prefer __Tests over non-__Tests location in same parent
$testsPath = $Paths | Where-Object { $_ -match "\\__Tests\\" }
if ($testsPath.Count -gt 0) { return $testsPath[0] }
# Otherwise, take first
return $Paths[0]
}
# Build final project list
$finalProjects = @()
foreach ($entry in $projectMap.GetEnumerator()) {
$canonical = Get-CanonicalPath -Paths $entry.Value
$finalProjects += $canonical
}
Write-Host ""
Write-Host "Final project count: $($finalProjects.Count)" -ForegroundColor Green
if ($DryRun) {
Write-Host ""
Write-Host "=== DRY RUN - No changes made ===" -ForegroundColor Magenta
Write-Host "Would add the following projects to solution:"
$finalProjects | ForEach-Object { Write-Host " $_" }
exit 0
}
# Backup existing solution
$backupPath = "$SolutionPath.bak"
if (Test-Path $SolutionPath) {
Copy-Item $SolutionPath $backupPath -Force
Write-Host "Backed up existing solution to $backupPath" -ForegroundColor Gray
}
# Create new solution
Write-Host ""
Write-Host "Creating new solution..." -ForegroundColor Yellow
$slnDir = Split-Path $SolutionPath -Parent
$slnName = [System.IO.Path]::GetFileNameWithoutExtension($SolutionPath)
# Remove old solution
if (Test-Path $SolutionPath) {
Remove-Item $SolutionPath -Force
}
# Create fresh solution
Push-Location $slnDir
dotnet new sln -n $slnName --force 2>$null
Pop-Location
# Add projects in batches (dotnet sln add can handle multiple)
Write-Host "Adding projects to solution..." -ForegroundColor Yellow
$added = 0
$failed = 0
foreach ($proj in $finalProjects) {
try {
$result = dotnet sln $SolutionPath add $proj 2>&1
if ($LASTEXITCODE -eq 0) {
$added++
if ($added % 50 -eq 0) {
Write-Host " Added $added projects..." -ForegroundColor Gray
}
} else {
Write-Host " Failed to add: $proj" -ForegroundColor Red
$failed++
}
} catch {
Write-Host " Error adding: $proj - $_" -ForegroundColor Red
$failed++
}
}
Write-Host ""
Write-Host "=== Summary ===" -ForegroundColor Cyan
Write-Host "Projects added: $added" -ForegroundColor Green
Write-Host "Projects failed: $failed" -ForegroundColor $(if ($failed -gt 0) { "Red" } else { "Green" })
Write-Host ""
Write-Host "Solution regenerated at: $SolutionPath"
# Verify
Write-Host ""
Write-Host "Verifying solution..." -ForegroundColor Yellow
$verifyResult = dotnet build $SolutionPath --no-restore -t:ValidateSolutionConfiguration 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Host "Solution validation passed!" -ForegroundColor Green
} else {
Write-Host "Solution validation had issues - check manually" -ForegroundColor Yellow
}

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env pwsh
# remove-stale-refs.ps1 - Remove stale project references that don't exist
param([string]$SlnPath = "src/StellaOps.sln")
$content = Get-Content $SlnPath -Raw
$lines = $content -split "`r?`n"
# Stale project paths (relative from solution location)
$staleProjects = @(
"__Tests\AirGap\StellaOps.AirGap.Controller.Tests",
"__Tests\AirGap\StellaOps.AirGap.Importer.Tests",
"__Tests\AirGap\StellaOps.AirGap.Time.Tests",
"__Tests\StellaOps.Gateway.WebService.Tests",
"__Tests\Graph\StellaOps.Graph.Indexer.Tests",
"Scanner\StellaOps.Scanner.Analyzers.Native",
"__Libraries\__Tests\StellaOps.Signals.Tests",
"__Tests\StellaOps.Audit.ReplayToken.Tests",
"__Tests\StellaOps.Router.Gateway.Tests",
"__Libraries\StellaOps.Cryptography"
)
$staleGuids = @()
$newLines = @()
$skipNext = $false
for ($i = 0; $i -lt $lines.Count; $i++) {
$line = $lines[$i]
if ($skipNext) {
$skipNext = $false
continue
}
$isStale = $false
foreach ($stalePath in $staleProjects) {
if ($line -like "*$stalePath*") {
# Extract GUID
if ($line -match '\{([A-F0-9-]+)\}"?$') {
$staleGuids += $Matches[1]
}
Write-Host "Removing stale: $stalePath"
$isStale = $true
$skipNext = $true
break
}
}
if (-not $isStale) {
$newLines += $line
}
}
# Remove GlobalSection references to stale GUIDs
$finalLines = @()
foreach ($line in $newLines) {
$skip = $false
foreach ($guid in $staleGuids) {
if ($line -match $guid) {
$skip = $true
break
}
}
if (-not $skip) {
$finalLines += $line
}
}
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
Write-Host "Removed $($staleGuids.Count) stale project references"

View File

@@ -0,0 +1,61 @@
# Restore deleted test files from commit parent
# Maps old locations to new locations
$ErrorActionPreference = "Stop"
$parentCommit = "74c7aa250c401ee9ac332686832b256159efa604^"
# Mapping: old path -> new path
$mappings = @{
"src/__Tests/AirGap/StellaOps.AirGap.Importer.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Importer.Tests"
"src/__Tests/AirGap/StellaOps.AirGap.Controller.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Controller.Tests"
"src/__Tests/AirGap/StellaOps.AirGap.Time.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Time.Tests"
"src/__Tests/StellaOps.Gateway.WebService.Tests" = "src/Gateway/__Tests/StellaOps.Gateway.WebService.Tests"
"src/__Tests/Replay/StellaOps.Replay.Core.Tests" = "src/Replay/__Tests/StellaOps.Replay.Core.Tests"
"src/__Tests/Provenance/StellaOps.Provenance.Attestation.Tests" = "src/Provenance/__Tests/StellaOps.Provenance.Attestation.Tests"
"src/__Tests/Policy/StellaOps.Policy.Scoring.Tests" = "src/Policy/__Tests/StellaOps.Policy.Scoring.Tests"
}
Set-Location "E:\dev\git.stella-ops.org"
foreach ($mapping in $mappings.GetEnumerator()) {
$oldPath = $mapping.Key
$newPath = $mapping.Value
Write-Host "`nProcessing: $oldPath -> $newPath" -ForegroundColor Cyan
# Get list of files from old location in git
$files = git ls-tree -r --name-only "$parentCommit" -- $oldPath 2>$null
if (-not $files) {
Write-Host " No files found at old path" -ForegroundColor Yellow
continue
}
foreach ($file in $files) {
# Calculate relative path and new file path
$relativePath = $file.Substring($oldPath.Length + 1)
$newFilePath = Join-Path $newPath $relativePath
# Create directory if needed
$newDir = Split-Path $newFilePath -Parent
if (-not (Test-Path $newDir)) {
New-Item -ItemType Directory -Path $newDir -Force | Out-Null
}
# Check if file exists
if (Test-Path $newFilePath) {
Write-Host " Exists: $relativePath" -ForegroundColor DarkGray
continue
}
# Restore file
git show "${parentCommit}:${file}" > $newFilePath 2>$null
if ($LASTEXITCODE -eq 0) {
Write-Host " Restored: $relativePath" -ForegroundColor Green
} else {
Write-Host " Failed: $relativePath" -ForegroundColor Red
}
}
}
Write-Host "`nDone!" -ForegroundColor Cyan