save progress

This commit is contained in:
StellaOps Bot
2025-12-26 22:03:32 +02:00
parent 9a4cd2e0f7
commit e6c47c8f50
3634 changed files with 253222 additions and 56632 deletions

View File

@@ -0,0 +1,350 @@
#!/usr/bin/env python3
"""
bump-service-version.py - Bump service version in centralized version storage
Sprint: CI/CD Enhancement - Per-Service Auto-Versioning
This script manages service versions stored in src/Directory.Versions.props
and devops/releases/service-versions.json.
Usage:
python bump-service-version.py <service> <bump-type> [options]
python bump-service-version.py authority patch
python bump-service-version.py scanner minor --dry-run
python bump-service-version.py cli major --commit
Arguments:
service Service name (authority, attestor, concelier, scanner, etc.)
bump-type Version bump type: major, minor, patch, or explicit version (e.g., 2.0.0)
Options:
--dry-run Show what would be changed without modifying files
--commit Commit changes to git after updating
--no-manifest Skip updating service-versions.json manifest
--git-sha SHA Git SHA to record in manifest (defaults to HEAD)
--docker-tag TAG Docker tag to record in manifest
"""
import argparse
import json
import os
import re
import subprocess
import sys
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional, Tuple
# Repository paths
SCRIPT_DIR = Path(__file__).parent
REPO_ROOT = SCRIPT_DIR.parent.parent.parent
VERSIONS_FILE = REPO_ROOT / "src" / "Directory.Versions.props"
MANIFEST_FILE = REPO_ROOT / "devops" / "releases" / "service-versions.json"
# Service name mapping (lowercase key -> property suffix)
SERVICE_MAP = {
"authority": "Authority",
"attestor": "Attestor",
"concelier": "Concelier",
"scanner": "Scanner",
"policy": "Policy",
"signer": "Signer",
"excititor": "Excititor",
"gateway": "Gateway",
"scheduler": "Scheduler",
"cli": "Cli",
"orchestrator": "Orchestrator",
"notify": "Notify",
"sbomservice": "SbomService",
"vexhub": "VexHub",
"evidencelocker": "EvidenceLocker",
}
def parse_version(version_str: str) -> Tuple[int, int, int]:
"""Parse semantic version string into tuple."""
match = re.match(r"^(\d+)\.(\d+)\.(\d+)$", version_str)
if not match:
raise ValueError(f"Invalid version format: {version_str}")
return int(match.group(1)), int(match.group(2)), int(match.group(3))
def format_version(major: int, minor: int, patch: int) -> str:
"""Format version tuple as string."""
return f"{major}.{minor}.{patch}"
def bump_version(current: str, bump_type: str) -> str:
"""Bump version according to bump type."""
# Check if bump_type is an explicit version
if re.match(r"^\d+\.\d+\.\d+$", bump_type):
return bump_type
major, minor, patch = parse_version(current)
if bump_type == "major":
return format_version(major + 1, 0, 0)
elif bump_type == "minor":
return format_version(major, minor + 1, 0)
elif bump_type == "patch":
return format_version(major, minor, patch + 1)
else:
raise ValueError(f"Invalid bump type: {bump_type}")
def read_version_from_props(service_key: str) -> Optional[str]:
"""Read current version from Directory.Versions.props."""
if not VERSIONS_FILE.exists():
return None
property_name = f"StellaOps{SERVICE_MAP[service_key]}Version"
pattern = rf"<{property_name}>(\d+\.\d+\.\d+)</{property_name}>"
content = VERSIONS_FILE.read_text(encoding="utf-8")
match = re.search(pattern, content)
return match.group(1) if match else None
def update_version_in_props(service_key: str, new_version: str, dry_run: bool = False) -> bool:
"""Update version in Directory.Versions.props."""
if not VERSIONS_FILE.exists():
print(f"Error: {VERSIONS_FILE} not found", file=sys.stderr)
return False
property_name = f"StellaOps{SERVICE_MAP[service_key]}Version"
pattern = rf"(<{property_name}>)\d+\.\d+\.\d+(</{property_name}>)"
replacement = rf"\g<1>{new_version}\g<2>"
content = VERSIONS_FILE.read_text(encoding="utf-8")
new_content, count = re.subn(pattern, replacement, content)
if count == 0:
print(f"Error: Property {property_name} not found in {VERSIONS_FILE}", file=sys.stderr)
return False
if dry_run:
print(f"[DRY-RUN] Would update {VERSIONS_FILE}")
print(f"[DRY-RUN] {property_name}: {new_version}")
else:
VERSIONS_FILE.write_text(new_content, encoding="utf-8")
print(f"Updated {VERSIONS_FILE}")
print(f" {property_name}: {new_version}")
return True
def update_manifest(
service_key: str,
new_version: str,
git_sha: Optional[str] = None,
docker_tag: Optional[str] = None,
dry_run: bool = False,
) -> bool:
"""Update service-versions.json manifest."""
if not MANIFEST_FILE.exists():
print(f"Warning: {MANIFEST_FILE} not found, skipping manifest update", file=sys.stderr)
return True
try:
manifest = json.loads(MANIFEST_FILE.read_text(encoding="utf-8"))
except json.JSONDecodeError as e:
print(f"Error parsing {MANIFEST_FILE}: {e}", file=sys.stderr)
return False
if service_key not in manifest.get("services", {}):
print(f"Warning: Service '{service_key}' not found in manifest", file=sys.stderr)
return True
# Update service entry
now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
service = manifest["services"][service_key]
service["version"] = new_version
service["releasedAt"] = now
if git_sha:
service["gitSha"] = git_sha
if docker_tag:
service["dockerTag"] = docker_tag
# Update manifest timestamp
manifest["lastUpdated"] = now
if dry_run:
print(f"[DRY-RUN] Would update {MANIFEST_FILE}")
print(f"[DRY-RUN] {service_key}.version: {new_version}")
if docker_tag:
print(f"[DRY-RUN] {service_key}.dockerTag: {docker_tag}")
else:
MANIFEST_FILE.write_text(
json.dumps(manifest, indent=2, ensure_ascii=False) + "\n",
encoding="utf-8",
)
print(f"Updated {MANIFEST_FILE}")
return True
def get_git_sha() -> Optional[str]:
"""Get current git HEAD SHA."""
try:
result = subprocess.run(
["git", "rev-parse", "HEAD"],
capture_output=True,
text=True,
cwd=REPO_ROOT,
check=True,
)
return result.stdout.strip()[:12] # Short SHA
except subprocess.CalledProcessError:
return None
def commit_changes(service_key: str, old_version: str, new_version: str) -> bool:
"""Commit version changes to git."""
try:
# Stage the files
subprocess.run(
["git", "add", str(VERSIONS_FILE), str(MANIFEST_FILE)],
cwd=REPO_ROOT,
check=True,
)
# Create commit
commit_msg = f"""chore({service_key}): bump version {old_version} -> {new_version}
Automated version bump via bump-service-version.py
Co-Authored-By: github-actions[bot] <github-actions[bot]@users.noreply.github.com>"""
subprocess.run(
["git", "commit", "-m", commit_msg],
cwd=REPO_ROOT,
check=True,
)
print(f"Committed version bump: {old_version} -> {new_version}")
return True
except subprocess.CalledProcessError as e:
print(f"Error committing changes: {e}", file=sys.stderr)
return False
def generate_docker_tag(version: str) -> str:
"""Generate Docker tag with datetime suffix: {version}+{YYYYMMDDHHmmss}."""
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d%H%M%S")
return f"{version}+{timestamp}"
def main():
parser = argparse.ArgumentParser(
description="Bump service version in centralized version storage",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
%(prog)s authority patch # Bump authority from 1.0.0 to 1.0.1
%(prog)s scanner minor --dry-run # Preview bumping scanner minor version
%(prog)s cli 2.0.0 --commit # Set CLI to 2.0.0 and commit
%(prog)s gateway patch --docker-tag # Bump and generate docker tag
""",
)
parser.add_argument(
"service",
choices=list(SERVICE_MAP.keys()),
help="Service name to bump",
)
parser.add_argument(
"bump_type",
help="Bump type: major, minor, patch, or explicit version (e.g., 2.0.0)",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Show what would be changed without modifying files",
)
parser.add_argument(
"--commit",
action="store_true",
help="Commit changes to git after updating",
)
parser.add_argument(
"--no-manifest",
action="store_true",
help="Skip updating service-versions.json manifest",
)
parser.add_argument(
"--git-sha",
help="Git SHA to record in manifest (defaults to HEAD)",
)
parser.add_argument(
"--docker-tag",
nargs="?",
const="auto",
help="Docker tag to record in manifest (use 'auto' to generate)",
)
parser.add_argument(
"--output-version",
action="store_true",
help="Output only the new version (for CI scripts)",
)
parser.add_argument(
"--output-docker-tag",
action="store_true",
help="Output only the docker tag (for CI scripts)",
)
args = parser.parse_args()
# Read current version
current_version = read_version_from_props(args.service)
if not current_version:
print(f"Error: Could not read current version for {args.service}", file=sys.stderr)
sys.exit(1)
# Calculate new version
try:
new_version = bump_version(current_version, args.bump_type)
except ValueError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
# Generate docker tag if requested
docker_tag = None
if args.docker_tag:
docker_tag = generate_docker_tag(new_version) if args.docker_tag == "auto" else args.docker_tag
# Output mode for CI scripts
if args.output_version:
print(new_version)
sys.exit(0)
if args.output_docker_tag:
print(docker_tag or generate_docker_tag(new_version))
sys.exit(0)
# Print summary
print(f"Service: {args.service}")
print(f"Current version: {current_version}")
print(f"New version: {new_version}")
if docker_tag:
print(f"Docker tag: {docker_tag}")
print()
# Update version in props file
if not update_version_in_props(args.service, new_version, args.dry_run):
sys.exit(1)
# Update manifest if not skipped
if not args.no_manifest:
git_sha = args.git_sha or get_git_sha()
if not update_manifest(args.service, new_version, git_sha, docker_tag, args.dry_run):
sys.exit(1)
# Commit if requested
if args.commit and not args.dry_run:
if not commit_changes(args.service, current_version, new_version):
sys.exit(1)
print()
print(f"Successfully bumped {args.service}: {current_version} -> {new_version}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,259 @@
#!/usr/bin/env python3
"""
collect_versions.py - Collect service versions for suite release
Sprint: CI/CD Enhancement - Suite Release Pipeline
Gathers all service versions from Directory.Versions.props and service-versions.json.
Usage:
python collect_versions.py [options]
python collect_versions.py --format json
python collect_versions.py --format yaml --output versions.yaml
Options:
--format FMT Output format: json, yaml, markdown, env (default: json)
--output FILE Output file (defaults to stdout)
--include-unreleased Include services with no Docker tag
--registry URL Container registry URL
"""
import argparse
import json
import os
import re
import sys
from dataclasses import dataclass, asdict
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional
# Repository paths
SCRIPT_DIR = Path(__file__).parent
REPO_ROOT = SCRIPT_DIR.parent.parent.parent
VERSIONS_FILE = REPO_ROOT / "src" / "Directory.Versions.props"
MANIFEST_FILE = REPO_ROOT / "devops" / "releases" / "service-versions.json"
# Default registry
DEFAULT_REGISTRY = "git.stella-ops.org/stella-ops.org"
@dataclass
class ServiceVersion:
name: str
version: str
docker_tag: Optional[str] = None
released_at: Optional[str] = None
git_sha: Optional[str] = None
image: Optional[str] = None
def read_versions_from_props() -> Dict[str, str]:
"""Read versions from Directory.Versions.props."""
if not VERSIONS_FILE.exists():
print(f"Warning: {VERSIONS_FILE} not found", file=sys.stderr)
return {}
content = VERSIONS_FILE.read_text(encoding="utf-8")
versions = {}
# Pattern: <StellaOps{Service}Version>X.Y.Z</StellaOps{Service}Version>
pattern = r"<StellaOps(\w+)Version>(\d+\.\d+\.\d+)</StellaOps\1Version>"
for match in re.finditer(pattern, content):
service_name = match.group(1)
version = match.group(2)
versions[service_name.lower()] = version
return versions
def read_manifest() -> Dict[str, dict]:
"""Read service metadata from manifest file."""
if not MANIFEST_FILE.exists():
print(f"Warning: {MANIFEST_FILE} not found", file=sys.stderr)
return {}
try:
manifest = json.loads(MANIFEST_FILE.read_text(encoding="utf-8"))
return manifest.get("services", {})
except json.JSONDecodeError as e:
print(f"Warning: Failed to parse {MANIFEST_FILE}: {e}", file=sys.stderr)
return {}
def collect_all_versions(
registry: str = DEFAULT_REGISTRY,
include_unreleased: bool = False,
) -> List[ServiceVersion]:
"""Collect all service versions."""
props_versions = read_versions_from_props()
manifest_services = read_manifest()
services = []
# Merge data from both sources
all_service_keys = set(props_versions.keys()) | set(manifest_services.keys())
for key in sorted(all_service_keys):
version = props_versions.get(key, "0.0.0")
manifest = manifest_services.get(key, {})
docker_tag = manifest.get("dockerTag")
released_at = manifest.get("releasedAt")
git_sha = manifest.get("gitSha")
# Skip unreleased if not requested
if not include_unreleased and not docker_tag:
continue
# Build image reference
if docker_tag:
image = f"{registry}/{key}:{docker_tag}"
else:
image = f"{registry}/{key}:{version}"
service = ServiceVersion(
name=manifest.get("name", key.title()),
version=version,
docker_tag=docker_tag,
released_at=released_at,
git_sha=git_sha,
image=image,
)
services.append(service)
return services
def format_json(services: List[ServiceVersion]) -> str:
"""Format as JSON."""
data = {
"generatedAt": datetime.now(timezone.utc).isoformat(),
"services": [asdict(s) for s in services],
}
return json.dumps(data, indent=2, ensure_ascii=False)
def format_yaml(services: List[ServiceVersion]) -> str:
"""Format as YAML."""
lines = [
"# Service Versions",
f"# Generated: {datetime.now(timezone.utc).isoformat()}",
"",
"services:",
]
for s in services:
lines.extend([
f" {s.name.lower()}:",
f" name: {s.name}",
f" version: \"{s.version}\"",
])
if s.docker_tag:
lines.append(f" dockerTag: \"{s.docker_tag}\"")
if s.image:
lines.append(f" image: \"{s.image}\"")
if s.released_at:
lines.append(f" releasedAt: \"{s.released_at}\"")
if s.git_sha:
lines.append(f" gitSha: \"{s.git_sha}\"")
return "\n".join(lines)
def format_markdown(services: List[ServiceVersion]) -> str:
"""Format as Markdown table."""
lines = [
"# Service Versions",
"",
f"Generated: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')}",
"",
"| Service | Version | Docker Tag | Released |",
"|---------|---------|------------|----------|",
]
for s in services:
released = s.released_at[:10] if s.released_at else "-"
docker_tag = f"`{s.docker_tag}`" if s.docker_tag else "-"
lines.append(f"| {s.name} | {s.version} | {docker_tag} | {released} |")
return "\n".join(lines)
def format_env(services: List[ServiceVersion]) -> str:
"""Format as environment variables."""
lines = [
"# Service Versions as Environment Variables",
f"# Generated: {datetime.now(timezone.utc).isoformat()}",
"",
]
for s in services:
name_upper = s.name.upper().replace(" ", "_")
lines.append(f"STELLAOPS_{name_upper}_VERSION={s.version}")
if s.docker_tag:
lines.append(f"STELLAOPS_{name_upper}_DOCKER_TAG={s.docker_tag}")
if s.image:
lines.append(f"STELLAOPS_{name_upper}_IMAGE={s.image}")
return "\n".join(lines)
def main():
parser = argparse.ArgumentParser(
description="Collect service versions for suite release",
)
parser.add_argument(
"--format",
choices=["json", "yaml", "markdown", "env"],
default="json",
help="Output format",
)
parser.add_argument("--output", "-o", help="Output file")
parser.add_argument(
"--include-unreleased",
action="store_true",
help="Include services without Docker tags",
)
parser.add_argument(
"--registry",
default=DEFAULT_REGISTRY,
help="Container registry URL",
)
args = parser.parse_args()
# Collect versions
services = collect_all_versions(
registry=args.registry,
include_unreleased=args.include_unreleased,
)
if not services:
print("No services found", file=sys.stderr)
if not args.include_unreleased:
print("Hint: Use --include-unreleased to show all services", file=sys.stderr)
sys.exit(0)
# Format output
formatters = {
"json": format_json,
"yaml": format_yaml,
"markdown": format_markdown,
"env": format_env,
}
output = formatters[args.format](services)
# Write output
if args.output:
Path(args.output).write_text(output, encoding="utf-8")
print(f"Versions written to: {args.output}", file=sys.stderr)
else:
print(output)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,130 @@
#!/bin/bash
# generate-docker-tag.sh - Generate Docker tag with datetime suffix
#
# Sprint: CI/CD Enhancement - Per-Service Auto-Versioning
# Generates Docker tags in format: {semver}+{YYYYMMDDHHmmss}
#
# Usage:
# ./generate-docker-tag.sh <service>
# ./generate-docker-tag.sh --version <version>
# ./generate-docker-tag.sh authority
# ./generate-docker-tag.sh --version 1.2.3
#
# Output:
# Prints the Docker tag to stdout (e.g., "1.2.3+20250128143022")
# Exit code 0 on success, 1 on error
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
usage() {
cat << EOF
Usage: $(basename "$0") <service|--version VERSION>
Generate Docker tag with datetime suffix.
Format: {semver}+{YYYYMMDDHHmmss}
Example: 1.2.3+20250128143022
Arguments:
service Service name to read version from
--version VERSION Use explicit version instead of reading from file
Options:
--timestamp TS Use explicit timestamp (YYYYMMDDHHmmss format)
--output-parts Output version and timestamp separately (JSON)
--help, -h Show this help message
Examples:
$(basename "$0") authority # 1.0.0+20250128143022
$(basename "$0") --version 2.0.0 # 2.0.0+20250128143022
$(basename "$0") scanner --timestamp 20250101120000
$(basename "$0") --version 1.0.0 --output-parts
EOF
}
# Generate timestamp in UTC
generate_timestamp() {
date -u +"%Y%m%d%H%M%S"
}
main() {
local version=""
local timestamp=""
local output_parts=false
local service=""
while [[ $# -gt 0 ]]; do
case "$1" in
--help|-h)
usage
exit 0
;;
--version)
version="$2"
shift 2
;;
--timestamp)
timestamp="$2"
shift 2
;;
--output-parts)
output_parts=true
shift
;;
-*)
echo "Error: Unknown option: $1" >&2
usage
exit 1
;;
*)
service="$1"
shift
;;
esac
done
# Get version from service if not explicitly provided
if [[ -z "$version" ]]; then
if [[ -z "$service" ]]; then
echo "Error: Either service name or --version must be provided" >&2
usage
exit 1
fi
# Read version using read-service-version.sh
if [[ ! -x "${SCRIPT_DIR}/read-service-version.sh" ]]; then
echo "Error: read-service-version.sh not found or not executable" >&2
exit 1
fi
version=$("${SCRIPT_DIR}/read-service-version.sh" "$service")
fi
# Validate version format
if ! [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo "Error: Invalid version format: $version (expected: X.Y.Z)" >&2
exit 1
fi
# Generate timestamp if not provided
if [[ -z "$timestamp" ]]; then
timestamp=$(generate_timestamp)
fi
# Validate timestamp format
if ! [[ "$timestamp" =~ ^[0-9]{14}$ ]]; then
echo "Error: Invalid timestamp format: $timestamp (expected: YYYYMMDDHHmmss)" >&2
exit 1
fi
# Output
if [[ "$output_parts" == "true" ]]; then
echo "{\"version\":\"$version\",\"timestamp\":\"$timestamp\",\"tag\":\"${version}+${timestamp}\"}"
else
echo "${version}+${timestamp}"
fi
}
main "$@"

View File

@@ -0,0 +1,448 @@
#!/usr/bin/env python3
"""
generate_changelog.py - AI-assisted changelog generation for suite releases
Sprint: CI/CD Enhancement - Suite Release Pipeline
Generates changelogs from git commit history with optional AI enhancement.
Usage:
python generate_changelog.py <version> [options]
python generate_changelog.py 2026.04 --codename Nova
python generate_changelog.py 2026.04 --from-tag suite-2025.10 --ai
Arguments:
version Suite version (YYYY.MM format)
Options:
--codename NAME Release codename
--from-tag TAG Previous release tag (defaults to latest suite-* tag)
--to-ref REF End reference (defaults to HEAD)
--ai Use AI to enhance changelog descriptions
--output FILE Output file (defaults to stdout)
--format FMT Output format: markdown, json (default: markdown)
"""
import argparse
import json
import os
import re
import subprocess
import sys
from dataclasses import dataclass, field
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from collections import defaultdict
# Repository paths
SCRIPT_DIR = Path(__file__).parent
REPO_ROOT = SCRIPT_DIR.parent.parent.parent
# Module patterns for categorization
MODULE_PATTERNS = {
"Authority": r"src/Authority/",
"Attestor": r"src/Attestor/",
"Concelier": r"src/Concelier/",
"Scanner": r"src/Scanner/",
"Policy": r"src/Policy/",
"Signer": r"src/Signer/",
"Excititor": r"src/Excititor/",
"Gateway": r"src/Gateway/",
"Scheduler": r"src/Scheduler/",
"CLI": r"src/Cli/",
"Orchestrator": r"src/Orchestrator/",
"Notify": r"src/Notify/",
"Infrastructure": r"(devops/|\.gitea/|docs/)",
"Core": r"src/__Libraries/",
}
# Commit type patterns (conventional commits)
COMMIT_TYPE_PATTERNS = {
"breaking": r"^(feat|fix|refactor)(\(.+\))?!:|BREAKING CHANGE:",
"security": r"^(security|fix)(\(.+\))?:|CVE-|vulnerability|exploit",
"feature": r"^feat(\(.+\))?:",
"fix": r"^fix(\(.+\))?:",
"performance": r"^perf(\(.+\))?:|performance|optimize",
"refactor": r"^refactor(\(.+\))?:",
"docs": r"^docs(\(.+\))?:",
"test": r"^test(\(.+\))?:",
"chore": r"^chore(\(.+\))?:|^ci(\(.+\))?:|^build(\(.+\))?:",
}
@dataclass
class Commit:
sha: str
short_sha: str
message: str
body: str
author: str
date: str
files: List[str] = field(default_factory=list)
type: str = "other"
module: str = "Other"
scope: str = ""
@dataclass
class ChangelogEntry:
description: str
commits: List[Commit]
module: str
type: str
def run_git(args: List[str], cwd: Path = REPO_ROOT) -> str:
"""Run git command and return output."""
result = subprocess.run(
["git"] + args,
capture_output=True,
text=True,
cwd=cwd,
)
if result.returncode != 0:
raise RuntimeError(f"Git command failed: {result.stderr}")
return result.stdout.strip()
def get_latest_suite_tag() -> Optional[str]:
"""Get the most recent suite-* tag."""
try:
output = run_git(["tag", "-l", "suite-*", "--sort=-creatordate"])
tags = output.split("\n")
return tags[0] if tags and tags[0] else None
except RuntimeError:
return None
def get_commits_between(from_ref: str, to_ref: str = "HEAD") -> List[Commit]:
"""Get commits between two refs."""
# Format: sha|short_sha|subject|body|author|date
format_str = "%H|%h|%s|%b|%an|%aI"
separator = "---COMMIT_SEPARATOR---"
try:
output = run_git([
"log",
f"{from_ref}..{to_ref}",
f"--format={format_str}{separator}",
"--name-only",
])
except RuntimeError:
# If from_ref doesn't exist, get all commits up to to_ref
output = run_git([
"log",
to_ref,
"-100", # Limit to last 100 commits
f"--format={format_str}{separator}",
"--name-only",
])
commits = []
entries = output.split(separator)
for entry in entries:
entry = entry.strip()
if not entry:
continue
lines = entry.split("\n")
if not lines:
continue
# Parse commit info
parts = lines[0].split("|")
if len(parts) < 6:
continue
# Get changed files (remaining lines after commit info)
files = [f.strip() for f in lines[1:] if f.strip()]
commit = Commit(
sha=parts[0],
short_sha=parts[1],
message=parts[2],
body=parts[3] if len(parts) > 3 else "",
author=parts[4] if len(parts) > 4 else "",
date=parts[5] if len(parts) > 5 else "",
files=files,
)
# Categorize commit
commit.type = categorize_commit_type(commit.message)
commit.module = categorize_commit_module(commit.files, commit.message)
commit.scope = extract_scope(commit.message)
commits.append(commit)
return commits
def categorize_commit_type(message: str) -> str:
"""Categorize commit by type based on message."""
message_lower = message.lower()
for commit_type, pattern in COMMIT_TYPE_PATTERNS.items():
if re.search(pattern, message, re.IGNORECASE):
return commit_type
return "other"
def categorize_commit_module(files: List[str], message: str) -> str:
"""Categorize commit by module based on changed files."""
module_counts: Dict[str, int] = defaultdict(int)
for file in files:
for module, pattern in MODULE_PATTERNS.items():
if re.search(pattern, file):
module_counts[module] += 1
break
if module_counts:
return max(module_counts, key=module_counts.get)
# Try to extract from message scope
scope_match = re.match(r"^\w+\((\w+)\):", message)
if scope_match:
scope = scope_match.group(1).lower()
for module in MODULE_PATTERNS:
if module.lower() == scope:
return module
return "Other"
def extract_scope(message: str) -> str:
"""Extract scope from conventional commit message."""
match = re.match(r"^\w+\(([^)]+)\):", message)
return match.group(1) if match else ""
def group_commits_by_type_and_module(
commits: List[Commit],
) -> Dict[str, Dict[str, List[Commit]]]:
"""Group commits by type and module."""
grouped: Dict[str, Dict[str, List[Commit]]] = defaultdict(lambda: defaultdict(list))
for commit in commits:
grouped[commit.type][commit.module].append(commit)
return grouped
def generate_markdown_changelog(
version: str,
codename: str,
commits: List[Commit],
ai_enhanced: bool = False,
) -> str:
"""Generate markdown changelog."""
grouped = group_commits_by_type_and_module(commits)
lines = [
f"# Changelog - StellaOps {version} \"{codename}\"",
"",
f"Release Date: {datetime.now(timezone.utc).strftime('%Y-%m-%d')}",
"",
]
# Order of sections
section_order = [
("breaking", "Breaking Changes"),
("security", "Security"),
("feature", "Features"),
("fix", "Bug Fixes"),
("performance", "Performance"),
("refactor", "Refactoring"),
("docs", "Documentation"),
("other", "Other Changes"),
]
for type_key, section_title in section_order:
if type_key not in grouped:
continue
modules = grouped[type_key]
if not modules:
continue
lines.append(f"## {section_title}")
lines.append("")
# Sort modules alphabetically
for module in sorted(modules.keys()):
commits_in_module = modules[module]
if not commits_in_module:
continue
lines.append(f"### {module}")
lines.append("")
for commit in commits_in_module:
# Clean up message
msg = commit.message
# Remove conventional commit prefix for display
msg = re.sub(r"^\w+(\([^)]+\))?[!]?:\s*", "", msg)
if ai_enhanced:
# Placeholder for AI-enhanced description
lines.append(f"- {msg} ([{commit.short_sha}])")
else:
lines.append(f"- {msg} (`{commit.short_sha}`)")
lines.append("")
# Add statistics
lines.extend([
"---",
"",
"## Statistics",
"",
f"- **Total Commits:** {len(commits)}",
f"- **Contributors:** {len(set(c.author for c in commits))}",
f"- **Files Changed:** {len(set(f for c in commits for f in c.files))}",
"",
])
return "\n".join(lines)
def generate_json_changelog(
version: str,
codename: str,
commits: List[Commit],
) -> str:
"""Generate JSON changelog."""
grouped = group_commits_by_type_and_module(commits)
changelog = {
"version": version,
"codename": codename,
"date": datetime.now(timezone.utc).isoformat(),
"statistics": {
"totalCommits": len(commits),
"contributors": len(set(c.author for c in commits)),
"filesChanged": len(set(f for c in commits for f in c.files)),
},
"sections": {},
}
for type_key, modules in grouped.items():
if not modules:
continue
changelog["sections"][type_key] = {}
for module, module_commits in modules.items():
changelog["sections"][type_key][module] = [
{
"sha": c.short_sha,
"message": c.message,
"author": c.author,
"date": c.date,
}
for c in module_commits
]
return json.dumps(changelog, indent=2, ensure_ascii=False)
def enhance_with_ai(changelog: str, api_key: Optional[str] = None) -> str:
"""Enhance changelog using AI (if available)."""
if not api_key:
api_key = os.environ.get("AI_API_KEY")
if not api_key:
print("Warning: No AI API key provided, skipping AI enhancement", file=sys.stderr)
return changelog
# This is a placeholder for AI integration
# In production, this would call Claude API or similar
prompt = f"""
You are a technical writer creating release notes for a security platform.
Improve the following changelog by:
1. Making descriptions more user-friendly
2. Highlighting important changes
3. Adding context where helpful
4. Keeping it concise
Original changelog:
{changelog}
Generate improved changelog in the same markdown format.
"""
# For now, return the original changelog
# TODO: Implement actual AI API call
print("Note: AI enhancement is a placeholder, returning original changelog", file=sys.stderr)
return changelog
def main():
parser = argparse.ArgumentParser(
description="Generate changelog from git history",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument("version", help="Suite version (YYYY.MM format)")
parser.add_argument("--codename", default="", help="Release codename")
parser.add_argument("--from-tag", help="Previous release tag")
parser.add_argument("--to-ref", default="HEAD", help="End reference")
parser.add_argument("--ai", action="store_true", help="Use AI enhancement")
parser.add_argument("--output", "-o", help="Output file")
parser.add_argument(
"--format",
choices=["markdown", "json"],
default="markdown",
help="Output format",
)
args = parser.parse_args()
# Validate version format
if not re.match(r"^\d{4}\.(04|10)$", args.version):
print(f"Warning: Non-standard version format: {args.version}", file=sys.stderr)
# Determine from tag
from_tag = args.from_tag
if not from_tag:
from_tag = get_latest_suite_tag()
if from_tag:
print(f"Using previous tag: {from_tag}", file=sys.stderr)
else:
print("No previous suite tag found, using last 100 commits", file=sys.stderr)
from_tag = "HEAD~100"
# Get commits
print(f"Collecting commits from {from_tag} to {args.to_ref}...", file=sys.stderr)
commits = get_commits_between(from_tag, args.to_ref)
print(f"Found {len(commits)} commits", file=sys.stderr)
if not commits:
print("No commits found in range", file=sys.stderr)
sys.exit(0)
# Generate changelog
codename = args.codename or "TBD"
if args.format == "json":
output = generate_json_changelog(args.version, codename, commits)
else:
output = generate_markdown_changelog(
args.version, codename, commits, ai_enhanced=args.ai
)
if args.ai:
output = enhance_with_ai(output)
# Output
if args.output:
Path(args.output).write_text(output, encoding="utf-8")
print(f"Changelog written to: {args.output}", file=sys.stderr)
else:
print(output)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,373 @@
#!/usr/bin/env python3
"""
generate_compose.py - Generate pinned Docker Compose files for suite releases
Sprint: CI/CD Enhancement - Suite Release Pipeline
Creates docker-compose.yml files with pinned image versions for releases.
Usage:
python generate_compose.py <version> <codename> [options]
python generate_compose.py 2026.04 Nova --output docker-compose.yml
python generate_compose.py 2026.04 Nova --airgap --output docker-compose.airgap.yml
Arguments:
version Suite version (YYYY.MM format)
codename Release codename
Options:
--output FILE Output file (default: stdout)
--airgap Generate air-gap variant
--registry URL Container registry URL
--include-deps Include infrastructure dependencies (postgres, valkey)
"""
import argparse
import json
import sys
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional
# Repository paths
SCRIPT_DIR = Path(__file__).parent
REPO_ROOT = SCRIPT_DIR.parent.parent.parent
MANIFEST_FILE = REPO_ROOT / "devops" / "releases" / "service-versions.json"
# Default registry
DEFAULT_REGISTRY = "git.stella-ops.org/stella-ops.org"
# Service definitions with port mappings and dependencies
SERVICE_DEFINITIONS = {
"authority": {
"ports": ["8080:8080"],
"depends_on": ["postgres"],
"environment": {
"AUTHORITY_DB_CONNECTION": "Host=postgres;Database=authority;Username=stellaops;Password=${POSTGRES_PASSWORD}",
},
"healthcheck": {
"test": ["CMD", "curl", "-f", "http://localhost:8080/health"],
"interval": "30s",
"timeout": "10s",
"retries": 3,
},
},
"attestor": {
"ports": ["8081:8080"],
"depends_on": ["postgres", "authority"],
"environment": {
"ATTESTOR_DB_CONNECTION": "Host=postgres;Database=attestor;Username=stellaops;Password=${POSTGRES_PASSWORD}",
"ATTESTOR_AUTHORITY_URL": "http://authority:8080",
},
},
"concelier": {
"ports": ["8082:8080"],
"depends_on": ["postgres", "valkey"],
"environment": {
"CONCELIER_DB_CONNECTION": "Host=postgres;Database=concelier;Username=stellaops;Password=${POSTGRES_PASSWORD}",
"CONCELIER_CACHE_URL": "valkey:6379",
},
},
"scanner": {
"ports": ["8083:8080"],
"depends_on": ["postgres", "concelier"],
"environment": {
"SCANNER_DB_CONNECTION": "Host=postgres;Database=scanner;Username=stellaops;Password=${POSTGRES_PASSWORD}",
"SCANNER_CONCELIER_URL": "http://concelier:8080",
},
"volumes": ["/var/run/docker.sock:/var/run/docker.sock:ro"],
},
"policy": {
"ports": ["8084:8080"],
"depends_on": ["postgres"],
"environment": {
"POLICY_DB_CONNECTION": "Host=postgres;Database=policy;Username=stellaops;Password=${POSTGRES_PASSWORD}",
},
},
"signer": {
"ports": ["8085:8080"],
"depends_on": ["authority"],
"environment": {
"SIGNER_AUTHORITY_URL": "http://authority:8080",
},
},
"excititor": {
"ports": ["8086:8080"],
"depends_on": ["postgres", "concelier"],
"environment": {
"EXCITITOR_DB_CONNECTION": "Host=postgres;Database=excititor;Username=stellaops;Password=${POSTGRES_PASSWORD}",
},
},
"gateway": {
"ports": ["8000:8080"],
"depends_on": ["authority"],
"environment": {
"GATEWAY_AUTHORITY_URL": "http://authority:8080",
},
},
"scheduler": {
"ports": ["8087:8080"],
"depends_on": ["postgres", "valkey"],
"environment": {
"SCHEDULER_DB_CONNECTION": "Host=postgres;Database=scheduler;Username=stellaops;Password=${POSTGRES_PASSWORD}",
"SCHEDULER_QUEUE_URL": "valkey:6379",
},
},
}
# Infrastructure services
INFRASTRUCTURE_SERVICES = {
"postgres": {
"image": "postgres:16-alpine",
"environment": {
"POSTGRES_USER": "stellaops",
"POSTGRES_PASSWORD": "${POSTGRES_PASSWORD:-stellaops}",
"POSTGRES_DB": "stellaops",
},
"volumes": ["postgres_data:/var/lib/postgresql/data"],
"healthcheck": {
"test": ["CMD-SHELL", "pg_isready -U stellaops"],
"interval": "10s",
"timeout": "5s",
"retries": 5,
},
},
"valkey": {
"image": "valkey/valkey:8-alpine",
"volumes": ["valkey_data:/data"],
"healthcheck": {
"test": ["CMD", "valkey-cli", "ping"],
"interval": "10s",
"timeout": "5s",
"retries": 5,
},
},
}
def read_service_versions() -> Dict[str, dict]:
"""Read service versions from manifest."""
if not MANIFEST_FILE.exists():
return {}
try:
manifest = json.loads(MANIFEST_FILE.read_text(encoding="utf-8"))
return manifest.get("services", {})
except json.JSONDecodeError:
return {}
def generate_compose(
version: str,
codename: str,
registry: str,
services: Dict[str, dict],
airgap: bool = False,
include_deps: bool = True,
) -> str:
"""Generate Docker Compose YAML."""
now = datetime.now(timezone.utc)
lines = [
"# Docker Compose for StellaOps Suite",
f"# Version: {version} \"{codename}\"",
f"# Generated: {now.isoformat()}",
"#",
"# Usage:",
"# docker compose up -d",
"# docker compose logs -f",
"# docker compose down",
"#",
"# Environment variables:",
"# POSTGRES_PASSWORD - PostgreSQL password (default: stellaops)",
"#",
"",
"services:",
]
# Add infrastructure services if requested
if include_deps:
for name, config in INFRASTRUCTURE_SERVICES.items():
lines.extend(generate_service_block(name, config, indent=2))
# Add StellaOps services
for svc_name, svc_def in SERVICE_DEFINITIONS.items():
# Get version info from manifest
manifest_info = services.get(svc_name, {})
docker_tag = manifest_info.get("dockerTag") or manifest_info.get("version", version)
# Build image reference
if airgap:
image = f"localhost:5000/{svc_name}:{docker_tag}"
else:
image = f"{registry}/{svc_name}:{docker_tag}"
# Build service config
config = {
"image": image,
"restart": "unless-stopped",
**svc_def,
}
# Add release labels
config["labels"] = {
"com.stellaops.release.version": version,
"com.stellaops.release.codename": codename,
"com.stellaops.service.name": svc_name,
"com.stellaops.service.version": manifest_info.get("version", "1.0.0"),
}
lines.extend(generate_service_block(svc_name, config, indent=2))
# Add volumes
lines.extend([
"",
"volumes:",
])
if include_deps:
lines.extend([
" postgres_data:",
" driver: local",
" valkey_data:",
" driver: local",
])
# Add networks
lines.extend([
"",
"networks:",
" default:",
" name: stellaops",
" driver: bridge",
])
return "\n".join(lines)
def generate_service_block(name: str, config: dict, indent: int = 2) -> List[str]:
"""Generate YAML block for a service."""
prefix = " " * indent
lines = [
"",
f"{prefix}{name}:",
]
inner_prefix = " " * (indent + 2)
# Image
if "image" in config:
lines.append(f"{inner_prefix}image: {config['image']}")
# Container name
lines.append(f"{inner_prefix}container_name: stellaops-{name}")
# Restart policy
if "restart" in config:
lines.append(f"{inner_prefix}restart: {config['restart']}")
# Ports
if "ports" in config:
lines.append(f"{inner_prefix}ports:")
for port in config["ports"]:
lines.append(f"{inner_prefix} - \"{port}\"")
# Volumes
if "volumes" in config:
lines.append(f"{inner_prefix}volumes:")
for vol in config["volumes"]:
lines.append(f"{inner_prefix} - {vol}")
# Environment
if "environment" in config:
lines.append(f"{inner_prefix}environment:")
for key, value in config["environment"].items():
lines.append(f"{inner_prefix} {key}: \"{value}\"")
# Depends on
if "depends_on" in config:
lines.append(f"{inner_prefix}depends_on:")
for dep in config["depends_on"]:
lines.append(f"{inner_prefix} {dep}:")
lines.append(f"{inner_prefix} condition: service_healthy")
# Health check
if "healthcheck" in config:
hc = config["healthcheck"]
lines.append(f"{inner_prefix}healthcheck:")
if "test" in hc:
test = hc["test"]
if isinstance(test, list):
lines.append(f"{inner_prefix} test: {json.dumps(test)}")
else:
lines.append(f"{inner_prefix} test: \"{test}\"")
for key in ["interval", "timeout", "retries", "start_period"]:
if key in hc:
lines.append(f"{inner_prefix} {key}: {hc[key]}")
# Labels
if "labels" in config:
lines.append(f"{inner_prefix}labels:")
for key, value in config["labels"].items():
lines.append(f"{inner_prefix} {key}: \"{value}\"")
return lines
def main():
parser = argparse.ArgumentParser(
description="Generate pinned Docker Compose files for suite releases",
)
parser.add_argument("version", help="Suite version (YYYY.MM format)")
parser.add_argument("codename", help="Release codename")
parser.add_argument("--output", "-o", help="Output file")
parser.add_argument(
"--airgap",
action="store_true",
help="Generate air-gap variant (localhost:5000 registry)",
)
parser.add_argument(
"--registry",
default=DEFAULT_REGISTRY,
help="Container registry URL",
)
parser.add_argument(
"--include-deps",
action="store_true",
default=True,
help="Include infrastructure dependencies",
)
parser.add_argument(
"--no-deps",
action="store_true",
help="Exclude infrastructure dependencies",
)
args = parser.parse_args()
# Read service versions
services = read_service_versions()
if not services:
print("Warning: No service versions found in manifest", file=sys.stderr)
# Generate compose file
include_deps = args.include_deps and not args.no_deps
compose = generate_compose(
version=args.version,
codename=args.codename,
registry=args.registry,
services=services,
airgap=args.airgap,
include_deps=include_deps,
)
# Output
if args.output:
Path(args.output).write_text(compose, encoding="utf-8")
print(f"Docker Compose written to: {args.output}", file=sys.stderr)
else:
print(compose)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,477 @@
#!/usr/bin/env python3
"""
generate_suite_docs.py - Generate suite release documentation
Sprint: CI/CD Enhancement - Suite Release Pipeline
Creates the docs/releases/YYYY.MM/ documentation structure.
Usage:
python generate_suite_docs.py <version> <codename> [options]
python generate_suite_docs.py 2026.04 Nova --channel lts
python generate_suite_docs.py 2026.10 Orion --changelog CHANGELOG.md
Arguments:
version Suite version (YYYY.MM format)
codename Release codename
Options:
--channel CH Release channel: edge, stable, lts
--changelog FILE Pre-generated changelog file
--output-dir DIR Output directory (default: docs/releases/YYYY.MM)
--registry URL Container registry URL
--previous VERSION Previous version for upgrade guide
"""
import argparse
import json
import os
import re
import subprocess
import sys
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional
# Repository paths
SCRIPT_DIR = Path(__file__).parent
REPO_ROOT = SCRIPT_DIR.parent.parent.parent
VERSIONS_FILE = REPO_ROOT / "src" / "Directory.Versions.props"
MANIFEST_FILE = REPO_ROOT / "devops" / "releases" / "service-versions.json"
# Default registry
DEFAULT_REGISTRY = "git.stella-ops.org/stella-ops.org"
# Support timeline
SUPPORT_TIMELINE = {
"edge": "3 months",
"stable": "9 months",
"lts": "5 years",
}
def get_git_sha() -> str:
"""Get current git HEAD SHA."""
try:
result = subprocess.run(
["git", "rev-parse", "HEAD"],
capture_output=True,
text=True,
cwd=REPO_ROOT,
check=True,
)
return result.stdout.strip()[:12]
except subprocess.CalledProcessError:
return "unknown"
def read_service_versions() -> Dict[str, dict]:
"""Read service versions from manifest."""
if not MANIFEST_FILE.exists():
return {}
try:
manifest = json.loads(MANIFEST_FILE.read_text(encoding="utf-8"))
return manifest.get("services", {})
except json.JSONDecodeError:
return {}
def generate_readme(
version: str,
codename: str,
channel: str,
registry: str,
services: Dict[str, dict],
) -> str:
"""Generate README.md for the release."""
now = datetime.now(timezone.utc)
support_period = SUPPORT_TIMELINE.get(channel, "unknown")
lines = [
f"# StellaOps {version} \"{codename}\"",
"",
f"**Release Date:** {now.strftime('%B %d, %Y')}",
f"**Channel:** {channel.upper()}",
f"**Support Period:** {support_period}",
"",
"## Overview",
"",
f"StellaOps {version} \"{codename}\" is a {'Long-Term Support (LTS)' if channel == 'lts' else channel} release ",
"of the StellaOps container security platform.",
"",
"## Quick Start",
"",
"### Docker Compose",
"",
"```bash",
f"curl -O https://git.stella-ops.org/stella-ops.org/releases/{version}/docker-compose.yml",
"docker compose up -d",
"```",
"",
"### Helm",
"",
"```bash",
f"helm repo add stellaops https://charts.stella-ops.org",
f"helm install stellaops stellaops/stellaops --version {version}",
"```",
"",
"## Included Services",
"",
"| Service | Version | Image |",
"|---------|---------|-------|",
]
for key, svc in sorted(services.items()):
name = svc.get("name", key.title())
ver = svc.get("version", "1.0.0")
tag = svc.get("dockerTag", ver)
image = f"`{registry}/{key}:{tag}`"
lines.append(f"| {name} | {ver} | {image} |")
lines.extend([
"",
"## Documentation",
"",
"- [CHANGELOG.md](./CHANGELOG.md) - Detailed list of changes",
"- [services.md](./services.md) - Service version details",
"- [upgrade-guide.md](./upgrade-guide.md) - Upgrade instructions",
"- [docker-compose.yml](./docker-compose.yml) - Docker Compose configuration",
"",
"## Support",
"",
f"This release is supported until **{calculate_eol(now, channel)}**.",
"",
"For issues and feature requests, please visit:",
"https://git.stella-ops.org/stella-ops.org/git.stella-ops.org/issues",
"",
"---",
"",
f"Generated: {now.isoformat()}",
f"Git SHA: {get_git_sha()}",
])
return "\n".join(lines)
def calculate_eol(release_date: datetime, channel: str) -> str:
"""Calculate end-of-life date based on channel."""
from dateutil.relativedelta import relativedelta
periods = {
"edge": relativedelta(months=3),
"stable": relativedelta(months=9),
"lts": relativedelta(years=5),
}
try:
eol = release_date + periods.get(channel, relativedelta(months=9))
return eol.strftime("%B %Y")
except ImportError:
# Fallback without dateutil
return f"See {channel} support policy"
def generate_services_doc(
version: str,
codename: str,
registry: str,
services: Dict[str, dict],
) -> str:
"""Generate services.md with detailed service information."""
lines = [
f"# Services - StellaOps {version} \"{codename}\"",
"",
"This document lists all services included in this release with their versions,",
"Docker images, and configuration details.",
"",
"## Service Matrix",
"",
"| Service | Version | Docker Tag | Released | Git SHA |",
"|---------|---------|------------|----------|---------|",
]
for key, svc in sorted(services.items()):
name = svc.get("name", key.title())
ver = svc.get("version", "1.0.0")
tag = svc.get("dockerTag") or "-"
released = svc.get("releasedAt", "-")
if released != "-":
released = released[:10]
sha = svc.get("gitSha") or "-"
lines.append(f"| {name} | {ver} | `{tag}` | {released} | `{sha}` |")
lines.extend([
"",
"## Container Images",
"",
"All images are available from the StellaOps registry:",
"",
"```",
f"Registry: {registry}",
"```",
"",
"### Pull Commands",
"",
"```bash",
])
for key, svc in sorted(services.items()):
tag = svc.get("dockerTag") or svc.get("version", "latest")
lines.append(f"docker pull {registry}/{key}:{tag}")
lines.extend([
"```",
"",
"## Service Descriptions",
"",
])
service_descriptions = {
"authority": "Authentication and authorization service with OAuth/OIDC support",
"attestor": "in-toto/DSSE attestation generation and verification",
"concelier": "Vulnerability advisory ingestion and merge engine",
"scanner": "Container scanning with SBOM generation",
"policy": "Policy engine with K4 lattice logic",
"signer": "Cryptographic signing operations",
"excititor": "VEX document ingestion and export",
"gateway": "API gateway with routing and transport abstraction",
"scheduler": "Job scheduling and queue management",
"cli": "Command-line interface",
"orchestrator": "Workflow orchestration and task coordination",
"notify": "Notification delivery (Email, Slack, Teams, Webhooks)",
}
for key, svc in sorted(services.items()):
name = svc.get("name", key.title())
desc = service_descriptions.get(key, "StellaOps service")
lines.extend([
f"### {name}",
"",
desc,
"",
f"- **Version:** {svc.get('version', '1.0.0')}",
f"- **Image:** `{registry}/{key}:{svc.get('dockerTag', 'latest')}`",
"",
])
return "\n".join(lines)
def generate_upgrade_guide(
version: str,
codename: str,
previous_version: Optional[str],
) -> str:
"""Generate upgrade-guide.md."""
lines = [
f"# Upgrade Guide - StellaOps {version} \"{codename}\"",
"",
]
if previous_version:
lines.extend([
f"This guide covers upgrading from StellaOps {previous_version} to {version}.",
"",
])
else:
lines.extend([
"This guide covers upgrading to this release from a previous version.",
"",
])
lines.extend([
"## Before You Begin",
"",
"1. **Backup your data** - Ensure all databases and configuration are backed up",
"2. **Review changelog** - Check [CHANGELOG.md](./CHANGELOG.md) for breaking changes",
"3. **Check compatibility** - Verify your environment meets the requirements",
"",
"## Upgrade Steps",
"",
"### Docker Compose",
"",
"```bash",
"# Pull new images",
"docker compose pull",
"",
"# Stop services",
"docker compose down",
"",
"# Start with new version",
"docker compose up -d",
"",
"# Verify health",
"docker compose ps",
"```",
"",
"### Helm",
"",
"```bash",
"# Update repository",
"helm repo update stellaops",
"",
"# Upgrade release",
f"helm upgrade stellaops stellaops/stellaops --version {version}",
"",
"# Verify status",
"helm status stellaops",
"```",
"",
"## Database Migrations",
"",
"Database migrations are applied automatically on service startup.",
"For manual migration control, set `AUTO_MIGRATE=false` and run:",
"",
"```bash",
"stellaops-cli db migrate",
"```",
"",
"## Configuration Changes",
"",
"Review the following configuration changes:",
"",
"| Setting | Previous | New | Notes |",
"|---------|----------|-----|-------|",
"| (No breaking changes) | - | - | - |",
"",
"## Rollback Procedure",
"",
"If issues occur, rollback to the previous version:",
"",
"### Docker Compose",
"",
"```bash",
"# Edit docker-compose.yml to use previous image tags",
"docker compose down",
"docker compose up -d",
"```",
"",
"### Helm",
"",
"```bash",
"helm rollback stellaops",
"```",
"",
"## Support",
"",
"For upgrade assistance, contact support or open an issue at:",
"https://git.stella-ops.org/stella-ops.org/git.stella-ops.org/issues",
])
return "\n".join(lines)
def generate_manifest_yaml(
version: str,
codename: str,
channel: str,
services: Dict[str, dict],
) -> str:
"""Generate manifest.yaml for the release."""
now = datetime.now(timezone.utc)
lines = [
"apiVersion: stellaops.org/v1",
"kind: SuiteRelease",
"metadata:",
f" version: \"{version}\"",
f" codename: \"{codename}\"",
f" channel: \"{channel}\"",
f" date: \"{now.isoformat()}\"",
f" gitSha: \"{get_git_sha()}\"",
"spec:",
" services:",
]
for key, svc in sorted(services.items()):
lines.append(f" {key}:")
lines.append(f" version: \"{svc.get('version', '1.0.0')}\"")
if svc.get("dockerTag"):
lines.append(f" dockerTag: \"{svc['dockerTag']}\"")
if svc.get("gitSha"):
lines.append(f" gitSha: \"{svc['gitSha']}\"")
return "\n".join(lines)
def main():
parser = argparse.ArgumentParser(
description="Generate suite release documentation",
)
parser.add_argument("version", help="Suite version (YYYY.MM format)")
parser.add_argument("codename", help="Release codename")
parser.add_argument(
"--channel",
choices=["edge", "stable", "lts"],
default="stable",
help="Release channel",
)
parser.add_argument("--changelog", help="Pre-generated changelog file")
parser.add_argument("--output-dir", help="Output directory")
parser.add_argument(
"--registry",
default=DEFAULT_REGISTRY,
help="Container registry URL",
)
parser.add_argument("--previous", help="Previous version for upgrade guide")
args = parser.parse_args()
# Determine output directory
if args.output_dir:
output_dir = Path(args.output_dir)
else:
output_dir = REPO_ROOT / "docs" / "releases" / args.version
output_dir.mkdir(parents=True, exist_ok=True)
print(f"Output directory: {output_dir}", file=sys.stderr)
# Read service versions
services = read_service_versions()
if not services:
print("Warning: No service versions found in manifest", file=sys.stderr)
# Generate README.md
readme = generate_readme(
args.version, args.codename, args.channel, args.registry, services
)
(output_dir / "README.md").write_text(readme, encoding="utf-8")
print("Generated: README.md", file=sys.stderr)
# Copy or generate CHANGELOG.md
if args.changelog and Path(args.changelog).exists():
changelog = Path(args.changelog).read_text(encoding="utf-8")
else:
# Generate basic changelog
changelog = f"# Changelog - StellaOps {args.version} \"{args.codename}\"\n\n"
changelog += "See git history for detailed changes.\n"
(output_dir / "CHANGELOG.md").write_text(changelog, encoding="utf-8")
print("Generated: CHANGELOG.md", file=sys.stderr)
# Generate services.md
services_doc = generate_services_doc(
args.version, args.codename, args.registry, services
)
(output_dir / "services.md").write_text(services_doc, encoding="utf-8")
print("Generated: services.md", file=sys.stderr)
# Generate upgrade-guide.md
upgrade_guide = generate_upgrade_guide(
args.version, args.codename, args.previous
)
(output_dir / "upgrade-guide.md").write_text(upgrade_guide, encoding="utf-8")
print("Generated: upgrade-guide.md", file=sys.stderr)
# Generate manifest.yaml
manifest = generate_manifest_yaml(
args.version, args.codename, args.channel, services
)
(output_dir / "manifest.yaml").write_text(manifest, encoding="utf-8")
print("Generated: manifest.yaml", file=sys.stderr)
print(f"\nSuite documentation generated in: {output_dir}", file=sys.stderr)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,131 @@
#!/bin/bash
# read-service-version.sh - Read service version from centralized storage
#
# Sprint: CI/CD Enhancement - Per-Service Auto-Versioning
# This script reads service versions from src/Directory.Versions.props
#
# Usage:
# ./read-service-version.sh <service>
# ./read-service-version.sh authority
# ./read-service-version.sh --all
#
# Output:
# Prints the version string to stdout (e.g., "1.2.3")
# Exit code 0 on success, 1 on error
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
VERSIONS_FILE="${REPO_ROOT}/src/Directory.Versions.props"
# Service name to property suffix mapping
declare -A SERVICE_MAP=(
["authority"]="Authority"
["attestor"]="Attestor"
["concelier"]="Concelier"
["scanner"]="Scanner"
["policy"]="Policy"
["signer"]="Signer"
["excititor"]="Excititor"
["gateway"]="Gateway"
["scheduler"]="Scheduler"
["cli"]="Cli"
["orchestrator"]="Orchestrator"
["notify"]="Notify"
["sbomservice"]="SbomService"
["vexhub"]="VexHub"
["evidencelocker"]="EvidenceLocker"
)
usage() {
cat << EOF
Usage: $(basename "$0") <service|--all>
Read service version from centralized version storage.
Arguments:
service Service name (authority, attestor, concelier, scanner, etc.)
--all Print all service versions in JSON format
Services:
${!SERVICE_MAP[*]}
Examples:
$(basename "$0") authority # Output: 1.0.0
$(basename "$0") scanner # Output: 1.2.3
$(basename "$0") --all # Output: {"authority":"1.0.0",...}
EOF
}
read_version() {
local service="$1"
local property_suffix="${SERVICE_MAP[$service]:-}"
if [[ -z "$property_suffix" ]]; then
echo "Error: Unknown service '$service'" >&2
echo "Valid services: ${!SERVICE_MAP[*]}" >&2
return 1
fi
if [[ ! -f "$VERSIONS_FILE" ]]; then
echo "Error: Versions file not found: $VERSIONS_FILE" >&2
return 1
fi
local property_name="StellaOps${property_suffix}Version"
local version
version=$(grep -oP "<${property_name}>\K[0-9]+\.[0-9]+\.[0-9]+" "$VERSIONS_FILE" || true)
if [[ -z "$version" ]]; then
echo "Error: Property '$property_name' not found in $VERSIONS_FILE" >&2
return 1
fi
echo "$version"
}
read_all_versions() {
if [[ ! -f "$VERSIONS_FILE" ]]; then
echo "Error: Versions file not found: $VERSIONS_FILE" >&2
return 1
fi
echo -n "{"
local first=true
for service in "${!SERVICE_MAP[@]}"; do
local version
version=$(read_version "$service" 2>/dev/null || echo "")
if [[ -n "$version" ]]; then
if [[ "$first" != "true" ]]; then
echo -n ","
fi
echo -n "\"$service\":\"$version\""
first=false
fi
done
echo "}"
}
main() {
if [[ $# -eq 0 ]]; then
usage
exit 1
fi
case "$1" in
--help|-h)
usage
exit 0
;;
--all)
read_all_versions
;;
*)
read_version "$1"
;;
esac
}
main "$@"

View File

@@ -0,0 +1,226 @@
#!/usr/bin/env bash
set -euo pipefail
# Rollback Script
# Sprint: CI/CD Enhancement - Deployment Safety
#
# Purpose: Execute rollback to a previous version
# Usage:
# ./rollback.sh --environment <env> --version <ver> --services <json> --reason <text>
#
# Exit codes:
# 0 - Rollback successful
# 1 - General error
# 2 - Invalid arguments
# 3 - Deployment failed
# 4 - Health check failed
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log_info() {
echo -e "${GREEN}[INFO]${NC} $*"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $*"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $*" >&2
}
log_step() {
echo -e "${BLUE}[STEP]${NC} $*"
}
usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS]
Execute rollback to a previous version.
Options:
--environment <env> Target environment (staging|production)
--version <version> Target version to rollback to
--services <json> JSON array of services to rollback
--reason <text> Reason for rollback
--dry-run Show what would be done without executing
--help, -h Show this help message
Examples:
$(basename "$0") --environment staging --version 1.2.3 --services '["scanner"]' --reason "Bug fix"
$(basename "$0") --environment production --version 1.2.0 --services '["authority","scanner"]' --reason "Hotfix rollback"
Exit codes:
0 Rollback successful
1 General error
2 Invalid arguments
3 Deployment failed
4 Health check failed
EOF
}
# Default values
ENVIRONMENT=""
VERSION=""
SERVICES=""
REASON=""
DRY_RUN=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--environment)
ENVIRONMENT="$2"
shift 2
;;
--version)
VERSION="$2"
shift 2
;;
--services)
SERVICES="$2"
shift 2
;;
--reason)
REASON="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--help|-h)
usage
exit 0
;;
*)
log_error "Unknown option: $1"
usage
exit 2
;;
esac
done
# Validate required arguments
if [[ -z "$ENVIRONMENT" ]] || [[ -z "$VERSION" ]] || [[ -z "$SERVICES" ]]; then
log_error "Missing required arguments"
usage
exit 2
fi
# Validate environment
if [[ "$ENVIRONMENT" != "staging" ]] && [[ "$ENVIRONMENT" != "production" ]]; then
log_error "Invalid environment: $ENVIRONMENT (must be staging or production)"
exit 2
fi
# Validate services JSON
if ! echo "$SERVICES" | jq empty 2>/dev/null; then
log_error "Invalid services JSON: $SERVICES"
exit 2
fi
log_info "Starting rollback process"
log_info " Environment: $ENVIRONMENT"
log_info " Version: $VERSION"
log_info " Services: $SERVICES"
log_info " Reason: $REASON"
log_info " Dry run: $DRY_RUN"
# Record start time
START_TIME=$(date +%s)
# Rollback each service
FAILED_SERVICES=()
SUCCESSFUL_SERVICES=()
echo "$SERVICES" | jq -r '.[]' | while read -r service; do
log_step "Rolling back $service to $VERSION..."
if [[ "$DRY_RUN" == "true" ]]; then
log_info " [DRY RUN] Would rollback $service"
continue
fi
# Determine deployment method
HELM_RELEASE="stellaops-${service}"
NAMESPACE="stellaops-${ENVIRONMENT}"
# Check if Helm release exists
if helm status "$HELM_RELEASE" -n "$NAMESPACE" >/dev/null 2>&1; then
log_info " Using Helm rollback for $service"
# Get revision for target version
REVISION=$(helm history "$HELM_RELEASE" -n "$NAMESPACE" --output json | \
jq -r --arg ver "$VERSION" '.[] | select(.app_version == $ver) | .revision' | tail -1)
if [[ -n "$REVISION" ]]; then
if helm rollback "$HELM_RELEASE" "$REVISION" -n "$NAMESPACE" --wait --timeout 5m; then
log_info " Successfully rolled back $service to revision $REVISION"
SUCCESSFUL_SERVICES+=("$service")
else
log_error " Failed to rollback $service"
FAILED_SERVICES+=("$service")
fi
else
log_warn " No Helm revision found for version $VERSION"
log_info " Attempting deployment with specific version..."
# Try to deploy specific version
IMAGE_TAG="${VERSION}"
VALUES_FILE="${REPO_ROOT}/devops/helm/values-${ENVIRONMENT}.yaml"
if helm upgrade "$HELM_RELEASE" "${REPO_ROOT}/devops/helm/stellaops" \
-n "$NAMESPACE" \
--set "services.${service}.image.tag=${IMAGE_TAG}" \
-f "$VALUES_FILE" \
--wait --timeout 5m 2>/dev/null; then
log_info " Deployed $service with version $VERSION"
SUCCESSFUL_SERVICES+=("$service")
else
log_error " Failed to deploy $service with version $VERSION"
FAILED_SERVICES+=("$service")
fi
fi
else
log_warn " No Helm release found for $service"
log_info " Attempting kubectl rollout undo..."
DEPLOYMENT="stellaops-${service}"
if kubectl rollout undo deployment/"$DEPLOYMENT" -n "$NAMESPACE" 2>/dev/null; then
log_info " Rolled back deployment $DEPLOYMENT"
SUCCESSFUL_SERVICES+=("$service")
else
log_error " Failed to rollback deployment $DEPLOYMENT"
FAILED_SERVICES+=("$service")
fi
fi
done
# Calculate duration
END_TIME=$(date +%s)
DURATION=$((END_TIME - START_TIME))
# Summary
echo ""
log_info "Rollback completed in ${DURATION}s"
log_info " Successful: ${#SUCCESSFUL_SERVICES[@]}"
log_info " Failed: ${#FAILED_SERVICES[@]}"
if [[ ${#FAILED_SERVICES[@]} -gt 0 ]]; then
log_error "Failed services: ${FAILED_SERVICES[*]}"
exit 3
fi
log_info "Rollback successful"
exit 0

View File

@@ -0,0 +1,299 @@
#!/usr/bin/env bash
# Test Category Runner
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Run tests for a specific category across all test projects
# Usage: ./run-test-category.sh <category> [options]
#
# Options:
# --fail-on-empty Fail if no tests are found for the category
# --collect-coverage Collect code coverage data
# --verbose Show detailed output
#
# Exit Codes:
# 0 - Success (all tests passed or no tests found)
# 1 - One or more tests failed
# 2 - Invalid usage
set -euo pipefail
# Source shared libraries if available
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
if [[ -f "$REPO_ROOT/devops/scripts/lib/logging.sh" ]]; then
source "$REPO_ROOT/devops/scripts/lib/logging.sh"
else
# Minimal logging fallback
log_info() { echo "[INFO] $*"; }
log_error() { echo "[ERROR] $*" >&2; }
log_debug() { [[ -n "${DEBUG:-}" ]] && echo "[DEBUG] $*"; }
log_step() { echo "==> $*"; }
fi
if [[ -f "$REPO_ROOT/devops/scripts/lib/exit-codes.sh" ]]; then
source "$REPO_ROOT/devops/scripts/lib/exit-codes.sh"
fi
# =============================================================================
# Constants
# =============================================================================
readonly FIND_PATTERN='\( -name "*.Tests.csproj" -o -name "*UnitTests.csproj" -o -name "*SmokeTests.csproj" -o -name "*FixtureTests.csproj" -o -name "*IntegrationTests.csproj" \)'
readonly EXCLUDE_PATHS='! -path "*/node_modules/*" ! -path "*/.git/*" ! -path "*/bin/*" ! -path "*/obj/*"'
readonly EXCLUDE_FILES='! -name "StellaOps.TestKit.csproj" ! -name "*Testing.csproj"'
# =============================================================================
# Functions
# =============================================================================
usage() {
cat <<EOF
Usage: $(basename "$0") <category> [options]
Run tests for a specific test category across all test projects.
Arguments:
category Test category (Unit, Architecture, Contract, Integration,
Security, Golden, Performance, Benchmark, AirGap, Chaos,
Determinism, Resilience, Observability)
Options:
--fail-on-empty Exit with error if no tests found for the category
--collect-coverage Collect XPlat Code Coverage data
--verbose Show detailed test output
--results-dir DIR Custom results directory (default: ./TestResults/<category>)
--help Show this help message
Environment Variables:
DOTNET_VERSION .NET SDK version (default: uses installed version)
TZ Timezone (should be UTC for determinism)
Examples:
$(basename "$0") Unit
$(basename "$0") Integration --collect-coverage
$(basename "$0") Performance --results-dir ./perf-results
EOF
}
find_test_projects() {
local search_dir="${1:-src}"
# Use eval to properly expand the find pattern
eval "find '$search_dir' $FIND_PATTERN -type f $EXCLUDE_PATHS $EXCLUDE_FILES" | sort
}
sanitize_project_name() {
local proj="$1"
# Replace slashes with underscores, remove .csproj extension
echo "$proj" | sed 's|/|_|g' | sed 's|\.csproj$||'
}
run_tests() {
local category="$1"
local results_dir="$2"
local collect_coverage="$3"
local verbose="$4"
local fail_on_empty="$5"
local passed=0
local failed=0
local skipped=0
local no_tests=0
mkdir -p "$results_dir"
local projects
projects=$(find_test_projects "$REPO_ROOT/src")
if [[ -z "$projects" ]]; then
log_error "No test projects found"
return 1
fi
local project_count
project_count=$(echo "$projects" | grep -c '.csproj' || echo "0")
log_info "Found $project_count test projects"
local category_lower
category_lower=$(echo "$category" | tr '[:upper:]' '[:lower:]')
while IFS= read -r proj; do
[[ -z "$proj" ]] && continue
local proj_name
proj_name=$(sanitize_project_name "$proj")
local trx_name="${proj_name}-${category_lower}.trx"
# GitHub Actions grouping
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::group::Testing $proj ($category)"
else
log_step "Testing $proj ($category)"
fi
# Build dotnet test command
local cmd="dotnet test \"$proj\""
cmd+=" --filter \"Category=$category\""
cmd+=" --configuration Release"
cmd+=" --logger \"trx;LogFileName=$trx_name\""
cmd+=" --results-directory \"$results_dir\""
if [[ "$collect_coverage" == "true" ]]; then
cmd+=" --collect:\"XPlat Code Coverage\""
fi
if [[ "$verbose" == "true" ]]; then
cmd+=" --verbosity normal"
else
cmd+=" --verbosity minimal"
fi
# Execute tests
local exit_code=0
eval "$cmd" 2>&1 || exit_code=$?
if [[ $exit_code -eq 0 ]]; then
# Check if TRX was created (tests actually ran)
if [[ -f "$results_dir/$trx_name" ]]; then
((passed++))
log_info "PASS: $proj"
else
((no_tests++))
log_debug "SKIP: $proj (no $category tests)"
fi
else
# Check if failure was due to no tests matching the filter
if [[ -f "$results_dir/$trx_name" ]]; then
((failed++))
log_error "FAIL: $proj"
else
((no_tests++))
log_debug "SKIP: $proj (no $category tests or build error)"
fi
fi
# Close GitHub Actions group
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::endgroup::"
fi
done <<< "$projects"
# Generate summary
log_info ""
log_info "=========================================="
log_info "$category Test Summary"
log_info "=========================================="
log_info "Passed: $passed"
log_info "Failed: $failed"
log_info "No Tests: $no_tests"
log_info "Total: $project_count"
log_info "=========================================="
# GitHub Actions summary
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
{
echo "## $category Test Summary"
echo ""
echo "| Metric | Count |"
echo "|--------|-------|"
echo "| Passed | $passed |"
echo "| Failed | $failed |"
echo "| No Tests | $no_tests |"
echo "| Total Projects | $project_count |"
} >> "$GITHUB_STEP_SUMMARY"
fi
# Determine exit code
if [[ $failed -gt 0 ]]; then
return 1
fi
if [[ "$fail_on_empty" == "true" ]] && [[ $passed -eq 0 ]]; then
log_error "No tests found for category: $category"
return 1
fi
return 0
}
# =============================================================================
# Main
# =============================================================================
main() {
local category=""
local results_dir=""
local collect_coverage="false"
local verbose="false"
local fail_on_empty="false"
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--help|-h)
usage
exit 0
;;
--fail-on-empty)
fail_on_empty="true"
shift
;;
--collect-coverage)
collect_coverage="true"
shift
;;
--verbose|-v)
verbose="true"
shift
;;
--results-dir)
results_dir="$2"
shift 2
;;
-*)
log_error "Unknown option: $1"
usage
exit 2
;;
*)
if [[ -z "$category" ]]; then
category="$1"
else
log_error "Unexpected argument: $1"
usage
exit 2
fi
shift
;;
esac
done
# Validate category
if [[ -z "$category" ]]; then
log_error "Category is required"
usage
exit 2
fi
# Validate category name
local valid_categories="Unit Architecture Contract Integration Security Golden Performance Benchmark AirGap Chaos Determinism Resilience Observability"
if ! echo "$valid_categories" | grep -qw "$category"; then
log_error "Invalid category: $category"
log_error "Valid categories: $valid_categories"
exit 2
fi
# Set default results directory
if [[ -z "$results_dir" ]]; then
results_dir="./TestResults/$category"
fi
log_info "Running $category tests..."
log_info "Results directory: $results_dir"
run_tests "$category" "$results_dir" "$collect_coverage" "$verbose" "$fail_on_empty"
}
main "$@"

View File

@@ -0,0 +1,260 @@
#!/usr/bin/env bash
# Migration Validation Script
# Validates migration naming conventions, detects duplicates, and checks for issues.
#
# Usage:
# ./validate-migrations.sh [--strict] [--fix-scanner]
#
# Options:
# --strict Exit with error on any warning
# --fix-scanner Generate rename commands for Scanner duplicates
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
STRICT_MODE=false
FIX_SCANNER=false
EXIT_CODE=0
# Parse arguments
for arg in "$@"; do
case $arg in
--strict)
STRICT_MODE=true
shift
;;
--fix-scanner)
FIX_SCANNER=true
shift
;;
esac
done
echo "=== Migration Validation ==="
echo "Repository: $REPO_ROOT"
echo ""
# Colors for output
RED='\033[0;31m'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
# Track issues
ERRORS=()
WARNINGS=()
# Function to check for duplicates in a directory
check_duplicates() {
local dir="$1"
local module="$2"
if [ ! -d "$dir" ]; then
return
fi
# Extract numeric prefixes and find duplicates
local duplicates
duplicates=$(find "$dir" -maxdepth 1 -name "*.sql" -printf "%f\n" 2>/dev/null | \
sed -E 's/^([0-9]+)_.*/\1/' | \
sort | uniq -d)
if [ -n "$duplicates" ]; then
for prefix in $duplicates; do
local files
files=$(find "$dir" -maxdepth 1 -name "${prefix}_*.sql" -printf "%f\n" | tr '\n' ', ' | sed 's/,$//')
ERRORS+=("[$module] Duplicate prefix $prefix: $files")
done
fi
}
# Function to check naming convention
check_naming() {
local dir="$1"
local module="$2"
if [ ! -d "$dir" ]; then
return
fi
find "$dir" -maxdepth 1 -name "*.sql" -printf "%f\n" 2>/dev/null | while read -r file; do
# Check standard pattern: NNN_description.sql
if [[ "$file" =~ ^[0-9]{3}_[a-z0-9_]+\.sql$ ]]; then
continue # Valid standard
fi
# Check seed pattern: SNNN_description.sql
if [[ "$file" =~ ^S[0-9]{3}_[a-z0-9_]+\.sql$ ]]; then
continue # Valid seed
fi
# Check data migration pattern: DMNNN_description.sql
if [[ "$file" =~ ^DM[0-9]{3}_[a-z0-9_]+\.sql$ ]]; then
continue # Valid data migration
fi
# Check for Flyway-style
if [[ "$file" =~ ^V[0-9]+.*\.sql$ ]]; then
WARNINGS+=("[$module] Flyway-style naming: $file (consider NNN_description.sql)")
continue
fi
# Check for EF Core timestamp style
if [[ "$file" =~ ^[0-9]{14,}_.*\.sql$ ]]; then
WARNINGS+=("[$module] EF Core timestamp naming: $file (consider NNN_description.sql)")
continue
fi
# Check for 4-digit prefix
if [[ "$file" =~ ^[0-9]{4}_.*\.sql$ ]]; then
WARNINGS+=("[$module] 4-digit prefix: $file (standard is 3-digit NNN_description.sql)")
continue
fi
# Non-standard
WARNINGS+=("[$module] Non-standard naming: $file")
done
}
# Function to check for dangerous operations in startup migrations
check_dangerous_ops() {
local dir="$1"
local module="$2"
if [ ! -d "$dir" ]; then
return
fi
find "$dir" -maxdepth 1 -name "*.sql" -printf "%f\n" 2>/dev/null | while read -r file; do
local filepath="$dir/$file"
local prefix
prefix=$(echo "$file" | sed -E 's/^([0-9]+)_.*/\1/')
# Only check startup migrations (001-099)
if [[ "$prefix" =~ ^0[0-9]{2}$ ]] && [ "$prefix" -lt 100 ]; then
# Check for DROP TABLE without IF EXISTS
if grep -qE "DROP\s+TABLE\s+(?!IF\s+EXISTS)" "$filepath" 2>/dev/null; then
ERRORS+=("[$module] $file: DROP TABLE without IF EXISTS in startup migration")
fi
# Check for DROP COLUMN (breaking change in startup)
if grep -qiE "ALTER\s+TABLE.*DROP\s+COLUMN" "$filepath" 2>/dev/null; then
ERRORS+=("[$module] $file: DROP COLUMN in startup migration (should be release migration 100+)")
fi
# Check for TRUNCATE
if grep -qiE "^\s*TRUNCATE" "$filepath" 2>/dev/null; then
ERRORS+=("[$module] $file: TRUNCATE in startup migration")
fi
fi
done
}
# Scan all module migration directories
echo "Scanning migration directories..."
echo ""
# Define module migration paths
declare -A MIGRATION_PATHS
MIGRATION_PATHS=(
["Authority"]="src/Authority/__Libraries/StellaOps.Authority.Storage.Postgres/Migrations"
["Concelier"]="src/Concelier/__Libraries/StellaOps.Concelier.Storage.Postgres/Migrations"
["Excititor"]="src/Excititor/__Libraries/StellaOps.Excititor.Storage.Postgres/Migrations"
["Policy"]="src/Policy/__Libraries/StellaOps.Policy.Storage.Postgres/Migrations"
["Scheduler"]="src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Postgres/Migrations"
["Notify"]="src/Notify/__Libraries/StellaOps.Notify.Storage.Postgres/Migrations"
["Scanner"]="src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations"
["Scanner.Triage"]="src/Scanner/__Libraries/StellaOps.Scanner.Triage/Migrations"
["Attestor"]="src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Migrations"
["Signer"]="src/Signer/__Libraries/StellaOps.Signer.KeyManagement/Migrations"
["Signals"]="src/Signals/StellaOps.Signals.Storage.Postgres/Migrations"
["EvidenceLocker"]="src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Db/Migrations"
["ExportCenter"]="src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Db/Migrations"
["IssuerDirectory"]="src/IssuerDirectory/StellaOps.IssuerDirectory/StellaOps.IssuerDirectory.Storage.Postgres/Migrations"
["Orchestrator"]="src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/migrations"
["TimelineIndexer"]="src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Db/Migrations"
["BinaryIndex"]="src/BinaryIndex/__Libraries/StellaOps.BinaryIndex.Persistence/Migrations"
["Unknowns"]="src/Unknowns/__Libraries/StellaOps.Unknowns.Storage.Postgres/Migrations"
["VexHub"]="src/VexHub/__Libraries/StellaOps.VexHub.Storage.Postgres/Migrations"
)
for module in "${!MIGRATION_PATHS[@]}"; do
path="$REPO_ROOT/${MIGRATION_PATHS[$module]}"
if [ -d "$path" ]; then
echo "Checking: $module"
check_duplicates "$path" "$module"
check_naming "$path" "$module"
check_dangerous_ops "$path" "$module"
fi
done
echo ""
# Report errors
if [ ${#ERRORS[@]} -gt 0 ]; then
echo -e "${RED}=== ERRORS (${#ERRORS[@]}) ===${NC}"
for error in "${ERRORS[@]}"; do
echo -e "${RED}$error${NC}"
done
EXIT_CODE=1
echo ""
fi
# Report warnings
if [ ${#WARNINGS[@]} -gt 0 ]; then
echo -e "${YELLOW}=== WARNINGS (${#WARNINGS[@]}) ===${NC}"
for warning in "${WARNINGS[@]}"; do
echo -e "${YELLOW}$warning${NC}"
done
if [ "$STRICT_MODE" = true ]; then
EXIT_CODE=1
fi
echo ""
fi
# Scanner fix suggestions
if [ "$FIX_SCANNER" = true ]; then
echo "=== Scanner Migration Rename Suggestions ==="
echo "# Run these commands to fix Scanner duplicate migrations:"
echo ""
SCANNER_DIR="$REPO_ROOT/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations"
if [ -d "$SCANNER_DIR" ]; then
# Map old names to new sequential numbers
cat << 'EOF'
# Before running: backup the schema_migrations table!
# After renaming: update schema_migrations.migration_name to match new names
cd src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations
# Fix duplicate 009 prefixes
git mv 009_call_graph_tables.sql 020_call_graph_tables.sql
git mv 009_smart_diff_tables_search_path.sql 021_smart_diff_tables_search_path.sql
# Fix duplicate 010 prefixes
git mv 010_reachability_drift_tables.sql 022_reachability_drift_tables.sql
git mv 010_scanner_api_ingestion.sql 023_scanner_api_ingestion.sql
git mv 010_smart_diff_priority_score_widen.sql 024_smart_diff_priority_score_widen.sql
# Fix duplicate 014 prefixes
git mv 014_epss_triage_columns.sql 025_epss_triage_columns.sql
git mv 014_vuln_surfaces.sql 026_vuln_surfaces.sql
# Renumber subsequent migrations
git mv 011_epss_raw_layer.sql 027_epss_raw_layer.sql
git mv 012_epss_signal_layer.sql 028_epss_signal_layer.sql
git mv 013_witness_storage.sql 029_witness_storage.sql
git mv 015_vuln_surface_triggers_update.sql 030_vuln_surface_triggers_update.sql
git mv 016_reach_cache.sql 031_reach_cache.sql
git mv 017_idempotency_keys.sql 032_idempotency_keys.sql
git mv 018_binary_evidence.sql 033_binary_evidence.sql
git mv 019_func_proof_tables.sql 034_func_proof_tables.sql
EOF
fi
echo ""
fi
# Summary
if [ $EXIT_CODE -eq 0 ]; then
echo -e "${GREEN}=== VALIDATION PASSED ===${NC}"
else
echo -e "${RED}=== VALIDATION FAILED ===${NC}"
fi
exit $EXIT_CODE