devops folders consolidate
This commit is contained in:
130
deploy/tools/validation/check-channel-alignment.py
Normal file
130
deploy/tools/validation/check-channel-alignment.py
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Ensure deployment bundles reference the images defined in a release manifest.
|
||||
|
||||
Usage:
|
||||
./deploy/tools/check-channel-alignment.py \
|
||||
--release deploy/releases/2025.10-edge.yaml \
|
||||
--target deploy/helm/stellaops/values-dev.yaml \
|
||||
--target deploy/compose/docker-compose.dev.yaml
|
||||
|
||||
For every target file, the script scans `image:` declarations and verifies that
|
||||
any image belonging to a repository listed in the release manifest matches the
|
||||
exact digest or tag recorded there. Images outside of the manifest (for example,
|
||||
supporting services such as `valkey`) are ignored.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, Iterable, List, Optional, Set
|
||||
|
||||
IMAGE_LINE = re.compile(r"^\s*image:\s*['\"]?(?P<image>\S+)['\"]?\s*$")
|
||||
|
||||
|
||||
def extract_images(path: pathlib.Path) -> List[str]:
|
||||
images: List[str] = []
|
||||
for line in path.read_text(encoding="utf-8").splitlines():
|
||||
match = IMAGE_LINE.match(line)
|
||||
if match:
|
||||
images.append(match.group("image"))
|
||||
return images
|
||||
|
||||
|
||||
def image_repo(image: str) -> str:
|
||||
if "@" in image:
|
||||
return image.split("@", 1)[0]
|
||||
# Split on the last colon to preserve registries with ports (e.g. localhost:5000)
|
||||
if ":" in image:
|
||||
prefix, tag = image.rsplit(":", 1)
|
||||
if "/" in tag:
|
||||
# handle digestive colon inside path (unlikely)
|
||||
return image
|
||||
return prefix
|
||||
return image
|
||||
|
||||
|
||||
def load_release_map(release_path: pathlib.Path) -> Dict[str, str]:
|
||||
release_map: Dict[str, str] = {}
|
||||
for image in extract_images(release_path):
|
||||
repo = image_repo(image)
|
||||
release_map[repo] = image
|
||||
return release_map
|
||||
|
||||
|
||||
def check_target(
|
||||
target_path: pathlib.Path,
|
||||
release_map: Dict[str, str],
|
||||
ignore_repos: Set[str],
|
||||
) -> List[str]:
|
||||
errors: List[str] = []
|
||||
for image in extract_images(target_path):
|
||||
repo = image_repo(image)
|
||||
if repo in ignore_repos:
|
||||
continue
|
||||
if repo not in release_map:
|
||||
continue
|
||||
expected = release_map[repo]
|
||||
if image != expected:
|
||||
errors.append(
|
||||
f"{target_path}: {image} does not match release value {expected}"
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--release",
|
||||
required=True,
|
||||
type=pathlib.Path,
|
||||
help="Path to the release manifest (YAML)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target",
|
||||
action="append",
|
||||
required=True,
|
||||
type=pathlib.Path,
|
||||
help="Deployment profile to validate against the release manifest",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-repo",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Repository prefix to ignore (may be repeated)",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: Optional[Iterable[str]] = None) -> int:
|
||||
args = parse_args(argv)
|
||||
|
||||
release_map = load_release_map(args.release)
|
||||
ignore_repos = {repo.rstrip("/") for repo in args.ignore_repo}
|
||||
|
||||
if not release_map:
|
||||
print(f"error: no images found in release manifest {args.release}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
total_errors: List[str] = []
|
||||
for target in args.target:
|
||||
if not target.exists():
|
||||
total_errors.append(f"{target}: file not found")
|
||||
continue
|
||||
total_errors.extend(check_target(target, release_map, ignore_repos))
|
||||
|
||||
if total_errors:
|
||||
print("✖ channel alignment check failed:", file=sys.stderr)
|
||||
for err in total_errors:
|
||||
print(f" - {err}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
print("✓ deployment profiles reference release images for the inspected repositories.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
61
deploy/tools/validation/validate-profiles.sh
Normal file
61
deploy/tools/validation/validate-profiles.sh
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
COMPOSE_DIR="$ROOT_DIR/compose"
|
||||
HELM_DIR="$ROOT_DIR/helm/stellaops"
|
||||
|
||||
compose_profiles=(
|
||||
"docker-compose.dev.yaml:env/dev.env.example"
|
||||
"docker-compose.stage.yaml:env/stage.env.example"
|
||||
"docker-compose.prod.yaml:env/prod.env.example"
|
||||
"docker-compose.airgap.yaml:env/airgap.env.example"
|
||||
"docker-compose.mirror.yaml:env/mirror.env.example"
|
||||
"docker-compose.telemetry.yaml:"
|
||||
"docker-compose.telemetry-storage.yaml:"
|
||||
)
|
||||
|
||||
docker_ready=false
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
if docker compose version >/dev/null 2>&1; then
|
||||
docker_ready=true
|
||||
else
|
||||
echo "⚠️ docker CLI present but Compose plugin unavailable; skipping compose validation" >&2
|
||||
fi
|
||||
else
|
||||
echo "⚠️ docker CLI not found; skipping compose validation" >&2
|
||||
fi
|
||||
|
||||
if [[ "$docker_ready" == "true" ]]; then
|
||||
for entry in "${compose_profiles[@]}"; do
|
||||
IFS=":" read -r compose_file env_file <<<"$entry"
|
||||
printf '→ validating %s with %s\n' "$compose_file" "$env_file"
|
||||
if [[ -n "$env_file" ]]; then
|
||||
docker compose \
|
||||
--env-file "$COMPOSE_DIR/$env_file" \
|
||||
-f "$COMPOSE_DIR/$compose_file" config >/dev/null
|
||||
else
|
||||
docker compose -f "$COMPOSE_DIR/$compose_file" config >/dev/null
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
helm_values=(
|
||||
"$HELM_DIR/values-dev.yaml"
|
||||
"$HELM_DIR/values-stage.yaml"
|
||||
"$HELM_DIR/values-prod.yaml"
|
||||
"$HELM_DIR/values-airgap.yaml"
|
||||
"$HELM_DIR/values-mirror.yaml"
|
||||
)
|
||||
|
||||
if command -v helm >/dev/null 2>&1; then
|
||||
for values in "${helm_values[@]}"; do
|
||||
printf '→ linting Helm chart with %s\n' "$(basename "$values")"
|
||||
helm lint "$HELM_DIR" -f "$values"
|
||||
helm template test-release "$HELM_DIR" -f "$values" >/dev/null
|
||||
done
|
||||
else
|
||||
echo "⚠️ helm CLI not found; skipping Helm lint/template" >&2
|
||||
fi
|
||||
|
||||
printf 'Profiles validated (where tooling was available).\n'
|
||||
183
deploy/tools/validation/validate_restore_sources.py
Normal file
183
deploy/tools/validation/validate_restore_sources.py
Normal file
@@ -0,0 +1,183 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Validate NuGet source ordering for StellaOps.
|
||||
|
||||
Ensures `local-nuget` is the highest priority feed in both NuGet.config and the
|
||||
Directory.Build.props restore configuration. Fails fast with actionable errors
|
||||
so CI/offline kit workflows can assert deterministic restore ordering.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
NUGET_CONFIG = REPO_ROOT / "NuGet.config"
|
||||
ROOT_PROPS = REPO_ROOT / "Directory.Build.props"
|
||||
EXPECTED_SOURCE_KEYS = ["local", "dotnet-public", "nuget.org"]
|
||||
|
||||
|
||||
class ValidationError(Exception):
|
||||
"""Raised when validation fails."""
|
||||
|
||||
|
||||
def _fail(message: str) -> None:
|
||||
raise ValidationError(message)
|
||||
|
||||
|
||||
def _parse_xml(path: Path) -> ET.ElementTree:
|
||||
try:
|
||||
return ET.parse(path)
|
||||
except FileNotFoundError as exc:
|
||||
_fail(f"Missing required file: {path}")
|
||||
except ET.ParseError as exc:
|
||||
_fail(f"Could not parse XML for {path}: {exc}")
|
||||
|
||||
|
||||
def validate_nuget_config() -> None:
|
||||
tree = _parse_xml(NUGET_CONFIG)
|
||||
root = tree.getroot()
|
||||
|
||||
package_sources = root.find("packageSources")
|
||||
if package_sources is None:
|
||||
_fail("NuGet.config must declare a <packageSources> section.")
|
||||
|
||||
children = list(package_sources)
|
||||
if not children or children[0].tag != "clear":
|
||||
_fail("NuGet.config packageSources must begin with a <clear /> element.")
|
||||
|
||||
adds = [child for child in children if child.tag == "add"]
|
||||
if not adds:
|
||||
_fail("NuGet.config packageSources must define at least one <add> entry.")
|
||||
|
||||
keys = [add.attrib.get("key") for add in adds]
|
||||
if keys[: len(EXPECTED_SOURCE_KEYS)] != EXPECTED_SOURCE_KEYS:
|
||||
formatted = ", ".join(keys) or "<empty>"
|
||||
_fail(
|
||||
"NuGet.config packageSources must list feeds in the order "
|
||||
f"{EXPECTED_SOURCE_KEYS}. Found: {formatted}"
|
||||
)
|
||||
|
||||
local_value = adds[0].attrib.get("value", "")
|
||||
if Path(local_value).name != "local-nuget":
|
||||
_fail(
|
||||
"NuGet.config local feed should point at the repo-local mirror "
|
||||
f"'local-nuget', found value '{local_value}'."
|
||||
)
|
||||
|
||||
clear = package_sources.find("clear")
|
||||
if clear is None:
|
||||
_fail("NuGet.config packageSources must start with <clear /> to avoid inherited feeds.")
|
||||
|
||||
|
||||
def validate_directory_build_props() -> None:
|
||||
tree = _parse_xml(ROOT_PROPS)
|
||||
root = tree.getroot()
|
||||
defaults = None
|
||||
for element in root.findall(".//_StellaOpsDefaultRestoreSources"):
|
||||
defaults = [fragment.strip() for fragment in element.text.split(";") if fragment.strip()]
|
||||
break
|
||||
|
||||
if defaults is None:
|
||||
_fail("Directory.Build.props must define _StellaOpsDefaultRestoreSources.")
|
||||
|
||||
expected_props = [
|
||||
"$(StellaOpsLocalNuGetSource)",
|
||||
"$(StellaOpsDotNetPublicSource)",
|
||||
"$(StellaOpsNuGetOrgSource)",
|
||||
]
|
||||
if defaults != expected_props:
|
||||
_fail(
|
||||
"Directory.Build.props _StellaOpsDefaultRestoreSources must list feeds "
|
||||
f"in the order {expected_props}. Found: {defaults}"
|
||||
)
|
||||
|
||||
restore_nodes = root.findall(".//RestoreSources")
|
||||
if not restore_nodes:
|
||||
_fail("Directory.Build.props must override RestoreSources to force deterministic ordering.")
|
||||
|
||||
uses_default_first = any(
|
||||
node.text
|
||||
and node.text.strip().startswith("$(_StellaOpsDefaultRestoreSources)")
|
||||
for node in restore_nodes
|
||||
)
|
||||
if not uses_default_first:
|
||||
_fail(
|
||||
"Directory.Build.props RestoreSources override must place "
|
||||
"$(_StellaOpsDefaultRestoreSources) at the beginning."
|
||||
)
|
||||
|
||||
|
||||
def assert_single_nuget_config() -> None:
|
||||
extra_configs: list[Path] = []
|
||||
configs: set[Path] = set()
|
||||
for glob in ("NuGet.config", "nuget.config"):
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["rg", "--files", f"-g{glob}"],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
cwd=REPO_ROOT,
|
||||
)
|
||||
except FileNotFoundError as exc:
|
||||
_fail("ripgrep (rg) is required for validation but was not found on PATH.")
|
||||
if result.returncode not in (0, 1):
|
||||
_fail(
|
||||
f"ripgrep failed while searching for {glob}: {result.stderr.strip() or result.returncode}"
|
||||
)
|
||||
for line in result.stdout.splitlines():
|
||||
configs.add((REPO_ROOT / line).resolve())
|
||||
|
||||
configs.discard(NUGET_CONFIG.resolve())
|
||||
extra_configs.extend(sorted(configs))
|
||||
if extra_configs:
|
||||
formatted = "\n ".join(str(path.relative_to(REPO_ROOT)) for path in extra_configs)
|
||||
_fail(
|
||||
"Unexpected additional NuGet.config files detected. "
|
||||
"Consolidate feed configuration in the repo root:\n "
|
||||
f"{formatted}"
|
||||
)
|
||||
|
||||
|
||||
def parse_args(argv: list[str]) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Verify StellaOps NuGet feeds prioritise the local mirror."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-rg",
|
||||
action="store_true",
|
||||
help="Skip ripgrep discovery of extra NuGet.config files (useful for focused runs).",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: list[str]) -> int:
|
||||
args = parse_args(argv)
|
||||
validations = [
|
||||
("NuGet.config ordering", validate_nuget_config),
|
||||
("Directory.Build.props restore override", validate_directory_build_props),
|
||||
]
|
||||
if not args.skip_rg:
|
||||
validations.append(("single NuGet.config", assert_single_nuget_config))
|
||||
|
||||
for label, check in validations:
|
||||
try:
|
||||
check()
|
||||
except ValidationError as exc:
|
||||
sys.stderr.write(f"[FAIL] {label}: {exc}\n")
|
||||
return 1
|
||||
else:
|
||||
sys.stdout.write(f"[OK] {label}\n")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
Reference in New Issue
Block a user