446 lines
		
	
	
		
			16 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			446 lines
		
	
	
		
			16 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| #!/usr/bin/env python3
 | |
| """Package the StellaOps Offline Kit with deterministic artefacts and manifest."""
 | |
| 
 | |
| from __future__ import annotations
 | |
| 
 | |
| import argparse
 | |
| import datetime as dt
 | |
| import hashlib
 | |
| import json
 | |
| import os
 | |
| import re
 | |
| import shutil
 | |
| import subprocess
 | |
| import sys
 | |
| import tarfile
 | |
| from collections import OrderedDict
 | |
| from pathlib import Path
 | |
| from typing import Any, Iterable, Mapping, MutableMapping, Optional
 | |
| 
 | |
| REPO_ROOT = Path(__file__).resolve().parents[2]
 | |
| RELEASE_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "release"
 | |
| TELEMETRY_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "telemetry"
 | |
| TELEMETRY_BUNDLE_PATH = REPO_ROOT / "out" / "telemetry" / "telemetry-offline-bundle.tar.gz"
 | |
| 
 | |
| if str(RELEASE_TOOLS_DIR) not in sys.path:
 | |
|     sys.path.insert(0, str(RELEASE_TOOLS_DIR))
 | |
| 
 | |
| from verify_release import (  # type: ignore import-not-found
 | |
|     load_manifest,
 | |
|     resolve_path,
 | |
|     verify_release,
 | |
| )
 | |
| 
 | |
| import mirror_debug_store  # type: ignore import-not-found
 | |
| 
 | |
| DEFAULT_RELEASE_DIR = REPO_ROOT / "out" / "release"
 | |
| DEFAULT_STAGING_DIR = REPO_ROOT / "out" / "offline-kit" / "staging"
 | |
| DEFAULT_OUTPUT_DIR = REPO_ROOT / "out" / "offline-kit" / "dist"
 | |
| 
 | |
| ARTIFACT_TARGETS = {
 | |
|     "sbom": Path("sboms"),
 | |
|     "provenance": Path("attest"),
 | |
|     "signature": Path("signatures"),
 | |
|     "metadata": Path("metadata/docker"),
 | |
| }
 | |
| 
 | |
| 
 | |
| class CommandError(RuntimeError):
 | |
|     """Raised when an external command fails."""
 | |
| 
 | |
| 
 | |
| def run(cmd: Iterable[str], *, cwd: Optional[Path] = None, env: Optional[Mapping[str, str]] = None) -> str:
 | |
|     process_env = dict(os.environ)
 | |
|     if env:
 | |
|         process_env.update(env)
 | |
|     result = subprocess.run(
 | |
|         list(cmd),
 | |
|         cwd=str(cwd) if cwd else None,
 | |
|         env=process_env,
 | |
|         check=False,
 | |
|         capture_output=True,
 | |
|         text=True,
 | |
|     )
 | |
|     if result.returncode != 0:
 | |
|         raise CommandError(
 | |
|             f"Command failed ({result.returncode}): {' '.join(cmd)}\nSTDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}"
 | |
|         )
 | |
|     return result.stdout
 | |
| 
 | |
| 
 | |
| def compute_sha256(path: Path) -> str:
 | |
|     sha = hashlib.sha256()
 | |
|     with path.open("rb") as handle:
 | |
|         for chunk in iter(lambda: handle.read(1024 * 1024), b""):
 | |
|             sha.update(chunk)
 | |
|     return sha.hexdigest()
 | |
| 
 | |
| 
 | |
| def utc_now_iso() -> str:
 | |
|     return dt.datetime.now(tz=dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
 | |
| 
 | |
| 
 | |
| def safe_component_name(name: str) -> str:
 | |
|     return re.sub(r"[^A-Za-z0-9_.-]", "-", name.strip().lower())
 | |
| 
 | |
| 
 | |
| def clean_directory(path: Path) -> None:
 | |
|     if path.exists():
 | |
|         shutil.rmtree(path)
 | |
|     path.mkdir(parents=True, exist_ok=True)
 | |
| 
 | |
| 
 | |
| def run_python_analyzer_smoke() -> None:
 | |
|     script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh"
 | |
|     run(["bash", str(script)], cwd=REPO_ROOT)
 | |
| 
 | |
| 
 | |
| def copy_if_exists(source: Path, target: Path) -> None:
 | |
|     if source.is_dir():
 | |
|         shutil.copytree(source, target, dirs_exist_ok=True)
 | |
|     elif source.is_file():
 | |
|         target.parent.mkdir(parents=True, exist_ok=True)
 | |
|         shutil.copy2(source, target)
 | |
| 
 | |
| 
 | |
| def copy_release_manifests(release_dir: Path, staging_dir: Path) -> None:
 | |
|     manifest_dir = staging_dir / "manifest"
 | |
|     manifest_dir.mkdir(parents=True, exist_ok=True)
 | |
|     for name in ("release.yaml", "release.yaml.sha256", "release.json", "release.json.sha256"):
 | |
|         source = release_dir / name
 | |
|         if source.exists():
 | |
|             shutil.copy2(source, manifest_dir / source.name)
 | |
| 
 | |
| 
 | |
| def copy_component_artifacts(
 | |
|     manifest: Mapping[str, Any],
 | |
|     release_dir: Path,
 | |
|     staging_dir: Path,
 | |
| ) -> None:
 | |
|     components = manifest.get("components") or []
 | |
|     for component in sorted(components, key=lambda entry: str(entry.get("name", ""))):
 | |
|         if not isinstance(component, Mapping):
 | |
|             continue
 | |
|         component_name = safe_component_name(str(component.get("name", "component")))
 | |
|         for key, target_root in ARTIFACT_TARGETS.items():
 | |
|             entry = component.get(key)
 | |
|             if not entry or not isinstance(entry, Mapping):
 | |
|                 continue
 | |
|             path_str = entry.get("path")
 | |
|             if not path_str:
 | |
|                 continue
 | |
|             resolved = resolve_path(str(path_str), release_dir)
 | |
|             if not resolved.exists():
 | |
|                 raise FileNotFoundError(f"Component '{component_name}' {key} artefact not found: {resolved}")
 | |
|             target_dir = staging_dir / target_root
 | |
|             target_dir.mkdir(parents=True, exist_ok=True)
 | |
|             target_name = f"{component_name}-{resolved.name}" if resolved.name else component_name
 | |
|             shutil.copy2(resolved, target_dir / target_name)
 | |
| 
 | |
| 
 | |
| def copy_collections(
 | |
|     manifest: Mapping[str, Any],
 | |
|     release_dir: Path,
 | |
|     staging_dir: Path,
 | |
| ) -> None:
 | |
|     for collection, subdir in (("charts", Path("charts")), ("compose", Path("compose"))):
 | |
|         entries = manifest.get(collection) or []
 | |
|         for entry in entries:
 | |
|             if not isinstance(entry, Mapping):
 | |
|                 continue
 | |
|             path_str = entry.get("path")
 | |
|             if not path_str:
 | |
|                 continue
 | |
|             resolved = resolve_path(str(path_str), release_dir)
 | |
|             if not resolved.exists():
 | |
|                 raise FileNotFoundError(f"{collection} artefact not found: {resolved}")
 | |
|             target_dir = staging_dir / subdir
 | |
|             target_dir.mkdir(parents=True, exist_ok=True)
 | |
|             shutil.copy2(resolved, target_dir / resolved.name)
 | |
| 
 | |
| 
 | |
| def copy_debug_store(release_dir: Path, staging_dir: Path) -> None:
 | |
|     mirror_debug_store.main(
 | |
|         [
 | |
|             "--release-dir",
 | |
|             str(release_dir),
 | |
|             "--offline-kit-dir",
 | |
|             str(staging_dir),
 | |
|         ]
 | |
|     )
 | |
| 
 | |
| 
 | |
| def copy_plugins_and_assets(staging_dir: Path) -> None:
 | |
|     copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner")
 | |
|     copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates")
 | |
|     copy_if_exists(REPO_ROOT / "seed-data", staging_dir / "seed-data")
 | |
|     docs_dir = staging_dir / "docs"
 | |
|     docs_dir.mkdir(parents=True, exist_ok=True)
 | |
|     copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md")
 | |
|     copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md")
 | |
|     copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md")
 | |
| 
 | |
| 
 | |
| def package_telemetry_bundle(staging_dir: Path) -> None:
 | |
|     script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py"
 | |
|     if not script.exists():
 | |
|         return
 | |
|     TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True)
 | |
|     run(["python", str(script), "--output", str(TELEMETRY_BUNDLE_PATH)], cwd=REPO_ROOT)
 | |
|     telemetry_dir = staging_dir / "telemetry"
 | |
|     telemetry_dir.mkdir(parents=True, exist_ok=True)
 | |
|     shutil.copy2(TELEMETRY_BUNDLE_PATH, telemetry_dir / TELEMETRY_BUNDLE_PATH.name)
 | |
|     sha_path = TELEMETRY_BUNDLE_PATH.with_suffix(TELEMETRY_BUNDLE_PATH.suffix + ".sha256")
 | |
|     if sha_path.exists():
 | |
|         shutil.copy2(sha_path, telemetry_dir / sha_path.name)
 | |
| 
 | |
| 
 | |
| def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]:
 | |
|     entries: list[OrderedDict[str, Any]] = []
 | |
|     exclude = exclude or set()
 | |
|     for path in sorted(staging_dir.rglob("*")):
 | |
|         if not path.is_file():
 | |
|             continue
 | |
|         rel = path.relative_to(staging_dir).as_posix()
 | |
|         if rel in exclude:
 | |
|             continue
 | |
|         entries.append(
 | |
|             OrderedDict(
 | |
|                 (
 | |
|                     ("name", rel),
 | |
|                     ("sha256", compute_sha256(path)),
 | |
|                     ("size", path.stat().st_size),
 | |
|                 )
 | |
|             )
 | |
|         )
 | |
|     return entries
 | |
| 
 | |
| 
 | |
| def write_offline_manifest(
 | |
|     staging_dir: Path,
 | |
|     version: str,
 | |
|     channel: str,
 | |
|     release_manifest_sha: Optional[str],
 | |
| ) -> tuple[Path, str]:
 | |
|     manifest_dir = staging_dir / "manifest"
 | |
|     manifest_dir.mkdir(parents=True, exist_ok=True)
 | |
|     offline_manifest_path = manifest_dir / "offline-manifest.json"
 | |
|     files = scan_files(staging_dir, exclude={"manifest/offline-manifest.json", "manifest/offline-manifest.json.sha256"})
 | |
|     manifest_data = OrderedDict(
 | |
|         (
 | |
|             (
 | |
|                 "bundle",
 | |
|                 OrderedDict(
 | |
|                     (
 | |
|                         ("version", version),
 | |
|                         ("channel", channel),
 | |
|                         ("capturedAt", utc_now_iso()),
 | |
|                         ("releaseManifestSha256", release_manifest_sha),
 | |
|                     )
 | |
|                 ),
 | |
|             ),
 | |
|             ("artifacts", files),
 | |
|         )
 | |
|     )
 | |
|     with offline_manifest_path.open("w", encoding="utf-8") as handle:
 | |
|         json.dump(manifest_data, handle, indent=2)
 | |
|         handle.write("\n")
 | |
|     manifest_sha = compute_sha256(offline_manifest_path)
 | |
|     (offline_manifest_path.with_suffix(".json.sha256")).write_text(
 | |
|         f"{manifest_sha}  {offline_manifest_path.name}\n",
 | |
|         encoding="utf-8",
 | |
|     )
 | |
|     return offline_manifest_path, manifest_sha
 | |
| 
 | |
| 
 | |
| def tarinfo_filter(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo:
 | |
|     tarinfo.uid = 0
 | |
|     tarinfo.gid = 0
 | |
|     tarinfo.uname = ""
 | |
|     tarinfo.gname = ""
 | |
|     tarinfo.mtime = 0
 | |
|     return tarinfo
 | |
| 
 | |
| 
 | |
| def create_tarball(staging_dir: Path, output_dir: Path, bundle_name: str) -> Path:
 | |
|     output_dir.mkdir(parents=True, exist_ok=True)
 | |
|     bundle_path = output_dir / f"{bundle_name}.tar.gz"
 | |
|     if bundle_path.exists():
 | |
|         bundle_path.unlink()
 | |
|     with tarfile.open(bundle_path, "w:gz", compresslevel=9) as tar:
 | |
|         for path in sorted(staging_dir.rglob("*")):
 | |
|             if path.is_file():
 | |
|                 arcname = path.relative_to(staging_dir).as_posix()
 | |
|                 tar.add(path, arcname=arcname, filter=tarinfo_filter)
 | |
|     return bundle_path
 | |
| 
 | |
| 
 | |
| def sign_blob(
 | |
|     path: Path,
 | |
|     *,
 | |
|     key_ref: Optional[str],
 | |
|     identity_token: Optional[str],
 | |
|     password: Optional[str],
 | |
|     tlog_upload: bool,
 | |
| ) -> Optional[Path]:
 | |
|     if not key_ref and not identity_token:
 | |
|         return None
 | |
|     cmd = ["cosign", "sign-blob", "--yes", str(path)]
 | |
|     if key_ref:
 | |
|         cmd.extend(["--key", key_ref])
 | |
|     if identity_token:
 | |
|         cmd.extend(["--identity-token", identity_token])
 | |
|     if not tlog_upload:
 | |
|         cmd.append("--tlog-upload=false")
 | |
|     env = {"COSIGN_PASSWORD": password or ""}
 | |
|     signature = run(cmd, env=env)
 | |
|     sig_path = path.with_suffix(path.suffix + ".sig")
 | |
|     sig_path.write_text(signature, encoding="utf-8")
 | |
|     return sig_path
 | |
| 
 | |
| 
 | |
| def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
 | |
|     release_dir = args.release_dir.resolve()
 | |
|     staging_dir = args.staging_dir.resolve()
 | |
|     output_dir = args.output_dir.resolve()
 | |
| 
 | |
|     verify_release(release_dir)
 | |
|     if not args.skip_smoke:
 | |
|         run_python_analyzer_smoke()
 | |
|     clean_directory(staging_dir)
 | |
|     copy_debug_store(release_dir, staging_dir)
 | |
| 
 | |
|     manifest_data = load_manifest(release_dir)
 | |
|     release_manifest_sha = None
 | |
|     checksums = manifest_data.get("checksums")
 | |
|     if isinstance(checksums, Mapping):
 | |
|         release_manifest_sha = checksums.get("sha256")
 | |
| 
 | |
|     copy_release_manifests(release_dir, staging_dir)
 | |
|     copy_component_artifacts(manifest_data, release_dir, staging_dir)
 | |
|     copy_collections(manifest_data, release_dir, staging_dir)
 | |
|     copy_plugins_and_assets(staging_dir)
 | |
|     package_telemetry_bundle(staging_dir)
 | |
| 
 | |
|     offline_manifest_path, offline_manifest_sha = write_offline_manifest(
 | |
|         staging_dir,
 | |
|         args.version,
 | |
|         args.channel,
 | |
|         release_manifest_sha,
 | |
|     )
 | |
|     bundle_name = f"stella-ops-offline-kit-{args.version}-{args.channel}"
 | |
|     bundle_path = create_tarball(staging_dir, output_dir, bundle_name)
 | |
|     bundle_sha = compute_sha256(bundle_path)
 | |
|     bundle_sha_prefixed = f"sha256:{bundle_sha}"
 | |
|     (bundle_path.with_suffix(".tar.gz.sha256")).write_text(
 | |
|         f"{bundle_sha}  {bundle_path.name}\n",
 | |
|         encoding="utf-8",
 | |
|     )
 | |
| 
 | |
|     signature_paths: dict[str, str] = {}
 | |
|     sig = sign_blob(
 | |
|         bundle_path,
 | |
|         key_ref=args.cosign_key,
 | |
|         identity_token=args.cosign_identity_token,
 | |
|         password=args.cosign_password,
 | |
|         tlog_upload=not args.no_transparency,
 | |
|     )
 | |
|     if sig:
 | |
|         signature_paths["bundleSignature"] = str(sig)
 | |
|     manifest_sig = sign_blob(
 | |
|         offline_manifest_path,
 | |
|         key_ref=args.cosign_key,
 | |
|         identity_token=args.cosign_identity_token,
 | |
|         password=args.cosign_password,
 | |
|         tlog_upload=not args.no_transparency,
 | |
|     )
 | |
|     if manifest_sig:
 | |
|         signature_paths["manifestSignature"] = str(manifest_sig)
 | |
| 
 | |
|     metadata = OrderedDict(
 | |
|         (
 | |
|             ("bundleId", args.bundle_id or f"{args.version}-{args.channel}-{utc_now_iso()}"),
 | |
|             ("bundleName", bundle_path.name),
 | |
|             ("bundleSha256", bundle_sha_prefixed),
 | |
|             ("bundleSize", bundle_path.stat().st_size),
 | |
|             ("manifestName", offline_manifest_path.name),
 | |
|             ("manifestSha256", f"sha256:{offline_manifest_sha}"),
 | |
|             ("manifestSize", offline_manifest_path.stat().st_size),
 | |
|             ("channel", args.channel),
 | |
|             ("version", args.version),
 | |
|             ("capturedAt", utc_now_iso()),
 | |
|         )
 | |
|     )
 | |
| 
 | |
|     if sig:
 | |
|         metadata["bundleSignatureName"] = Path(sig).name
 | |
|     if manifest_sig:
 | |
|         metadata["manifestSignatureName"] = Path(manifest_sig).name
 | |
| 
 | |
|     metadata_path = output_dir / f"{bundle_name}.metadata.json"
 | |
|     with metadata_path.open("w", encoding="utf-8") as handle:
 | |
|         json.dump(metadata, handle, indent=2)
 | |
|         handle.write("\n")
 | |
| 
 | |
|     return OrderedDict(
 | |
|         (
 | |
|             ("bundlePath", str(bundle_path)),
 | |
|             ("bundleSha256", bundle_sha),
 | |
|             ("manifestPath", str(offline_manifest_path)),
 | |
|             ("metadataPath", str(metadata_path)),
 | |
|             ("signatures", signature_paths),
 | |
|         )
 | |
|     )
 | |
| 
 | |
| 
 | |
| def parse_args(argv: Optional[list[str]] = None) -> argparse.Namespace:
 | |
|     parser = argparse.ArgumentParser(description=__doc__)
 | |
|     parser.add_argument("--version", required=True, help="Bundle version (e.g. 2025.10.0)")
 | |
|     parser.add_argument("--channel", default="edge", help="Release channel (default: %(default)s)")
 | |
|     parser.add_argument("--bundle-id", help="Optional explicit bundle identifier")
 | |
|     parser.add_argument(
 | |
|         "--release-dir",
 | |
|         type=Path,
 | |
|         default=DEFAULT_RELEASE_DIR,
 | |
|         help="Release artefact directory (default: %(default)s)",
 | |
|     )
 | |
|     parser.add_argument(
 | |
|         "--staging-dir",
 | |
|         type=Path,
 | |
|         default=DEFAULT_STAGING_DIR,
 | |
|         help="Temporary staging directory (default: %(default)s)",
 | |
|     )
 | |
|     parser.add_argument(
 | |
|         "--output-dir",
 | |
|         type=Path,
 | |
|         default=DEFAULT_OUTPUT_DIR,
 | |
|         help="Destination directory for packaged bundles (default: %(default)s)",
 | |
|     )
 | |
|     parser.add_argument("--cosign-key", dest="cosign_key", help="Cosign key reference for signing")
 | |
|     parser.add_argument("--cosign-password", dest="cosign_password", help="Cosign key password (if applicable)")
 | |
|     parser.add_argument("--cosign-identity-token", dest="cosign_identity_token", help="Cosign identity token")
 | |
|     parser.add_argument("--no-transparency", action="store_true", help="Disable Rekor transparency log uploads")
 | |
|     parser.add_argument("--skip-smoke", action="store_true", help="Skip analyzer smoke execution (testing only)")
 | |
|     return parser.parse_args(argv)
 | |
| 
 | |
| 
 | |
| def main(argv: Optional[list[str]] = None) -> int:
 | |
|     args = parse_args(argv)
 | |
|     try:
 | |
|         result = build_offline_kit(args)
 | |
|     except Exception as exc:  # pylint: disable=broad-except
 | |
|         print(f"offline-kit packaging failed: {exc}", file=sys.stderr)
 | |
|         return 1
 | |
|     print("✅ Offline kit packaged")
 | |
|     for key, value in result.items():
 | |
|         if isinstance(value, dict):
 | |
|             for sub_key, sub_val in value.items():
 | |
|                 print(f"  - {key}.{sub_key}: {sub_val}")
 | |
|         else:
 | |
|             print(f"  - {key}: {value}")
 | |
|     return 0
 | |
| 
 | |
| 
 | |
| if __name__ == "__main__":
 | |
|     raise SystemExit(main())
 |