Add Policy DSL Validator, Schema Exporter, and Simulation Smoke tools
- Implemented PolicyDslValidator with command-line options for strict mode and JSON output. - Created PolicySchemaExporter to generate JSON schemas for policy-related models. - Developed PolicySimulationSmoke tool to validate policy simulations against expected outcomes. - Added project files and necessary dependencies for each tool. - Ensured proper error handling and usage instructions across tools.
This commit is contained in:
BIN
ops/devops/release/__pycache__/build_release.cpython-312.pyc
Normal file
BIN
ops/devops/release/__pycache__/build_release.cpython-312.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
ops/devops/release/__pycache__/verify_release.cpython-312.pyc
Normal file
BIN
ops/devops/release/__pycache__/verify_release.cpython-312.pyc
Normal file
Binary file not shown.
@@ -14,6 +14,7 @@ The workflow expects external tooling to be available on PATH:
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import datetime as dt
|
||||
import hashlib
|
||||
import json
|
||||
@@ -21,11 +22,14 @@ import os
|
||||
import pathlib
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import uuid
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence
|
||||
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence, Tuple
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parents[3]
|
||||
DEFAULT_CONFIG = REPO_ROOT / "ops/devops/release/components.json"
|
||||
@@ -184,6 +188,8 @@ class ReleaseBuilder:
|
||||
self.provenance_dir = ensure_directory(self.artifacts_dir / "provenance")
|
||||
self.signature_dir = ensure_directory(self.artifacts_dir / "signatures")
|
||||
self.metadata_dir = ensure_directory(self.artifacts_dir / "metadata")
|
||||
self.debug_dir = ensure_directory(self.output_dir / "debug")
|
||||
self.debug_store_dir = ensure_directory(self.debug_dir / ".build-id")
|
||||
self.temp_dir = pathlib.Path(tempfile.mkdtemp(prefix="stellaops-release-"))
|
||||
self.skip_signing = skip_signing
|
||||
self.tlog_upload = tlog_upload
|
||||
@@ -196,6 +202,9 @@ class ReleaseBuilder:
|
||||
"COSIGN_ALLOW_HTTP_REGISTRY": os.environ.get("COSIGN_ALLOW_HTTP_REGISTRY", "1"),
|
||||
"COSIGN_DOCKER_MEDIA_TYPES": os.environ.get("COSIGN_DOCKER_MEDIA_TYPES", "1"),
|
||||
}
|
||||
# Cache resolved objcopy binaries keyed by machine identifier to avoid repeated lookups.
|
||||
self._objcopy_cache: Dict[str, Optional[str]] = {}
|
||||
self._missing_symbol_platforms: Dict[str, int] = {}
|
||||
|
||||
# ----------------
|
||||
# Build steps
|
||||
@@ -210,7 +219,8 @@ class ReleaseBuilder:
|
||||
components_result.append(result)
|
||||
helm_meta = self._package_helm()
|
||||
compose_meta = self._digest_compose_files()
|
||||
manifest = self._compose_manifest(components_result, helm_meta, compose_meta)
|
||||
debug_meta = self._collect_debug_store(components_result)
|
||||
manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta)
|
||||
return manifest
|
||||
|
||||
def _prime_buildx_plugin(self) -> None:
|
||||
@@ -339,7 +349,15 @@ class ReleaseBuilder:
|
||||
if bundle_info:
|
||||
component_entry["signature"] = bundle_info
|
||||
if metadata_file.exists():
|
||||
component_entry["metadata"] = str(metadata_file.relative_to(self.output_dir.parent)) if metadata_file.is_relative_to(self.output_dir.parent) else str(metadata_file)
|
||||
metadata_rel = (
|
||||
str(metadata_file.relative_to(self.output_dir.parent))
|
||||
if metadata_file.is_relative_to(self.output_dir.parent)
|
||||
else str(metadata_file)
|
||||
)
|
||||
component_entry["metadata"] = OrderedDict((
|
||||
("path", metadata_rel),
|
||||
("sha256", compute_sha256(metadata_file)),
|
||||
))
|
||||
return component_entry
|
||||
|
||||
def _sign_image(self, name: str, image_ref: str, tags: Sequence[str]) -> Optional[Mapping[str, Any]]:
|
||||
@@ -370,6 +388,7 @@ class ReleaseBuilder:
|
||||
image_ref,
|
||||
])
|
||||
signature_path.write_text(signature_data, encoding="utf-8")
|
||||
signature_sha = compute_sha256(signature_path)
|
||||
signature_ref = run([
|
||||
"cosign",
|
||||
"triangulate",
|
||||
@@ -380,6 +399,7 @@ class ReleaseBuilder:
|
||||
(
|
||||
("signature", OrderedDict((
|
||||
("path", str(signature_path.relative_to(self.output_dir.parent)) if signature_path.is_relative_to(self.output_dir.parent) else str(signature_path)),
|
||||
("sha256", signature_sha),
|
||||
("ref", signature_ref),
|
||||
("tlogUploaded", self.tlog_upload),
|
||||
))),
|
||||
@@ -479,6 +499,271 @@ class ReleaseBuilder:
|
||||
entry["ref"] = ref
|
||||
return entry
|
||||
|
||||
def _collect_debug_store(self, components: Sequence[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
|
||||
if self.dry_run:
|
||||
return None
|
||||
debug_records: Dict[Tuple[str, str], OrderedDict[str, Any]] = {}
|
||||
for component in components:
|
||||
image_ref = component.get("image")
|
||||
if not image_ref:
|
||||
continue
|
||||
name = component.get("name", "unknown")
|
||||
entries = self._extract_debug_entries(name, image_ref)
|
||||
for entry in entries:
|
||||
key = (entry["platform"], entry["buildId"])
|
||||
existing = debug_records.get(key)
|
||||
if existing is None:
|
||||
record = OrderedDict((
|
||||
("buildId", entry["buildId"]),
|
||||
("platform", entry["platform"]),
|
||||
("debugPath", entry["debugPath"]),
|
||||
("sha256", entry["sha256"]),
|
||||
("size", entry["size"]),
|
||||
("components", [entry["component"]]),
|
||||
("images", [entry["image"]]),
|
||||
("sources", list(entry["sources"])),
|
||||
))
|
||||
debug_records[key] = record
|
||||
else:
|
||||
if entry["sha256"] != existing["sha256"]:
|
||||
raise RuntimeError(
|
||||
f"Build-id {entry['buildId']} for platform {entry['platform']} produced conflicting hashes"
|
||||
)
|
||||
if entry["component"] not in existing["components"]:
|
||||
existing["components"].append(entry["component"])
|
||||
if entry["image"] not in existing["images"]:
|
||||
existing["images"].append(entry["image"])
|
||||
for source in entry["sources"]:
|
||||
if source not in existing["sources"]:
|
||||
existing["sources"].append(source)
|
||||
if not debug_records:
|
||||
sys.stderr.write(
|
||||
"[error] release build produced no debug artefacts; enable symbol extraction so out/release/debug is populated (DEVOPS-REL-17-004).\n"
|
||||
)
|
||||
# Remove empty directories before failing
|
||||
with contextlib.suppress(FileNotFoundError, OSError):
|
||||
if not any(self.debug_store_dir.iterdir()):
|
||||
self.debug_store_dir.rmdir()
|
||||
with contextlib.suppress(FileNotFoundError, OSError):
|
||||
if not any(self.debug_dir.iterdir()):
|
||||
self.debug_dir.rmdir()
|
||||
raise RuntimeError(
|
||||
"Debug store collection produced no build-id artefacts (DEVOPS-REL-17-004)."
|
||||
)
|
||||
entries = []
|
||||
for record in debug_records.values():
|
||||
entry = OrderedDict((
|
||||
("buildId", record["buildId"]),
|
||||
("platform", record["platform"]),
|
||||
("debugPath", record["debugPath"]),
|
||||
("sha256", record["sha256"]),
|
||||
("size", record["size"]),
|
||||
("components", sorted(record["components"])),
|
||||
("images", sorted(record["images"])),
|
||||
("sources", sorted(record["sources"])),
|
||||
))
|
||||
entries.append(entry)
|
||||
entries.sort(key=lambda item: (item["platform"], item["buildId"]))
|
||||
manifest_path = self.debug_dir / "debug-manifest.json"
|
||||
platform_counts: Dict[str, int] = {}
|
||||
for entry in entries:
|
||||
platform_counts[entry["platform"]] = platform_counts.get(entry["platform"], 0) + 1
|
||||
missing_platforms = [
|
||||
platform
|
||||
for platform in self._missing_symbol_platforms
|
||||
if platform_counts.get(platform, 0) == 0
|
||||
]
|
||||
if missing_platforms:
|
||||
raise RuntimeError(
|
||||
"Debug extraction skipped all binaries for platforms without objcopy support: "
|
||||
+ ", ".join(sorted(missing_platforms))
|
||||
)
|
||||
manifest_data = OrderedDict((
|
||||
("generatedAt", self.release_date),
|
||||
("version", self.version),
|
||||
("channel", self.channel),
|
||||
("artifacts", entries),
|
||||
))
|
||||
with manifest_path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(manifest_data, handle, indent=2)
|
||||
handle.write("\n")
|
||||
manifest_sha = compute_sha256(manifest_path)
|
||||
sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256")
|
||||
sha_path.write_text(f"{manifest_sha} {manifest_path.name}\n", encoding="utf-8")
|
||||
manifest_rel = manifest_path.relative_to(self.output_dir).as_posix()
|
||||
store_rel = self.debug_store_dir.relative_to(self.output_dir).as_posix()
|
||||
platforms = sorted({entry["platform"] for entry in entries})
|
||||
return OrderedDict((
|
||||
("manifest", manifest_rel),
|
||||
("sha256", manifest_sha),
|
||||
("entries", len(entries)),
|
||||
("platforms", platforms),
|
||||
("directory", store_rel),
|
||||
))
|
||||
|
||||
def _extract_debug_entries(self, component_name: str, image_ref: str) -> List[OrderedDict[str, Any]]:
|
||||
if self.dry_run:
|
||||
return []
|
||||
entries: List[OrderedDict[str, Any]] = []
|
||||
platforms = self.platforms if self.push else [None]
|
||||
for platform in platforms:
|
||||
platform_label = platform or (self.platforms[0] if self.platforms else "linux/amd64")
|
||||
if self.push:
|
||||
pull_cmd = ["docker", "pull"]
|
||||
if platform:
|
||||
pull_cmd.extend(["--platform", platform])
|
||||
pull_cmd.append(image_ref)
|
||||
run(pull_cmd)
|
||||
create_cmd = ["docker", "create"]
|
||||
if platform:
|
||||
create_cmd.extend(["--platform", platform])
|
||||
create_cmd.append(image_ref)
|
||||
container_id = run(create_cmd).strip()
|
||||
export_path = self.temp_dir / f"{container_id}.tar"
|
||||
try:
|
||||
run(["docker", "export", container_id, "-o", str(export_path)], capture=False)
|
||||
finally:
|
||||
run(["docker", "rm", container_id], capture=False)
|
||||
rootfs_dir = ensure_directory(self.temp_dir / f"{component_name}-{platform_label}-{uuid.uuid4().hex}")
|
||||
try:
|
||||
with tarfile.open(export_path, "r:*") as tar:
|
||||
self._safe_extract_tar(tar, rootfs_dir)
|
||||
finally:
|
||||
export_path.unlink(missing_ok=True)
|
||||
try:
|
||||
for file_path in rootfs_dir.rglob("*"):
|
||||
if not file_path.is_file() or file_path.is_symlink():
|
||||
continue
|
||||
if not self._is_elf(file_path):
|
||||
continue
|
||||
build_id, machine = self._read_build_id_and_machine(file_path)
|
||||
if not build_id:
|
||||
continue
|
||||
debug_file = self._debug_file_for_build_id(build_id)
|
||||
if not debug_file.exists():
|
||||
debug_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
temp_debug = self.temp_dir / f"{build_id}.debug"
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
temp_debug.unlink()
|
||||
objcopy_tool = self._resolve_objcopy_tool(machine)
|
||||
if not objcopy_tool:
|
||||
self._emit_objcopy_warning(machine, platform_label, file_path)
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
temp_debug.unlink()
|
||||
continue
|
||||
try:
|
||||
run([objcopy_tool, "--only-keep-debug", str(file_path), str(temp_debug)], capture=False)
|
||||
except CommandError:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
temp_debug.unlink()
|
||||
continue
|
||||
debug_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.move(str(temp_debug), str(debug_file))
|
||||
sha = compute_sha256(debug_file)
|
||||
rel_debug = debug_file.relative_to(self.output_dir).as_posix()
|
||||
source_rel = file_path.relative_to(rootfs_dir).as_posix()
|
||||
entry = OrderedDict((
|
||||
("component", component_name),
|
||||
("image", image_ref),
|
||||
("platform", platform_label),
|
||||
("buildId", build_id),
|
||||
("debugPath", rel_debug),
|
||||
("sha256", sha),
|
||||
("size", debug_file.stat().st_size),
|
||||
("sources", [source_rel]),
|
||||
))
|
||||
entries.append(entry)
|
||||
finally:
|
||||
shutil.rmtree(rootfs_dir, ignore_errors=True)
|
||||
return entries
|
||||
|
||||
def _debug_file_for_build_id(self, build_id: str) -> pathlib.Path:
|
||||
normalized = build_id.lower()
|
||||
prefix = normalized[:2]
|
||||
remainder = normalized[2:]
|
||||
return self.debug_store_dir / prefix / f"{remainder}.debug"
|
||||
|
||||
@staticmethod
|
||||
def _safe_extract_tar(tar: tarfile.TarFile, dest: pathlib.Path) -> None:
|
||||
dest_root = dest.resolve()
|
||||
members = tar.getmembers()
|
||||
for member in members:
|
||||
member_path = (dest / member.name).resolve()
|
||||
if not str(member_path).startswith(str(dest_root)):
|
||||
raise RuntimeError(f"Refusing to extract '{member.name}' outside of destination directory")
|
||||
tar.extractall(dest)
|
||||
|
||||
@staticmethod
|
||||
def _is_elf(path: pathlib.Path) -> bool:
|
||||
try:
|
||||
with path.open("rb") as handle:
|
||||
return handle.read(4) == b"\x7fELF"
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def _read_build_id_and_machine(self, path: pathlib.Path) -> Tuple[Optional[str], Optional[str]]:
|
||||
try:
|
||||
header_output = run(["readelf", "-nh", str(path)])
|
||||
except CommandError:
|
||||
return None, None
|
||||
build_id: Optional[str] = None
|
||||
machine: Optional[str] = None
|
||||
for line in header_output.splitlines():
|
||||
stripped = line.strip()
|
||||
if stripped.startswith("Build ID:"):
|
||||
build_id = stripped.split("Build ID:", 1)[1].strip().lower()
|
||||
elif stripped.startswith("Machine:"):
|
||||
machine = stripped.split("Machine:", 1)[1].strip()
|
||||
return build_id, machine
|
||||
|
||||
def _resolve_objcopy_tool(self, machine: Optional[str]) -> Optional[str]:
|
||||
key = (machine or "generic").lower()
|
||||
if key in self._objcopy_cache:
|
||||
return self._objcopy_cache[key]
|
||||
|
||||
env_override = None
|
||||
if machine and "aarch64" in machine.lower():
|
||||
env_override = os.environ.get("STELLAOPS_OBJCOPY_AARCH64")
|
||||
candidates = [
|
||||
env_override,
|
||||
"aarch64-linux-gnu-objcopy",
|
||||
"llvm-objcopy",
|
||||
"objcopy",
|
||||
]
|
||||
elif machine and any(token in machine.lower() for token in ("x86-64", "amd", "x86_64")):
|
||||
env_override = os.environ.get("STELLAOPS_OBJCOPY_AMD64")
|
||||
candidates = [
|
||||
env_override,
|
||||
"objcopy",
|
||||
"llvm-objcopy",
|
||||
]
|
||||
else:
|
||||
env_override = os.environ.get("STELLAOPS_OBJCOPY_DEFAULT")
|
||||
candidates = [
|
||||
env_override,
|
||||
"objcopy",
|
||||
"llvm-objcopy",
|
||||
]
|
||||
|
||||
for candidate in candidates:
|
||||
if not candidate:
|
||||
continue
|
||||
tool = shutil.which(candidate)
|
||||
if tool:
|
||||
self._objcopy_cache[key] = tool
|
||||
return tool
|
||||
self._objcopy_cache[key] = None
|
||||
return None
|
||||
|
||||
def _emit_objcopy_warning(self, machine: Optional[str], platform: str, file_path: pathlib.Path) -> None:
|
||||
machine_label = machine or "unknown-machine"
|
||||
count = self._missing_symbol_platforms.get(platform, 0)
|
||||
self._missing_symbol_platforms[platform] = count + 1
|
||||
if count == 0:
|
||||
sys.stderr.write(
|
||||
f"[warn] no objcopy tool available for {machine_label}; skipping debug extraction for {file_path}.\n"
|
||||
)
|
||||
|
||||
# ----------------
|
||||
# Helm + compose
|
||||
# ----------------
|
||||
@@ -546,6 +831,7 @@ class ReleaseBuilder:
|
||||
components: List[Mapping[str, Any]],
|
||||
helm_meta: Optional[Mapping[str, Any]],
|
||||
compose_meta: List[Mapping[str, Any]],
|
||||
debug_meta: Optional[Mapping[str, Any]],
|
||||
) -> Dict[str, Any]:
|
||||
manifest = OrderedDict()
|
||||
manifest["release"] = OrderedDict((
|
||||
@@ -559,6 +845,8 @@ class ReleaseBuilder:
|
||||
manifest["charts"] = [helm_meta]
|
||||
if compose_meta:
|
||||
manifest["compose"] = compose_meta
|
||||
if debug_meta:
|
||||
manifest["debugStore"] = debug_meta
|
||||
return manifest
|
||||
|
||||
|
||||
@@ -593,6 +881,18 @@ def write_manifest(manifest: Mapping[str, Any], output_dir: pathlib.Path) -> pat
|
||||
output_path = output_dir / "release.yaml"
|
||||
with output_path.open("w", encoding="utf-8") as handle:
|
||||
handle.write(final_yaml)
|
||||
sha_path = output_path.with_name(output_path.name + ".sha256")
|
||||
yaml_file_digest = compute_sha256(output_path)
|
||||
sha_path.write_text(f"{yaml_file_digest} {output_path.name}\n", encoding="utf-8")
|
||||
|
||||
json_text = json.dumps(manifest_with_checksum, indent=2)
|
||||
json_path = output_dir / "release.json"
|
||||
with json_path.open("w", encoding="utf-8") as handle:
|
||||
handle.write(json_text)
|
||||
handle.write("\n")
|
||||
json_digest = compute_sha256(json_path)
|
||||
json_sha_path = json_path.with_name(json_path.name + ".sha256")
|
||||
json_sha_path.write_text(f"{json_digest} {json_path.name}\n", encoding="utf-8")
|
||||
return output_path
|
||||
|
||||
|
||||
|
||||
232
ops/devops/release/test_verify_release.py
Normal file
232
ops/devops/release/test_verify_release.py
Normal file
@@ -0,0 +1,232 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import tempfile
|
||||
import unittest
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
sys.path.append(str(Path(__file__).resolve().parent))
|
||||
|
||||
from build_release import write_manifest # type: ignore import-not-found
|
||||
from verify_release import VerificationError, compute_sha256, verify_release
|
||||
|
||||
|
||||
class VerifyReleaseTests(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self._temp = tempfile.TemporaryDirectory()
|
||||
self.base_path = Path(self._temp.name)
|
||||
self.out_dir = self.base_path / "out"
|
||||
self.release_dir = self.out_dir / "release"
|
||||
self.release_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self._temp.cleanup()
|
||||
|
||||
def _relative_to_out(self, path: Path) -> str:
|
||||
return path.relative_to(self.out_dir).as_posix()
|
||||
|
||||
def _write_json(self, path: Path, payload: dict[str, object]) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(payload, handle, indent=2)
|
||||
handle.write("\n")
|
||||
|
||||
def _create_sample_release(self) -> None:
|
||||
sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json"
|
||||
sbom_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8")
|
||||
sbom_sha = compute_sha256(sbom_path)
|
||||
|
||||
provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json"
|
||||
self._write_json(
|
||||
provenance_path,
|
||||
{
|
||||
"buildDefinition": {"buildType": "https://example/build", "externalParameters": {}},
|
||||
"runDetails": {"builder": {"id": "https://example/ci"}},
|
||||
},
|
||||
)
|
||||
provenance_sha = compute_sha256(provenance_path)
|
||||
|
||||
signature_path = self.release_dir / "artifacts/signatures/sample.signature"
|
||||
signature_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
signature_path.write_text("signature-data\n", encoding="utf-8")
|
||||
signature_sha = compute_sha256(signature_path)
|
||||
|
||||
metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json"
|
||||
self._write_json(metadata_path, {"digest": "sha256:1234"})
|
||||
metadata_sha = compute_sha256(metadata_path)
|
||||
|
||||
chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz"
|
||||
chart_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
chart_path.write_bytes(b"helm-chart-data")
|
||||
chart_sha = compute_sha256(chart_path)
|
||||
|
||||
compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml"
|
||||
compose_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
compose_path.write_text("services: {}\n", encoding="utf-8")
|
||||
compose_sha = compute_sha256(compose_path)
|
||||
|
||||
debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug"
|
||||
debug_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
debug_file.write_bytes(b"\x7fELFDEBUGDATA")
|
||||
debug_sha = compute_sha256(debug_file)
|
||||
|
||||
debug_manifest_path = self.release_dir / "debug/debug-manifest.json"
|
||||
debug_manifest = OrderedDict(
|
||||
(
|
||||
("generatedAt", "2025-10-26T00:00:00Z"),
|
||||
("version", "1.0.0"),
|
||||
("channel", "edge"),
|
||||
(
|
||||
"artifacts",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("buildId", "abcdef1234"),
|
||||
("platform", "linux/amd64"),
|
||||
("debugPath", "debug/.build-id/ab/cdef.debug"),
|
||||
("sha256", debug_sha),
|
||||
("size", debug_file.stat().st_size),
|
||||
("components", ["sample"]),
|
||||
("images", ["registry.example/sample@sha256:feedface"]),
|
||||
("sources", ["app/sample.dll"]),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
)
|
||||
)
|
||||
self._write_json(debug_manifest_path, debug_manifest)
|
||||
debug_manifest_sha = compute_sha256(debug_manifest_path)
|
||||
(debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text(
|
||||
f"{debug_manifest_sha} {debug_manifest_path.name}\n", encoding="utf-8"
|
||||
)
|
||||
|
||||
manifest = OrderedDict(
|
||||
(
|
||||
(
|
||||
"release",
|
||||
OrderedDict(
|
||||
(
|
||||
("version", "1.0.0"),
|
||||
("channel", "edge"),
|
||||
("date", "2025-10-26T00:00:00Z"),
|
||||
("calendar", "2025.10"),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"components",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("name", "sample"),
|
||||
("image", "registry.example/sample@sha256:feedface"),
|
||||
("tags", ["registry.example/sample:1.0.0"]),
|
||||
(
|
||||
"sbom",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(sbom_path)),
|
||||
("sha256", sbom_sha),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"provenance",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(provenance_path)),
|
||||
("sha256", provenance_sha),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"signature",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(signature_path)),
|
||||
("sha256", signature_sha),
|
||||
("ref", "sigstore://example"),
|
||||
("tlogUploaded", True),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"metadata",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(metadata_path)),
|
||||
("sha256", metadata_sha),
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
"charts",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("name", "stellaops"),
|
||||
("version", "1.0.0"),
|
||||
("path", self._relative_to_out(chart_path)),
|
||||
("sha256", chart_sha),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
"compose",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("name", "docker-compose.dev.yaml"),
|
||||
("path", compose_path.relative_to(self.out_dir).as_posix()),
|
||||
("sha256", compose_sha),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
"debugStore",
|
||||
OrderedDict(
|
||||
(
|
||||
("manifest", "debug/debug-manifest.json"),
|
||||
("sha256", debug_manifest_sha),
|
||||
("entries", 1),
|
||||
("platforms", ["linux/amd64"]),
|
||||
("directory", "debug/.build-id"),
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
write_manifest(manifest, self.release_dir)
|
||||
|
||||
def test_verify_release_success(self) -> None:
|
||||
self._create_sample_release()
|
||||
# Should not raise
|
||||
verify_release(self.release_dir)
|
||||
|
||||
def test_verify_release_detects_sha_mismatch(self) -> None:
|
||||
self._create_sample_release()
|
||||
tampered = self.release_dir / "artifacts/sboms/sample.cyclonedx.json"
|
||||
tampered.write_text("tampered\n", encoding="utf-8")
|
||||
with self.assertRaises(VerificationError):
|
||||
verify_release(self.release_dir)
|
||||
|
||||
def test_verify_release_detects_missing_debug_file(self) -> None:
|
||||
self._create_sample_release()
|
||||
debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug"
|
||||
debug_file.unlink()
|
||||
with self.assertRaises(VerificationError):
|
||||
verify_release(self.release_dir)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
279
ops/devops/release/verify_release.py
Normal file
279
ops/devops/release/verify_release.py
Normal file
@@ -0,0 +1,279 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Verify release artefacts (SBOMs, provenance, signatures, manifest hashes)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Mapping, Optional
|
||||
|
||||
from build_release import dump_yaml # type: ignore import-not-found
|
||||
|
||||
|
||||
class VerificationError(Exception):
|
||||
"""Raised when release artefacts fail verification."""
|
||||
|
||||
|
||||
def compute_sha256(path: pathlib.Path) -> str:
|
||||
sha = hashlib.sha256()
|
||||
with path.open("rb") as handle:
|
||||
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
|
||||
sha.update(chunk)
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def parse_sha_file(path: pathlib.Path) -> Optional[str]:
|
||||
if not path.exists():
|
||||
return None
|
||||
content = path.read_text(encoding="utf-8").strip()
|
||||
if not content:
|
||||
return None
|
||||
return content.split()[0]
|
||||
|
||||
|
||||
def resolve_path(path_str: str, release_dir: pathlib.Path) -> pathlib.Path:
|
||||
candidate = pathlib.Path(path_str.replace("\\", "/"))
|
||||
if candidate.is_absolute():
|
||||
return candidate
|
||||
|
||||
for base in (release_dir, release_dir.parent, release_dir.parent.parent):
|
||||
resolved = (base / candidate).resolve()
|
||||
if resolved.exists():
|
||||
return resolved
|
||||
# Fall back to release_dir joined path even if missing to surface in caller.
|
||||
return (release_dir / candidate).resolve()
|
||||
|
||||
|
||||
def load_manifest(release_dir: pathlib.Path) -> OrderedDict[str, Any]:
|
||||
manifest_path = release_dir / "release.json"
|
||||
if not manifest_path.exists():
|
||||
raise VerificationError(f"Release manifest JSON missing at {manifest_path}")
|
||||
try:
|
||||
with manifest_path.open("r", encoding="utf-8") as handle:
|
||||
return json.load(handle, object_pairs_hook=OrderedDict)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise VerificationError(f"Failed to parse {manifest_path}: {exc}") from exc
|
||||
|
||||
|
||||
def verify_manifest_hashes(
|
||||
manifest: Mapping[str, Any],
|
||||
release_dir: pathlib.Path,
|
||||
errors: list[str],
|
||||
) -> None:
|
||||
yaml_path = release_dir / "release.yaml"
|
||||
if not yaml_path.exists():
|
||||
errors.append(f"Missing release.yaml at {yaml_path}")
|
||||
return
|
||||
|
||||
recorded_yaml_sha = parse_sha_file(yaml_path.with_name(yaml_path.name + ".sha256"))
|
||||
actual_yaml_sha = compute_sha256(yaml_path)
|
||||
if recorded_yaml_sha and recorded_yaml_sha != actual_yaml_sha:
|
||||
errors.append(
|
||||
f"release.yaml.sha256 recorded {recorded_yaml_sha} but file hashes to {actual_yaml_sha}"
|
||||
)
|
||||
|
||||
json_path = release_dir / "release.json"
|
||||
recorded_json_sha = parse_sha_file(json_path.with_name(json_path.name + ".sha256"))
|
||||
actual_json_sha = compute_sha256(json_path)
|
||||
if recorded_json_sha and recorded_json_sha != actual_json_sha:
|
||||
errors.append(
|
||||
f"release.json.sha256 recorded {recorded_json_sha} but file hashes to {actual_json_sha}"
|
||||
)
|
||||
|
||||
checksums = manifest.get("checksums")
|
||||
if isinstance(checksums, Mapping):
|
||||
recorded_digest = checksums.get("sha256")
|
||||
base_manifest = OrderedDict(manifest)
|
||||
base_manifest.pop("checksums", None)
|
||||
yaml_without_checksums = dump_yaml(base_manifest)
|
||||
computed_digest = hashlib.sha256(yaml_without_checksums.encode("utf-8")).hexdigest()
|
||||
if recorded_digest != computed_digest:
|
||||
errors.append(
|
||||
"Manifest checksum mismatch: "
|
||||
f"recorded {recorded_digest}, computed {computed_digest}"
|
||||
)
|
||||
|
||||
|
||||
def verify_artifact_entry(
|
||||
entry: Mapping[str, Any],
|
||||
release_dir: pathlib.Path,
|
||||
label: str,
|
||||
component_name: str,
|
||||
errors: list[str],
|
||||
) -> None:
|
||||
path_str = entry.get("path")
|
||||
if not path_str:
|
||||
errors.append(f"{component_name}: {label} missing 'path' field.")
|
||||
return
|
||||
resolved = resolve_path(str(path_str), release_dir)
|
||||
if not resolved.exists():
|
||||
errors.append(f"{component_name}: {label} path does not exist → {resolved}")
|
||||
return
|
||||
recorded_sha = entry.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(resolved)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"{component_name}: {label} SHA mismatch for {resolved} "
|
||||
f"(recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
|
||||
def verify_components(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
for component in manifest.get("components", []):
|
||||
if not isinstance(component, Mapping):
|
||||
errors.append("Component entry is not a mapping.")
|
||||
continue
|
||||
name = str(component.get("name", "<unknown>"))
|
||||
for key, label in (
|
||||
("sbom", "SBOM"),
|
||||
("provenance", "provenance"),
|
||||
("signature", "signature"),
|
||||
("metadata", "metadata"),
|
||||
):
|
||||
entry = component.get(key)
|
||||
if not entry:
|
||||
continue
|
||||
if not isinstance(entry, Mapping):
|
||||
errors.append(f"{name}: {label} entry must be a mapping.")
|
||||
continue
|
||||
verify_artifact_entry(entry, release_dir, label, name, errors)
|
||||
|
||||
|
||||
def verify_collections(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
for collection, label in (
|
||||
("charts", "chart"),
|
||||
("compose", "compose file"),
|
||||
):
|
||||
for item in manifest.get(collection, []):
|
||||
if not isinstance(item, Mapping):
|
||||
errors.append(f"{collection} entry is not a mapping.")
|
||||
continue
|
||||
path_value = item.get("path")
|
||||
if not path_value:
|
||||
errors.append(f"{collection} entry missing path.")
|
||||
continue
|
||||
resolved = resolve_path(str(path_value), release_dir)
|
||||
if not resolved.exists():
|
||||
errors.append(f"{label} missing file → {resolved}")
|
||||
continue
|
||||
recorded_sha = item.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(resolved)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"{label} SHA mismatch for {resolved} "
|
||||
f"(recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
|
||||
def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
debug = manifest.get("debugStore")
|
||||
if not isinstance(debug, Mapping):
|
||||
return
|
||||
manifest_path_str = debug.get("manifest")
|
||||
manifest_data: Optional[Mapping[str, Any]] = None
|
||||
if manifest_path_str:
|
||||
manifest_path = resolve_path(str(manifest_path_str), release_dir)
|
||||
if not manifest_path.exists():
|
||||
errors.append(f"Debug manifest missing → {manifest_path}")
|
||||
else:
|
||||
recorded_sha = debug.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(manifest_path)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"Debug manifest SHA mismatch (recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
sha_sidecar = manifest_path.with_suffix(manifest_path.suffix + ".sha256")
|
||||
sidecar_sha = parse_sha_file(sha_sidecar)
|
||||
if sidecar_sha and recorded_sha and sidecar_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"Debug manifest sidecar digest {sidecar_sha} disagrees with recorded {recorded_sha}"
|
||||
)
|
||||
try:
|
||||
with manifest_path.open("r", encoding="utf-8") as handle:
|
||||
manifest_data = json.load(handle)
|
||||
except json.JSONDecodeError as exc:
|
||||
errors.append(f"Debug manifest JSON invalid: {exc}")
|
||||
directory = debug.get("directory")
|
||||
if directory:
|
||||
debug_dir = resolve_path(str(directory), release_dir)
|
||||
if not debug_dir.exists():
|
||||
errors.append(f"Debug directory missing → {debug_dir}")
|
||||
|
||||
if manifest_data:
|
||||
artifacts = manifest_data.get("artifacts")
|
||||
if not isinstance(artifacts, list) or not artifacts:
|
||||
errors.append("Debug manifest contains no artefacts.")
|
||||
return
|
||||
|
||||
declared_entries = debug.get("entries")
|
||||
if isinstance(declared_entries, int) and declared_entries != len(artifacts):
|
||||
errors.append(
|
||||
f"Debug manifest reports {declared_entries} entries but contains {len(artifacts)} artefacts."
|
||||
)
|
||||
|
||||
for artefact in artifacts:
|
||||
if not isinstance(artefact, Mapping):
|
||||
errors.append("Debug manifest artefact entry is not a mapping.")
|
||||
continue
|
||||
debug_path = artefact.get("debugPath")
|
||||
artefact_sha = artefact.get("sha256")
|
||||
if not debug_path or not artefact_sha:
|
||||
errors.append("Debug manifest artefact missing debugPath or sha256.")
|
||||
continue
|
||||
resolved_debug = resolve_path(str(debug_path), release_dir)
|
||||
if not resolved_debug.exists():
|
||||
errors.append(f"Debug artefact missing → {resolved_debug}")
|
||||
continue
|
||||
actual_sha = compute_sha256(resolved_debug)
|
||||
if actual_sha != artefact_sha:
|
||||
errors.append(
|
||||
f"Debug artefact SHA mismatch for {resolved_debug} "
|
||||
f"(recorded {artefact_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
|
||||
def verify_release(release_dir: pathlib.Path) -> None:
|
||||
if not release_dir.exists():
|
||||
raise VerificationError(f"Release directory not found: {release_dir}")
|
||||
manifest = load_manifest(release_dir)
|
||||
errors: list[str] = []
|
||||
verify_manifest_hashes(manifest, release_dir, errors)
|
||||
verify_components(manifest, release_dir, errors)
|
||||
verify_collections(manifest, release_dir, errors)
|
||||
verify_debug_store(manifest, release_dir, errors)
|
||||
if errors:
|
||||
bullet_list = "\n - ".join(errors)
|
||||
raise VerificationError(f"Release verification failed:\n - {bullet_list}")
|
||||
|
||||
|
||||
def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--release-dir",
|
||||
type=pathlib.Path,
|
||||
default=pathlib.Path("out/release"),
|
||||
help="Path to the release artefact directory (default: %(default)s)",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
args = parse_args(argv)
|
||||
try:
|
||||
verify_release(args.release_dir.resolve())
|
||||
except VerificationError as exc:
|
||||
print(str(exc), file=sys.stderr)
|
||||
return 1
|
||||
print(f"✅ Release artefacts verified OK in {args.release_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
Reference in New Issue
Block a user