Add Policy DSL Validator, Schema Exporter, and Simulation Smoke tools
- Implemented PolicyDslValidator with command-line options for strict mode and JSON output. - Created PolicySchemaExporter to generate JSON schemas for policy-related models. - Developed PolicySimulationSmoke tool to validate policy simulations against expected outcomes. - Added project files and necessary dependencies for each tool. - Ensured proper error handling and usage instructions across tools.
This commit is contained in:
@@ -14,6 +14,7 @@ The workflow expects external tooling to be available on PATH:
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import datetime as dt
|
||||
import hashlib
|
||||
import json
|
||||
@@ -21,11 +22,14 @@ import os
|
||||
import pathlib
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import uuid
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence
|
||||
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence, Tuple
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parents[3]
|
||||
DEFAULT_CONFIG = REPO_ROOT / "ops/devops/release/components.json"
|
||||
@@ -184,6 +188,8 @@ class ReleaseBuilder:
|
||||
self.provenance_dir = ensure_directory(self.artifacts_dir / "provenance")
|
||||
self.signature_dir = ensure_directory(self.artifacts_dir / "signatures")
|
||||
self.metadata_dir = ensure_directory(self.artifacts_dir / "metadata")
|
||||
self.debug_dir = ensure_directory(self.output_dir / "debug")
|
||||
self.debug_store_dir = ensure_directory(self.debug_dir / ".build-id")
|
||||
self.temp_dir = pathlib.Path(tempfile.mkdtemp(prefix="stellaops-release-"))
|
||||
self.skip_signing = skip_signing
|
||||
self.tlog_upload = tlog_upload
|
||||
@@ -196,6 +202,9 @@ class ReleaseBuilder:
|
||||
"COSIGN_ALLOW_HTTP_REGISTRY": os.environ.get("COSIGN_ALLOW_HTTP_REGISTRY", "1"),
|
||||
"COSIGN_DOCKER_MEDIA_TYPES": os.environ.get("COSIGN_DOCKER_MEDIA_TYPES", "1"),
|
||||
}
|
||||
# Cache resolved objcopy binaries keyed by machine identifier to avoid repeated lookups.
|
||||
self._objcopy_cache: Dict[str, Optional[str]] = {}
|
||||
self._missing_symbol_platforms: Dict[str, int] = {}
|
||||
|
||||
# ----------------
|
||||
# Build steps
|
||||
@@ -210,7 +219,8 @@ class ReleaseBuilder:
|
||||
components_result.append(result)
|
||||
helm_meta = self._package_helm()
|
||||
compose_meta = self._digest_compose_files()
|
||||
manifest = self._compose_manifest(components_result, helm_meta, compose_meta)
|
||||
debug_meta = self._collect_debug_store(components_result)
|
||||
manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta)
|
||||
return manifest
|
||||
|
||||
def _prime_buildx_plugin(self) -> None:
|
||||
@@ -339,7 +349,15 @@ class ReleaseBuilder:
|
||||
if bundle_info:
|
||||
component_entry["signature"] = bundle_info
|
||||
if metadata_file.exists():
|
||||
component_entry["metadata"] = str(metadata_file.relative_to(self.output_dir.parent)) if metadata_file.is_relative_to(self.output_dir.parent) else str(metadata_file)
|
||||
metadata_rel = (
|
||||
str(metadata_file.relative_to(self.output_dir.parent))
|
||||
if metadata_file.is_relative_to(self.output_dir.parent)
|
||||
else str(metadata_file)
|
||||
)
|
||||
component_entry["metadata"] = OrderedDict((
|
||||
("path", metadata_rel),
|
||||
("sha256", compute_sha256(metadata_file)),
|
||||
))
|
||||
return component_entry
|
||||
|
||||
def _sign_image(self, name: str, image_ref: str, tags: Sequence[str]) -> Optional[Mapping[str, Any]]:
|
||||
@@ -370,6 +388,7 @@ class ReleaseBuilder:
|
||||
image_ref,
|
||||
])
|
||||
signature_path.write_text(signature_data, encoding="utf-8")
|
||||
signature_sha = compute_sha256(signature_path)
|
||||
signature_ref = run([
|
||||
"cosign",
|
||||
"triangulate",
|
||||
@@ -380,6 +399,7 @@ class ReleaseBuilder:
|
||||
(
|
||||
("signature", OrderedDict((
|
||||
("path", str(signature_path.relative_to(self.output_dir.parent)) if signature_path.is_relative_to(self.output_dir.parent) else str(signature_path)),
|
||||
("sha256", signature_sha),
|
||||
("ref", signature_ref),
|
||||
("tlogUploaded", self.tlog_upload),
|
||||
))),
|
||||
@@ -479,6 +499,271 @@ class ReleaseBuilder:
|
||||
entry["ref"] = ref
|
||||
return entry
|
||||
|
||||
def _collect_debug_store(self, components: Sequence[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
|
||||
if self.dry_run:
|
||||
return None
|
||||
debug_records: Dict[Tuple[str, str], OrderedDict[str, Any]] = {}
|
||||
for component in components:
|
||||
image_ref = component.get("image")
|
||||
if not image_ref:
|
||||
continue
|
||||
name = component.get("name", "unknown")
|
||||
entries = self._extract_debug_entries(name, image_ref)
|
||||
for entry in entries:
|
||||
key = (entry["platform"], entry["buildId"])
|
||||
existing = debug_records.get(key)
|
||||
if existing is None:
|
||||
record = OrderedDict((
|
||||
("buildId", entry["buildId"]),
|
||||
("platform", entry["platform"]),
|
||||
("debugPath", entry["debugPath"]),
|
||||
("sha256", entry["sha256"]),
|
||||
("size", entry["size"]),
|
||||
("components", [entry["component"]]),
|
||||
("images", [entry["image"]]),
|
||||
("sources", list(entry["sources"])),
|
||||
))
|
||||
debug_records[key] = record
|
||||
else:
|
||||
if entry["sha256"] != existing["sha256"]:
|
||||
raise RuntimeError(
|
||||
f"Build-id {entry['buildId']} for platform {entry['platform']} produced conflicting hashes"
|
||||
)
|
||||
if entry["component"] not in existing["components"]:
|
||||
existing["components"].append(entry["component"])
|
||||
if entry["image"] not in existing["images"]:
|
||||
existing["images"].append(entry["image"])
|
||||
for source in entry["sources"]:
|
||||
if source not in existing["sources"]:
|
||||
existing["sources"].append(source)
|
||||
if not debug_records:
|
||||
sys.stderr.write(
|
||||
"[error] release build produced no debug artefacts; enable symbol extraction so out/release/debug is populated (DEVOPS-REL-17-004).\n"
|
||||
)
|
||||
# Remove empty directories before failing
|
||||
with contextlib.suppress(FileNotFoundError, OSError):
|
||||
if not any(self.debug_store_dir.iterdir()):
|
||||
self.debug_store_dir.rmdir()
|
||||
with contextlib.suppress(FileNotFoundError, OSError):
|
||||
if not any(self.debug_dir.iterdir()):
|
||||
self.debug_dir.rmdir()
|
||||
raise RuntimeError(
|
||||
"Debug store collection produced no build-id artefacts (DEVOPS-REL-17-004)."
|
||||
)
|
||||
entries = []
|
||||
for record in debug_records.values():
|
||||
entry = OrderedDict((
|
||||
("buildId", record["buildId"]),
|
||||
("platform", record["platform"]),
|
||||
("debugPath", record["debugPath"]),
|
||||
("sha256", record["sha256"]),
|
||||
("size", record["size"]),
|
||||
("components", sorted(record["components"])),
|
||||
("images", sorted(record["images"])),
|
||||
("sources", sorted(record["sources"])),
|
||||
))
|
||||
entries.append(entry)
|
||||
entries.sort(key=lambda item: (item["platform"], item["buildId"]))
|
||||
manifest_path = self.debug_dir / "debug-manifest.json"
|
||||
platform_counts: Dict[str, int] = {}
|
||||
for entry in entries:
|
||||
platform_counts[entry["platform"]] = platform_counts.get(entry["platform"], 0) + 1
|
||||
missing_platforms = [
|
||||
platform
|
||||
for platform in self._missing_symbol_platforms
|
||||
if platform_counts.get(platform, 0) == 0
|
||||
]
|
||||
if missing_platforms:
|
||||
raise RuntimeError(
|
||||
"Debug extraction skipped all binaries for platforms without objcopy support: "
|
||||
+ ", ".join(sorted(missing_platforms))
|
||||
)
|
||||
manifest_data = OrderedDict((
|
||||
("generatedAt", self.release_date),
|
||||
("version", self.version),
|
||||
("channel", self.channel),
|
||||
("artifacts", entries),
|
||||
))
|
||||
with manifest_path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(manifest_data, handle, indent=2)
|
||||
handle.write("\n")
|
||||
manifest_sha = compute_sha256(manifest_path)
|
||||
sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256")
|
||||
sha_path.write_text(f"{manifest_sha} {manifest_path.name}\n", encoding="utf-8")
|
||||
manifest_rel = manifest_path.relative_to(self.output_dir).as_posix()
|
||||
store_rel = self.debug_store_dir.relative_to(self.output_dir).as_posix()
|
||||
platforms = sorted({entry["platform"] for entry in entries})
|
||||
return OrderedDict((
|
||||
("manifest", manifest_rel),
|
||||
("sha256", manifest_sha),
|
||||
("entries", len(entries)),
|
||||
("platforms", platforms),
|
||||
("directory", store_rel),
|
||||
))
|
||||
|
||||
def _extract_debug_entries(self, component_name: str, image_ref: str) -> List[OrderedDict[str, Any]]:
|
||||
if self.dry_run:
|
||||
return []
|
||||
entries: List[OrderedDict[str, Any]] = []
|
||||
platforms = self.platforms if self.push else [None]
|
||||
for platform in platforms:
|
||||
platform_label = platform or (self.platforms[0] if self.platforms else "linux/amd64")
|
||||
if self.push:
|
||||
pull_cmd = ["docker", "pull"]
|
||||
if platform:
|
||||
pull_cmd.extend(["--platform", platform])
|
||||
pull_cmd.append(image_ref)
|
||||
run(pull_cmd)
|
||||
create_cmd = ["docker", "create"]
|
||||
if platform:
|
||||
create_cmd.extend(["--platform", platform])
|
||||
create_cmd.append(image_ref)
|
||||
container_id = run(create_cmd).strip()
|
||||
export_path = self.temp_dir / f"{container_id}.tar"
|
||||
try:
|
||||
run(["docker", "export", container_id, "-o", str(export_path)], capture=False)
|
||||
finally:
|
||||
run(["docker", "rm", container_id], capture=False)
|
||||
rootfs_dir = ensure_directory(self.temp_dir / f"{component_name}-{platform_label}-{uuid.uuid4().hex}")
|
||||
try:
|
||||
with tarfile.open(export_path, "r:*") as tar:
|
||||
self._safe_extract_tar(tar, rootfs_dir)
|
||||
finally:
|
||||
export_path.unlink(missing_ok=True)
|
||||
try:
|
||||
for file_path in rootfs_dir.rglob("*"):
|
||||
if not file_path.is_file() or file_path.is_symlink():
|
||||
continue
|
||||
if not self._is_elf(file_path):
|
||||
continue
|
||||
build_id, machine = self._read_build_id_and_machine(file_path)
|
||||
if not build_id:
|
||||
continue
|
||||
debug_file = self._debug_file_for_build_id(build_id)
|
||||
if not debug_file.exists():
|
||||
debug_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
temp_debug = self.temp_dir / f"{build_id}.debug"
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
temp_debug.unlink()
|
||||
objcopy_tool = self._resolve_objcopy_tool(machine)
|
||||
if not objcopy_tool:
|
||||
self._emit_objcopy_warning(machine, platform_label, file_path)
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
temp_debug.unlink()
|
||||
continue
|
||||
try:
|
||||
run([objcopy_tool, "--only-keep-debug", str(file_path), str(temp_debug)], capture=False)
|
||||
except CommandError:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
temp_debug.unlink()
|
||||
continue
|
||||
debug_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.move(str(temp_debug), str(debug_file))
|
||||
sha = compute_sha256(debug_file)
|
||||
rel_debug = debug_file.relative_to(self.output_dir).as_posix()
|
||||
source_rel = file_path.relative_to(rootfs_dir).as_posix()
|
||||
entry = OrderedDict((
|
||||
("component", component_name),
|
||||
("image", image_ref),
|
||||
("platform", platform_label),
|
||||
("buildId", build_id),
|
||||
("debugPath", rel_debug),
|
||||
("sha256", sha),
|
||||
("size", debug_file.stat().st_size),
|
||||
("sources", [source_rel]),
|
||||
))
|
||||
entries.append(entry)
|
||||
finally:
|
||||
shutil.rmtree(rootfs_dir, ignore_errors=True)
|
||||
return entries
|
||||
|
||||
def _debug_file_for_build_id(self, build_id: str) -> pathlib.Path:
|
||||
normalized = build_id.lower()
|
||||
prefix = normalized[:2]
|
||||
remainder = normalized[2:]
|
||||
return self.debug_store_dir / prefix / f"{remainder}.debug"
|
||||
|
||||
@staticmethod
|
||||
def _safe_extract_tar(tar: tarfile.TarFile, dest: pathlib.Path) -> None:
|
||||
dest_root = dest.resolve()
|
||||
members = tar.getmembers()
|
||||
for member in members:
|
||||
member_path = (dest / member.name).resolve()
|
||||
if not str(member_path).startswith(str(dest_root)):
|
||||
raise RuntimeError(f"Refusing to extract '{member.name}' outside of destination directory")
|
||||
tar.extractall(dest)
|
||||
|
||||
@staticmethod
|
||||
def _is_elf(path: pathlib.Path) -> bool:
|
||||
try:
|
||||
with path.open("rb") as handle:
|
||||
return handle.read(4) == b"\x7fELF"
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def _read_build_id_and_machine(self, path: pathlib.Path) -> Tuple[Optional[str], Optional[str]]:
|
||||
try:
|
||||
header_output = run(["readelf", "-nh", str(path)])
|
||||
except CommandError:
|
||||
return None, None
|
||||
build_id: Optional[str] = None
|
||||
machine: Optional[str] = None
|
||||
for line in header_output.splitlines():
|
||||
stripped = line.strip()
|
||||
if stripped.startswith("Build ID:"):
|
||||
build_id = stripped.split("Build ID:", 1)[1].strip().lower()
|
||||
elif stripped.startswith("Machine:"):
|
||||
machine = stripped.split("Machine:", 1)[1].strip()
|
||||
return build_id, machine
|
||||
|
||||
def _resolve_objcopy_tool(self, machine: Optional[str]) -> Optional[str]:
|
||||
key = (machine or "generic").lower()
|
||||
if key in self._objcopy_cache:
|
||||
return self._objcopy_cache[key]
|
||||
|
||||
env_override = None
|
||||
if machine and "aarch64" in machine.lower():
|
||||
env_override = os.environ.get("STELLAOPS_OBJCOPY_AARCH64")
|
||||
candidates = [
|
||||
env_override,
|
||||
"aarch64-linux-gnu-objcopy",
|
||||
"llvm-objcopy",
|
||||
"objcopy",
|
||||
]
|
||||
elif machine and any(token in machine.lower() for token in ("x86-64", "amd", "x86_64")):
|
||||
env_override = os.environ.get("STELLAOPS_OBJCOPY_AMD64")
|
||||
candidates = [
|
||||
env_override,
|
||||
"objcopy",
|
||||
"llvm-objcopy",
|
||||
]
|
||||
else:
|
||||
env_override = os.environ.get("STELLAOPS_OBJCOPY_DEFAULT")
|
||||
candidates = [
|
||||
env_override,
|
||||
"objcopy",
|
||||
"llvm-objcopy",
|
||||
]
|
||||
|
||||
for candidate in candidates:
|
||||
if not candidate:
|
||||
continue
|
||||
tool = shutil.which(candidate)
|
||||
if tool:
|
||||
self._objcopy_cache[key] = tool
|
||||
return tool
|
||||
self._objcopy_cache[key] = None
|
||||
return None
|
||||
|
||||
def _emit_objcopy_warning(self, machine: Optional[str], platform: str, file_path: pathlib.Path) -> None:
|
||||
machine_label = machine or "unknown-machine"
|
||||
count = self._missing_symbol_platforms.get(platform, 0)
|
||||
self._missing_symbol_platforms[platform] = count + 1
|
||||
if count == 0:
|
||||
sys.stderr.write(
|
||||
f"[warn] no objcopy tool available for {machine_label}; skipping debug extraction for {file_path}.\n"
|
||||
)
|
||||
|
||||
# ----------------
|
||||
# Helm + compose
|
||||
# ----------------
|
||||
@@ -546,6 +831,7 @@ class ReleaseBuilder:
|
||||
components: List[Mapping[str, Any]],
|
||||
helm_meta: Optional[Mapping[str, Any]],
|
||||
compose_meta: List[Mapping[str, Any]],
|
||||
debug_meta: Optional[Mapping[str, Any]],
|
||||
) -> Dict[str, Any]:
|
||||
manifest = OrderedDict()
|
||||
manifest["release"] = OrderedDict((
|
||||
@@ -559,6 +845,8 @@ class ReleaseBuilder:
|
||||
manifest["charts"] = [helm_meta]
|
||||
if compose_meta:
|
||||
manifest["compose"] = compose_meta
|
||||
if debug_meta:
|
||||
manifest["debugStore"] = debug_meta
|
||||
return manifest
|
||||
|
||||
|
||||
@@ -593,6 +881,18 @@ def write_manifest(manifest: Mapping[str, Any], output_dir: pathlib.Path) -> pat
|
||||
output_path = output_dir / "release.yaml"
|
||||
with output_path.open("w", encoding="utf-8") as handle:
|
||||
handle.write(final_yaml)
|
||||
sha_path = output_path.with_name(output_path.name + ".sha256")
|
||||
yaml_file_digest = compute_sha256(output_path)
|
||||
sha_path.write_text(f"{yaml_file_digest} {output_path.name}\n", encoding="utf-8")
|
||||
|
||||
json_text = json.dumps(manifest_with_checksum, indent=2)
|
||||
json_path = output_dir / "release.json"
|
||||
with json_path.open("w", encoding="utf-8") as handle:
|
||||
handle.write(json_text)
|
||||
handle.write("\n")
|
||||
json_digest = compute_sha256(json_path)
|
||||
json_sha_path = json_path.with_name(json_path.name + ".sha256")
|
||||
json_sha_path.write_text(f"{json_digest} {json_path.name}\n", encoding="utf-8")
|
||||
return output_path
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user