Add Policy DSL Validator, Schema Exporter, and Simulation Smoke tools
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Implemented PolicyDslValidator with command-line options for strict mode and JSON output.
- Created PolicySchemaExporter to generate JSON schemas for policy-related models.
- Developed PolicySimulationSmoke tool to validate policy simulations against expected outcomes.
- Added project files and necessary dependencies for each tool.
- Ensured proper error handling and usage instructions across tools.
This commit is contained in:
2025-10-27 08:00:11 +02:00
parent 651b8e0fa3
commit 96d52884e8
712 changed files with 49449 additions and 6124 deletions

View File

@@ -0,0 +1,221 @@
#!/usr/bin/env python3
"""Mirror release debug-store artefacts into the Offline Kit staging tree.
This helper copies the release `debug/` directory (including `.build-id/`,
`debug-manifest.json`, and the `.sha256` companion) into the Offline Kit
output directory and verifies the manifest hashes after the copy. A summary
document is written under `metadata/debug-store.json` so packaging jobs can
surface the available build-ids and validation status.
"""
from __future__ import annotations
import argparse
import datetime as dt
import json
import pathlib
import shutil
import sys
from typing import Iterable, Tuple
REPO_ROOT = pathlib.Path(__file__).resolve().parents[2]
def compute_sha256(path: pathlib.Path) -> str:
import hashlib
sha = hashlib.sha256()
with path.open("rb") as handle:
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
sha.update(chunk)
return sha.hexdigest()
def load_manifest(manifest_path: pathlib.Path) -> dict:
with manifest_path.open("r", encoding="utf-8") as handle:
return json.load(handle)
def parse_manifest_sha(sha_path: pathlib.Path) -> str | None:
if not sha_path.exists():
return None
text = sha_path.read_text(encoding="utf-8").strip()
if not text:
return None
# Allow either "<sha>" or "<sha> filename" formats.
return text.split()[0]
def iter_debug_files(base_dir: pathlib.Path) -> Iterable[pathlib.Path]:
for path in base_dir.rglob("*"):
if path.is_file():
yield path
def copy_debug_store(source_root: pathlib.Path, target_root: pathlib.Path, *, dry_run: bool) -> None:
if dry_run:
print(f"[dry-run] Would copy '{source_root}' -> '{target_root}'")
return
if target_root.exists():
shutil.rmtree(target_root)
shutil.copytree(source_root, target_root)
def verify_debug_store(manifest: dict, offline_root: pathlib.Path) -> Tuple[int, int]:
"""Return (verified_count, total_entries)."""
artifacts = manifest.get("artifacts", [])
verified = 0
for entry in artifacts:
debug_path = entry.get("debugPath")
expected_sha = entry.get("sha256")
expected_size = entry.get("size")
if not debug_path or not expected_sha:
continue
relative = pathlib.PurePosixPath(debug_path)
resolved = (offline_root.parent / relative).resolve()
if not resolved.exists():
raise FileNotFoundError(f"Debug artefact missing after mirror: {relative}")
actual_sha = compute_sha256(resolved)
if actual_sha != expected_sha:
raise ValueError(
f"Digest mismatch for {relative}: expected {expected_sha}, found {actual_sha}"
)
if expected_size is not None:
actual_size = resolved.stat().st_size
if actual_size != expected_size:
raise ValueError(
f"Size mismatch for {relative}: expected {expected_size}, found {actual_size}"
)
verified += 1
return verified, len(artifacts)
def summarize_store(manifest: dict, manifest_sha: str | None, offline_root: pathlib.Path, summary_path: pathlib.Path) -> None:
debug_files = [
path
for path in iter_debug_files(offline_root)
if path.suffix == ".debug"
]
total_size = sum(path.stat().st_size for path in debug_files)
build_ids = sorted(
{entry.get("buildId") for entry in manifest.get("artifacts", []) if entry.get("buildId")}
)
summary = {
"generatedAt": dt.datetime.now(tz=dt.timezone.utc)
.replace(microsecond=0)
.isoformat()
.replace("+00:00", "Z"),
"manifestGeneratedAt": manifest.get("generatedAt"),
"manifestSha256": manifest_sha,
"platforms": manifest.get("platforms")
or sorted({entry.get("platform") for entry in manifest.get("artifacts", []) if entry.get("platform")}),
"artifactCount": len(manifest.get("artifacts", [])),
"buildIds": {
"total": len(build_ids),
"samples": build_ids[:10],
},
"debugFiles": {
"count": len(debug_files),
"totalSizeBytes": total_size,
},
}
summary_path.parent.mkdir(parents=True, exist_ok=True)
with summary_path.open("w", encoding="utf-8") as handle:
json.dump(summary, handle, indent=2)
handle.write("\n")
def resolve_release_debug_dir(base: pathlib.Path) -> pathlib.Path:
debug_dir = base / "debug"
if debug_dir.exists():
return debug_dir
# Allow specifying the channel directory directly (e.g. out/release/stable)
if base.name == "debug":
return base
raise FileNotFoundError(f"Debug directory not found under '{base}'")
def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--release-dir",
type=pathlib.Path,
default=REPO_ROOT / "out" / "release",
help="Release output directory containing the debug store (default: %(default)s)",
)
parser.add_argument(
"--offline-kit-dir",
type=pathlib.Path,
default=REPO_ROOT / "out" / "offline-kit",
help="Offline Kit staging directory (default: %(default)s)",
)
parser.add_argument(
"--verify-only",
action="store_true",
help="Skip copying and only verify the existing offline kit debug store",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Print actions without copying files",
)
return parser.parse_args(argv)
def main(argv: list[str] | None = None) -> int:
args = parse_args(argv)
try:
source_debug = resolve_release_debug_dir(args.release_dir.resolve())
except FileNotFoundError as exc:
print(f"error: {exc}", file=sys.stderr)
return 2
target_root = (args.offline_kit_dir / "debug").resolve()
if not args.verify_only:
copy_debug_store(source_debug, target_root, dry_run=args.dry_run)
if args.dry_run:
return 0
manifest_path = target_root / "debug-manifest.json"
if not manifest_path.exists():
print(f"error: offline kit manifest missing at {manifest_path}", file=sys.stderr)
return 3
manifest = load_manifest(manifest_path)
manifest_sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256")
recorded_sha = parse_manifest_sha(manifest_sha_path)
recomputed_sha = compute_sha256(manifest_path)
if recorded_sha and recorded_sha != recomputed_sha:
print(
f"warning: manifest SHA mismatch (recorded {recorded_sha}, recomputed {recomputed_sha}); updating checksum",
file=sys.stderr,
)
manifest_sha_path.write_text(f"{recomputed_sha} {manifest_path.name}\n", encoding="utf-8")
verified, total = verify_debug_store(manifest, target_root)
print(f"✔ verified {verified}/{total} debug artefacts (manifest SHA {recomputed_sha})")
summary_path = args.offline_kit_dir / "metadata" / "debug-store.json"
summarize_store(manifest, recomputed_sha, target_root, summary_path)
print(f" summary written to {summary_path}")
return 0
if __name__ == "__main__":
raise SystemExit(main())