Files
git.stella-ops.org/scripts/update-binary-manifests.py
master e91da22836
Some checks failed
api-governance / spectral-lint (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
feat: Add new provenance and crypto registry documentation
- Introduced attestation inventory and subject-rekor mapping files for tracking Docker packages.
- Added a comprehensive crypto registry decision document outlining defaults and required follow-ups.
- Created an offline feeds manifest for bundling air-gap resources.
- Implemented a script to generate and update binary manifests for curated binaries.
- Added a verification script to ensure binary artefacts are located in approved directories.
- Defined new schemas for AdvisoryEvidenceBundle, OrchestratorEnvelope, ScannerReportReadyPayload, and ScannerScanCompletedPayload.
- Established project files for StellaOps.Orchestrator.Schemas and StellaOps.PolicyAuthoritySignals.Contracts.
- Updated vendor manifest to track pinned binaries for integrity.
2025-11-18 23:47:13 +02:00

161 lines
4.7 KiB
Python

#!/usr/bin/env python3
"""Generate manifests for curated binaries.
- local-nugets/manifest.json : NuGet packages (id, version, sha256)
- vendor/manifest.json : Plugin/tool/deploy/ops binaries with sha256
- offline/feeds/manifest.json : Offline bundles (tar/tgz/zip) with sha256
Intended to be idempotent and run in CI to ensure manifests stay current.
"""
from __future__ import annotations
import hashlib
import json
import re
from datetime import datetime, timezone
from pathlib import Path
ROOT = Path(__file__).resolve().parent.parent
def iso_timestamp() -> str:
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
def sha256(path: Path) -> str:
with path.open("rb") as fh:
return hashlib.sha256(fh.read()).hexdigest()
VERSION_RE = re.compile(r"^\d+\.")
def split_id_version(package_path: Path) -> tuple[str, str]:
stem = package_path.stem
parts = stem.split(".")
for i in range(len(parts) - 1, 0, -1):
version = ".".join(parts[i:])
if VERSION_RE.match(version):
pkg_id = ".".join(parts[:i])
return pkg_id, version
return stem, "unknown"
def write_json(path: Path, payload: dict) -> None:
path.write_text(json.dumps(payload, indent=2))
def generate_local_nugets_manifest() -> None:
nuget_dir = ROOT / "local-nugets"
nuget_dir.mkdir(exist_ok=True)
packages = []
for pkg in sorted(nuget_dir.glob("*.nupkg"), key=lambda p: p.name.lower()):
pkg_id, version = split_id_version(pkg)
packages.append(
{
"id": pkg_id,
"version": version,
"filename": pkg.name,
"sha256": sha256(pkg),
}
)
manifest = {
"generated_utc": iso_timestamp(),
"source": "StellaOps binary prereq consolidation",
"base_dir": "local-nugets",
"count": len(packages),
"packages": packages,
}
write_json(nuget_dir / "manifest.json", manifest)
BINARY_EXTS = {".dll", ".exe", ".so", ".dylib", ".bin"}
VENDOR_ROOTS = ["plugins", "tools", "deploy", "ops", "vendor"]
def generate_vendor_manifest() -> None:
entries = []
for root_name in VENDOR_ROOTS:
root_dir = ROOT / root_name
if not root_dir.exists():
continue
for path in root_dir.rglob("*"):
if path.is_file() and path.suffix.lower() in BINARY_EXTS:
entries.append(
{
"path": path.relative_to(ROOT).as_posix(),
"sha256": sha256(path),
"type": "binary",
"owner": root_name,
}
)
entries.sort(key=lambda x: x["path"])
manifest = {
"generated_utc": iso_timestamp(),
"summary": "Pinned binaries (non-NuGet) tracked for integrity; relocate new artefacts here or under offline/feeds.",
"entries": entries,
}
vendor_dir = ROOT / "vendor"
vendor_dir.mkdir(exist_ok=True)
write_json(vendor_dir / "manifest.json", manifest)
FEED_SUFFIXES = (".tar.gz", ".tgz", ".tar", ".zip", ".gz")
def generate_offline_manifest() -> None:
feeds_dir = ROOT / "offline" / "feeds"
feeds_dir.mkdir(parents=True, exist_ok=True)
existing = {}
manifest_path = feeds_dir / "manifest.json"
if manifest_path.exists():
try:
existing = json.loads(manifest_path.read_text())
except json.JSONDecodeError:
existing = {}
prior = {f.get("name"): f for f in existing.get("feeds", []) if isinstance(f, dict)}
feeds = []
for path in sorted(feeds_dir.rglob("*"), key=lambda p: p.as_posix()):
if path.is_file() and any(path.name.endswith(suf) for suf in FEED_SUFFIXES):
name = path.name
# strip first matching suffix for readability
for suf in FEED_SUFFIXES:
if name.endswith(suf):
name = name[: -len(suf)]
break
previous = prior.get(name, {})
feeds.append(
{
"name": name,
"path": path.relative_to(ROOT).as_posix(),
"sha256": sha256(path),
"description": previous.get("description", ""),
}
)
manifest = {
"generated_utc": iso_timestamp(),
"summary": existing.get(
"summary",
"Offline feed bundles registered here. Add entries when baking air-gap bundles.",
),
"feeds": feeds,
}
write_json(manifest_path, manifest)
def main() -> None:
generate_local_nugets_manifest()
generate_vendor_manifest()
generate_offline_manifest()
if __name__ == "__main__":
main()