feat: Add new provenance and crypto registry documentation
Some checks failed
api-governance / spectral-lint (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled

- Introduced attestation inventory and subject-rekor mapping files for tracking Docker packages.
- Added a comprehensive crypto registry decision document outlining defaults and required follow-ups.
- Created an offline feeds manifest for bundling air-gap resources.
- Implemented a script to generate and update binary manifests for curated binaries.
- Added a verification script to ensure binary artefacts are located in approved directories.
- Defined new schemas for AdvisoryEvidenceBundle, OrchestratorEnvelope, ScannerReportReadyPayload, and ScannerScanCompletedPayload.
- Established project files for StellaOps.Orchestrator.Schemas and StellaOps.PolicyAuthoritySignals.Contracts.
- Updated vendor manifest to track pinned binaries for integrity.
This commit is contained in:
master
2025-11-18 23:47:13 +02:00
parent d3ecd7f8e6
commit e91da22836
44 changed files with 6793 additions and 99 deletions

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env python3
"""Generate manifests for curated binaries.
- local-nugets/manifest.json : NuGet packages (id, version, sha256)
- vendor/manifest.json : Plugin/tool/deploy/ops binaries with sha256
- offline/feeds/manifest.json : Offline bundles (tar/tgz/zip) with sha256
Intended to be idempotent and run in CI to ensure manifests stay current.
"""
from __future__ import annotations
import hashlib
import json
import re
from datetime import datetime, timezone
from pathlib import Path
ROOT = Path(__file__).resolve().parent.parent
def iso_timestamp() -> str:
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
def sha256(path: Path) -> str:
with path.open("rb") as fh:
return hashlib.sha256(fh.read()).hexdigest()
VERSION_RE = re.compile(r"^\d+\.")
def split_id_version(package_path: Path) -> tuple[str, str]:
stem = package_path.stem
parts = stem.split(".")
for i in range(len(parts) - 1, 0, -1):
version = ".".join(parts[i:])
if VERSION_RE.match(version):
pkg_id = ".".join(parts[:i])
return pkg_id, version
return stem, "unknown"
def write_json(path: Path, payload: dict) -> None:
path.write_text(json.dumps(payload, indent=2))
def generate_local_nugets_manifest() -> None:
nuget_dir = ROOT / "local-nugets"
nuget_dir.mkdir(exist_ok=True)
packages = []
for pkg in sorted(nuget_dir.glob("*.nupkg"), key=lambda p: p.name.lower()):
pkg_id, version = split_id_version(pkg)
packages.append(
{
"id": pkg_id,
"version": version,
"filename": pkg.name,
"sha256": sha256(pkg),
}
)
manifest = {
"generated_utc": iso_timestamp(),
"source": "StellaOps binary prereq consolidation",
"base_dir": "local-nugets",
"count": len(packages),
"packages": packages,
}
write_json(nuget_dir / "manifest.json", manifest)
BINARY_EXTS = {".dll", ".exe", ".so", ".dylib", ".bin"}
VENDOR_ROOTS = ["plugins", "tools", "deploy", "ops", "vendor"]
def generate_vendor_manifest() -> None:
entries = []
for root_name in VENDOR_ROOTS:
root_dir = ROOT / root_name
if not root_dir.exists():
continue
for path in root_dir.rglob("*"):
if path.is_file() and path.suffix.lower() in BINARY_EXTS:
entries.append(
{
"path": path.relative_to(ROOT).as_posix(),
"sha256": sha256(path),
"type": "binary",
"owner": root_name,
}
)
entries.sort(key=lambda x: x["path"])
manifest = {
"generated_utc": iso_timestamp(),
"summary": "Pinned binaries (non-NuGet) tracked for integrity; relocate new artefacts here or under offline/feeds.",
"entries": entries,
}
vendor_dir = ROOT / "vendor"
vendor_dir.mkdir(exist_ok=True)
write_json(vendor_dir / "manifest.json", manifest)
FEED_SUFFIXES = (".tar.gz", ".tgz", ".tar", ".zip", ".gz")
def generate_offline_manifest() -> None:
feeds_dir = ROOT / "offline" / "feeds"
feeds_dir.mkdir(parents=True, exist_ok=True)
existing = {}
manifest_path = feeds_dir / "manifest.json"
if manifest_path.exists():
try:
existing = json.loads(manifest_path.read_text())
except json.JSONDecodeError:
existing = {}
prior = {f.get("name"): f for f in existing.get("feeds", []) if isinstance(f, dict)}
feeds = []
for path in sorted(feeds_dir.rglob("*"), key=lambda p: p.as_posix()):
if path.is_file() and any(path.name.endswith(suf) for suf in FEED_SUFFIXES):
name = path.name
# strip first matching suffix for readability
for suf in FEED_SUFFIXES:
if name.endswith(suf):
name = name[: -len(suf)]
break
previous = prior.get(name, {})
feeds.append(
{
"name": name,
"path": path.relative_to(ROOT).as_posix(),
"sha256": sha256(path),
"description": previous.get("description", ""),
}
)
manifest = {
"generated_utc": iso_timestamp(),
"summary": existing.get(
"summary",
"Offline feed bundles registered here. Add entries when baking air-gap bundles.",
),
"feeds": feeds,
}
write_json(manifest_path, manifest)
def main() -> None:
generate_local_nugets_manifest()
generate_vendor_manifest()
generate_offline_manifest()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -euo pipefail
# Verifies binary artefacts live only in approved locations.
# Allowed roots: local-nugets (curated feed), .nuget/packages (cache), vendor (pinned binaries),
# offline (air-gap bundles/templates), plugins/tools/deploy/ops (module-owned binaries).
repo_root="$(git rev-parse --show-toplevel)"
cd "$repo_root"
# Extensions considered binary artefacts.
binary_ext="(nupkg|dll|exe|so|dylib|a|lib|tar|tar.gz|tgz|zip|jar|deb|rpm|bin)"
# Locations allowed to contain binaries.
allowed_prefix="^(local-nugets|local-nugets/packages|vendor|offline|plugins|tools|deploy|ops|third_party|docs/artifacts|samples|src/.*/Fixtures|src/.*/fixtures)/"
# Only consider files that currently exist in the working tree (skip deleted placeholders).
violations=$(git ls-files | while read -r f; do [[ -f "$f" ]] && echo "$f"; done | grep -E "\\.${binary_ext}$" | grep -Ev "$allowed_prefix" || true)
if [[ -n "$violations" ]]; then
echo "Binary artefacts found outside approved directories:" >&2
echo "$violations" >&2
exit 1
fi
printf "Binary layout OK (allowed roots: %s)\n" "$allowed_prefix"