CD/CD consolidation

This commit is contained in:
StellaOps Bot
2025-12-26 17:32:23 +02:00
parent a866eb6277
commit c786faae84
638 changed files with 3821 additions and 181 deletions

View File

@@ -0,0 +1,14 @@
# Mirror signing helpers
- `make-thin-v1.sh`: builds thin bundle v1, computes checksums, emits bundle meta (offline/rekor/mirror gaps), optional DSSE+TUF signing when `SIGN_KEY` is set, and runs verifier.
- `sign_thin_bundle.py`: signs manifest (DSSE), bundle meta (DSSE), and root/targets/snapshot/timestamp JSON using an Ed25519 PEM key.
- `verify_thin_bundle.py`: checks SHA256 sidecars, manifest schema, tar determinism, required layers, optional bundle meta and DSSE signatures; accepts `--bundle-meta`, `--pubkey`, `--tenant`, `--environment`.
- `ci-sign.sh`: CI wrapper. Set `MIRROR_SIGN_KEY_B64` (base64-encoded Ed25519 PEM) and run; it builds, signs, and verifies in one step, emitting `milestone.json` with manifest/tar/bundle hashes.
- `verify_oci_layout.py`: validates OCI layout/index/manifest and blob digests when `OCI=1` is used.
- `mirror-create.sh`: convenience wrapper to build + verify thin bundles (optional SIGN_KEY, time anchor, OCI flag).
- `mirror-verify.sh`: wrapper around `verify_thin_bundle.py` for quick hash/DSSE checks.
- `schedule-export-center-run.sh`: schedules an Export Center run for mirror bundles via HTTP POST; set `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_TOKEN` (Bearer), optional `EXPORT_CENTER_PROJECT`; logs to `AUDIT_LOG_PATH` (default `logs/export-center-schedule.log`). Set `EXPORT_CENTER_ARTIFACTS_JSON` to inject bundle metadata into the request payload.
- `export-center-wire.sh`: builds `export-center-handoff.json` from `out/mirror/thin/milestone.json`, emits recommended Export Center targets, and (when `EXPORT_CENTER_AUTO_SCHEDULE=1`) calls `schedule-export-center-run.sh` to push the run. Outputs live under `out/mirror/thin/export-center/`.
- CI: `.gitea/workflows/mirror-sign.yml` runs this script after signing; scheduling remains opt-in via secrets `EXPORT_CENTER_BASE_URL`, `EXPORT_CENTER_TOKEN`, `EXPORT_CENTER_TENANT`, `EXPORT_CENTER_PROJECT`, `EXPORT_CENTER_AUTO_SCHEDULE`.
Artifacts live under `out/mirror/thin/`.

View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
# Verifies signing prerequisites without requiring the actual key contents.
set -euo pipefail
if [[ -z "${MIRROR_SIGN_KEY_B64:-}" ]]; then
if [[ "${REQUIRE_PROD_SIGNING:-0}" == "1" ]]; then
echo "[error] MIRROR_SIGN_KEY_B64 is required for production signing; set the secret before running." >&2
exit 2
fi
echo "[warn] MIRROR_SIGN_KEY_B64 is not set; ci-sign.sh will fall back to embedded test key (non-production)." >&2
fi
# basic base64 sanity check
if ! printf "%s" "$MIRROR_SIGN_KEY_B64" | base64 -d >/dev/null 2>&1; then
echo "MIRROR_SIGN_KEY_B64 is not valid base64" >&2
exit 3
fi
# ensure scripts exist
for f in scripts/mirror/ci-sign.sh scripts/mirror/sign_thin_bundle.py scripts/mirror/verify_thin_bundle.py; do
[[ -x "$f" || -f "$f" ]] || { echo "$f missing" >&2; exit 4; }
done
echo "Signing prerequisites present (key env set, scripts available)."

View File

@@ -0,0 +1,116 @@
#!/usr/bin/env bash
set -euo pipefail
# Allow CI to fall back to a deterministic test key when MIRROR_SIGN_KEY_B64 is unset,
# but forbid this on release/tag builds when REQUIRE_PROD_SIGNING=1.
# Throwaway dev key (Ed25519) generated 2025-11-23; matches the value documented in
# docs/modules/mirror/signing-runbook.md. Safe for non-production smoke only.
DEFAULT_TEST_KEY_B64="LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1DNENBUUF3QlFZREsyVndCQ0lFSURqb3pDRVdKVVFUdW1xZ2gyRmZXcVBaemlQbkdaSzRvOFZRTThGYkZCSEcKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo="
if [[ -z "${MIRROR_SIGN_KEY_B64:-}" ]]; then
if [[ "${REQUIRE_PROD_SIGNING:-0}" == "1" ]]; then
echo "[error] MIRROR_SIGN_KEY_B64 is required for production signing; refusing to use test key." >&2
exit 1
fi
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using embedded test key (non-production) for CI signing" >&2
MIRROR_SIGN_KEY_B64="$DEFAULT_TEST_KEY_B64"
fi
ROOT=$(cd "$(dirname "$0")/../.." && pwd)
KEYDIR="$ROOT/out/mirror/thin/tuf/keys"
mkdir -p "$KEYDIR"
KEYFILE="$KEYDIR/ci-ed25519.pem"
printf "%s" "$MIRROR_SIGN_KEY_B64" | base64 -d > "$KEYFILE"
chmod 600 "$KEYFILE"
# Export public key for TUF keyid calculation
openssl pkey -in "$KEYFILE" -pubout -out "$KEYDIR/ci-ed25519.pub" >/dev/null 2>&1
STAGE=${STAGE:-$ROOT/out/mirror/thin/stage-v1}
CREATED=${CREATED:-$(date -u +%Y-%m-%dT%H:%M:%SZ)}
TENANT_SCOPE=${TENANT_SCOPE:-tenant-demo}
ENV_SCOPE=${ENV_SCOPE:-lab}
CHUNK_SIZE=${CHUNK_SIZE:-5242880}
CHECKPOINT_FRESHNESS=${CHECKPOINT_FRESHNESS:-86400}
OCI=${OCI:-1}
SIGN_KEY="$KEYFILE" STAGE="$STAGE" CREATED="$CREATED" TENANT_SCOPE="$TENANT_SCOPE" ENV_SCOPE="$ENV_SCOPE" CHUNK_SIZE="$CHUNK_SIZE" CHECKPOINT_FRESHNESS="$CHECKPOINT_FRESHNESS" OCI="$OCI" "$ROOT/src/Mirror/StellaOps.Mirror.Creator/make-thin-v1.sh"
# Default to staged time-anchor unless caller overrides
TIME_ANCHOR_FILE=${TIME_ANCHOR_FILE:-$ROOT/out/mirror/thin/stage-v1/layers/time-anchor.json}
# Emit milestone summary with hashes for downstream consumers
MANIFEST_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.manifest.json"
TAR_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.tar.gz"
DSSE_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.manifest.dsse.json"
BUNDLE_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.bundle.json"
BUNDLE_DSSE_PATH="$ROOT/out/mirror/thin/mirror-thin-v1.bundle.dsse.json"
TIME_ANCHOR_DSSE_PATH="$TIME_ANCHOR_FILE.dsse.json"
TRANSPORT_PATH="$ROOT/out/mirror/thin/stage-v1/layers/transport-plan.json"
REKOR_POLICY_PATH="$ROOT/out/mirror/thin/stage-v1/layers/rekor-policy.json"
MIRROR_POLICY_PATH="$ROOT/out/mirror/thin/stage-v1/layers/mirror-policy.json"
OFFLINE_POLICY_PATH="$ROOT/out/mirror/thin/stage-v1/layers/offline-kit-policy.json"
SUMMARY_PATH="$ROOT/out/mirror/thin/milestone.json"
sha256() {
sha256sum "$1" | awk '{print $1}'
}
# Sign manifest, bundle meta, and time-anchor (if present)
python "$ROOT/scripts/mirror/sign_thin_bundle.py" \
--key "$KEYFILE" \
--manifest "$MANIFEST_PATH" \
--tar "$TAR_PATH" \
--tuf-dir "$ROOT/out/mirror/thin/tuf" \
--bundle "$BUNDLE_PATH" \
--time-anchor "$TIME_ANCHOR_FILE"
# Normalize time-anchor DSSE location for bundle meta/summary
if [[ -f "$TIME_ANCHOR_FILE.dsse.json" ]]; then
cp "$TIME_ANCHOR_FILE.dsse.json" "$TIME_ANCHOR_DSSE_PATH"
fi
# Refresh bundle meta hashes now that DSSE files exist
python - <<'PY'
import json, pathlib, hashlib
root = pathlib.Path("$ROOT")
bundle_path = pathlib.Path("$BUNDLE_PATH")
manifest_dsse = pathlib.Path("$DSSE_PATH")
bundle_dsse = pathlib.Path("$BUNDLE_DSSE_PATH")
time_anchor_dsse = pathlib.Path("$TIME_ANCHOR_DSSE_PATH")
def sha(path: pathlib.Path) -> str:
h = hashlib.sha256()
with path.open('rb') as f:
for chunk in iter(lambda: f.read(8192), b''):
h.update(chunk)
return h.hexdigest()
data = json.loads(bundle_path.read_text())
art = data.setdefault('artifacts', {})
if manifest_dsse.exists():
art.setdefault('manifest_dsse', {})['sha256'] = sha(manifest_dsse)
if bundle_dsse.exists():
art.setdefault('bundle_dsse', {})['sha256'] = sha(bundle_dsse)
if time_anchor_dsse.exists():
art.setdefault('time_anchor_dsse', {})['sha256'] = sha(time_anchor_dsse)
bundle_path.write_text(json.dumps(data, indent=2, sort_keys=True) + "\n")
sha_path = bundle_path.with_suffix(bundle_path.suffix + '.sha256')
sha_path.write_text(f"{sha(bundle_path)} {bundle_path.name}\n")
PY
cat > "$SUMMARY_PATH" <<JSON
{
"created": "$CREATED",
"manifest": {"path": "$(basename "$MANIFEST_PATH")", "sha256": "$(sha256 "$MANIFEST_PATH")"},
"tarball": {"path": "$(basename "$TAR_PATH")", "sha256": "$(sha256 "$TAR_PATH")"},
"dsse": $( [[ -f "$DSSE_PATH" ]] && echo "{\"path\": \"$(basename "$DSSE_PATH")\", \"sha256\": \"$(sha256 "$DSSE_PATH")\"}" || echo "null" ),
"bundle": $( [[ -f "$BUNDLE_PATH" ]] && echo "{\"path\": \"$(basename "$BUNDLE_PATH")\", \"sha256\": \"$(sha256 "$BUNDLE_PATH")\"}" || echo "null" ),
"bundle_dsse": $( [[ -f "$BUNDLE_DSSE_PATH" ]] && echo "{\"path\": \"$(basename "$BUNDLE_DSSE_PATH")\", \"sha256\": \"$(sha256 "$BUNDLE_DSSE_PATH")\"}" || echo "null" ),
"time_anchor": $( [[ -n "${TIME_ANCHOR_FILE:-}" && -f "$TIME_ANCHOR_FILE" ]] && echo "{\"path\": \"$(basename "$TIME_ANCHOR_FILE")\", \"sha256\": \"$(sha256 "$TIME_ANCHOR_FILE")\"}" || echo "null" ),
"time_anchor_dsse": $( [[ -f "$TIME_ANCHOR_DSSE_PATH" ]] && echo "{\"path\": \"$(basename "$TIME_ANCHOR_DSSE_PATH")\", \"sha256\": \"$(sha256 "$TIME_ANCHOR_DSSE_PATH")\"}" || echo "null" )
,"policies": {
"transport": {"path": "$(basename "$TRANSPORT_PATH")", "sha256": "$(sha256 "$TRANSPORT_PATH")"},
"rekor": {"path": "$(basename "$REKOR_POLICY_PATH")", "sha256": "$(sha256 "$REKOR_POLICY_PATH")"},
"mirror": {"path": "$(basename "$MIRROR_POLICY_PATH")", "sha256": "$(sha256 "$MIRROR_POLICY_PATH")"},
"offline": {"path": "$(basename "$OFFLINE_POLICY_PATH")", "sha256": "$(sha256 "$OFFLINE_POLICY_PATH")"}
}
}
JSON
echo "Milestone summary written to $SUMMARY_PATH"

View File

@@ -0,0 +1,122 @@
#!/usr/bin/env bash
set -euo pipefail
# Prepare Export Center handoff metadata for mirror thin bundles and optionally schedule a run.
# Usage (handoff only):
# scripts/mirror/export-center-wire.sh
# Usage (handoff + schedule when secrets exist):
# EXPORT_CENTER_BASE_URL=https://export.example.com \
# EXPORT_CENTER_TOKEN=token123 \
# EXPORT_CENTER_TENANT=tenant-a \
# EXPORT_CENTER_AUTO_SCHEDULE=1 \
# scripts/mirror/export-center-wire.sh
# Inputs:
# - MILESTONE_PATH: path to milestone.json (default: out/mirror/thin/milestone.json)
# - EXPORT_CENTER_OUT_DIR: output directory for handoff files (default: out/mirror/thin/export-center)
# - EXPORT_CENTER_PROFILE_ID: profile identifier for the Export Center run (default: mirror:thin)
# - EXPORT_CENTER_TARGETS_JSON: override targets array sent to Export Center (JSON array string)
# - EXPORT_CENTER_FORMATS_JSON: override formats array (JSON array string; default: ["tar.gz","json","dsse"])
# - EXPORT_CENTER_AUTO_SCHEDULE: when "1", schedule a run using schedule-export-center-run.sh
# - EXPORT_CENTER_BASE_URL / EXPORT_CENTER_TENANT / EXPORT_CENTER_PROJECT / EXPORT_CENTER_TOKEN: forwarded to scheduler
# - EXPORT_CENTER_AUDIT_LOG: optional override for scheduler audit log path
MILESTONE_PATH="${MILESTONE_PATH:-out/mirror/thin/milestone.json}"
OUT_DIR="${EXPORT_CENTER_OUT_DIR:-out/mirror/thin/export-center}"
PROFILE_ID="${EXPORT_CENTER_PROFILE_ID:-mirror:thin}"
FORMATS_JSON="${EXPORT_CENTER_FORMATS_JSON:-[\"tar.gz\",\"json\",\"dsse\"]}"
AUTO_SCHEDULE="${EXPORT_CENTER_AUTO_SCHEDULE:-0}"
HANDOFF_PATH="${OUT_DIR}/export-center-handoff.json"
TARGETS_PATH="${OUT_DIR}/export-center-targets.json"
RESPONSE_PATH="${OUT_DIR}/schedule-response.json"
export HANDOFF_PATH TARGETS_PATH RESPONSE_PATH PROFILE_ID MILESTONE_PATH
mkdir -p "${OUT_DIR}"
PROFILE_ID="${PROFILE_ID}" MILESTONE_PATH="${MILESTONE_PATH}" HANDOFF_PATH="${HANDOFF_PATH}" TARGETS_PATH="${TARGETS_PATH}" python3 - <<'PY'
import datetime
import json
import os
import sys
from typing import Dict, Any
milestone_path = os.environ["MILESTONE_PATH"]
handoff_path = os.environ["HANDOFF_PATH"]
targets_path = os.environ["TARGETS_PATH"]
profile = os.environ.get("PROFILE_ID", "mirror:thin")
try:
with open(milestone_path, encoding="utf-8") as f:
milestone = json.load(f)
except FileNotFoundError:
print(f"milestone file not found: {milestone_path}", file=sys.stderr)
sys.exit(1)
artifacts = []
def add_artifact(name: str, entry: Dict[str, Any] | None) -> None:
if not isinstance(entry, dict):
return
path = entry.get("path")
sha = entry.get("sha256")
if path and sha:
artifacts.append({"name": name, "path": path, "sha256": sha})
add_artifact("manifest", milestone.get("manifest"))
add_artifact("manifest_dsse", milestone.get("dsse"))
add_artifact("bundle", milestone.get("tarball"))
add_artifact("bundle_meta", milestone.get("bundle"))
add_artifact("bundle_meta_dsse", milestone.get("bundle_dsse"))
add_artifact("time_anchor", milestone.get("time_anchor"))
for name, entry in sorted((milestone.get("policies") or {}).items()):
add_artifact(f"policy_{name}", entry)
handoff = {
"profileId": profile,
"generatedAt": datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z"),
"sourceMilestone": os.path.abspath(milestone_path),
"artifacts": artifacts,
}
with open(handoff_path, "w", encoding="utf-8") as f:
json.dump(handoff, f, indent=2)
with open(targets_path, "w", encoding="utf-8") as f:
json.dump([a["name"] for a in artifacts], f)
PY
ARTIFACTS_JSON=$(python3 - <<'PY'
import json
import os
with open(os.environ["HANDOFF_PATH"], encoding="utf-8") as f:
data = json.load(f)
print(json.dumps(data.get("artifacts", [])))
PY
)
ARTIFACTS_JSON="${ARTIFACTS_JSON//$'\n'/}"
TARGETS_JSON_DEFAULT=$(tr -d '\r\n' < "${TARGETS_PATH}")
TARGETS_JSON="${EXPORT_CENTER_TARGETS_JSON:-$TARGETS_JSON_DEFAULT}"
echo "[info] Export Center handoff written to ${HANDOFF_PATH}"
echo "[info] Recommended targets: ${TARGETS_JSON}"
schedule_note="AUTO_SCHEDULE=0"
if [[ "${AUTO_SCHEDULE}" == "1" ]]; then
schedule_note="missing EXPORT_CENTER_BASE_URL"
if [[ -n "${EXPORT_CENTER_BASE_URL:-}" ]]; then
export EXPORT_CENTER_ARTIFACTS_JSON="${ARTIFACTS_JSON}"
schedule_note="scheduled"
bash src/Mirror/StellaOps.Mirror.Creator/schedule-export-center-run.sh "${PROFILE_ID}" "${TARGETS_JSON}" "${FORMATS_JSON}" | tee "${RESPONSE_PATH}"
fi
fi
if [[ ! -f "${RESPONSE_PATH}" ]]; then
cat > "${RESPONSE_PATH}" <<JSON
{"scheduled": false, "reason": "${schedule_note}"}
JSON
fi
echo "[info] Scheduler response captured at ${RESPONSE_PATH}"

View File

@@ -0,0 +1,45 @@
#!/usr/bin/env bash
set -euo pipefail
# Deterministic wrapper for building mirror-thin-v1 bundles.
# Usage: mirror-create.sh [--out out/mirror/thin] [--sign-key path.pem] [--oci] [--time-anchor path.json]
OUT="out/mirror/thin"
SIGN_KEY=""
TIME_ANCHOR=""
OCI=0
usage() {
echo "Usage: $0 [--out <dir>] [--sign-key key.pem] [--oci] [--time-anchor path.json]" >&2
exit 2
}
while [[ $# -gt 0 ]]; do
case "$1" in
--out) OUT=${2:-}; shift ;;
--sign-key) SIGN_KEY=${2:-}; shift ;;
--time-anchor) TIME_ANCHOR=${2:-}; shift ;;
--oci) OCI=1 ;;
*) usage ;;
esac
shift
done
ROOT=$(cd "$(dirname "$0")/.." && pwd)
pushd "$ROOT/.." >/dev/null
export SIGN_KEY
export TIME_ANCHOR_FILE=${TIME_ANCHOR:-}
export OCI
export OUT
src/Mirror/StellaOps.Mirror.Creator/make-thin-v1.sh
echo "Bundle built under $OUT"
python scripts/mirror/verify_thin_bundle.py \
"$OUT/mirror-thin-v1.manifest.json" \
"$OUT/mirror-thin-v1.tar.gz" \
--bundle-meta "$OUT/mirror-thin-v1.bundle.json"
popd >/dev/null
echo "Create/verify completed"

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -euo pipefail
# Verify a mirror-thin-v1 bundle and optional DSSE signatures.
# Usage: mirror-verify.sh manifest.json bundle.tar.gz [--bundle-meta bundle.json] [--pubkey key.pub] [--tenant t] [--environment env]
manifest=${1:-}
bundle=${2:-}
shift 2 || true
bundle_meta=""
pubkey=""
tenant=""
environment=""
while [[ $# -gt 0 ]]; do
case "$1" in
--bundle-meta) bundle_meta=${2:-}; shift ;;
--pubkey) pubkey=${2:-}; shift ;;
--tenant) tenant=${2:-}; shift ;;
--environment) environment=${2:-}; shift ;;
*) echo "Unknown arg $1" >&2; exit 2 ;;
esac
shift
done
[[ -z "$manifest" || -z "$bundle" ]] && { echo "manifest and bundle required" >&2; exit 2; }
args=("$manifest" "$bundle")
[[ -n "$bundle_meta" ]] && args+=("--bundle-meta" "$bundle_meta")
[[ -n "$pubkey" ]] && args+=("--pubkey" "$pubkey")
[[ -n "$tenant" ]] && args+=("--tenant" "$tenant")
[[ -n "$environment" ]] && args+=("--environment" "$environment")
python scripts/mirror/verify_thin_bundle.py "${args[@]}"
echo "Mirror bundle verification passed."

View File

@@ -0,0 +1,105 @@
#!/usr/bin/env python3
"""
Sign mirror-thin-v1 artefacts using an Ed25519 key and emit DSSE + TUF signatures.
Usage:
python scripts/mirror/sign_thin_bundle.py \
--key out/mirror/thin/tuf/keys/mirror-ed25519-test-1.pem \
--manifest out/mirror/thin/mirror-thin-v1.manifest.json \
--tar out/mirror/thin/mirror-thin-v1.tar.gz \
--tuf-dir out/mirror/thin/tuf \
--time-anchor out/mirror/thin/stage-v1/layers/time-anchor.json
Writes:
- mirror-thin-v1.manifest.dsse.json
- mirror-thin-v1.bundle.dsse.json (optional, when --bundle is provided)
- updates signatures in root.json, targets.json, snapshot.json, timestamp.json
"""
import argparse, base64, json, pathlib, hashlib
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
def b64url(data: bytes) -> str:
return base64.urlsafe_b64encode(data).rstrip(b"=").decode()
def load_key(path: pathlib.Path) -> Ed25519PrivateKey:
return serialization.load_pem_private_key(path.read_bytes(), password=None)
def keyid_from_pub(pub_path: pathlib.Path) -> str:
raw = pub_path.read_bytes()
return hashlib.sha256(raw).hexdigest()
def sign_bytes(key: Ed25519PrivateKey, data: bytes) -> bytes:
return key.sign(data)
def write_json(path: pathlib.Path, obj):
path.write_text(json.dumps(obj, indent=2, sort_keys=True) + "\n")
def sign_tuf(path: pathlib.Path, keyid: str, key: Ed25519PrivateKey):
data = path.read_bytes()
sig = sign_bytes(key, data)
obj = json.loads(data)
obj["signatures"] = [{"keyid": keyid, "sig": b64url(sig)}]
write_json(path, obj)
def main():
ap = argparse.ArgumentParser()
ap.add_argument("--key", required=True, type=pathlib.Path)
ap.add_argument("--manifest", required=True, type=pathlib.Path)
ap.add_argument("--tar", required=True, type=pathlib.Path)
ap.add_argument("--tuf-dir", required=True, type=pathlib.Path)
ap.add_argument("--bundle", required=False, type=pathlib.Path)
ap.add_argument("--time-anchor", required=False, type=pathlib.Path)
args = ap.parse_args()
key = load_key(args.key)
pub_path = args.key.with_suffix(".pub")
keyid = keyid_from_pub(pub_path)
manifest_bytes = args.manifest.read_bytes()
sig = sign_bytes(key, manifest_bytes)
dsse = {
"payloadType": "application/vnd.stellaops.mirror.manifest+json",
"payload": b64url(manifest_bytes),
"signatures": [{"keyid": keyid, "sig": b64url(sig)}],
}
dsse_path = args.manifest.with_suffix(".dsse.json")
write_json(dsse_path, dsse)
if args.bundle:
bundle_bytes = args.bundle.read_bytes()
bundle_sig = sign_bytes(key, bundle_bytes)
bundle_dsse = {
"payloadType": "application/vnd.stellaops.mirror.bundle+json",
"payload": b64url(bundle_bytes),
"signatures": [{"keyid": keyid, "sig": b64url(bundle_sig)}],
}
bundle_dsse_path = args.bundle.with_suffix(".dsse.json")
write_json(bundle_dsse_path, bundle_dsse)
anchor_dsse_path = None
if args.time_anchor:
anchor_bytes = args.time_anchor.read_bytes()
anchor_sig = sign_bytes(key, anchor_bytes)
anchor_dsse = {
"payloadType": "application/vnd.stellaops.time-anchor+json",
"payload": b64url(anchor_bytes),
"signatures": [{"keyid": keyid, "sig": b64url(anchor_sig)}],
}
anchor_dsse_path = args.time_anchor.with_suffix(".dsse.json")
write_json(anchor_dsse_path, anchor_dsse)
# update TUF metadata
for name in ["root.json", "targets.json", "snapshot.json", "timestamp.json"]:
sign_tuf(args.tuf_dir / name, keyid, key)
parts = [f"manifest DSSE -> {dsse_path}"]
if args.bundle:
parts.append(f"bundle DSSE -> {bundle_dsse_path}")
if anchor_dsse_path:
parts.append(f"time anchor DSSE -> {anchor_dsse_path}")
parts.append("TUF metadata updated")
print(f"Signed DSSE + TUF using keyid {keyid}; " + ", ".join(parts))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
"""
Verify OCI layout emitted by make-thin-v1.sh when OCI=1.
Checks:
1) oci-layout exists and version is 1.0.0
2) index.json manifest digest/size match manifest.json hash/size
3) manifest.json references config/layers present in blobs with matching sha256 and size
Usage:
python scripts/mirror/verify_oci_layout.py out/mirror/thin/oci
Exit 0 on success, non-zero on failure with message.
"""
import hashlib, json, pathlib, sys
def sha256(path: pathlib.Path) -> str:
h = hashlib.sha256()
with path.open('rb') as f:
for chunk in iter(lambda: f.read(8192), b''):
h.update(chunk)
return h.hexdigest()
def main():
if len(sys.argv) != 2:
print(__doc__)
sys.exit(2)
root = pathlib.Path(sys.argv[1])
layout = root / "oci-layout"
index = root / "index.json"
manifest = root / "manifest.json"
if not layout.exists() or not index.exists() or not manifest.exists():
raise SystemExit("missing oci-layout/index.json/manifest.json")
layout_obj = json.loads(layout.read_text())
if layout_obj.get("imageLayoutVersion") != "1.0.0":
raise SystemExit("oci-layout version not 1.0.0")
idx_obj = json.loads(index.read_text())
if not idx_obj.get("manifests"):
raise SystemExit("index.json manifests empty")
man_digest = idx_obj["manifests"][0]["digest"]
man_size = idx_obj["manifests"][0]["size"]
actual_man_sha = sha256(manifest)
if man_digest != f"sha256:{actual_man_sha}":
raise SystemExit(f"manifest digest mismatch: {man_digest} vs sha256:{actual_man_sha}")
if man_size != manifest.stat().st_size:
raise SystemExit("manifest size mismatch")
man_obj = json.loads(manifest.read_text())
blobs = root / "blobs" / "sha256"
# config
cfg_digest = man_obj["config"]["digest"].split(":",1)[1]
cfg_size = man_obj["config"]["size"]
cfg_path = blobs / cfg_digest
if not cfg_path.exists():
raise SystemExit(f"config blob missing: {cfg_path}")
if cfg_path.stat().st_size != cfg_size:
raise SystemExit("config size mismatch")
if sha256(cfg_path) != cfg_digest:
raise SystemExit("config digest mismatch")
for layer in man_obj.get("layers", []):
ldigest = layer["digest"].split(":",1)[1]
lsize = layer["size"]
lpath = blobs / ldigest
if not lpath.exists():
raise SystemExit(f"layer blob missing: {lpath}")
if lpath.stat().st_size != lsize:
raise SystemExit("layer size mismatch")
if sha256(lpath) != ldigest:
raise SystemExit("layer digest mismatch")
print("OK: OCI layout verified")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,293 @@
#!/usr/bin/env python3
"""
Verifier for mirror-thin-v1 artefacts and bundle meta.
Checks:
1) SHA256 of manifest/tarball (and optional bundle meta) matches sidecars.
2) Manifest schema contains required fields and required layer files exist.
3) Tarball headers deterministic (sorted paths, uid/gid=0, mtime=0).
4) Tar contents match manifest digests.
5) Optional: verify DSSE signatures for manifest/bundle when a public key is provided.
6) Optional: validate bundle meta (tenant/env scope, policy hashes, gap coverage counts).
Usage:
python scripts/mirror/verify_thin_bundle.py \
out/mirror/thin/mirror-thin-v1.manifest.json \
out/mirror/thin/mirror-thin-v1.tar.gz \
--bundle-meta out/mirror/thin/mirror-thin-v1.bundle.json \
--pubkey out/mirror/thin/tuf/keys/ci-ed25519.pub \
--tenant tenant-demo --environment lab
Exit code 0 on success; non-zero on any check failure.
"""
import argparse
import base64
import hashlib
import json
import pathlib
import sys
import tarfile
from typing import Optional
try:
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PublicKey
CRYPTO_AVAILABLE = True
except ImportError: # pragma: no cover - surfaced as runtime guidance
CRYPTO_AVAILABLE = False
REQUIRED_FIELDS = ["version", "created", "layers", "indexes"]
REQUIRED_LAYER_FILES = {
"layers/observations.ndjson",
"layers/time-anchor.json",
"layers/transport-plan.json",
"layers/rekor-policy.json",
"layers/mirror-policy.json",
"layers/offline-kit-policy.json",
"layers/artifact-hashes.json",
"indexes/observations.index",
}
def _b64url_decode(data: str) -> bytes:
padding = "=" * (-len(data) % 4)
return base64.urlsafe_b64decode(data + padding)
def sha256_file(path: pathlib.Path) -> str:
h = hashlib.sha256()
with path.open("rb") as f:
for chunk in iter(lambda: f.read(8192), b""):
h.update(chunk)
return h.hexdigest()
def load_sha256_sidecar(path: pathlib.Path) -> str:
sidecar = path.with_suffix(path.suffix + ".sha256")
if not sidecar.exists():
raise SystemExit(f"missing sidecar {sidecar}")
return sidecar.read_text().strip().split()[0]
def check_schema(manifest: dict):
missing = [f for f in REQUIRED_FIELDS if f not in manifest]
if missing:
raise SystemExit(f"manifest missing fields: {missing}")
def normalize(name: str) -> str:
return name[2:] if name.startswith("./") else name
def check_tar_determinism(tar_path: pathlib.Path):
with tarfile.open(tar_path, "r:gz") as tf:
names = [normalize(n) for n in tf.getnames()]
if names != sorted(names):
raise SystemExit("tar entries not sorted")
for m in tf.getmembers():
if m.uid != 0 or m.gid != 0:
raise SystemExit(f"tar header uid/gid not zero for {m.name}")
if m.mtime != 0:
raise SystemExit(f"tar header mtime not zero for {m.name}")
def check_required_layers(tar_path: pathlib.Path):
with tarfile.open(tar_path, "r:gz") as tf:
names = {normalize(n) for n in tf.getnames()}
for required in REQUIRED_LAYER_FILES:
if required not in names:
raise SystemExit(f"required file missing from bundle: {required}")
def check_content_hashes(manifest: dict, tar_path: pathlib.Path):
with tarfile.open(tar_path, "r:gz") as tf:
def get(name: str):
try:
return tf.getmember(name)
except KeyError:
return tf.getmember(f"./{name}")
for layer in manifest.get("layers", []):
name = layer["path"]
info = get(name)
data = tf.extractfile(info).read()
digest = hashlib.sha256(data).hexdigest()
if layer["digest"] != f"sha256:{digest}":
raise SystemExit(f"layer digest mismatch {name}: {digest}")
for idx in manifest.get("indexes", []):
name = idx['name']
if not name.startswith("indexes/"):
name = f"indexes/{name}"
info = get(name)
data = tf.extractfile(info).read()
digest = hashlib.sha256(data).hexdigest()
if idx["digest"] != f"sha256:{digest}":
raise SystemExit(f"index digest mismatch {name}: {digest}")
def read_tar_entry(tar_path: pathlib.Path, name: str) -> bytes:
with tarfile.open(tar_path, "r:gz") as tf:
try:
info = tf.getmember(name)
except KeyError:
info = tf.getmember(f"./{name}")
data = tf.extractfile(info).read()
return data
def load_pubkey(path: pathlib.Path) -> Ed25519PublicKey:
if not CRYPTO_AVAILABLE:
raise SystemExit("cryptography is required for DSSE verification; install before using --pubkey")
return serialization.load_pem_public_key(path.read_bytes())
def verify_dsse(dsse_path: pathlib.Path, pubkey_path: pathlib.Path, expected_payload: pathlib.Path, expected_type: str):
dsse_obj = json.loads(dsse_path.read_text())
if dsse_obj.get("payloadType") != expected_type:
raise SystemExit(f"DSSE payloadType mismatch for {dsse_path}")
payload = _b64url_decode(dsse_obj.get("payload", ""))
if payload != expected_payload.read_bytes():
raise SystemExit(f"DSSE payload mismatch for {dsse_path}")
sigs = dsse_obj.get("signatures") or []
if not sigs:
raise SystemExit(f"DSSE missing signatures: {dsse_path}")
pub = load_pubkey(pubkey_path)
try:
pub.verify(_b64url_decode(sigs[0]["sig"]), payload)
except Exception as exc: # pragma: no cover - cryptography raises InvalidSignature
raise SystemExit(f"DSSE signature verification failed for {dsse_path}: {exc}")
def check_bundle_meta(meta_path: pathlib.Path, manifest_path: pathlib.Path, tar_path: pathlib.Path, tenant: Optional[str], environment: Optional[str]):
meta = json.loads(meta_path.read_text())
for field in ["bundle", "version", "artifacts", "gaps", "tooling"]:
if field not in meta:
raise SystemExit(f"bundle meta missing field {field}")
if tenant and meta.get("tenant") != tenant:
raise SystemExit(f"bundle tenant mismatch: {meta.get('tenant')} != {tenant}")
if environment and meta.get("environment") != environment:
raise SystemExit(f"bundle environment mismatch: {meta.get('environment')} != {environment}")
artifacts = meta["artifacts"]
def expect(name: str, path: pathlib.Path):
recorded = artifacts.get(name)
if not recorded:
raise SystemExit(f"bundle meta missing artifact entry: {name}")
expected = recorded.get("sha256")
if expected and expected != sha256_file(path):
raise SystemExit(f"bundle meta digest mismatch for {name}")
expect("manifest", manifest_path)
expect("tarball", tar_path)
# DSSE sidecars are optional but if present, validate hashes
dsse_manifest = artifacts.get("manifest_dsse")
if dsse_manifest and dsse_manifest.get("path"):
expect("manifest_dsse", meta_path.parent / dsse_manifest["path"])
dsse_bundle = artifacts.get("bundle_dsse")
if dsse_bundle and dsse_bundle.get("path"):
expect("bundle_dsse", meta_path.parent / dsse_bundle["path"])
dsse_anchor = artifacts.get("time_anchor_dsse")
if dsse_anchor and dsse_anchor.get("path"):
expect("time_anchor_dsse", meta_path.parent / dsse_anchor["path"])
for extra in ["time_anchor", "transport_plan", "rekor_policy", "mirror_policy", "offline_policy", "artifact_hashes"]:
rec = artifacts.get(extra)
if not rec:
raise SystemExit(f"bundle meta missing artifact entry: {extra}")
if not rec.get("path"):
raise SystemExit(f"bundle meta missing path for {extra}")
time_anchor_dsse = artifacts.get("time_anchor_dsse")
if time_anchor_dsse:
if not time_anchor_dsse.get("path"):
raise SystemExit("bundle meta missing path for time_anchor_dsse")
if not (meta_path.parent / time_anchor_dsse["path"]).exists():
raise SystemExit("time_anchor_dsse referenced but file missing")
for group, expected_count in [("ok", 10), ("rk", 10), ("ms", 10)]:
if len(meta.get("gaps", {}).get(group, [])) != expected_count:
raise SystemExit(f"bundle meta gaps.{group} expected {expected_count} entries")
root_guess = manifest_path.parents[3] if len(manifest_path.parents) > 3 else manifest_path.parents[-1]
tool_expectations = {
'make_thin_v1_sh': root_guess / 'src' / 'Mirror' / 'StellaOps.Mirror.Creator' / 'make-thin-v1.sh',
'sign_script': root_guess / 'scripts' / 'mirror' / 'sign_thin_bundle.py',
'verify_script': root_guess / 'scripts' / 'mirror' / 'verify_thin_bundle.py',
'verify_oci': root_guess / 'scripts' / 'mirror' / 'verify_oci_layout.py'
}
for key, path in tool_expectations.items():
recorded = meta['tooling'].get(key)
if not recorded:
raise SystemExit(f"tool hash missing for {key}")
actual = sha256_file(path)
if recorded != actual:
raise SystemExit(f"tool hash mismatch for {key}")
if meta.get("checkpoint_freshness_seconds", 0) <= 0:
raise SystemExit("checkpoint_freshness_seconds must be positive")
def main():
parser = argparse.ArgumentParser()
parser.add_argument("manifest", type=pathlib.Path)
parser.add_argument("tar", type=pathlib.Path)
parser.add_argument("--bundle-meta", type=pathlib.Path)
parser.add_argument("--pubkey", type=pathlib.Path)
parser.add_argument("--tenant", type=str)
parser.add_argument("--environment", type=str)
args = parser.parse_args()
manifest_path = args.manifest
tar_path = args.tar
bundle_meta = args.bundle_meta
bundle_dsse = bundle_meta.with_suffix(".dsse.json") if bundle_meta else None
manifest_dsse = manifest_path.with_suffix(".dsse.json")
time_anchor_dsse = None
time_anchor_path = tar_path.parent / "stage-v1" / "layers" / "time-anchor.json"
man_expected = load_sha256_sidecar(manifest_path)
tar_expected = load_sha256_sidecar(tar_path)
if sha256_file(manifest_path) != man_expected:
raise SystemExit("manifest sha256 mismatch")
if sha256_file(tar_path) != tar_expected:
raise SystemExit("tarball sha256 mismatch")
manifest = json.loads(manifest_path.read_text())
check_schema(manifest)
check_tar_determinism(tar_path)
check_required_layers(tar_path)
check_content_hashes(manifest, tar_path)
if bundle_meta:
if not bundle_meta.exists():
raise SystemExit(f"bundle meta missing: {bundle_meta}")
meta_expected = load_sha256_sidecar(bundle_meta)
if sha256_file(bundle_meta) != meta_expected:
raise SystemExit("bundle meta sha256 mismatch")
check_bundle_meta(bundle_meta, manifest_path, tar_path, args.tenant, args.environment)
meta = json.loads(bundle_meta.read_text())
ta_entry = meta.get("artifacts", {}).get("time_anchor_dsse")
if ta_entry and ta_entry.get("path"):
ta_path = bundle_meta.parent / ta_entry["path"]
if sha256_file(ta_path) != ta_entry.get("sha256"):
raise SystemExit("time_anchor_dsse sha256 mismatch")
time_anchor_dsse = ta_path
if args.pubkey:
pubkey = args.pubkey
if manifest_dsse.exists():
verify_dsse(manifest_dsse, pubkey, manifest_path, "application/vnd.stellaops.mirror.manifest+json")
if bundle_dsse and bundle_dsse.exists():
verify_dsse(bundle_dsse, pubkey, bundle_meta, "application/vnd.stellaops.mirror.bundle+json")
if time_anchor_dsse and time_anchor_dsse.exists() and time_anchor_path.exists():
anchor_bytes = read_tar_entry(tar_path, "layers/time-anchor.json")
tmp_anchor = tar_path.parent / "time-anchor.verify.json"
tmp_anchor.write_bytes(anchor_bytes)
verify_dsse(time_anchor_dsse, pubkey, tmp_anchor, "application/vnd.stellaops.time-anchor+json")
tmp_anchor.unlink(missing_ok=True)
print("OK: mirror-thin bundle verified")
if __name__ == "__main__":
main()