CD/CD consolidation
This commit is contained in:
22
devops/offline/airgap/README.md
Normal file
22
devops/offline/airgap/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Air-gap Egress Guard Rails
|
||||
|
||||
Artifacts supporting `DEVOPS-AIRGAP-56-001`:
|
||||
|
||||
- `k8s-deny-egress.yaml` — NetworkPolicy template that denies all egress for pods labeled `sealed=true`, except optional in-cluster DNS when enabled.
|
||||
- `compose-egress-guard.sh` — Idempotent iptables guard for Docker/compose using the `DOCKER-USER` chain to drop all outbound traffic from a compose project network while allowing loopback and RFC1918 intra-cluster ranges.
|
||||
- `verify-egress-block.sh` — Verification harness that runs curl probes from Docker or Kubernetes and reports JSON results; exits non-zero if any target is reachable.
|
||||
- `bundle_stage_import.py` — Deterministic bundle staging helper: validates sha256 manifest, copies bundles to staging dir as `<sha256>-<basename>`, emits `staging-report.json` for evidence.
|
||||
- `stage-bundle.sh` — Thin wrapper around `bundle_stage_import.py` with positional args.
|
||||
- `build_bootstrap_pack.py` — Builds a Bootstrap Pack from images/charts/extras listed in a JSON config, writing `bootstrap-manifest.json` + `checksums.sha256` deterministically.
|
||||
- `build_bootstrap_pack.sh` — Wrapper for the bootstrap pack builder.
|
||||
- `build_mirror_bundle.py` — Generates mirror bundle manifest + checksums with dual-control approvals; optional cosign signing. Outputs `mirror-bundle-manifest.json`, `checksums.sha256`, and optional signature/cert.
|
||||
- `compose-syslog-smtp.yaml` — Local SMTP (MailHog) + syslog-ng stack for sealed environments.
|
||||
- `health_syslog_smtp.sh` — Brings up the syslog/SMTP stack via docker compose and performs health checks (MailHog API + syslog logger).
|
||||
- `compose-observability.yaml` — Sealed-mode observability stack (Prometheus, Grafana, Tempo, Loki) with offline configs and healthchecks.
|
||||
- `health_observability.sh` — Starts the observability stack and probes Prometheus/Grafana/Tempo/Loki readiness.
|
||||
- `compose-syslog-smtp.yaml` + `syslog-ng.conf` — Local SMTP + syslog stack for sealed-mode notifications; run via `scripts/devops/run-smtp-syslog.sh` (health check `health_syslog_smtp.sh`).
|
||||
- `observability-offline-compose.yml` + `otel-offline.yaml` + `promtail-config.yaml` — Sealed-mode observability stack (Loki, Promtail, OTEL collector with file exporters) to satisfy DEVOPS-AIRGAP-58-002.
|
||||
- `compose-syslog-smtp.yaml` — Local SMTP (MailHog) + syslog-ng stack for sealed environments.
|
||||
- `health_syslog_smtp.sh` — Brings up the syslog/SMTP stack via docker compose and performs health checks (MailHog API + syslog logger).
|
||||
|
||||
See also `ops/devops/sealed-mode-ci/` for the full sealed-mode compose harness and `egress_probe.py`, which this verification script wraps.
|
||||
174
devops/offline/airgap/build_bootstrap_pack.py
Normal file
174
devops/offline/airgap/build_bootstrap_pack.py
Normal file
@@ -0,0 +1,174 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a deterministic Bootstrap Pack bundle for sealed/offline transfer.
|
||||
|
||||
- Reads a JSON config listing artefacts to include (images, Helm charts, extras).
|
||||
- Copies artefacts into an output directory with preserved basenames.
|
||||
- Generates `bootstrap-manifest.json` and `checksums.sha256` with sha256 hashes
|
||||
and sizes for evidence/verification.
|
||||
- Intended to satisfy DEVOPS-AIRGAP-56-003.
|
||||
|
||||
Config schema (JSON):
|
||||
{
|
||||
"name": "bootstrap-pack",
|
||||
"images": ["release/containers/taskrunner.tar", "release/containers/orchestrator.tar"],
|
||||
"charts": ["deploy/helm/stella.tgz"],
|
||||
"extras": ["docs/24_OFFLINE_KIT.md"]
|
||||
}
|
||||
|
||||
Usage:
|
||||
build_bootstrap_pack.py --config bootstrap.json --output out/bootstrap-pack
|
||||
build_bootstrap_pack.py --self-test
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
DEFAULT_NAME = "bootstrap-pack"
|
||||
|
||||
|
||||
def sha256_file(path: Path) -> Tuple[str, int]:
|
||||
h = hashlib.sha256()
|
||||
size = 0
|
||||
with path.open("rb") as f:
|
||||
for chunk in iter(lambda: f.read(1024 * 1024), b""):
|
||||
h.update(chunk)
|
||||
size += len(chunk)
|
||||
return h.hexdigest(), size
|
||||
|
||||
|
||||
def load_config(path: Path) -> Dict:
|
||||
with path.open("r", encoding="utf-8") as handle:
|
||||
cfg = json.load(handle)
|
||||
if not isinstance(cfg, dict):
|
||||
raise ValueError("config must be a JSON object")
|
||||
return cfg
|
||||
|
||||
|
||||
def ensure_list(cfg: Dict, key: str) -> List[str]:
|
||||
value = cfg.get(key, [])
|
||||
if value is None:
|
||||
return []
|
||||
if not isinstance(value, list):
|
||||
raise ValueError(f"config.{key} must be a list")
|
||||
return [str(x) for x in value]
|
||||
|
||||
|
||||
def copy_item(src: Path, dest_root: Path, rel_dir: str) -> Tuple[str, str, int]:
|
||||
dest_dir = dest_root / rel_dir
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
dest_path = dest_dir / src.name
|
||||
shutil.copy2(src, dest_path)
|
||||
digest, size = sha256_file(dest_path)
|
||||
rel_path = dest_path.relative_to(dest_root).as_posix()
|
||||
return rel_path, digest, size
|
||||
|
||||
|
||||
def build_pack(config_path: Path, output_dir: Path) -> Dict:
|
||||
cfg = load_config(config_path)
|
||||
name = cfg.get("name", DEFAULT_NAME)
|
||||
images = ensure_list(cfg, "images")
|
||||
charts = ensure_list(cfg, "charts")
|
||||
extras = ensure_list(cfg, "extras")
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
items = []
|
||||
|
||||
def process_list(paths: List[str], kind: str, rel_dir: str):
|
||||
for raw in sorted(paths):
|
||||
src = Path(raw).expanduser().resolve()
|
||||
if not src.exists():
|
||||
items.append({
|
||||
"type": kind,
|
||||
"source": raw,
|
||||
"status": "missing"
|
||||
})
|
||||
continue
|
||||
rel_path, digest, size = copy_item(src, output_dir, rel_dir)
|
||||
items.append({
|
||||
"type": kind,
|
||||
"source": raw,
|
||||
"path": rel_path,
|
||||
"sha256": digest,
|
||||
"size": size,
|
||||
"status": "ok",
|
||||
})
|
||||
|
||||
process_list(images, "image", "images")
|
||||
process_list(charts, "chart", "charts")
|
||||
process_list(extras, "extra", "extras")
|
||||
|
||||
manifest = {
|
||||
"name": name,
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"items": items,
|
||||
}
|
||||
|
||||
# checksums file (only for ok items)
|
||||
checksum_lines = [f"{item['sha256']} {item['path']}" for item in items if item.get("status") == "ok"]
|
||||
(output_dir / "checksums.sha256").write_text("\n".join(checksum_lines) + ("\n" if checksum_lines else ""), encoding="utf-8")
|
||||
(output_dir / "bootstrap-manifest.json").write_text(json.dumps(manifest, ensure_ascii=False, indent=2) + "\n", encoding="utf-8")
|
||||
return manifest
|
||||
|
||||
|
||||
def parse_args(argv: List[str]) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--config", type=Path, help="Path to bootstrap pack config JSON")
|
||||
parser.add_argument("--output", type=Path, help="Output directory for the pack")
|
||||
parser.add_argument("--self-test", action="store_true", help="Run internal self-test and exit")
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def self_test() -> int:
|
||||
import tempfile
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmpdir = Path(tmp)
|
||||
files = []
|
||||
for name, content in [("img1.tar", b"image-one"), ("chart1.tgz", b"chart-one"), ("readme.txt", b"hello")]:
|
||||
p = tmpdir / name
|
||||
p.write_bytes(content)
|
||||
files.append(p)
|
||||
cfg = {
|
||||
"images": [str(files[0])],
|
||||
"charts": [str(files[1])],
|
||||
"extras": [str(files[2])],
|
||||
}
|
||||
cfg_path = tmpdir / "bootstrap.json"
|
||||
cfg_path.write_text(json.dumps(cfg), encoding="utf-8")
|
||||
outdir = tmpdir / "out"
|
||||
manifest = build_pack(cfg_path, outdir)
|
||||
assert all(item.get("status") == "ok" for item in manifest["items"]), manifest
|
||||
for rel in ["images/img1.tar", "charts/chart1.tgz", "extras/readme.txt", "checksums.sha256", "bootstrap-manifest.json"]:
|
||||
assert (outdir / rel).exists(), f"missing {rel}"
|
||||
print("self-test passed")
|
||||
return 0
|
||||
|
||||
|
||||
def main(argv: List[str]) -> int:
|
||||
args = parse_args(argv)
|
||||
if args.self_test:
|
||||
return self_test()
|
||||
if not (args.config and args.output):
|
||||
print("--config and --output are required unless --self-test", file=sys.stderr)
|
||||
return 2
|
||||
manifest = build_pack(args.config, args.output)
|
||||
missing = [i for i in manifest["items"] if i.get("status") == "missing"]
|
||||
if missing:
|
||||
print("Pack built with missing items:")
|
||||
for item in missing:
|
||||
print(f" - {item['source']}")
|
||||
return 1
|
||||
print(f"Bootstrap pack written to {args.output}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
10
devops/offline/airgap/build_bootstrap_pack.sh
Normal file
10
devops/offline/airgap/build_bootstrap_pack.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
# Thin wrapper for build_bootstrap_pack.py
|
||||
# Usage: ./build_bootstrap_pack.sh config.json out/bootstrap-pack
|
||||
set -euo pipefail
|
||||
if [[ $# -lt 2 ]]; then
|
||||
echo "Usage: $0 <config.json> <output-dir>" >&2
|
||||
exit 2
|
||||
fi
|
||||
SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd)
|
||||
python3 "$SCRIPT_DIR/build_bootstrap_pack.py" --config "$1" --output "$2"
|
||||
154
devops/offline/airgap/build_mirror_bundle.py
Normal file
154
devops/offline/airgap/build_mirror_bundle.py
Normal file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Automate mirror bundle manifest + checksums with dual-control approvals.
|
||||
|
||||
Implements DEVOPS-AIRGAP-57-001.
|
||||
|
||||
Features:
|
||||
- Deterministic manifest (`mirror-bundle-manifest.json`) with sha256/size per file.
|
||||
- `checksums.sha256` for quick verification.
|
||||
- Dual-control approvals recorded via `--approver` (min 2 required to mark approved).
|
||||
- Optional cosign signing of the manifest via `--cosign-key` (sign-blob); writes
|
||||
`mirror-bundle-manifest.sig` and `mirror-bundle-manifest.pem` when available.
|
||||
- Offline-friendly: purely local file reads; no network access.
|
||||
|
||||
Usage:
|
||||
build_mirror_bundle.py --root /path/to/bundles --output out/mirror \
|
||||
--approver alice@example.com --approver bob@example.com
|
||||
|
||||
build_mirror_bundle.py --self-test
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
|
||||
def sha256_file(path: Path) -> Dict[str, int | str]:
|
||||
h = hashlib.sha256()
|
||||
size = 0
|
||||
with path.open("rb") as f:
|
||||
for chunk in iter(lambda: f.read(1024 * 1024), b""):
|
||||
h.update(chunk)
|
||||
size += len(chunk)
|
||||
return {"sha256": h.hexdigest(), "size": size}
|
||||
|
||||
|
||||
def find_files(root: Path) -> List[Path]:
|
||||
files: List[Path] = []
|
||||
for p in sorted(root.rglob("*")):
|
||||
if p.is_file():
|
||||
files.append(p)
|
||||
return files
|
||||
|
||||
|
||||
def write_checksums(items: List[Dict], output_dir: Path) -> None:
|
||||
lines = [f"{item['sha256']} {item['path']}" for item in items]
|
||||
(output_dir / "checksums.sha256").write_text("\n".join(lines) + ("\n" if lines else ""), encoding="utf-8")
|
||||
|
||||
|
||||
def maybe_sign(manifest_path: Path, key: Optional[str]) -> Dict[str, str]:
|
||||
if not key:
|
||||
return {"status": "skipped", "reason": "no key provided"}
|
||||
if shutil.which("cosign") is None:
|
||||
return {"status": "skipped", "reason": "cosign not found"}
|
||||
sig = manifest_path.with_suffix(manifest_path.suffix + ".sig")
|
||||
pem = manifest_path.with_suffix(manifest_path.suffix + ".pem")
|
||||
try:
|
||||
subprocess.run(
|
||||
["cosign", "sign-blob", "--key", key, "--output-signature", str(sig), "--output-certificate", str(pem), str(manifest_path)],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
return {
|
||||
"status": "signed",
|
||||
"signature": sig.name,
|
||||
"certificate": pem.name,
|
||||
}
|
||||
except subprocess.CalledProcessError as exc: # pragma: no cover
|
||||
return {"status": "failed", "reason": exc.stderr or str(exc)}
|
||||
|
||||
|
||||
def build_manifest(root: Path, output_dir: Path, approvers: List[str], cosign_key: Optional[str]) -> Dict:
|
||||
files = find_files(root)
|
||||
items: List[Dict] = []
|
||||
for p in files:
|
||||
rel = p.relative_to(root).as_posix()
|
||||
info = sha256_file(p)
|
||||
items.append({"path": rel, **info})
|
||||
manifest = {
|
||||
"created": datetime.now(timezone.utc).isoformat(),
|
||||
"root": str(root),
|
||||
"total": len(items),
|
||||
"items": items,
|
||||
"approvals": sorted(set(approvers)),
|
||||
"approvalStatus": "approved" if len(set(approvers)) >= 2 else "pending",
|
||||
}
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
manifest_path = output_dir / "mirror-bundle-manifest.json"
|
||||
manifest_path.write_text(json.dumps(manifest, ensure_ascii=False, indent=2) + "\n", encoding="utf-8")
|
||||
write_checksums(items, output_dir)
|
||||
signing = maybe_sign(manifest_path, cosign_key)
|
||||
manifest["signing"] = signing
|
||||
# Persist signing status in manifest for traceability
|
||||
manifest_path.write_text(json.dumps(manifest, ensure_ascii=False, indent=2) + "\n", encoding="utf-8")
|
||||
return manifest
|
||||
|
||||
|
||||
def parse_args(argv: List[str]) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--root", type=Path, help="Root directory containing bundle files")
|
||||
parser.add_argument("--output", type=Path, help="Output directory for manifest + checksums")
|
||||
parser.add_argument("--approver", action="append", default=[], help="Approver identity (email or handle); provide twice for dual-control")
|
||||
parser.add_argument("--cosign-key", help="Path or KMS URI for cosign signing key (optional)")
|
||||
parser.add_argument("--self-test", action="store_true", help="Run internal self-test and exit")
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def self_test() -> int:
|
||||
import tempfile
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmpdir = Path(tmp)
|
||||
root = tmpdir / "bundles"
|
||||
root.mkdir()
|
||||
(root / "a.txt").write_text("hello", encoding="utf-8")
|
||||
(root / "b.bin").write_bytes(b"world")
|
||||
out = tmpdir / "out"
|
||||
manifest = build_manifest(root, out, ["alice", "bob"], cosign_key=None)
|
||||
assert manifest["approvalStatus"] == "approved"
|
||||
assert (out / "mirror-bundle-manifest.json").exists()
|
||||
assert (out / "checksums.sha256").exists()
|
||||
print("self-test passed")
|
||||
return 0
|
||||
|
||||
|
||||
def main(argv: List[str]) -> int:
|
||||
args = parse_args(argv)
|
||||
if args.self_test:
|
||||
return self_test()
|
||||
if not (args.root and args.output):
|
||||
print("--root and --output are required unless --self-test", file=sys.stderr)
|
||||
return 2
|
||||
manifest = build_manifest(args.root.resolve(), args.output.resolve(), args.approver, args.cosign_key)
|
||||
if manifest["approvalStatus"] != "approved":
|
||||
print("Manifest generated but approvalStatus=pending (need >=2 distinct approvers).", file=sys.stderr)
|
||||
return 1
|
||||
missing = [i for i in manifest["items"] if not (args.root / i["path"]).exists()]
|
||||
if missing:
|
||||
print(f"Missing files in manifest: {missing}", file=sys.stderr)
|
||||
return 1
|
||||
print(f"Mirror bundle manifest written to {args.output}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
169
devops/offline/airgap/bundle_stage_import.py
Normal file
169
devops/offline/airgap/bundle_stage_import.py
Normal file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Bundle staging helper for sealed-mode imports.
|
||||
|
||||
Validates bundle files against a manifest and stages them into a target directory
|
||||
with deterministic names (`<sha256>-<basename>`). Emits a JSON report detailing
|
||||
success/failure per file for evidence capture.
|
||||
|
||||
Manifest format (JSON):
|
||||
[
|
||||
{"file": "bundle1.tar.gz", "sha256": "..."},
|
||||
{"file": "bundle2.ndjson", "sha256": "..."}
|
||||
]
|
||||
|
||||
Usage:
|
||||
bundle_stage_import.py --manifest bundles.json --root /path/to/files --out staging
|
||||
bundle_stage_import.py --manifest bundles.json --root . --out staging --prefix mirror/
|
||||
bundle_stage_import.py --self-test
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
def sha256_file(path: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open('rb') as f:
|
||||
for chunk in iter(lambda: f.read(1024 * 1024), b""):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def load_manifest(path: Path) -> List[Dict[str, str]]:
|
||||
with path.open('r', encoding='utf-8') as handle:
|
||||
data = json.load(handle)
|
||||
if not isinstance(data, list):
|
||||
raise ValueError("Manifest must be a list of objects")
|
||||
normalized = []
|
||||
for idx, entry in enumerate(data):
|
||||
if not isinstance(entry, dict):
|
||||
raise ValueError(f"Manifest entry {idx} is not an object")
|
||||
file = entry.get("file")
|
||||
digest = entry.get("sha256")
|
||||
if not file or not digest:
|
||||
raise ValueError(f"Manifest entry {idx} missing file or sha256")
|
||||
normalized.append({"file": str(file), "sha256": str(digest).lower()})
|
||||
return normalized
|
||||
|
||||
|
||||
def stage_file(src: Path, digest: str, out_dir: Path, prefix: str) -> Path:
|
||||
dest_name = f"{digest}-{src.name}"
|
||||
dest_rel = Path(prefix) / dest_name if prefix else Path(dest_name)
|
||||
dest_path = out_dir / dest_rel
|
||||
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(src, dest_path)
|
||||
return dest_rel
|
||||
|
||||
|
||||
def process(manifest: Path, root: Path, out_dir: Path, prefix: str) -> Dict:
|
||||
items = load_manifest(manifest)
|
||||
results = []
|
||||
success = True
|
||||
for entry in items:
|
||||
rel = Path(entry["file"])
|
||||
src = (root / rel).resolve()
|
||||
expected = entry["sha256"].lower()
|
||||
status = "ok"
|
||||
actual = None
|
||||
staged = None
|
||||
message = ""
|
||||
if not src.exists():
|
||||
status = "missing"
|
||||
message = "file not found"
|
||||
success = False
|
||||
else:
|
||||
actual = sha256_file(src)
|
||||
if actual != expected:
|
||||
status = "checksum-mismatch"
|
||||
message = "sha256 mismatch"
|
||||
success = False
|
||||
else:
|
||||
staged = str(stage_file(src, expected, out_dir, prefix))
|
||||
results.append(
|
||||
{
|
||||
"file": str(rel),
|
||||
"expectedSha256": expected,
|
||||
"actualSha256": actual,
|
||||
"status": status,
|
||||
"stagedPath": staged,
|
||||
"message": message,
|
||||
}
|
||||
)
|
||||
report = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"root": str(root),
|
||||
"output": str(out_dir),
|
||||
"prefix": prefix,
|
||||
"summary": {
|
||||
"total": len(results),
|
||||
"success": success,
|
||||
"ok": sum(1 for r in results if r["status"] == "ok"),
|
||||
"missing": sum(1 for r in results if r["status"] == "missing"),
|
||||
"checksumMismatch": sum(1 for r in results if r["status"] == "checksum-mismatch"),
|
||||
},
|
||||
"items": results,
|
||||
}
|
||||
return report
|
||||
|
||||
|
||||
def parse_args(argv: List[str]) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--manifest", type=Path, help="Path to bundle manifest JSON")
|
||||
parser.add_argument("--root", type=Path, help="Root directory containing bundle files")
|
||||
parser.add_argument("--out", type=Path, help="Output directory for staged bundles and report")
|
||||
parser.add_argument("--prefix", default="", help="Optional prefix within output dir (e.g., mirror/)")
|
||||
parser.add_argument("--report", type=Path, help="Override report path (defaults to <out>/staging-report.json)")
|
||||
parser.add_argument("--self-test", action="store_true", help="Run internal self-test and exit")
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def write_report(report: Dict, report_path: Path) -> None:
|
||||
report_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with report_path.open('w', encoding='utf-8') as handle:
|
||||
json.dump(report, handle, ensure_ascii=False, indent=2)
|
||||
handle.write("\n")
|
||||
|
||||
|
||||
def self_test() -> int:
|
||||
import tempfile
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmpdir = Path(tmp)
|
||||
sample = tmpdir / "sample.bin"
|
||||
sample.write_bytes(b"offline-bundle")
|
||||
digest = sha256_file(sample)
|
||||
manifest = tmpdir / "manifest.json"
|
||||
manifest.write_text(json.dumps([{ "file": "sample.bin", "sha256": digest }]), encoding='utf-8')
|
||||
out = tmpdir / "out"
|
||||
report = process(manifest, tmpdir, out, prefix="mirror/")
|
||||
assert report["summary"]["success"] is True, report
|
||||
staged = out / report["items"][0]["stagedPath"]
|
||||
assert staged.exists(), f"staged file missing: {staged}"
|
||||
print("self-test passed")
|
||||
return 0
|
||||
|
||||
|
||||
def main(argv: List[str]) -> int:
|
||||
args = parse_args(argv)
|
||||
if args.self_test:
|
||||
return self_test()
|
||||
if not (args.manifest and args.root and args.out):
|
||||
print("--manifest, --root, and --out are required unless --self-test", file=sys.stderr)
|
||||
return 2
|
||||
report = process(args.manifest, args.root, args.out, args.prefix)
|
||||
report_path = args.report or args.out / "staging-report.json"
|
||||
write_report(report, report_path)
|
||||
print(f"Staged bundles → {args.out} (report {report_path})")
|
||||
return 0 if report["summary"]["success"] else 1
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
54
devops/offline/airgap/compose-egress-guard.sh
Normal file
54
devops/offline/airgap/compose-egress-guard.sh
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env bash
|
||||
# Enforce deny-all egress for a Docker/Compose project using DOCKER-USER chain.
|
||||
# Usage: COMPOSE_PROJECT=stella ./compose-egress-guard.sh
|
||||
# Optional env: ALLOW_RFC1918=true to allow east-west traffic inside 10/172/192 ranges.
|
||||
set -euo pipefail
|
||||
|
||||
PROJECT=${COMPOSE_PROJECT:-stella}
|
||||
ALLOW_RFC1918=${ALLOW_RFC1918:-true}
|
||||
NETWORK=${COMPOSE_NETWORK:-${PROJECT}_default}
|
||||
|
||||
chain=STELLAOPS_SEALED_${PROJECT^^}
|
||||
ipset_name=${PROJECT}_cidrs
|
||||
|
||||
insert_accept() {
|
||||
local dest=$1
|
||||
iptables -C DOCKER-USER -d "$dest" -j ACCEPT 2>/dev/null || iptables -I DOCKER-USER -d "$dest" -j ACCEPT
|
||||
}
|
||||
|
||||
# 1) Ensure DOCKER-USER exists
|
||||
iptables -nL DOCKER-USER >/dev/null 2>&1 || iptables -N DOCKER-USER
|
||||
|
||||
# 2) Create dedicated chain per project for clarity
|
||||
iptables -nL "$chain" >/dev/null 2>&1 || iptables -N "$chain"
|
||||
|
||||
# 2b) Populate ipset with compose network CIDRs (if available)
|
||||
if command -v ipset >/dev/null; then
|
||||
ipset list "$ipset_name" >/dev/null 2>&1 || ipset create "$ipset_name" hash:net -exist
|
||||
cidrs=$(docker network inspect "$NETWORK" -f '{{range .IPAM.Config}}{{.Subnet}} {{end}}')
|
||||
for cidr in $cidrs; do
|
||||
ipset add "$ipset_name" "$cidr" 2>/dev/null || true
|
||||
done
|
||||
fi
|
||||
|
||||
# 3) Allow loopback and optional RFC1918 intra-cluster ranges, then drop everything else
|
||||
insert_accept 127.0.0.0/8
|
||||
if [[ "$ALLOW_RFC1918" == "true" ]]; then
|
||||
insert_accept 10.0.0.0/8
|
||||
insert_accept 172.16.0.0/12
|
||||
insert_accept 192.168.0.0/16
|
||||
fi
|
||||
iptables -C "$chain" -j DROP 2>/dev/null || iptables -A "$chain" -j DROP
|
||||
|
||||
# 4) Hook chain into DOCKER-USER for containers in this project network
|
||||
iptables -C DOCKER-USER -m addrtype --src-type LOCAL -j RETURN 2>/dev/null || true
|
||||
if command -v ipset >/dev/null && ipset list "$ipset_name" >/dev/null 2>&1; then
|
||||
iptables -C DOCKER-USER -m set --match-set "$ipset_name" dst -j "$chain" 2>/dev/null || iptables -I DOCKER-USER -m set --match-set "$ipset_name" dst -j "$chain"
|
||||
else
|
||||
# Fallback: match by destination subnet from docker inspect (first subnet only)
|
||||
first_cidr=$(docker network inspect "$NETWORK" -f '{{(index .IPAM.Config 0).Subnet}}')
|
||||
iptables -C DOCKER-USER -d "$first_cidr" -j "$chain" 2>/dev/null || iptables -I DOCKER-USER -d "$first_cidr" -j "$chain"
|
||||
fi
|
||||
|
||||
echo "Applied compose egress guard via DOCKER-USER -> $chain" >&2
|
||||
iptables -vnL "$chain"
|
||||
77
devops/offline/airgap/compose-observability.yaml
Normal file
77
devops/offline/airgap/compose-observability.yaml
Normal file
@@ -0,0 +1,77 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
prometheus:
|
||||
image: prom/prometheus:v2.53.0
|
||||
container_name: prometheus
|
||||
command:
|
||||
- --config.file=/etc/prometheus/prometheus.yml
|
||||
volumes:
|
||||
- ./observability/prometheus.yml:/etc/prometheus/prometheus.yml:ro
|
||||
ports:
|
||||
- "9090:9090"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:9090/-/ready"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
restart: unless-stopped
|
||||
|
||||
loki:
|
||||
image: grafana/loki:3.0.0
|
||||
container_name: loki
|
||||
command: ["-config.file=/etc/loki/config.yaml"]
|
||||
volumes:
|
||||
- ./observability/loki-config.yaml:/etc/loki/config.yaml:ro
|
||||
- ./observability/data/loki:/loki
|
||||
ports:
|
||||
- "3100:3100"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:3100/ready"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 15s
|
||||
restart: unless-stopped
|
||||
|
||||
tempo:
|
||||
image: grafana/tempo:2.4.1
|
||||
container_name: tempo
|
||||
command: ["-config.file=/etc/tempo/tempo.yaml"]
|
||||
volumes:
|
||||
- ./observability/tempo-config.yaml:/etc/tempo/tempo.yaml:ro
|
||||
- ./observability/data/tempo:/var/tempo
|
||||
ports:
|
||||
- "3200:3200"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:3200/ready"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 15s
|
||||
restart: unless-stopped
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:10.4.2
|
||||
container_name: grafana
|
||||
environment:
|
||||
- GF_AUTH_ANONYMOUS_ENABLED=true
|
||||
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
|
||||
- GF_SECURITY_ADMIN_PASSWORD=admin
|
||||
- GF_SECURITY_ADMIN_USER=admin
|
||||
volumes:
|
||||
- ./observability/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
|
||||
ports:
|
||||
- "3000:3000"
|
||||
depends_on:
|
||||
- prometheus
|
||||
- loki
|
||||
- tempo
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:3000/api/health"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 20s
|
||||
restart: unless-stopped
|
||||
23
devops/offline/airgap/compose-syslog-smtp.yaml
Normal file
23
devops/offline/airgap/compose-syslog-smtp.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: '3.8'
|
||||
services:
|
||||
smtp:
|
||||
image: bytemark/smtp
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- MAILNAME=sealed.local
|
||||
networks: [sealed]
|
||||
ports:
|
||||
- "2525:25"
|
||||
syslog:
|
||||
image: balabit/syslog-ng:4.7.1
|
||||
restart: unless-stopped
|
||||
command: ["syslog-ng", "-F", "--no-caps"]
|
||||
networks: [sealed]
|
||||
ports:
|
||||
- "5514:514/udp"
|
||||
- "5515:601/tcp"
|
||||
volumes:
|
||||
- ./syslog-ng.conf:/etc/syslog-ng/syslog-ng.conf:ro
|
||||
networks:
|
||||
sealed:
|
||||
driver: bridge
|
||||
28
devops/offline/airgap/health_observability.sh
Normal file
28
devops/offline/airgap/health_observability.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Health check for compose-observability.yaml (DEVOPS-AIRGAP-58-002)
|
||||
|
||||
COMPOSE_FILE="$(cd "$(dirname "$0")" && pwd)/compose-observability.yaml"
|
||||
|
||||
echo "Starting observability stack (Prometheus/Grafana/Tempo/Loki)..."
|
||||
docker compose -f "$COMPOSE_FILE" up -d
|
||||
|
||||
echo "Waiting for containers to report healthy..."
|
||||
docker compose -f "$COMPOSE_FILE" wait >/dev/null 2>&1 || true
|
||||
|
||||
docker compose -f "$COMPOSE_FILE" ps
|
||||
|
||||
echo "Probing Prometheus /-/ready"
|
||||
curl -sf http://127.0.0.1:9090/-/ready
|
||||
|
||||
echo "Probing Grafana /api/health"
|
||||
curl -sf http://127.0.0.1:3000/api/health
|
||||
|
||||
echo "Probing Loki /ready"
|
||||
curl -sf http://127.0.0.1:3100/ready
|
||||
|
||||
echo "Probing Tempo /ready"
|
||||
curl -sf http://127.0.0.1:3200/ready
|
||||
|
||||
echo "All probes succeeded."
|
||||
33
devops/offline/airgap/health_syslog_smtp.sh
Normal file
33
devops/offline/airgap/health_syslog_smtp.sh
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
# Health check for compose-syslog-smtp.yaml (DEVOPS-AIRGAP-58-001)
|
||||
ROOT=${ROOT:-$(git rev-parse --show-toplevel)}
|
||||
COMPOSE_FILE="${COMPOSE_FILE:-$ROOT/ops/devops/airgap/compose-syslog-smtp.yaml}"
|
||||
SMTP_PORT=${SMTP_PORT:-2525}
|
||||
SYSLOG_TCP=${SYSLOG_TCP:-5515}
|
||||
SYSLOG_UDP=${SYSLOG_UDP:-5514}
|
||||
|
||||
export COMPOSE_FILE
|
||||
# ensure stack up
|
||||
if ! docker compose ps >/dev/null 2>&1; then
|
||||
docker compose up -d
|
||||
fi
|
||||
sleep 2
|
||||
|
||||
# probe smtp banner
|
||||
if ! timeout 5 bash -lc "echo QUIT | nc -w2 127.0.0.1 ${SMTP_PORT}" >/dev/null 2>&1; then
|
||||
echo "smtp service not responding on ${SMTP_PORT}" >&2
|
||||
exit 1
|
||||
fi
|
||||
# probe syslog tcp
|
||||
if ! echo "test" | nc -w2 127.0.0.1 ${SYSLOG_TCP} >/dev/null 2>&1; then
|
||||
echo "syslog tcp not responding on ${SYSLOG_TCP}" >&2
|
||||
exit 1
|
||||
fi
|
||||
# probe syslog udp
|
||||
if ! echo "test" | nc -w2 -u 127.0.0.1 ${SYSLOG_UDP} >/dev/null 2>&1; then
|
||||
echo "syslog udp not responding on ${SYSLOG_UDP}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "smtp/syslog stack healthy"
|
||||
130
devops/offline/airgap/import-bundle.sh
Normal file
130
devops/offline/airgap/import-bundle.sh
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env bash
|
||||
# Import air-gap bundle into isolated environment
|
||||
# Usage: ./import-bundle.sh <bundle-dir> [registry]
|
||||
# Example: ./import-bundle.sh /media/usb/stellaops-bundle localhost:5000
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
BUNDLE_DIR="${1:?Bundle directory required}"
|
||||
REGISTRY="${2:-localhost:5000}"
|
||||
|
||||
echo "==> Importing air-gap bundle from ${BUNDLE_DIR}"
|
||||
|
||||
# Verify bundle structure
|
||||
if [[ ! -f "${BUNDLE_DIR}/manifest.json" ]]; then
|
||||
echo "ERROR: manifest.json not found in bundle" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify checksums first
|
||||
echo "==> Verifying checksums..."
|
||||
cd "${BUNDLE_DIR}"
|
||||
for sha_file in *.sha256; do
|
||||
if [[ -f "${sha_file}" ]]; then
|
||||
echo " Checking ${sha_file}..."
|
||||
sha256sum -c "${sha_file}" || { echo "CHECKSUM FAILED: ${sha_file}" >&2; exit 1; }
|
||||
fi
|
||||
done
|
||||
|
||||
# Load container images
|
||||
echo "==> Loading container images..."
|
||||
for tarball in images/*.tar images/*.tar.gz 2>/dev/null; do
|
||||
if [[ -f "${tarball}" ]]; then
|
||||
echo " Loading ${tarball}..."
|
||||
docker load -i "${tarball}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Re-tag and push to local registry
|
||||
echo "==> Pushing images to ${REGISTRY}..."
|
||||
IMAGES=$(jq -r '.images[]?.name // empty' manifest.json 2>/dev/null || true)
|
||||
for IMAGE in ${IMAGES}; do
|
||||
LOCAL_TAG="${REGISTRY}/${IMAGE##*/}"
|
||||
echo " ${IMAGE} -> ${LOCAL_TAG}"
|
||||
docker tag "${IMAGE}" "${LOCAL_TAG}" 2>/dev/null || true
|
||||
docker push "${LOCAL_TAG}" 2>/dev/null || echo " (push skipped - registry may be unavailable)"
|
||||
done
|
||||
|
||||
# Import Helm charts
|
||||
echo "==> Importing Helm charts..."
|
||||
if [[ -d "${BUNDLE_DIR}/charts" ]]; then
|
||||
for chart in "${BUNDLE_DIR}"/charts/*.tgz; do
|
||||
if [[ -f "${chart}" ]]; then
|
||||
echo " Installing ${chart}..."
|
||||
helm push "${chart}" "oci://${REGISTRY}/charts" 2>/dev/null || \
|
||||
echo " (OCI push skipped - copying to local)"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Import NuGet packages
|
||||
echo "==> Importing NuGet packages..."
|
||||
if [[ -d "${BUNDLE_DIR}/nugets" ]]; then
|
||||
NUGET_CACHE="${HOME}/.nuget/packages"
|
||||
mkdir -p "${NUGET_CACHE}"
|
||||
for nupkg in "${BUNDLE_DIR}"/nugets/*.nupkg; do
|
||||
if [[ -f "${nupkg}" ]]; then
|
||||
PKG_NAME=$(basename "${nupkg}" .nupkg)
|
||||
echo " Caching ${PKG_NAME}..."
|
||||
# Extract to NuGet cache structure
|
||||
unzip -q -o "${nupkg}" -d "${NUGET_CACHE}/${PKG_NAME,,}" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Import npm packages
|
||||
echo "==> Importing npm packages..."
|
||||
if [[ -d "${BUNDLE_DIR}/npm" ]]; then
|
||||
NPM_CACHE="${HOME}/.npm/_cacache"
|
||||
mkdir -p "${NPM_CACHE}"
|
||||
if [[ -f "${BUNDLE_DIR}/npm/cache.tar.gz" ]]; then
|
||||
tar -xzf "${BUNDLE_DIR}/npm/cache.tar.gz" -C "${HOME}/.npm" 2>/dev/null || true
|
||||
fi
|
||||
fi
|
||||
|
||||
# Import advisory feeds
|
||||
echo "==> Importing advisory feeds..."
|
||||
if [[ -d "${BUNDLE_DIR}/feeds" ]]; then
|
||||
FEEDS_DIR="/var/lib/stellaops/feeds"
|
||||
sudo mkdir -p "${FEEDS_DIR}" 2>/dev/null || mkdir -p "${FEEDS_DIR}"
|
||||
for feed in "${BUNDLE_DIR}"/feeds/*.ndjson.gz; do
|
||||
if [[ -f "${feed}" ]]; then
|
||||
FEED_NAME=$(basename "${feed}")
|
||||
echo " Installing ${FEED_NAME}..."
|
||||
cp "${feed}" "${FEEDS_DIR}/" 2>/dev/null || sudo cp "${feed}" "${FEEDS_DIR}/"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Import symbol bundles
|
||||
echo "==> Importing symbol bundles..."
|
||||
if [[ -d "${BUNDLE_DIR}/symbols" ]]; then
|
||||
SYMBOLS_DIR="/var/lib/stellaops/symbols"
|
||||
sudo mkdir -p "${SYMBOLS_DIR}" 2>/dev/null || mkdir -p "${SYMBOLS_DIR}"
|
||||
for bundle in "${BUNDLE_DIR}"/symbols/*.zip; do
|
||||
if [[ -f "${bundle}" ]]; then
|
||||
echo " Extracting ${bundle}..."
|
||||
unzip -q -o "${bundle}" -d "${SYMBOLS_DIR}" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Generate import report
|
||||
echo "==> Generating import report..."
|
||||
cat > "${BUNDLE_DIR}/import-report.json" <<EOF
|
||||
{
|
||||
"importedAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"registry": "${REGISTRY}",
|
||||
"bundleDir": "${BUNDLE_DIR}",
|
||||
"status": "success"
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "==> Import complete"
|
||||
echo " Registry: ${REGISTRY}"
|
||||
echo " Report: ${BUNDLE_DIR}/import-report.json"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. Update Helm values with registry: ${REGISTRY}"
|
||||
echo " 2. Deploy: helm install stellaops deploy/helm/stellaops -f values-airgap.yaml"
|
||||
echo " 3. Verify: kubectl get pods -n stellaops"
|
||||
42
devops/offline/airgap/k8s-deny-egress.yaml
Normal file
42
devops/offline/airgap/k8s-deny-egress.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: NetworkPolicy
|
||||
metadata:
|
||||
name: sealed-deny-all-egress
|
||||
namespace: default
|
||||
labels:
|
||||
stellaops.dev/owner: devops
|
||||
stellaops.dev/purpose: sealed-mode
|
||||
spec:
|
||||
podSelector:
|
||||
matchLabels:
|
||||
sealed: "true"
|
||||
policyTypes:
|
||||
- Egress
|
||||
egress: []
|
||||
---
|
||||
# Optional patch to allow in-cluster DNS while still blocking external egress.
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: NetworkPolicy
|
||||
metadata:
|
||||
name: sealed-allow-dns
|
||||
namespace: default
|
||||
labels:
|
||||
stellaops.dev/owner: devops
|
||||
stellaops.dev/purpose: sealed-mode
|
||||
spec:
|
||||
podSelector:
|
||||
matchLabels:
|
||||
sealed: "true"
|
||||
policyTypes:
|
||||
- Egress
|
||||
egress:
|
||||
- to:
|
||||
- namespaceSelector:
|
||||
matchLabels:
|
||||
kubernetes.io/metadata.name: kube-system
|
||||
podSelector:
|
||||
matchLabels:
|
||||
k8s-app: kube-dns
|
||||
ports:
|
||||
- protocol: UDP
|
||||
port: 53
|
||||
32
devops/offline/airgap/observability-offline-compose.yml
Normal file
32
devops/offline/airgap/observability-offline-compose.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
version: '3.8'
|
||||
services:
|
||||
loki:
|
||||
image: grafana/loki:3.0.1
|
||||
command: ["-config.file=/etc/loki/local-config.yaml"]
|
||||
volumes:
|
||||
- loki-data:/loki
|
||||
networks: [sealed]
|
||||
promtail:
|
||||
image: grafana/promtail:3.0.1
|
||||
command: ["-config.file=/etc/promtail/config.yml"]
|
||||
volumes:
|
||||
- promtail-data:/var/log
|
||||
- ./promtail-config.yaml:/etc/promtail/config.yml:ro
|
||||
networks: [sealed]
|
||||
otel:
|
||||
image: otel/opentelemetry-collector-contrib:0.97.0
|
||||
command: ["--config=/etc/otel/otel-offline.yaml"]
|
||||
volumes:
|
||||
- ./otel-offline.yaml:/etc/otel/otel-offline.yaml:ro
|
||||
- otel-data:/var/otel
|
||||
ports:
|
||||
- "4317:4317"
|
||||
- "4318:4318"
|
||||
networks: [sealed]
|
||||
networks:
|
||||
sealed:
|
||||
driver: bridge
|
||||
volumes:
|
||||
loki-data:
|
||||
promtail-data:
|
||||
otel-data:
|
||||
@@ -0,0 +1,16 @@
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
access: proxy
|
||||
url: http://prometheus:9090
|
||||
isDefault: true
|
||||
- name: Loki
|
||||
type: loki
|
||||
access: proxy
|
||||
url: http://loki:3100
|
||||
- name: Tempo
|
||||
type: tempo
|
||||
access: proxy
|
||||
url: http://tempo:3200
|
||||
35
devops/offline/airgap/observability/loki-config.yaml
Normal file
35
devops/offline/airgap/observability/loki-config.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
server:
|
||||
http_listen_port: 3100
|
||||
log_level: warn
|
||||
|
||||
common:
|
||||
ring:
|
||||
instance_addr: loki
|
||||
kvstore:
|
||||
store: inmemory
|
||||
replication_factor: 1
|
||||
|
||||
table_manager:
|
||||
retention_deletes_enabled: true
|
||||
retention_period: 168h
|
||||
|
||||
schema_config:
|
||||
configs:
|
||||
- from: 2024-01-01
|
||||
store: boltdb-shipper
|
||||
object_store: filesystem
|
||||
schema: v13
|
||||
index:
|
||||
prefix: index_
|
||||
period: 24h
|
||||
|
||||
storage_config:
|
||||
filesystem:
|
||||
directory: /loki/chunks
|
||||
boltdb_shipper:
|
||||
active_index_directory: /loki/index
|
||||
cache_location: /loki/cache
|
||||
shared_store: filesystem
|
||||
|
||||
limits_config:
|
||||
retention_period: 168h
|
||||
14
devops/offline/airgap/observability/prometheus.yml
Normal file
14
devops/offline/airgap/observability/prometheus.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: prometheus
|
||||
static_configs:
|
||||
- targets: ['prometheus:9090']
|
||||
- job_name: loki
|
||||
static_configs:
|
||||
- targets: ['loki:3100']
|
||||
- job_name: tempo
|
||||
static_configs:
|
||||
- targets: ['tempo:3200']
|
||||
26
devops/offline/airgap/observability/tempo-config.yaml
Normal file
26
devops/offline/airgap/observability/tempo-config.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
server:
|
||||
http_listen_port: 3200
|
||||
log_level: warn
|
||||
|
||||
distributor:
|
||||
receivers:
|
||||
jaeger:
|
||||
protocols:
|
||||
thrift_http:
|
||||
otlp:
|
||||
protocols:
|
||||
http:
|
||||
grpc:
|
||||
zipkin:
|
||||
|
||||
storage:
|
||||
trace:
|
||||
backend: local
|
||||
wal:
|
||||
path: /var/tempo/wal
|
||||
local:
|
||||
path: /var/tempo/traces
|
||||
|
||||
compactor:
|
||||
compaction:
|
||||
block_retention: 168h
|
||||
40
devops/offline/airgap/otel-offline.yaml
Normal file
40
devops/offline/airgap/otel-offline.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
receivers:
|
||||
prometheus:
|
||||
config:
|
||||
scrape_configs:
|
||||
- job_name: 'self'
|
||||
static_configs:
|
||||
- targets: ['localhost:8888']
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
processors:
|
||||
batch:
|
||||
timeout: 1s
|
||||
send_batch_size: 512
|
||||
exporters:
|
||||
file/metrics:
|
||||
path: /var/otel/metrics.prom
|
||||
file/traces:
|
||||
path: /var/otel/traces.ndjson
|
||||
loki/offline:
|
||||
endpoint: http://loki:3100/loki/api/v1/push
|
||||
labels:
|
||||
job: sealed-observability
|
||||
tenant_id: "sealed"
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
level: info
|
||||
pipelines:
|
||||
metrics:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [file/metrics]
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [file/traces]
|
||||
14
devops/offline/airgap/promtail-config.yaml
Normal file
14
devops/offline/airgap/promtail-config.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
server:
|
||||
http_listen_port: 9080
|
||||
grpc_listen_port: 0
|
||||
positions:
|
||||
filename: /tmp/positions.yaml
|
||||
clients:
|
||||
- url: http://loki:3100/loki/api/v1/push
|
||||
scrape_configs:
|
||||
- job_name: promtail
|
||||
static_configs:
|
||||
- targets: [localhost]
|
||||
labels:
|
||||
job: promtail
|
||||
__path__: /var/log/*.log
|
||||
42
devops/offline/airgap/sealed-ci-smoke.sh
Normal file
42
devops/offline/airgap/sealed-ci-smoke.sh
Normal file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
# Simple sealed-mode CI smoke: block egress, resolve mock DNS, assert services start.
|
||||
ROOT=${ROOT:-$(cd "$(dirname "$0")/../.." && pwd)}
|
||||
LOGDIR=${LOGDIR:-$ROOT/out/airgap-smoke}
|
||||
mkdir -p "$LOGDIR"
|
||||
|
||||
# 1) Start mock DNS (returns 0.0.0.0 for everything)
|
||||
DNS_PORT=${DNS_PORT:-53535}
|
||||
python - <<PY &
|
||||
import socketserver, threading
|
||||
from dnslib import DNSRecord, RR, A
|
||||
|
||||
class Handler(socketserver.BaseRequestHandler):
|
||||
def handle(self):
|
||||
data, sock = self.request
|
||||
request = DNSRecord.parse(data)
|
||||
reply = request.reply()
|
||||
reply.add_answer(RR(request.q.qname, rdata=A('0.0.0.0')))
|
||||
sock.sendto(reply.pack(), self.client_address)
|
||||
|
||||
def run():
|
||||
with socketserver.UDPServer(('0.0.0.0', ${DNS_PORT}), Handler) as server:
|
||||
server.serve_forever()
|
||||
|
||||
threading.Thread(target=run, daemon=True).start()
|
||||
PY
|
||||
|
||||
# 2) Block egress except loopback
|
||||
iptables -I OUTPUT -d 127.0.0.1/8 -j ACCEPT
|
||||
iptables -I OUTPUT -d 0.0.0.0/8 -j ACCEPT
|
||||
iptables -A OUTPUT -j DROP
|
||||
|
||||
# 3) Placeholder: capture environment info (replace with service start once wired)
|
||||
pushd "$ROOT" >/dev/null
|
||||
DOTNET_SYSTEM_NET_HTTP_SOCKETSHTTPHANDLER_HTTP2SUPPORT=false \
|
||||
DOTNET_CLI_TELEMETRY_OPTOUT=1 \
|
||||
DNS_SERVER=127.0.0.1:${DNS_PORT} \
|
||||
dotnet --info > "$LOGDIR/dotnet-info.txt"
|
||||
popd >/dev/null
|
||||
|
||||
echo "sealed CI smoke complete; logs at $LOGDIR"
|
||||
14
devops/offline/airgap/stage-bundle.sh
Normal file
14
devops/offline/airgap/stage-bundle.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
# Wrapper for bundle_stage_import.py with sane defaults.
|
||||
# Usage: ./stage-bundle.sh manifest.json /path/to/files out/staging [prefix]
|
||||
set -euo pipefail
|
||||
if [[ $# -lt 3 ]]; then
|
||||
echo "Usage: $0 <manifest.json> <root> <out-dir> [prefix]" >&2
|
||||
exit 2
|
||||
fi
|
||||
manifest=$1
|
||||
root=$2
|
||||
out=$3
|
||||
prefix=${4:-}
|
||||
SCRIPT_DIR=$(cd "$(dirname "$0")" && pwd)
|
||||
python3 "$SCRIPT_DIR/bundle_stage_import.py" --manifest "$manifest" --root "$root" --out "$out" --prefix "$prefix"
|
||||
19
devops/offline/airgap/syslog-ng.conf
Normal file
19
devops/offline/airgap/syslog-ng.conf
Normal file
@@ -0,0 +1,19 @@
|
||||
@version: 4.7
|
||||
@include "scl.conf"
|
||||
|
||||
options {
|
||||
time-reopen(10);
|
||||
log-msg-size(8192);
|
||||
ts-format(iso);
|
||||
};
|
||||
|
||||
source s_net {
|
||||
tcp(port(601));
|
||||
udp(port(514));
|
||||
};
|
||||
|
||||
destination d_file {
|
||||
file("/var/log/syslog-ng/sealed.log" create-dirs(yes) perm(0644));
|
||||
};
|
||||
|
||||
log { source(s_net); destination(d_file); };
|
||||
88
devops/offline/airgap/verify-egress-block.sh
Normal file
88
devops/offline/airgap/verify-egress-block.sh
Normal file
@@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env bash
|
||||
# Verification harness for sealed-mode egress: Docker/Compose or Kubernetes.
|
||||
# Examples:
|
||||
# ./verify-egress-block.sh docker stella_default out/airgap-probe.json
|
||||
# ./verify-egress-block.sh k8s default out/k8s-probe.json
|
||||
set -euo pipefail
|
||||
|
||||
mode=${1:-}
|
||||
context=${2:-}
|
||||
out=${3:-}
|
||||
|
||||
if [[ -z "$mode" || -z "$context" || -z "$out" ]]; then
|
||||
echo "Usage: $0 <docker|k8s> <network|namespace> <output.json> [target ...]" >&2
|
||||
exit 2
|
||||
fi
|
||||
shift 3
|
||||
TARGETS=($@)
|
||||
|
||||
ROOT=$(cd "$(dirname "$0")/../.." && pwd)
|
||||
PROBE_PY="$ROOT/ops/devops/sealed-mode-ci/egress_probe.py"
|
||||
|
||||
case "$mode" in
|
||||
docker)
|
||||
network="$context"
|
||||
python3 "$PROBE_PY" --network "$network" --output "$out" "${TARGETS[@]}"
|
||||
;;
|
||||
k8s|kubernetes)
|
||||
ns="$context"
|
||||
targets=("${TARGETS[@]}")
|
||||
if [[ ${#targets[@]} -eq 0 ]]; then
|
||||
targets=("https://example.com" "https://www.cloudflare.com" "https://releases.stella-ops.org/healthz")
|
||||
fi
|
||||
image="curlimages/curl:8.6.0"
|
||||
tmpfile=$(mktemp)
|
||||
cat > "$tmpfile" <<MANIFEST
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: sealed-egress-probe
|
||||
namespace: ${ns}
|
||||
labels:
|
||||
sealed: "true"
|
||||
stellaops.dev/purpose: sealed-mode
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: curl
|
||||
image: ${image}
|
||||
command: ["/bin/sh","-c"]
|
||||
args:
|
||||
- >
|
||||
set -euo pipefail;
|
||||
rc=0;
|
||||
for url in ${targets[@]}; do
|
||||
echo "PROBE $url";
|
||||
if curl -fsS --max-time 8 "$url"; then
|
||||
echo "UNEXPECTED_SUCCESS $url";
|
||||
rc=1;
|
||||
else
|
||||
echo "BLOCKED $url";
|
||||
fi;
|
||||
done;
|
||||
exit $rc;
|
||||
securityContext:
|
||||
runAsNonRoot: true
|
||||
readOnlyRootFilesystem: true
|
||||
MANIFEST
|
||||
kubectl apply -f "$tmpfile" >/dev/null
|
||||
kubectl wait --for=condition=Ready pod/sealed-egress-probe -n "$ns" --timeout=30s >/dev/null 2>&1 || true
|
||||
set +e
|
||||
kubectl logs -n "$ns" sealed-egress-probe > "$out.log" 2>&1
|
||||
kubectl wait --for=condition=Succeeded pod/sealed-egress-probe -n "$ns" --timeout=60s
|
||||
pod_rc=$?
|
||||
kubectl get pod/sealed-egress-probe -n "$ns" -o json > "$out"
|
||||
kubectl delete pod/sealed-egress-probe -n "$ns" >/dev/null 2>&1 || true
|
||||
set -e
|
||||
if [[ $pod_rc -ne 0 ]]; then
|
||||
echo "Egress check failed; see $out and $out.log" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "Unknown mode: $mode" >&2
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Egress verification complete → $out"
|
||||
15
devops/offline/kit/AGENTS.md
Normal file
15
devops/offline/kit/AGENTS.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Offline Kit — Agent Charter
|
||||
|
||||
## Mission
|
||||
Package Offline Update Kit per `docs/modules/devops/ARCHITECTURE.md` and `docs/24_OFFLINE_KIT.md` with deterministic digests and import tooling.
|
||||
|
||||
## Required Reading
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/airgap/airgap-mode.md`
|
||||
|
||||
## Working Agreement
|
||||
- 1. Update task status to `DOING`/`DONE` inside the corresponding `docs/implplan/SPRINT_*.md` entry when you start or finish work.
|
||||
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
|
||||
- 3. Keep changes deterministic (stable ordering, timestamps, hashes) and align with offline/air-gap expectations.
|
||||
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
|
||||
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.
|
||||
8
devops/offline/kit/TASKS.completed.md
Normal file
8
devops/offline/kit/TASKS.completed.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# Completed Tasks
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| DEVOPS-OFFLINE-14-002 | DONE (2025-10-26) | Offline Kit Guild | DEVOPS-REL-14-001 | Build offline kit packaging workflow (artifact bundling, manifest generation, signature verification). | Offline tarball generated with manifest + checksums + signatures; `ops/offline-kit/run-python-analyzer-smoke.sh` invoked as part of packaging; `debug/.build-id` tree mirrored from release output; import script verifies integrity; docs updated. |
|
||||
| DEVOPS-OFFLINE-18-004 | DONE (2025-10-22) | Offline Kit Guild, Scanner Guild | DEVOPS-OFFLINE-18-003, SCANNER-ANALYZERS-LANG-10-309G | Rebuild Offline Kit bundle with Go analyzer plug-in and updated manifest/signature set. | Kit tarball includes Go analyzer artifacts; manifest/signature refreshed; verification steps executed and logged; docs updated with new bundle version. |
|
||||
| DEVOPS-OFFLINE-18-005 | DONE (2025-10-26) | Offline Kit Guild, Scanner Guild | DEVOPS-REL-14-004, SCANNER-ANALYZERS-LANG-10-309P | Repackage Offline Kit with Python analyzer plug-in artefacts and refreshed manifest/signature set. | Kit tarball includes Python analyzer DLL/PDB/manifest; signature + manifest updated; Offline Kit guide references Python coverage; smoke import validated. |
|
||||
| DEVOPS-OFFLINE-17-003 | DONE (2025-10-26) | Offline Kit Guild, DevOps Guild | DEVOPS-REL-17-002 | Mirror release debug-store artefacts ( `.build-id/` tree and `debug-manifest.json`) into Offline Kit packaging and document import validation. | Offline kit archives `debug/.build-id/` with manifest/sha256, docs cover symbol lookup workflow, smoke job confirms build-id lookup succeeds on air-gapped install. |
|
||||
BIN
devops/offline/kit/__pycache__/build_offline_kit.cpython-312.pyc
Normal file
BIN
devops/offline/kit/__pycache__/build_offline_kit.cpython-312.pyc
Normal file
Binary file not shown.
Binary file not shown.
580
devops/offline/kit/build_offline_kit.py
Normal file
580
devops/offline/kit/build_offline_kit.py
Normal file
@@ -0,0 +1,580 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Package the StellaOps Offline Kit with deterministic artefacts and manifest."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterable, Mapping, MutableMapping, Optional
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||
RELEASE_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "release"
|
||||
TELEMETRY_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "telemetry"
|
||||
TELEMETRY_BUNDLE_PATH = REPO_ROOT / "out" / "telemetry" / "telemetry-offline-bundle.tar.gz"
|
||||
|
||||
if str(RELEASE_TOOLS_DIR) not in sys.path:
|
||||
sys.path.insert(0, str(RELEASE_TOOLS_DIR))
|
||||
|
||||
from verify_release import ( # type: ignore import-not-found
|
||||
load_manifest,
|
||||
resolve_path,
|
||||
verify_release,
|
||||
)
|
||||
|
||||
import mirror_debug_store # type: ignore import-not-found
|
||||
|
||||
DEFAULT_RELEASE_DIR = REPO_ROOT / "out" / "release"
|
||||
DEFAULT_STAGING_DIR = REPO_ROOT / "out" / "offline-kit" / "staging"
|
||||
DEFAULT_OUTPUT_DIR = REPO_ROOT / "out" / "offline-kit" / "dist"
|
||||
|
||||
ARTIFACT_TARGETS = {
|
||||
"sbom": Path("sboms"),
|
||||
"provenance": Path("attest"),
|
||||
"signature": Path("signatures"),
|
||||
"metadata": Path("metadata/docker"),
|
||||
}
|
||||
|
||||
|
||||
class CommandError(RuntimeError):
|
||||
"""Raised when an external command fails."""
|
||||
|
||||
|
||||
def run(cmd: Iterable[str], *, cwd: Optional[Path] = None, env: Optional[Mapping[str, str]] = None) -> str:
|
||||
process_env = dict(os.environ)
|
||||
if env:
|
||||
process_env.update(env)
|
||||
result = subprocess.run(
|
||||
list(cmd),
|
||||
cwd=str(cwd) if cwd else None,
|
||||
env=process_env,
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
raise CommandError(
|
||||
f"Command failed ({result.returncode}): {' '.join(cmd)}\nSTDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}"
|
||||
)
|
||||
return result.stdout
|
||||
|
||||
|
||||
def compute_sha256(path: Path) -> str:
|
||||
sha = hashlib.sha256()
|
||||
with path.open("rb") as handle:
|
||||
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
|
||||
sha.update(chunk)
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def utc_now_iso() -> str:
|
||||
return dt.datetime.now(tz=dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
|
||||
|
||||
def safe_component_name(name: str) -> str:
|
||||
return re.sub(r"[^A-Za-z0-9_.-]", "-", name.strip().lower())
|
||||
|
||||
|
||||
def clean_directory(path: Path) -> None:
|
||||
if path.exists():
|
||||
shutil.rmtree(path)
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def run_python_analyzer_smoke() -> None:
|
||||
script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh"
|
||||
run(["bash", str(script)], cwd=REPO_ROOT)
|
||||
|
||||
|
||||
def run_rust_analyzer_smoke() -> None:
|
||||
script = REPO_ROOT / "ops" / "offline-kit" / "run-rust-analyzer-smoke.sh"
|
||||
run(["bash", str(script)], cwd=REPO_ROOT)
|
||||
|
||||
|
||||
def copy_if_exists(source: Path, target: Path) -> None:
|
||||
if source.is_dir():
|
||||
shutil.copytree(source, target, dirs_exist_ok=True)
|
||||
elif source.is_file():
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(source, target)
|
||||
|
||||
|
||||
def copy_release_manifests(release_dir: Path, staging_dir: Path) -> None:
|
||||
manifest_dir = staging_dir / "manifest"
|
||||
manifest_dir.mkdir(parents=True, exist_ok=True)
|
||||
for name in ("release.yaml", "release.yaml.sha256", "release.json", "release.json.sha256"):
|
||||
source = release_dir / name
|
||||
if source.exists():
|
||||
shutil.copy2(source, manifest_dir / source.name)
|
||||
|
||||
|
||||
def copy_component_artifacts(
|
||||
manifest: Mapping[str, Any],
|
||||
release_dir: Path,
|
||||
staging_dir: Path,
|
||||
) -> None:
|
||||
components = manifest.get("components") or []
|
||||
for component in sorted(components, key=lambda entry: str(entry.get("name", ""))):
|
||||
if not isinstance(component, Mapping):
|
||||
continue
|
||||
component_name = safe_component_name(str(component.get("name", "component")))
|
||||
for key, target_root in ARTIFACT_TARGETS.items():
|
||||
entry = component.get(key)
|
||||
if not entry or not isinstance(entry, Mapping):
|
||||
continue
|
||||
path_str = entry.get("path")
|
||||
if not path_str:
|
||||
continue
|
||||
resolved = resolve_path(str(path_str), release_dir)
|
||||
if not resolved.exists():
|
||||
raise FileNotFoundError(f"Component '{component_name}' {key} artefact not found: {resolved}")
|
||||
target_dir = staging_dir / target_root
|
||||
target_dir.mkdir(parents=True, exist_ok=True)
|
||||
target_name = f"{component_name}-{resolved.name}" if resolved.name else component_name
|
||||
shutil.copy2(resolved, target_dir / target_name)
|
||||
|
||||
|
||||
def copy_collections(
|
||||
manifest: Mapping[str, Any],
|
||||
release_dir: Path,
|
||||
staging_dir: Path,
|
||||
) -> None:
|
||||
for collection, subdir in (("charts", Path("charts")), ("compose", Path("compose"))):
|
||||
entries = manifest.get(collection) or []
|
||||
for entry in entries:
|
||||
if not isinstance(entry, Mapping):
|
||||
continue
|
||||
path_str = entry.get("path")
|
||||
if not path_str:
|
||||
continue
|
||||
resolved = resolve_path(str(path_str), release_dir)
|
||||
if not resolved.exists():
|
||||
raise FileNotFoundError(f"{collection} artefact not found: {resolved}")
|
||||
target_dir = staging_dir / subdir
|
||||
target_dir.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(resolved, target_dir / resolved.name)
|
||||
|
||||
|
||||
def copy_debug_store(release_dir: Path, staging_dir: Path) -> None:
|
||||
mirror_debug_store.main(
|
||||
[
|
||||
"--release-dir",
|
||||
str(release_dir),
|
||||
"--offline-kit-dir",
|
||||
str(staging_dir),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def copy_plugins_and_assets(staging_dir: Path) -> None:
|
||||
copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner")
|
||||
copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates")
|
||||
copy_if_exists(REPO_ROOT / "seed-data", staging_dir / "seed-data")
|
||||
docs_dir = staging_dir / "docs"
|
||||
docs_dir.mkdir(parents=True, exist_ok=True)
|
||||
copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md")
|
||||
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md")
|
||||
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md")
|
||||
copy_if_exists(REPO_ROOT / "docs" / "airgap" / "mirror-bundles.md", docs_dir / "mirror-bundles.md")
|
||||
|
||||
|
||||
def copy_cli_and_taskrunner_assets(release_dir: Path, staging_dir: Path) -> None:
|
||||
"""Bundle CLI binaries, task pack docs, and Task Runner samples when available."""
|
||||
cli_src = release_dir / "cli"
|
||||
if cli_src.exists():
|
||||
copy_if_exists(cli_src, staging_dir / "cli")
|
||||
|
||||
taskrunner_bootstrap = staging_dir / "bootstrap" / "task-runner"
|
||||
taskrunner_bootstrap.mkdir(parents=True, exist_ok=True)
|
||||
copy_if_exists(REPO_ROOT / "etc" / "task-runner.yaml.sample", taskrunner_bootstrap / "task-runner.yaml.sample")
|
||||
|
||||
docs_dir = staging_dir / "docs"
|
||||
copy_if_exists(REPO_ROOT / "docs" / "task-packs", docs_dir / "task-packs")
|
||||
copy_if_exists(REPO_ROOT / "docs" / "modules" / "taskrunner", docs_dir / "modules" / "taskrunner")
|
||||
|
||||
|
||||
def copy_orchestrator_assets(release_dir: Path, staging_dir: Path) -> None:
|
||||
"""Copy orchestrator service, worker SDK, postgres snapshot, and dashboards when present."""
|
||||
mapping = {
|
||||
release_dir / "orchestrator" / "service": staging_dir / "orchestrator" / "service",
|
||||
release_dir / "orchestrator" / "worker-sdk": staging_dir / "orchestrator" / "worker-sdk",
|
||||
release_dir / "orchestrator" / "postgres": staging_dir / "orchestrator" / "postgres",
|
||||
release_dir / "orchestrator" / "dashboards": staging_dir / "orchestrator" / "dashboards",
|
||||
}
|
||||
for src, dest in mapping.items():
|
||||
copy_if_exists(src, dest)
|
||||
|
||||
|
||||
def copy_export_and_notifier_assets(release_dir: Path, staging_dir: Path) -> None:
|
||||
"""Copy Export Center and Notifier offline bundles and tooling when present."""
|
||||
copy_if_exists(release_dir / "export-center", staging_dir / "export-center")
|
||||
copy_if_exists(release_dir / "notifier", staging_dir / "notifier")
|
||||
|
||||
|
||||
def copy_surface_secrets(release_dir: Path, staging_dir: Path) -> None:
|
||||
"""Include Surface.Secrets bundles and manifests if present."""
|
||||
copy_if_exists(release_dir / "surface-secrets", staging_dir / "surface-secrets")
|
||||
|
||||
|
||||
def copy_bootstrap_configs(staging_dir: Path) -> None:
|
||||
notify_config = REPO_ROOT / "etc" / "notify.airgap.yaml"
|
||||
notify_secret = REPO_ROOT / "etc" / "secrets" / "notify-web-airgap.secret.example"
|
||||
notify_doc = REPO_ROOT / "docs" / "modules" / "notify" / "bootstrap-pack.md"
|
||||
|
||||
if not notify_config.exists():
|
||||
raise FileNotFoundError(f"Missing notifier air-gap config: {notify_config}")
|
||||
if not notify_secret.exists():
|
||||
raise FileNotFoundError(f"Missing notifier air-gap secret template: {notify_secret}")
|
||||
|
||||
notify_bootstrap_dir = staging_dir / "bootstrap" / "notify"
|
||||
notify_bootstrap_dir.mkdir(parents=True, exist_ok=True)
|
||||
copy_if_exists(REPO_ROOT / "etc" / "bootstrap" / "notify", notify_bootstrap_dir)
|
||||
|
||||
copy_if_exists(notify_config, notify_bootstrap_dir / "notify.yaml")
|
||||
copy_if_exists(notify_secret, notify_bootstrap_dir / "notify-web.secret.example")
|
||||
copy_if_exists(notify_doc, notify_bootstrap_dir / "README.md")
|
||||
|
||||
|
||||
def verify_required_seed_data(repo_root: Path) -> None:
|
||||
ruby_git_sources = repo_root / "seed-data" / "analyzers" / "ruby" / "git-sources"
|
||||
if not ruby_git_sources.is_dir():
|
||||
raise FileNotFoundError(f"Missing Ruby git-sources seed directory: {ruby_git_sources}")
|
||||
|
||||
required_files = [
|
||||
ruby_git_sources / "Gemfile.lock",
|
||||
ruby_git_sources / "expected.json",
|
||||
]
|
||||
for path in required_files:
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"Offline kit seed artefact missing: {path}")
|
||||
|
||||
|
||||
def copy_third_party_licenses(staging_dir: Path) -> None:
|
||||
licenses_src = REPO_ROOT / "third-party-licenses"
|
||||
if not licenses_src.is_dir():
|
||||
return
|
||||
|
||||
target_dir = staging_dir / "third-party-licenses"
|
||||
target_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
entries = sorted(licenses_src.iterdir(), key=lambda entry: entry.name.lower())
|
||||
for entry in entries:
|
||||
if entry.is_dir():
|
||||
shutil.copytree(entry, target_dir / entry.name, dirs_exist_ok=True)
|
||||
elif entry.is_file():
|
||||
shutil.copy2(entry, target_dir / entry.name)
|
||||
|
||||
|
||||
def package_telemetry_bundle(staging_dir: Path) -> None:
|
||||
script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py"
|
||||
if not script.exists():
|
||||
return
|
||||
TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
run(["python", str(script), "--output", str(TELEMETRY_BUNDLE_PATH)], cwd=REPO_ROOT)
|
||||
telemetry_dir = staging_dir / "telemetry"
|
||||
telemetry_dir.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(TELEMETRY_BUNDLE_PATH, telemetry_dir / TELEMETRY_BUNDLE_PATH.name)
|
||||
sha_path = TELEMETRY_BUNDLE_PATH.with_suffix(TELEMETRY_BUNDLE_PATH.suffix + ".sha256")
|
||||
if sha_path.exists():
|
||||
shutil.copy2(sha_path, telemetry_dir / sha_path.name)
|
||||
|
||||
|
||||
def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]:
|
||||
entries: list[OrderedDict[str, Any]] = []
|
||||
exclude = exclude or set()
|
||||
for path in sorted(staging_dir.rglob("*")):
|
||||
if not path.is_file():
|
||||
continue
|
||||
rel = path.relative_to(staging_dir).as_posix()
|
||||
if rel in exclude:
|
||||
continue
|
||||
entries.append(
|
||||
OrderedDict(
|
||||
(
|
||||
("name", rel),
|
||||
("sha256", compute_sha256(path)),
|
||||
("size", path.stat().st_size),
|
||||
)
|
||||
)
|
||||
)
|
||||
return entries
|
||||
|
||||
|
||||
def summarize_counts(staging_dir: Path) -> Mapping[str, int]:
|
||||
def count_files(rel: str) -> int:
|
||||
root = staging_dir / rel
|
||||
if not root.exists():
|
||||
return 0
|
||||
return sum(1 for path in root.rglob("*") if path.is_file())
|
||||
|
||||
return {
|
||||
"cli": count_files("cli"),
|
||||
"taskPacksDocs": count_files("docs/task-packs"),
|
||||
"containers": count_files("containers"),
|
||||
"orchestrator": count_files("orchestrator"),
|
||||
"exportCenter": count_files("export-center"),
|
||||
"notifier": count_files("notifier"),
|
||||
"surfaceSecrets": count_files("surface-secrets"),
|
||||
}
|
||||
|
||||
|
||||
def copy_container_bundles(release_dir: Path, staging_dir: Path) -> None:
|
||||
"""Copy container air-gap bundles if present in the release directory."""
|
||||
candidates = [release_dir / "containers", release_dir / "images"]
|
||||
target_dir = staging_dir / "containers"
|
||||
for root in candidates:
|
||||
if not root.exists():
|
||||
continue
|
||||
for bundle in sorted(root.glob("**/*")):
|
||||
if bundle.is_file() and bundle.suffix in {".gz", ".tar", ".tgz"}:
|
||||
target_path = target_dir / bundle.relative_to(root)
|
||||
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(bundle, target_path)
|
||||
|
||||
|
||||
def write_offline_manifest(
|
||||
staging_dir: Path,
|
||||
version: str,
|
||||
channel: str,
|
||||
release_manifest_sha: Optional[str],
|
||||
) -> tuple[Path, str]:
|
||||
manifest_dir = staging_dir / "manifest"
|
||||
manifest_dir.mkdir(parents=True, exist_ok=True)
|
||||
offline_manifest_path = manifest_dir / "offline-manifest.json"
|
||||
files = scan_files(staging_dir, exclude={"manifest/offline-manifest.json", "manifest/offline-manifest.json.sha256"})
|
||||
manifest_data = OrderedDict(
|
||||
(
|
||||
(
|
||||
"bundle",
|
||||
OrderedDict(
|
||||
(
|
||||
("version", version),
|
||||
("channel", channel),
|
||||
("capturedAt", utc_now_iso()),
|
||||
("releaseManifestSha256", release_manifest_sha),
|
||||
)
|
||||
),
|
||||
),
|
||||
("artifacts", files),
|
||||
)
|
||||
)
|
||||
with offline_manifest_path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(manifest_data, handle, indent=2)
|
||||
handle.write("\n")
|
||||
manifest_sha = compute_sha256(offline_manifest_path)
|
||||
(offline_manifest_path.with_suffix(".json.sha256")).write_text(
|
||||
f"{manifest_sha} {offline_manifest_path.name}\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
return offline_manifest_path, manifest_sha
|
||||
|
||||
|
||||
def tarinfo_filter(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo:
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
tarinfo.uname = ""
|
||||
tarinfo.gname = ""
|
||||
tarinfo.mtime = 0
|
||||
return tarinfo
|
||||
|
||||
|
||||
def create_tarball(staging_dir: Path, output_dir: Path, bundle_name: str) -> Path:
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
bundle_path = output_dir / f"{bundle_name}.tar.gz"
|
||||
if bundle_path.exists():
|
||||
bundle_path.unlink()
|
||||
with tarfile.open(bundle_path, "w:gz", compresslevel=9) as tar:
|
||||
for path in sorted(staging_dir.rglob("*")):
|
||||
if path.is_file():
|
||||
arcname = path.relative_to(staging_dir).as_posix()
|
||||
tar.add(path, arcname=arcname, filter=tarinfo_filter)
|
||||
return bundle_path
|
||||
|
||||
|
||||
def sign_blob(
|
||||
path: Path,
|
||||
*,
|
||||
key_ref: Optional[str],
|
||||
identity_token: Optional[str],
|
||||
password: Optional[str],
|
||||
tlog_upload: bool,
|
||||
) -> Optional[Path]:
|
||||
if not key_ref and not identity_token:
|
||||
return None
|
||||
cmd = ["cosign", "sign-blob", "--yes", str(path)]
|
||||
if key_ref:
|
||||
cmd.extend(["--key", key_ref])
|
||||
if identity_token:
|
||||
cmd.extend(["--identity-token", identity_token])
|
||||
if not tlog_upload:
|
||||
cmd.append("--tlog-upload=false")
|
||||
env = {"COSIGN_PASSWORD": password or ""}
|
||||
signature = run(cmd, env=env)
|
||||
sig_path = path.with_suffix(path.suffix + ".sig")
|
||||
sig_path.write_text(signature, encoding="utf-8")
|
||||
return sig_path
|
||||
|
||||
|
||||
def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
|
||||
release_dir = args.release_dir.resolve()
|
||||
staging_dir = args.staging_dir.resolve()
|
||||
output_dir = args.output_dir.resolve()
|
||||
|
||||
verify_release(release_dir)
|
||||
verify_required_seed_data(REPO_ROOT)
|
||||
if not args.skip_smoke:
|
||||
run_rust_analyzer_smoke()
|
||||
run_python_analyzer_smoke()
|
||||
clean_directory(staging_dir)
|
||||
copy_debug_store(release_dir, staging_dir)
|
||||
|
||||
manifest_data = load_manifest(release_dir)
|
||||
release_manifest_sha = None
|
||||
checksums = manifest_data.get("checksums")
|
||||
if isinstance(checksums, Mapping):
|
||||
release_manifest_sha = checksums.get("sha256")
|
||||
|
||||
copy_release_manifests(release_dir, staging_dir)
|
||||
copy_component_artifacts(manifest_data, release_dir, staging_dir)
|
||||
copy_collections(manifest_data, release_dir, staging_dir)
|
||||
copy_plugins_and_assets(staging_dir)
|
||||
copy_bootstrap_configs(staging_dir)
|
||||
copy_cli_and_taskrunner_assets(release_dir, staging_dir)
|
||||
copy_container_bundles(release_dir, staging_dir)
|
||||
copy_orchestrator_assets(release_dir, staging_dir)
|
||||
copy_export_and_notifier_assets(release_dir, staging_dir)
|
||||
copy_surface_secrets(release_dir, staging_dir)
|
||||
copy_third_party_licenses(staging_dir)
|
||||
package_telemetry_bundle(staging_dir)
|
||||
|
||||
offline_manifest_path, offline_manifest_sha = write_offline_manifest(
|
||||
staging_dir,
|
||||
args.version,
|
||||
args.channel,
|
||||
release_manifest_sha,
|
||||
)
|
||||
bundle_name = f"stella-ops-offline-kit-{args.version}-{args.channel}"
|
||||
bundle_path = create_tarball(staging_dir, output_dir, bundle_name)
|
||||
bundle_sha = compute_sha256(bundle_path)
|
||||
bundle_sha_prefixed = f"sha256:{bundle_sha}"
|
||||
(bundle_path.with_suffix(".tar.gz.sha256")).write_text(
|
||||
f"{bundle_sha} {bundle_path.name}\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
signature_paths: dict[str, str] = {}
|
||||
sig = sign_blob(
|
||||
bundle_path,
|
||||
key_ref=args.cosign_key,
|
||||
identity_token=args.cosign_identity_token,
|
||||
password=args.cosign_password,
|
||||
tlog_upload=not args.no_transparency,
|
||||
)
|
||||
if sig:
|
||||
signature_paths["bundleSignature"] = str(sig)
|
||||
manifest_sig = sign_blob(
|
||||
offline_manifest_path,
|
||||
key_ref=args.cosign_key,
|
||||
identity_token=args.cosign_identity_token,
|
||||
password=args.cosign_password,
|
||||
tlog_upload=not args.no_transparency,
|
||||
)
|
||||
if manifest_sig:
|
||||
signature_paths["manifestSignature"] = str(manifest_sig)
|
||||
|
||||
metadata = OrderedDict(
|
||||
(
|
||||
("bundleId", args.bundle_id or f"{args.version}-{args.channel}-{utc_now_iso()}"),
|
||||
("bundleName", bundle_path.name),
|
||||
("bundleSha256", bundle_sha_prefixed),
|
||||
("bundleSize", bundle_path.stat().st_size),
|
||||
("manifestName", offline_manifest_path.name),
|
||||
("manifestSha256", f"sha256:{offline_manifest_sha}"),
|
||||
("manifestSize", offline_manifest_path.stat().st_size),
|
||||
("channel", args.channel),
|
||||
("version", args.version),
|
||||
("capturedAt", utc_now_iso()),
|
||||
("counts", summarize_counts(staging_dir)),
|
||||
)
|
||||
)
|
||||
|
||||
if sig:
|
||||
metadata["bundleSignatureName"] = Path(sig).name
|
||||
if manifest_sig:
|
||||
metadata["manifestSignatureName"] = Path(manifest_sig).name
|
||||
|
||||
metadata_path = output_dir / f"{bundle_name}.metadata.json"
|
||||
with metadata_path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(metadata, handle, indent=2)
|
||||
handle.write("\n")
|
||||
|
||||
return OrderedDict(
|
||||
(
|
||||
("bundlePath", str(bundle_path)),
|
||||
("bundleSha256", bundle_sha),
|
||||
("manifestPath", str(offline_manifest_path)),
|
||||
("metadataPath", str(metadata_path)),
|
||||
("signatures", signature_paths),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def parse_args(argv: Optional[list[str]] = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument("--version", required=True, help="Bundle version (e.g. 2025.10.0)")
|
||||
parser.add_argument("--channel", default="edge", help="Release channel (default: %(default)s)")
|
||||
parser.add_argument("--bundle-id", help="Optional explicit bundle identifier")
|
||||
parser.add_argument(
|
||||
"--release-dir",
|
||||
type=Path,
|
||||
default=DEFAULT_RELEASE_DIR,
|
||||
help="Release artefact directory (default: %(default)s)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--staging-dir",
|
||||
type=Path,
|
||||
default=DEFAULT_STAGING_DIR,
|
||||
help="Temporary staging directory (default: %(default)s)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-dir",
|
||||
type=Path,
|
||||
default=DEFAULT_OUTPUT_DIR,
|
||||
help="Destination directory for packaged bundles (default: %(default)s)",
|
||||
)
|
||||
parser.add_argument("--cosign-key", dest="cosign_key", help="Cosign key reference for signing")
|
||||
parser.add_argument("--cosign-password", dest="cosign_password", help="Cosign key password (if applicable)")
|
||||
parser.add_argument("--cosign-identity-token", dest="cosign_identity_token", help="Cosign identity token")
|
||||
parser.add_argument("--no-transparency", action="store_true", help="Disable Rekor transparency log uploads")
|
||||
parser.add_argument("--skip-smoke", action="store_true", help="Skip analyzer smoke execution (testing only)")
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: Optional[list[str]] = None) -> int:
|
||||
args = parse_args(argv)
|
||||
try:
|
||||
result = build_offline_kit(args)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
print(f"offline-kit packaging failed: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
print("✅ Offline kit packaged")
|
||||
for key, value in result.items():
|
||||
if isinstance(value, dict):
|
||||
for sub_key, sub_val in value.items():
|
||||
print(f" - {key}.{sub_key}: {sub_val}")
|
||||
else:
|
||||
print(f" - {key}: {value}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
221
devops/offline/kit/mirror_debug_store.py
Normal file
221
devops/offline/kit/mirror_debug_store.py
Normal file
@@ -0,0 +1,221 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Mirror release debug-store artefacts into the Offline Kit staging tree.
|
||||
|
||||
This helper copies the release `debug/` directory (including `.build-id/`,
|
||||
`debug-manifest.json`, and the `.sha256` companion) into the Offline Kit
|
||||
output directory and verifies the manifest hashes after the copy. A summary
|
||||
document is written under `metadata/debug-store.json` so packaging jobs can
|
||||
surface the available build-ids and validation status.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import json
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Iterable, Tuple
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parents[2]
|
||||
|
||||
|
||||
def compute_sha256(path: pathlib.Path) -> str:
|
||||
import hashlib
|
||||
|
||||
sha = hashlib.sha256()
|
||||
with path.open("rb") as handle:
|
||||
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
|
||||
sha.update(chunk)
|
||||
return sha.hexdigest()
|
||||
|
||||
|
||||
def load_manifest(manifest_path: pathlib.Path) -> dict:
|
||||
with manifest_path.open("r", encoding="utf-8") as handle:
|
||||
return json.load(handle)
|
||||
|
||||
|
||||
def parse_manifest_sha(sha_path: pathlib.Path) -> str | None:
|
||||
if not sha_path.exists():
|
||||
return None
|
||||
text = sha_path.read_text(encoding="utf-8").strip()
|
||||
if not text:
|
||||
return None
|
||||
# Allow either "<sha>" or "<sha> filename" formats.
|
||||
return text.split()[0]
|
||||
|
||||
|
||||
def iter_debug_files(base_dir: pathlib.Path) -> Iterable[pathlib.Path]:
|
||||
for path in base_dir.rglob("*"):
|
||||
if path.is_file():
|
||||
yield path
|
||||
|
||||
|
||||
def copy_debug_store(source_root: pathlib.Path, target_root: pathlib.Path, *, dry_run: bool) -> None:
|
||||
if dry_run:
|
||||
print(f"[dry-run] Would copy '{source_root}' -> '{target_root}'")
|
||||
return
|
||||
|
||||
if target_root.exists():
|
||||
shutil.rmtree(target_root)
|
||||
shutil.copytree(source_root, target_root)
|
||||
|
||||
|
||||
def verify_debug_store(manifest: dict, offline_root: pathlib.Path) -> Tuple[int, int]:
|
||||
"""Return (verified_count, total_entries)."""
|
||||
|
||||
artifacts = manifest.get("artifacts", [])
|
||||
verified = 0
|
||||
for entry in artifacts:
|
||||
debug_path = entry.get("debugPath")
|
||||
expected_sha = entry.get("sha256")
|
||||
expected_size = entry.get("size")
|
||||
|
||||
if not debug_path or not expected_sha:
|
||||
continue
|
||||
|
||||
relative = pathlib.PurePosixPath(debug_path)
|
||||
resolved = (offline_root.parent / relative).resolve()
|
||||
|
||||
if not resolved.exists():
|
||||
raise FileNotFoundError(f"Debug artefact missing after mirror: {relative}")
|
||||
|
||||
actual_sha = compute_sha256(resolved)
|
||||
if actual_sha != expected_sha:
|
||||
raise ValueError(
|
||||
f"Digest mismatch for {relative}: expected {expected_sha}, found {actual_sha}"
|
||||
)
|
||||
|
||||
if expected_size is not None:
|
||||
actual_size = resolved.stat().st_size
|
||||
if actual_size != expected_size:
|
||||
raise ValueError(
|
||||
f"Size mismatch for {relative}: expected {expected_size}, found {actual_size}"
|
||||
)
|
||||
|
||||
verified += 1
|
||||
|
||||
return verified, len(artifacts)
|
||||
|
||||
|
||||
def summarize_store(manifest: dict, manifest_sha: str | None, offline_root: pathlib.Path, summary_path: pathlib.Path) -> None:
|
||||
debug_files = [
|
||||
path
|
||||
for path in iter_debug_files(offline_root)
|
||||
if path.suffix == ".debug"
|
||||
]
|
||||
|
||||
total_size = sum(path.stat().st_size for path in debug_files)
|
||||
build_ids = sorted(
|
||||
{entry.get("buildId") for entry in manifest.get("artifacts", []) if entry.get("buildId")}
|
||||
)
|
||||
|
||||
summary = {
|
||||
"generatedAt": dt.datetime.now(tz=dt.timezone.utc)
|
||||
.replace(microsecond=0)
|
||||
.isoformat()
|
||||
.replace("+00:00", "Z"),
|
||||
"manifestGeneratedAt": manifest.get("generatedAt"),
|
||||
"manifestSha256": manifest_sha,
|
||||
"platforms": manifest.get("platforms")
|
||||
or sorted({entry.get("platform") for entry in manifest.get("artifacts", []) if entry.get("platform")}),
|
||||
"artifactCount": len(manifest.get("artifacts", [])),
|
||||
"buildIds": {
|
||||
"total": len(build_ids),
|
||||
"samples": build_ids[:10],
|
||||
},
|
||||
"debugFiles": {
|
||||
"count": len(debug_files),
|
||||
"totalSizeBytes": total_size,
|
||||
},
|
||||
}
|
||||
|
||||
summary_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with summary_path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(summary, handle, indent=2)
|
||||
handle.write("\n")
|
||||
|
||||
|
||||
def resolve_release_debug_dir(base: pathlib.Path) -> pathlib.Path:
|
||||
debug_dir = base / "debug"
|
||||
if debug_dir.exists():
|
||||
return debug_dir
|
||||
|
||||
# Allow specifying the channel directory directly (e.g. out/release/stable)
|
||||
if base.name == "debug":
|
||||
return base
|
||||
|
||||
raise FileNotFoundError(f"Debug directory not found under '{base}'")
|
||||
|
||||
|
||||
def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--release-dir",
|
||||
type=pathlib.Path,
|
||||
default=REPO_ROOT / "out" / "release",
|
||||
help="Release output directory containing the debug store (default: %(default)s)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--offline-kit-dir",
|
||||
type=pathlib.Path,
|
||||
default=REPO_ROOT / "out" / "offline-kit",
|
||||
help="Offline Kit staging directory (default: %(default)s)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verify-only",
|
||||
action="store_true",
|
||||
help="Skip copying and only verify the existing offline kit debug store",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Print actions without copying files",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
args = parse_args(argv)
|
||||
|
||||
try:
|
||||
source_debug = resolve_release_debug_dir(args.release_dir.resolve())
|
||||
except FileNotFoundError as exc:
|
||||
print(f"error: {exc}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
target_root = (args.offline_kit_dir / "debug").resolve()
|
||||
|
||||
if not args.verify_only:
|
||||
copy_debug_store(source_debug, target_root, dry_run=args.dry_run)
|
||||
if args.dry_run:
|
||||
return 0
|
||||
|
||||
manifest_path = target_root / "debug-manifest.json"
|
||||
if not manifest_path.exists():
|
||||
print(f"error: offline kit manifest missing at {manifest_path}", file=sys.stderr)
|
||||
return 3
|
||||
|
||||
manifest = load_manifest(manifest_path)
|
||||
manifest_sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256")
|
||||
recorded_sha = parse_manifest_sha(manifest_sha_path)
|
||||
recomputed_sha = compute_sha256(manifest_path)
|
||||
if recorded_sha and recorded_sha != recomputed_sha:
|
||||
print(
|
||||
f"warning: manifest SHA mismatch (recorded {recorded_sha}, recomputed {recomputed_sha}); updating checksum",
|
||||
file=sys.stderr,
|
||||
)
|
||||
manifest_sha_path.write_text(f"{recomputed_sha} {manifest_path.name}\n", encoding="utf-8")
|
||||
|
||||
verified, total = verify_debug_store(manifest, target_root)
|
||||
print(f"✔ verified {verified}/{total} debug artefacts (manifest SHA {recomputed_sha})")
|
||||
|
||||
summary_path = args.offline_kit_dir / "metadata" / "debug-store.json"
|
||||
summarize_store(manifest, recomputed_sha, target_root, summary_path)
|
||||
print(f"ℹ summary written to {summary_path}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
36
devops/offline/kit/run-python-analyzer-smoke.sh
Normal file
36
devops/offline/kit/run-python-analyzer-smoke.sh
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)"
|
||||
project_path="${repo_root}/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj"
|
||||
output_dir="${repo_root}/out/analyzers/python"
|
||||
plugin_dir="${repo_root}/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python"
|
||||
|
||||
to_win_path() {
|
||||
if command -v wslpath >/dev/null 2>&1; then
|
||||
wslpath -w "$1"
|
||||
else
|
||||
printf '%s\n' "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
rm -rf "${output_dir}"
|
||||
project_path_win="$(to_win_path "$project_path")"
|
||||
output_dir_win="$(to_win_path "$output_dir")"
|
||||
|
||||
dotnet publish "$project_path_win" \
|
||||
--configuration Release \
|
||||
--output "$output_dir_win" \
|
||||
--self-contained false
|
||||
|
||||
mkdir -p "${plugin_dir}"
|
||||
cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.dll" "${plugin_dir}/"
|
||||
if [[ -f "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.pdb" ]]; then
|
||||
cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Python.pdb" "${plugin_dir}/"
|
||||
fi
|
||||
|
||||
repo_root_win="$(to_win_path "$repo_root")"
|
||||
exec dotnet run \
|
||||
--project "${repo_root_win}/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj" \
|
||||
--configuration Release \
|
||||
-- --repo-root "${repo_root_win}"
|
||||
37
devops/offline/kit/run-rust-analyzer-smoke.sh
Normal file
37
devops/offline/kit/run-rust-analyzer-smoke.sh
Normal file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)"
|
||||
project_path="${repo_root}/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj"
|
||||
output_dir="${repo_root}/out/analyzers/rust"
|
||||
plugin_dir="${repo_root}/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust"
|
||||
|
||||
to_win_path() {
|
||||
if command -v wslpath >/dev/null 2>&1; then
|
||||
wslpath -w "$1"
|
||||
else
|
||||
printf '%s\n' "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
rm -rf "${output_dir}"
|
||||
project_path_win="$(to_win_path "$project_path")"
|
||||
output_dir_win="$(to_win_path "$output_dir")"
|
||||
|
||||
dotnet publish "$project_path_win" \
|
||||
--configuration Release \
|
||||
--output "$output_dir_win" \
|
||||
--self-contained false
|
||||
|
||||
mkdir -p "${plugin_dir}"
|
||||
cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Rust.dll" "${plugin_dir}/"
|
||||
if [[ -f "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Rust.pdb" ]]; then
|
||||
cp "${output_dir}/StellaOps.Scanner.Analyzers.Lang.Rust.pdb" "${plugin_dir}/"
|
||||
fi
|
||||
|
||||
repo_root_win="$(to_win_path "$repo_root")"
|
||||
exec dotnet run \
|
||||
--project "${repo_root_win}/src/Tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj" \
|
||||
--configuration Release \
|
||||
-- --repo-root "${repo_root_win}" \
|
||||
--analyzer rust
|
||||
334
devops/offline/kit/test_build_offline_kit.py
Normal file
334
devops/offline/kit/test_build_offline_kit.py
Normal file
@@ -0,0 +1,334 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import tarfile
|
||||
import tempfile
|
||||
import unittest
|
||||
import argparse
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
|
||||
current_dir = Path(__file__).resolve().parent
|
||||
sys.path.append(str(current_dir))
|
||||
sys.path.append(str(current_dir.parent / "devops" / "release"))
|
||||
|
||||
from build_release import write_manifest # type: ignore import-not-found
|
||||
|
||||
from build_offline_kit import build_offline_kit, compute_sha256 # type: ignore import-not-found
|
||||
|
||||
|
||||
class OfflineKitBuilderTests(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self._temp = tempfile.TemporaryDirectory()
|
||||
self.base_path = Path(self._temp.name)
|
||||
self.out_dir = self.base_path / "out"
|
||||
self.release_dir = self.out_dir / "release"
|
||||
self.staging_dir = self.base_path / "staging"
|
||||
self.output_dir = self.base_path / "dist"
|
||||
self._create_sample_release()
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self._temp.cleanup()
|
||||
|
||||
def _relative_to_out(self, path: Path) -> str:
|
||||
return path.relative_to(self.out_dir).as_posix()
|
||||
|
||||
def _write_json(self, path: Path, payload: dict[str, object]) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", encoding="utf-8") as handle:
|
||||
json.dump(payload, handle, indent=2)
|
||||
handle.write("\n")
|
||||
|
||||
def _create_sample_release(self) -> None:
|
||||
self.release_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
cli_archive = self.release_dir / "cli" / "stellaops-cli-linux-x64.tar.gz"
|
||||
cli_archive.parent.mkdir(parents=True, exist_ok=True)
|
||||
cli_archive.write_bytes(b"cli-bytes")
|
||||
compute_sha256(cli_archive)
|
||||
|
||||
container_bundle = self.release_dir / "containers" / "stellaops-containers.tar.gz"
|
||||
container_bundle.parent.mkdir(parents=True, exist_ok=True)
|
||||
container_bundle.write_bytes(b"container-bundle")
|
||||
compute_sha256(container_bundle)
|
||||
|
||||
orchestrator_service = self.release_dir / "orchestrator" / "service" / "orchestrator-service.tar.gz"
|
||||
orchestrator_service.parent.mkdir(parents=True, exist_ok=True)
|
||||
orchestrator_service.write_bytes(b"orch-service")
|
||||
compute_sha256(orchestrator_service)
|
||||
|
||||
orchestrator_dash = self.release_dir / "orchestrator" / "dashboards" / "dash.json"
|
||||
orchestrator_dash.parent.mkdir(parents=True, exist_ok=True)
|
||||
orchestrator_dash.write_text("{}\n", encoding="utf-8")
|
||||
|
||||
export_bundle = self.release_dir / "export-center" / "export-offline-bundle.tar.gz"
|
||||
export_bundle.parent.mkdir(parents=True, exist_ok=True)
|
||||
export_bundle.write_bytes(b"export")
|
||||
compute_sha256(export_bundle)
|
||||
|
||||
notifier_pack = self.release_dir / "notifier" / "notifier-offline-pack.tar.gz"
|
||||
notifier_pack.parent.mkdir(parents=True, exist_ok=True)
|
||||
notifier_pack.write_bytes(b"notifier")
|
||||
compute_sha256(notifier_pack)
|
||||
|
||||
secrets_bundle = self.release_dir / "surface-secrets" / "secrets-bundle.tar.gz"
|
||||
secrets_bundle.parent.mkdir(parents=True, exist_ok=True)
|
||||
secrets_bundle.write_bytes(b"secrets")
|
||||
compute_sha256(secrets_bundle)
|
||||
|
||||
sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json"
|
||||
sbom_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8")
|
||||
sbom_sha = compute_sha256(sbom_path)
|
||||
|
||||
provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json"
|
||||
self._write_json(
|
||||
provenance_path,
|
||||
{
|
||||
"buildDefinition": {"buildType": "https://example/build"},
|
||||
"runDetails": {"builder": {"id": "https://example/ci"}},
|
||||
},
|
||||
)
|
||||
provenance_sha = compute_sha256(provenance_path)
|
||||
|
||||
signature_path = self.release_dir / "artifacts/signatures/sample.signature"
|
||||
signature_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
signature_path.write_text("signature-data\n", encoding="utf-8")
|
||||
signature_sha = compute_sha256(signature_path)
|
||||
|
||||
metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json"
|
||||
self._write_json(metadata_path, {"digest": "sha256:1234"})
|
||||
metadata_sha = compute_sha256(metadata_path)
|
||||
|
||||
chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz"
|
||||
chart_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
chart_path.write_bytes(b"helm-chart-data")
|
||||
chart_sha = compute_sha256(chart_path)
|
||||
|
||||
compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml"
|
||||
compose_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
compose_path.write_text("services: {}\n", encoding="utf-8")
|
||||
compose_sha = compute_sha256(compose_path)
|
||||
|
||||
debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug"
|
||||
debug_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
debug_file.write_bytes(b"\x7fELFDEBUGDATA")
|
||||
debug_sha = compute_sha256(debug_file)
|
||||
|
||||
debug_manifest_path = self.release_dir / "debug/debug-manifest.json"
|
||||
debug_manifest = OrderedDict(
|
||||
(
|
||||
("generatedAt", "2025-10-26T00:00:00Z"),
|
||||
("version", "1.0.0"),
|
||||
("channel", "edge"),
|
||||
(
|
||||
"artifacts",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("buildId", "abcdef1234"),
|
||||
("platform", "linux/amd64"),
|
||||
("debugPath", "debug/.build-id/ab/cdef.debug"),
|
||||
("sha256", debug_sha),
|
||||
("size", debug_file.stat().st_size),
|
||||
("components", ["sample"]),
|
||||
("images", ["registry.example/sample@sha256:feedface"]),
|
||||
("sources", ["app/sample.dll"]),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
)
|
||||
)
|
||||
self._write_json(debug_manifest_path, debug_manifest)
|
||||
debug_manifest_sha = compute_sha256(debug_manifest_path)
|
||||
(debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text(
|
||||
f"{debug_manifest_sha} {debug_manifest_path.name}\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
manifest = OrderedDict(
|
||||
(
|
||||
(
|
||||
"release",
|
||||
OrderedDict(
|
||||
(
|
||||
("version", "1.0.0"),
|
||||
("channel", "edge"),
|
||||
("date", "2025-10-26T00:00:00Z"),
|
||||
("calendar", "2025.10"),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"components",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("name", "sample"),
|
||||
("image", "registry.example/sample@sha256:feedface"),
|
||||
("tags", ["registry.example/sample:1.0.0"]),
|
||||
(
|
||||
"sbom",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(sbom_path)),
|
||||
("sha256", sbom_sha),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"provenance",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(provenance_path)),
|
||||
("sha256", provenance_sha),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"signature",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(signature_path)),
|
||||
("sha256", signature_sha),
|
||||
("ref", "sigstore://example"),
|
||||
("tlogUploaded", True),
|
||||
)
|
||||
),
|
||||
),
|
||||
(
|
||||
"metadata",
|
||||
OrderedDict(
|
||||
(
|
||||
("path", self._relative_to_out(metadata_path)),
|
||||
("sha256", metadata_sha),
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
"charts",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("name", "stellaops"),
|
||||
("version", "1.0.0"),
|
||||
("path", self._relative_to_out(chart_path)),
|
||||
("sha256", chart_sha),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
"compose",
|
||||
[
|
||||
OrderedDict(
|
||||
(
|
||||
("name", "docker-compose.dev.yaml"),
|
||||
("path", compose_path.relative_to(self.out_dir).as_posix()),
|
||||
("sha256", compose_sha),
|
||||
)
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
"debugStore",
|
||||
OrderedDict(
|
||||
(
|
||||
("manifest", "debug/debug-manifest.json"),
|
||||
("sha256", debug_manifest_sha),
|
||||
("entries", 1),
|
||||
("platforms", ["linux/amd64"]),
|
||||
("directory", "debug/.build-id"),
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
write_manifest(manifest, self.release_dir)
|
||||
|
||||
def test_build_offline_kit(self) -> None:
|
||||
args = argparse.Namespace(
|
||||
version="2025.10.0",
|
||||
channel="edge",
|
||||
bundle_id="bundle-001",
|
||||
release_dir=self.release_dir,
|
||||
staging_dir=self.staging_dir,
|
||||
output_dir=self.output_dir,
|
||||
cosign_key=None,
|
||||
cosign_password=None,
|
||||
cosign_identity_token=None,
|
||||
no_transparency=False,
|
||||
skip_smoke=True,
|
||||
)
|
||||
result = build_offline_kit(args)
|
||||
bundle_path = Path(result["bundlePath"])
|
||||
self.assertTrue(bundle_path.exists())
|
||||
offline_manifest = self.output_dir.parent / "staging" / "manifest" / "offline-manifest.json"
|
||||
self.assertTrue(offline_manifest.exists())
|
||||
|
||||
bootstrap_notify = self.staging_dir / "bootstrap" / "notify"
|
||||
self.assertTrue((bootstrap_notify / "notify.yaml").exists())
|
||||
self.assertTrue((bootstrap_notify / "notify-web.secret.example").exists())
|
||||
|
||||
taskrunner_bootstrap = self.staging_dir / "bootstrap" / "task-runner"
|
||||
self.assertTrue((taskrunner_bootstrap / "task-runner.yaml.sample").exists())
|
||||
|
||||
docs_taskpacks = self.staging_dir / "docs" / "task-packs"
|
||||
self.assertTrue(docs_taskpacks.exists())
|
||||
self.assertTrue((self.staging_dir / "docs" / "mirror-bundles.md").exists())
|
||||
|
||||
containers_dir = self.staging_dir / "containers"
|
||||
self.assertTrue((containers_dir / "stellaops-containers.tar.gz").exists())
|
||||
|
||||
orchestrator_dir = self.staging_dir / "orchestrator"
|
||||
self.assertTrue((orchestrator_dir / "service" / "orchestrator-service.tar.gz").exists())
|
||||
self.assertTrue((orchestrator_dir / "dashboards" / "dash.json").exists())
|
||||
|
||||
export_dir = self.staging_dir / "export-center"
|
||||
self.assertTrue((export_dir / "export-offline-bundle.tar.gz").exists())
|
||||
|
||||
notifier_dir = self.staging_dir / "notifier"
|
||||
self.assertTrue((notifier_dir / "notifier-offline-pack.tar.gz").exists())
|
||||
|
||||
secrets_dir = self.staging_dir / "surface-secrets"
|
||||
self.assertTrue((secrets_dir / "secrets-bundle.tar.gz").exists())
|
||||
|
||||
with offline_manifest.open("r", encoding="utf-8") as handle:
|
||||
manifest_data = json.load(handle)
|
||||
artifacts = manifest_data["artifacts"]
|
||||
self.assertTrue(any(item["name"].startswith("sboms/") for item in artifacts))
|
||||
self.assertTrue(any(item["name"].startswith("cli/") for item in artifacts))
|
||||
|
||||
metadata_path = Path(result["metadataPath"])
|
||||
data = json.loads(metadata_path.read_text(encoding="utf-8"))
|
||||
self.assertTrue(data["bundleSha256"].startswith("sha256:"))
|
||||
self.assertTrue(data["manifestSha256"].startswith("sha256:"))
|
||||
counts = data["counts"]
|
||||
self.assertGreaterEqual(counts["cli"], 1)
|
||||
self.assertGreaterEqual(counts["containers"], 1)
|
||||
self.assertGreaterEqual(counts["orchestrator"], 2)
|
||||
self.assertGreaterEqual(counts["exportCenter"], 1)
|
||||
self.assertGreaterEqual(counts["notifier"], 1)
|
||||
self.assertGreaterEqual(counts["surfaceSecrets"], 1)
|
||||
|
||||
with tarfile.open(bundle_path, "r:gz") as tar:
|
||||
members = tar.getnames()
|
||||
self.assertIn("manifest/release.yaml", members)
|
||||
self.assertTrue(any(name.startswith("sboms/sample-") for name in members))
|
||||
self.assertIn("bootstrap/notify/notify.yaml", members)
|
||||
self.assertIn("bootstrap/notify/notify-web.secret.example", members)
|
||||
self.assertIn("containers/stellaops-containers.tar.gz", members)
|
||||
self.assertIn("orchestrator/service/orchestrator-service.tar.gz", members)
|
||||
self.assertIn("export-center/export-offline-bundle.tar.gz", members)
|
||||
self.assertIn("notifier/notifier-offline-pack.tar.gz", members)
|
||||
self.assertIn("surface-secrets/secrets-bundle.tar.gz", members)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user