Refactor compare-view component to use observables for data loading, enhancing performance and responsiveness. Update compare service interfaces and methods for improved delta computation. Modify audit log component to handle optional event properties gracefully. Optimize Monaco editor worker loading to reduce bundle size. Introduce shared SCSS mixins for consistent styling across components. Add Gitea test instance setup and NuGet package publishing test scripts for CI/CD validation. Update documentation paths and ensure all references are accurate.

This commit is contained in:
StellaOps Bot
2025-12-26 21:39:17 +02:00
parent b4fc66feb6
commit 75de089ee8
61 changed files with 1318 additions and 958 deletions

View File

@@ -90,16 +90,16 @@ def clean_directory(path: Path) -> None:
path.mkdir(parents=True, exist_ok=True)
def run_python_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def run_rust_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-rust-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def run_python_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def run_rust_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-rust-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def copy_if_exists(source: Path, target: Path) -> None:
if source.is_dir():
shutil.copytree(source, target, dirs_exist_ok=True)
@@ -175,110 +175,110 @@ def copy_debug_store(release_dir: Path, staging_dir: Path) -> None:
)
def copy_plugins_and_assets(staging_dir: Path) -> None:
copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner")
copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates")
copy_if_exists(REPO_ROOT / "seed-data", staging_dir / "seed-data")
docs_dir = staging_dir / "docs"
docs_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md")
copy_if_exists(REPO_ROOT / "docs" / "airgap" / "mirror-bundles.md", docs_dir / "mirror-bundles.md")
def copy_cli_and_taskrunner_assets(release_dir: Path, staging_dir: Path) -> None:
"""Bundle CLI binaries, task pack docs, and Task Runner samples when available."""
cli_src = release_dir / "cli"
if cli_src.exists():
copy_if_exists(cli_src, staging_dir / "cli")
taskrunner_bootstrap = staging_dir / "bootstrap" / "task-runner"
taskrunner_bootstrap.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "task-runner.yaml.sample", taskrunner_bootstrap / "task-runner.yaml.sample")
docs_dir = staging_dir / "docs"
copy_if_exists(REPO_ROOT / "docs" / "task-packs", docs_dir / "task-packs")
copy_if_exists(REPO_ROOT / "docs" / "modules" / "taskrunner", docs_dir / "modules" / "taskrunner")
def copy_orchestrator_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy orchestrator service, worker SDK, postgres snapshot, and dashboards when present."""
mapping = {
release_dir / "orchestrator" / "service": staging_dir / "orchestrator" / "service",
release_dir / "orchestrator" / "worker-sdk": staging_dir / "orchestrator" / "worker-sdk",
release_dir / "orchestrator" / "postgres": staging_dir / "orchestrator" / "postgres",
release_dir / "orchestrator" / "dashboards": staging_dir / "orchestrator" / "dashboards",
}
for src, dest in mapping.items():
copy_if_exists(src, dest)
def copy_export_and_notifier_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy Export Center and Notifier offline bundles and tooling when present."""
copy_if_exists(release_dir / "export-center", staging_dir / "export-center")
copy_if_exists(release_dir / "notifier", staging_dir / "notifier")
def copy_surface_secrets(release_dir: Path, staging_dir: Path) -> None:
"""Include Surface.Secrets bundles and manifests if present."""
copy_if_exists(release_dir / "surface-secrets", staging_dir / "surface-secrets")
def copy_bootstrap_configs(staging_dir: Path) -> None:
notify_config = REPO_ROOT / "etc" / "notify.airgap.yaml"
notify_secret = REPO_ROOT / "etc" / "secrets" / "notify-web-airgap.secret.example"
notify_doc = REPO_ROOT / "docs" / "modules" / "notify" / "bootstrap-pack.md"
if not notify_config.exists():
raise FileNotFoundError(f"Missing notifier air-gap config: {notify_config}")
if not notify_secret.exists():
raise FileNotFoundError(f"Missing notifier air-gap secret template: {notify_secret}")
notify_bootstrap_dir = staging_dir / "bootstrap" / "notify"
notify_bootstrap_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "bootstrap" / "notify", notify_bootstrap_dir)
copy_if_exists(notify_config, notify_bootstrap_dir / "notify.yaml")
copy_if_exists(notify_secret, notify_bootstrap_dir / "notify-web.secret.example")
copy_if_exists(notify_doc, notify_bootstrap_dir / "README.md")
def verify_required_seed_data(repo_root: Path) -> None:
ruby_git_sources = repo_root / "seed-data" / "analyzers" / "ruby" / "git-sources"
if not ruby_git_sources.is_dir():
raise FileNotFoundError(f"Missing Ruby git-sources seed directory: {ruby_git_sources}")
required_files = [
ruby_git_sources / "Gemfile.lock",
ruby_git_sources / "expected.json",
]
for path in required_files:
if not path.exists():
raise FileNotFoundError(f"Offline kit seed artefact missing: {path}")
def copy_third_party_licenses(staging_dir: Path) -> None:
licenses_src = REPO_ROOT / "third-party-licenses"
if not licenses_src.is_dir():
return
target_dir = staging_dir / "third-party-licenses"
target_dir.mkdir(parents=True, exist_ok=True)
entries = sorted(licenses_src.iterdir(), key=lambda entry: entry.name.lower())
for entry in entries:
if entry.is_dir():
shutil.copytree(entry, target_dir / entry.name, dirs_exist_ok=True)
elif entry.is_file():
shutil.copy2(entry, target_dir / entry.name)
def package_telemetry_bundle(staging_dir: Path) -> None:
script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py"
if not script.exists():
return
TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True)
def copy_plugins_and_assets(staging_dir: Path) -> None:
copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner")
copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates")
copy_if_exists(REPO_ROOT / "src" / "__Tests" / "__Datasets" / "seed-data", staging_dir / "seed-data")
docs_dir = staging_dir / "docs"
docs_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md")
copy_if_exists(REPO_ROOT / "docs" / "airgap" / "mirror-bundles.md", docs_dir / "mirror-bundles.md")
def copy_cli_and_taskrunner_assets(release_dir: Path, staging_dir: Path) -> None:
"""Bundle CLI binaries, task pack docs, and Task Runner samples when available."""
cli_src = release_dir / "cli"
if cli_src.exists():
copy_if_exists(cli_src, staging_dir / "cli")
taskrunner_bootstrap = staging_dir / "bootstrap" / "task-runner"
taskrunner_bootstrap.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "task-runner.yaml.sample", taskrunner_bootstrap / "task-runner.yaml.sample")
docs_dir = staging_dir / "docs"
copy_if_exists(REPO_ROOT / "docs" / "task-packs", docs_dir / "task-packs")
copy_if_exists(REPO_ROOT / "docs" / "modules" / "taskrunner", docs_dir / "modules" / "taskrunner")
def copy_orchestrator_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy orchestrator service, worker SDK, postgres snapshot, and dashboards when present."""
mapping = {
release_dir / "orchestrator" / "service": staging_dir / "orchestrator" / "service",
release_dir / "orchestrator" / "worker-sdk": staging_dir / "orchestrator" / "worker-sdk",
release_dir / "orchestrator" / "postgres": staging_dir / "orchestrator" / "postgres",
release_dir / "orchestrator" / "dashboards": staging_dir / "orchestrator" / "dashboards",
}
for src, dest in mapping.items():
copy_if_exists(src, dest)
def copy_export_and_notifier_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy Export Center and Notifier offline bundles and tooling when present."""
copy_if_exists(release_dir / "export-center", staging_dir / "export-center")
copy_if_exists(release_dir / "notifier", staging_dir / "notifier")
def copy_surface_secrets(release_dir: Path, staging_dir: Path) -> None:
"""Include Surface.Secrets bundles and manifests if present."""
copy_if_exists(release_dir / "surface-secrets", staging_dir / "surface-secrets")
def copy_bootstrap_configs(staging_dir: Path) -> None:
notify_config = REPO_ROOT / "etc" / "notify.airgap.yaml"
notify_secret = REPO_ROOT / "etc" / "secrets" / "notify-web-airgap.secret.example"
notify_doc = REPO_ROOT / "docs" / "modules" / "notify" / "bootstrap-pack.md"
if not notify_config.exists():
raise FileNotFoundError(f"Missing notifier air-gap config: {notify_config}")
if not notify_secret.exists():
raise FileNotFoundError(f"Missing notifier air-gap secret template: {notify_secret}")
notify_bootstrap_dir = staging_dir / "bootstrap" / "notify"
notify_bootstrap_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "bootstrap" / "notify", notify_bootstrap_dir)
copy_if_exists(notify_config, notify_bootstrap_dir / "notify.yaml")
copy_if_exists(notify_secret, notify_bootstrap_dir / "notify-web.secret.example")
copy_if_exists(notify_doc, notify_bootstrap_dir / "README.md")
def verify_required_seed_data(repo_root: Path) -> None:
ruby_git_sources = repo_root / "src" / "__Tests" / "__Datasets" / "seed-data" / "analyzers" / "ruby" / "git-sources"
if not ruby_git_sources.is_dir():
raise FileNotFoundError(f"Missing Ruby git-sources seed directory: {ruby_git_sources}")
required_files = [
ruby_git_sources / "Gemfile.lock",
ruby_git_sources / "expected.json",
]
for path in required_files:
if not path.exists():
raise FileNotFoundError(f"Offline kit seed artefact missing: {path}")
def copy_third_party_licenses(staging_dir: Path) -> None:
licenses_src = REPO_ROOT / "third-party-licenses"
if not licenses_src.is_dir():
return
target_dir = staging_dir / "third-party-licenses"
target_dir.mkdir(parents=True, exist_ok=True)
entries = sorted(licenses_src.iterdir(), key=lambda entry: entry.name.lower())
for entry in entries:
if entry.is_dir():
shutil.copytree(entry, target_dir / entry.name, dirs_exist_ok=True)
elif entry.is_file():
shutil.copy2(entry, target_dir / entry.name)
def package_telemetry_bundle(staging_dir: Path) -> None:
script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py"
if not script.exists():
return
TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True)
run(["python", str(script), "--output", str(TELEMETRY_BUNDLE_PATH)], cwd=REPO_ROOT)
telemetry_dir = staging_dir / "telemetry"
telemetry_dir.mkdir(parents=True, exist_ok=True)
@@ -288,8 +288,8 @@ def package_telemetry_bundle(staging_dir: Path) -> None:
shutil.copy2(sha_path, telemetry_dir / sha_path.name)
def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]:
entries: list[OrderedDict[str, Any]] = []
def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]:
entries: list[OrderedDict[str, Any]] = []
exclude = exclude or set()
for path in sorted(staging_dir.rglob("*")):
if not path.is_file():
@@ -306,39 +306,39 @@ def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[Or
)
)
)
return entries
def summarize_counts(staging_dir: Path) -> Mapping[str, int]:
def count_files(rel: str) -> int:
root = staging_dir / rel
if not root.exists():
return 0
return sum(1 for path in root.rglob("*") if path.is_file())
return {
"cli": count_files("cli"),
"taskPacksDocs": count_files("docs/task-packs"),
"containers": count_files("containers"),
"orchestrator": count_files("orchestrator"),
"exportCenter": count_files("export-center"),
"notifier": count_files("notifier"),
"surfaceSecrets": count_files("surface-secrets"),
}
def copy_container_bundles(release_dir: Path, staging_dir: Path) -> None:
"""Copy container air-gap bundles if present in the release directory."""
candidates = [release_dir / "containers", release_dir / "images"]
target_dir = staging_dir / "containers"
for root in candidates:
if not root.exists():
continue
for bundle in sorted(root.glob("**/*")):
if bundle.is_file() and bundle.suffix in {".gz", ".tar", ".tgz"}:
target_path = target_dir / bundle.relative_to(root)
target_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(bundle, target_path)
return entries
def summarize_counts(staging_dir: Path) -> Mapping[str, int]:
def count_files(rel: str) -> int:
root = staging_dir / rel
if not root.exists():
return 0
return sum(1 for path in root.rglob("*") if path.is_file())
return {
"cli": count_files("cli"),
"taskPacksDocs": count_files("docs/task-packs"),
"containers": count_files("containers"),
"orchestrator": count_files("orchestrator"),
"exportCenter": count_files("export-center"),
"notifier": count_files("notifier"),
"surfaceSecrets": count_files("surface-secrets"),
}
def copy_container_bundles(release_dir: Path, staging_dir: Path) -> None:
"""Copy container air-gap bundles if present in the release directory."""
candidates = [release_dir / "containers", release_dir / "images"]
target_dir = staging_dir / "containers"
for root in candidates:
if not root.exists():
continue
for bundle in sorted(root.glob("**/*")):
if bundle.is_file() and bundle.suffix in {".gz", ".tar", ".tgz"}:
target_path = target_dir / bundle.relative_to(root)
target_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(bundle, target_path)
def write_offline_manifest(
@@ -424,17 +424,17 @@ def sign_blob(
return sig_path
def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
release_dir = args.release_dir.resolve()
staging_dir = args.staging_dir.resolve()
output_dir = args.output_dir.resolve()
verify_release(release_dir)
verify_required_seed_data(REPO_ROOT)
if not args.skip_smoke:
run_rust_analyzer_smoke()
run_python_analyzer_smoke()
clean_directory(staging_dir)
def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
release_dir = args.release_dir.resolve()
staging_dir = args.staging_dir.resolve()
output_dir = args.output_dir.resolve()
verify_release(release_dir)
verify_required_seed_data(REPO_ROOT)
if not args.skip_smoke:
run_rust_analyzer_smoke()
run_python_analyzer_smoke()
clean_directory(staging_dir)
copy_debug_store(release_dir, staging_dir)
manifest_data = load_manifest(release_dir)
@@ -443,22 +443,22 @@ def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
if isinstance(checksums, Mapping):
release_manifest_sha = checksums.get("sha256")
copy_release_manifests(release_dir, staging_dir)
copy_component_artifacts(manifest_data, release_dir, staging_dir)
copy_collections(manifest_data, release_dir, staging_dir)
copy_plugins_and_assets(staging_dir)
copy_bootstrap_configs(staging_dir)
copy_cli_and_taskrunner_assets(release_dir, staging_dir)
copy_container_bundles(release_dir, staging_dir)
copy_orchestrator_assets(release_dir, staging_dir)
copy_export_and_notifier_assets(release_dir, staging_dir)
copy_surface_secrets(release_dir, staging_dir)
copy_third_party_licenses(staging_dir)
package_telemetry_bundle(staging_dir)
offline_manifest_path, offline_manifest_sha = write_offline_manifest(
staging_dir,
args.version,
copy_release_manifests(release_dir, staging_dir)
copy_component_artifacts(manifest_data, release_dir, staging_dir)
copy_collections(manifest_data, release_dir, staging_dir)
copy_plugins_and_assets(staging_dir)
copy_bootstrap_configs(staging_dir)
copy_cli_and_taskrunner_assets(release_dir, staging_dir)
copy_container_bundles(release_dir, staging_dir)
copy_orchestrator_assets(release_dir, staging_dir)
copy_export_and_notifier_assets(release_dir, staging_dir)
copy_surface_secrets(release_dir, staging_dir)
copy_third_party_licenses(staging_dir)
package_telemetry_bundle(staging_dir)
offline_manifest_path, offline_manifest_sha = write_offline_manifest(
staging_dir,
args.version,
args.channel,
release_manifest_sha,
)
@@ -491,8 +491,8 @@ def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
if manifest_sig:
signature_paths["manifestSignature"] = str(manifest_sig)
metadata = OrderedDict(
(
metadata = OrderedDict(
(
("bundleId", args.bundle_id or f"{args.version}-{args.channel}-{utc_now_iso()}"),
("bundleName", bundle_path.name),
("bundleSha256", bundle_sha_prefixed),
@@ -501,11 +501,11 @@ def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
("manifestSha256", f"sha256:{offline_manifest_sha}"),
("manifestSize", offline_manifest_path.stat().st_size),
("channel", args.channel),
("version", args.version),
("capturedAt", utc_now_iso()),
("counts", summarize_counts(staging_dir)),
)
)
("version", args.version),
("capturedAt", utc_now_iso()),
("counts", summarize_counts(staging_dir)),
)
)
if sig:
metadata["bundleSignatureName"] = Path(sig).name