feat: Implement console session management with tenant and profile handling
- Add ConsoleSessionStore for managing console session state including tenants, profile, and token information. - Create OperatorContextService to manage operator context for orchestrator actions. - Implement OperatorMetadataInterceptor to enrich HTTP requests with operator context metadata. - Develop ConsoleProfileComponent to display user profile and session details, including tenant information and access tokens. - Add corresponding HTML and SCSS for ConsoleProfileComponent to enhance UI presentation. - Write unit tests for ConsoleProfileComponent to ensure correct rendering and functionality.
This commit is contained in:
@@ -40,6 +40,8 @@ For additional options, see `etc/authority.yaml.sample`.
|
||||
|
||||
> **Graph Explorer reminder:** When enabling Cartographer or Graph API components, update `etc/authority.yaml` so the `cartographer-service` client includes `properties.serviceIdentity: "cartographer"` and a tenant hint. Authority now rejects `graph:write` tokens that lack this marker, so existing deployments must apply the update before rolling out the new build.
|
||||
|
||||
> **Console endpoint reminder:** The Console UI now calls `/console/tenants`, `/console/profile`, and `/console/token/introspect`. Reverse proxies must forward the `X-Stella-Tenant` header (derived from the access token) so Authority can enforce tenancy; audit events are logged under `authority.console.*`. Admin actions obey a five-minute fresh-auth window reported by `/console/profile`, so keep session timeout prompts aligned with that value.
|
||||
|
||||
## Key rotation automation (OPS3)
|
||||
|
||||
The `key-rotation.sh` helper wraps the `/internal/signing/rotate` endpoint delivered with CORE10. It can run in CI/CD once the new PEM key is staged on the Authority host volume.
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| DEVOPS-OPS-14-003 | DONE (2025-10-26) | Deployment Guild | DEVOPS-REL-14-001 | Document and script upgrade/rollback flows, channel management, and compatibility matrices per architecture. | Helm/Compose guides updated with digest pinning, automated checks committed, rollback drill recorded. |
|
||||
| DOWNLOADS-CONSOLE-23-001 | TODO | Deployment Guild, DevOps Guild | DEVOPS-CONSOLE-23-002 | Maintain signed downloads manifest pipeline (images, Helm, offline bundles), publish JSON under `deploy/downloads/manifest.json`, and document sync cadence for Console + docs parity. | Pipeline generates signed manifest with checksums, automated PR updates manifest, docs updated with sync workflow, parity check in CI passes. |
|
||||
| DEPLOY-NOTIFY-38-001 | BLOCKED (2025-10-29) | Deployment Guild, DevOps Guild | NOTIFY-SVC-38-001..004 | Package notifier API/worker Helm overlays (email/chat/webhook), secrets templates, rollout guide. | Overlays committed; smoke deploy executed; rollback steps recorded; secrets templates provided. |
|
||||
| DEPLOY-POLICY-27-001 | TODO | Deployment Guild, Policy Registry Guild | REGISTRY-API-27-001, DEVOPS-POLICY-27-003 | Produce Helm/Compose overlays for Policy Registry + simulation workers, including Mongo migrations, object storage buckets, signing key secrets, and tenancy defaults. | Overlays committed with deterministic digests; install docs updated; smoke deploy validated in staging. |
|
||||
| DEPLOY-POLICY-27-002 | TODO | Deployment Guild, Policy Guild | DEPLOY-POLICY-27-001, WEB-POLICY-27-004 | Document rollout/rollback playbooks for policy publish/promote (canary strategy, emergency freeze toggle, evidence retrieval) under `/docs/runbooks/policy-incident.md`. | Runbook published with decision tree; checklist appended; rehearsal recorded. |
|
||||
| DEPLOY-VULN-29-001 | TODO | Deployment Guild, Findings Ledger Guild | LEDGER-29-009 | Produce Helm/Compose overlays for Findings Ledger + projector, including DB migrations, Merkle anchor jobs, and scaling guidance. | Overlays committed; migrations documented; smoke deploy executed; rollback steps recorded. |
|
||||
@@ -12,7 +13,7 @@
|
||||
| DEPLOY-VEX-30-002 | TODO | Deployment Guild, Issuer Directory Guild | ISSUER-30-006 | Package Issuer Directory deployment manifests, backups, and security hardening guidance. | Deployment docs merged; backup tested; hardening checklist appended. |
|
||||
| DEPLOY-AIAI-31-001 | TODO | Deployment Guild, Advisory AI Guild | AIAI-31-008 | Provide Helm/Compose manifests, GPU toggle, scaling/runbook, and offline kit instructions for Advisory AI service + inference container. | Deployment docs merged; smoke deploy executed; offline kit updated; runbooks published. |
|
||||
| DEPLOY-ORCH-34-001 | TODO | Deployment Guild, Orchestrator Service Guild | ORCH-SVC-34-004 | Provide orchestrator Helm/Compose manifests, scaling defaults, secret templates, offline kit instructions, and GA rollout/rollback playbook. | Manifests committed with digests; scaling guidance documented; smoke deploy/rollback rehearsed; offline kit instructions updated. |
|
||||
| DEPLOY-EXPORT-35-001 | TODO | Deployment Guild, Exporter Service Guild | EXPORT-SVC-35-001..006 | Package exporter service/worker Helm overlays (download-only), document rollout/rollback, and integrate signing KMS secrets. | Overlays committed; smoke deploy executed; rollback steps recorded; secrets templates provided. |
|
||||
| DEPLOY-EXPORT-35-001 | BLOCKED (2025-10-29) | Deployment Guild, Exporter Service Guild | EXPORT-SVC-35-001..006 | Package exporter service/worker Helm overlays (download-only), document rollout/rollback, and integrate signing KMS secrets. | Overlays committed; smoke deploy executed; rollback steps recorded; secrets templates provided. |
|
||||
| DEPLOY-EXPORT-36-001 | TODO | Deployment Guild, Exporter Service Guild | DEPLOY-EXPORT-35-001, EXPORT-SVC-36-003 | Document OCI/object storage distribution workflows, registry credential automation, and monitoring hooks for exports. | Documentation merged; automation scripts validated; monitoring instructions added. |
|
||||
|
||||
## CLI Parity & Task Packs
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
> Blocked: waiting on CLI verifier command and Concelier/Excititor guard endpoints to land (CLI-AOC-19-002, CONCELIER-WEB-AOC-19-004, EXCITITOR-WEB-AOC-19-004).
|
||||
| DEVOPS-AOC-19-003 | BLOCKED (2025-10-26) | DevOps Guild, QA Guild | CONCELIER-WEB-AOC-19-003, EXCITITOR-WEB-AOC-19-003 | Enforce unit test coverage thresholds for AOC guard suites and ensure coverage exported to dashboards. | Coverage report includes guard projects, threshold gate passes/fails as expected, dashboards refreshed with new metrics. |
|
||||
> Blocked: guard coverage suites and exporter hooks pending in Concelier/Excititor (CONCELIER-WEB-AOC-19-003, EXCITITOR-WEB-AOC-19-003).
|
||||
| DEVOPS-AOC-19-101 | TODO (2025-10-28) | DevOps Guild, Concelier Storage Guild | CONCELIER-STORE-AOC-19-002 | Draft supersedes backfill rollout (freeze window, dry-run steps, rollback) once advisory_raw idempotency index passes staging verification. | Runbook committed in `docs/deploy/containers.md` + Offline Kit notes, staging rehearsal scheduled with dependencies captured in SPRINTS. |
|
||||
| DEVOPS-OBS-50-001 | DONE (2025-10-26) | DevOps Guild, Observability Guild | TELEMETRY-OBS-50-001 | Deliver default OpenTelemetry collector deployment (Compose/Helm manifests), OTLP ingestion endpoints, and secure pipeline (authN, mTLS, tenant partitioning). Provide smoke test verifying traces/logs/metrics ingestion. | Collector manifests committed; smoke test green; docs updated; imposed rule banner reminder noted. |
|
||||
| DEVOPS-OBS-50-002 | DOING (2025-10-26) | DevOps Guild, Security Guild | DEVOPS-OBS-50-001, TELEMETRY-OBS-51-002 | Stand up multi-tenant storage backends (Prometheus, Tempo/Jaeger, Loki) with retention policies, tenant isolation, and redaction guard rails. Integrate with Authority scopes for read paths. | Storage stack deployed with auth; retention configured; integration tests verify tenant isolation; runbook drafted. |
|
||||
> Coordination started with Observability Guild (2025-10-26) to schedule staging rollout and provision service accounts. Staging bootstrap commands and secret names documented in `docs/ops/telemetry-storage.md`.
|
||||
@@ -114,8 +115,8 @@
|
||||
## Export Center
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| DEVOPS-EXPORT-35-001 | TODO | DevOps Guild, Exporter Service Guild | EXPORT-SVC-35-001..006 | Establish exporter CI pipeline (lint/test/perf smoke), configure object storage fixtures, seed Grafana dashboards, and document bootstrap steps. | CI pipeline running; smoke export job seeded; dashboards live; runbook updated. |
|
||||
| DEVOPS-EXPORT-36-001 | TODO | DevOps Guild, Exporter Service Guild | DEVOPS-EXPORT-35-001, EXPORT-SVC-36-001..004 | Integrate Trivy compatibility validation, OCI push smoke tests, and throughput/error dashboards. | CI executes Trivy validation; OCI push smoke passes; dashboards/alerts configured. |
|
||||
| DEVOPS-EXPORT-35-001 | BLOCKED (2025-10-29) | DevOps Guild, Exporter Service Guild | EXPORT-SVC-35-001..006 | Establish exporter CI pipeline (lint/test/perf smoke), configure object storage fixtures, seed Grafana dashboards, and document bootstrap steps. | CI pipeline running; smoke export job seeded; dashboards live; runbook updated. |
|
||||
| DEVOPS-EXPORT-36-001 | TODO | DevOps Guild, Exporter Service Guild | DEVOPS-EXPORT-35-001, EXPORT-SVC-36-001..004 | Integrate Trivy compatibility validation, cosign signature checks, `trivy module db import` smoke tests, OCI distribution verification, and throughput/error dashboards. | CI executes cosign + Trivy import validation; OCI push smoke passes; dashboards/alerts configured. |
|
||||
| DEVOPS-EXPORT-37-001 | TODO | DevOps Guild, Exporter Service Guild | DEVOPS-EXPORT-36-001, EXPORT-SVC-37-001..004 | Finalize exporter monitoring (failure alerts, verify metrics, retention jobs) and chaos/latency tests ahead of GA. | Alerts tuned; chaos tests documented; retention monitoring active; runbook updated. |
|
||||
|
||||
## CLI Parity & Task Packs
|
||||
@@ -124,7 +125,10 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| DEVOPS-CLI-41-001 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-CORE-41-001 | Establish CLI build pipeline (multi-platform binaries, SBOM, checksums), parity matrix CI enforcement, and release artifact signing. | Build pipeline operational; SBOM/checksums published; parity gate failing on drift; docs updated. |
|
||||
| DEVOPS-CLI-42-001 | TODO | DevOps Guild | DEVOPS-CLI-41-001, CLI-PARITY-41-001 | Add CLI golden output tests, parity diff automation, pack run CI harness, and artifact cache for remote mode. | Golden tests running; parity diff automation in CI; pack run harness executes sample packs; documentation updated. |
|
||||
| DEVOPS-CLI-43-001 | TODO | DevOps Guild | DEVOPS-CLI-42-001, TASKRUN-42-001 | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, and Task Pack chaos tests. | Release automation verified; SBOM signed; parity gate enforced; chaos tests documented. |
|
||||
| DEVOPS-CLI-43-001 | DOING (2025-10-27) | DevOps Guild | DEVOPS-CLI-42-001, TASKRUN-42-001 | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, and Task Pack chaos tests. | Release automation verified; SBOM signed; parity gate enforced; chaos tests documented. |
|
||||
> 2025-10-27: Release pipeline now packages CLI multi-platform artefacts with SBOM/signature coverage and enforces the CLI parity gate (`ops/devops/check_cli_parity.py`). Task Pack chaos smoke still pending CLI pack command delivery.
|
||||
| DEVOPS-CLI-43-002 | TODO | DevOps Guild, Task Runner Guild | CLI-PACKS-43-001, TASKRUN-43-001 | Implement Task Pack chaos smoke in CI (random failure injection, resume, sealed-mode toggle) and publish evidence bundles for review. | Chaos smoke job runs nightly; failures alert Slack; evidence stored in `out/pack-chaos`; runbook updated. |
|
||||
| DEVOPS-CLI-43-003 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-PARITY-41-001, CLI-PACKS-42-001 | Integrate CLI golden output/parity diff automation into release gating; export parity report artifact consumed by Console Downloads workspace. | `check_cli_parity.py` wired to compare parity matrix and CLI outputs; artifact uploaded; release fails on regressions.
|
||||
|
||||
## Containerized Distribution (Epic 13)
|
||||
|
||||
|
||||
53
ops/devops/check_cli_parity.py
Normal file
53
ops/devops/check_cli_parity.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Ensure CLI parity matrix contains no outstanding blockers before release."""
|
||||
from __future__ import annotations
|
||||
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parents[2]
|
||||
PARITY_DOC = REPO_ROOT / "docs/cli-vs-ui-parity.md"
|
||||
|
||||
BLOCKERS = {
|
||||
"🟥": "blocking gap",
|
||||
"❌": "missing feature",
|
||||
"🚫": "unsupported",
|
||||
}
|
||||
WARNINGS = {
|
||||
"🟡": "partial support",
|
||||
"⚠️": "warning",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if not PARITY_DOC.exists():
|
||||
print(f"❌ Parity matrix not found at {PARITY_DOC}", file=sys.stderr)
|
||||
return 1
|
||||
text = PARITY_DOC.read_text(encoding="utf-8")
|
||||
blockers: list[str] = []
|
||||
warnings: list[str] = []
|
||||
for line in text.splitlines():
|
||||
for symbol, label in BLOCKERS.items():
|
||||
if symbol in line:
|
||||
blockers.append(f"{label}: {line.strip()}")
|
||||
for symbol, label in WARNINGS.items():
|
||||
if symbol in line:
|
||||
warnings.append(f"{label}: {line.strip()}")
|
||||
if blockers:
|
||||
print("❌ CLI parity gate failed — blocking items present:", file=sys.stderr)
|
||||
for item in blockers:
|
||||
print(f" - {item}", file=sys.stderr)
|
||||
return 1
|
||||
if warnings:
|
||||
print("⚠️ CLI parity gate warnings detected:", file=sys.stderr)
|
||||
for item in warnings:
|
||||
print(f" - {item}", file=sys.stderr)
|
||||
print("Treat warnings as failures until parity matrix is fully green.", file=sys.stderr)
|
||||
return 1
|
||||
print("✅ CLI parity matrix has no blocking or warning entries.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -23,11 +23,13 @@ import pathlib
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import uuid
|
||||
import zipfile
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence, Tuple
|
||||
|
||||
@@ -190,6 +192,8 @@ class ReleaseBuilder:
|
||||
self.metadata_dir = ensure_directory(self.artifacts_dir / "metadata")
|
||||
self.debug_dir = ensure_directory(self.output_dir / "debug")
|
||||
self.debug_store_dir = ensure_directory(self.debug_dir / ".build-id")
|
||||
self.cli_config = config.get("cli")
|
||||
self.cli_output_dir = ensure_directory(self.output_dir / "cli") if self.cli_config else None
|
||||
self.temp_dir = pathlib.Path(tempfile.mkdtemp(prefix="stellaops-release-"))
|
||||
self.skip_signing = skip_signing
|
||||
self.tlog_upload = tlog_upload
|
||||
@@ -220,7 +224,8 @@ class ReleaseBuilder:
|
||||
helm_meta = self._package_helm()
|
||||
compose_meta = self._digest_compose_files()
|
||||
debug_meta = self._collect_debug_store(components_result)
|
||||
manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta)
|
||||
cli_meta = self._build_cli_artifacts()
|
||||
manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta, cli_meta)
|
||||
return manifest
|
||||
|
||||
def _prime_buildx_plugin(self) -> None:
|
||||
@@ -262,6 +267,12 @@ class ReleaseBuilder:
|
||||
def _component_ref(self, repo: str, digest: str) -> str:
|
||||
return f"{self.registry}/{repo}@{digest}"
|
||||
|
||||
def _relative_path(self, path: pathlib.Path) -> str:
|
||||
try:
|
||||
return str(path.relative_to(self.output_dir.parent))
|
||||
except ValueError:
|
||||
return str(path)
|
||||
|
||||
def _build_component(self, component: Mapping[str, Any]) -> Mapping[str, Any]:
|
||||
name = component["name"]
|
||||
repo = component.get("repository", name)
|
||||
@@ -601,6 +612,165 @@ class ReleaseBuilder:
|
||||
("directory", store_rel),
|
||||
))
|
||||
|
||||
# ----------------
|
||||
# CLI packaging
|
||||
# ----------------
|
||||
def _build_cli_artifacts(self) -> List[Mapping[str, Any]]:
|
||||
if not self.cli_config or self.dry_run:
|
||||
return []
|
||||
project_rel = self.cli_config.get("project")
|
||||
if not project_rel:
|
||||
return []
|
||||
project_path = (self.repo_root / project_rel).resolve()
|
||||
if not project_path.exists():
|
||||
raise FileNotFoundError(f"CLI project not found at {project_path}")
|
||||
runtimes: Sequence[str] = self.cli_config.get("runtimes", [])
|
||||
if not runtimes:
|
||||
runtimes = ("linux-x64",)
|
||||
package_prefix = self.cli_config.get("packagePrefix", "stella")
|
||||
ensure_directory(self.cli_output_dir or (self.output_dir / "cli"))
|
||||
|
||||
cli_entries: List[Mapping[str, Any]] = []
|
||||
for runtime in runtimes:
|
||||
entry = self._build_cli_for_runtime(project_path, runtime, package_prefix)
|
||||
cli_entries.append(entry)
|
||||
return cli_entries
|
||||
|
||||
def _build_cli_for_runtime(
|
||||
self,
|
||||
project_path: pathlib.Path,
|
||||
runtime: str,
|
||||
package_prefix: str,
|
||||
) -> Mapping[str, Any]:
|
||||
publish_dir = ensure_directory(self.temp_dir / f"cli-publish-{runtime}")
|
||||
publish_cmd = [
|
||||
"dotnet",
|
||||
"publish",
|
||||
str(project_path),
|
||||
"--configuration",
|
||||
"Release",
|
||||
"--runtime",
|
||||
runtime,
|
||||
"--self-contained",
|
||||
"true",
|
||||
"/p:PublishSingleFile=true",
|
||||
"/p:IncludeNativeLibrariesForSelfExtract=true",
|
||||
"/p:EnableCompressionInSingleFile=true",
|
||||
"/p:InvariantGlobalization=true",
|
||||
"--output",
|
||||
str(publish_dir),
|
||||
]
|
||||
run(publish_cmd, cwd=self.repo_root)
|
||||
|
||||
original_name = "StellaOps.Cli"
|
||||
if runtime.startswith("win"):
|
||||
source = publish_dir / f"{original_name}.exe"
|
||||
target = publish_dir / "stella.exe"
|
||||
else:
|
||||
source = publish_dir / original_name
|
||||
target = publish_dir / "stella"
|
||||
if source.exists():
|
||||
if target.exists():
|
||||
target.unlink()
|
||||
source.rename(target)
|
||||
if not runtime.startswith("win"):
|
||||
target.chmod(target.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
|
||||
package_dir = self.cli_output_dir or (self.output_dir / "cli")
|
||||
ensure_directory(package_dir)
|
||||
archive_name = f"{package_prefix}-{self.version}-{runtime}"
|
||||
if runtime.startswith("win"):
|
||||
package_path = package_dir / f"{archive_name}.zip"
|
||||
self._archive_zip(publish_dir, package_path)
|
||||
else:
|
||||
package_path = package_dir / f"{archive_name}.tar.gz"
|
||||
self._archive_tar(publish_dir, package_path)
|
||||
|
||||
digest = compute_sha256(package_path)
|
||||
sha_path = package_path.with_suffix(package_path.suffix + ".sha256")
|
||||
sha_path.write_text(f"{digest} {package_path.name}\n", encoding="utf-8")
|
||||
|
||||
archive_info = OrderedDict((
|
||||
("path", self._relative_path(package_path)),
|
||||
("sha256", digest),
|
||||
))
|
||||
signature_info = self._sign_file(package_path)
|
||||
if signature_info:
|
||||
archive_info["signature"] = signature_info
|
||||
|
||||
sbom_info = self._generate_cli_sbom(runtime, publish_dir)
|
||||
|
||||
entry = OrderedDict((
|
||||
("runtime", runtime),
|
||||
("archive", archive_info),
|
||||
))
|
||||
if sbom_info:
|
||||
entry["sbom"] = sbom_info
|
||||
return entry
|
||||
|
||||
def _archive_tar(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None:
|
||||
with tarfile.open(archive_path, "w:gz") as tar:
|
||||
for item in sorted(source_dir.rglob("*")):
|
||||
arcname = item.relative_to(source_dir)
|
||||
tar.add(item, arcname=arcname)
|
||||
|
||||
def _archive_zip(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None:
|
||||
with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zipf:
|
||||
for item in sorted(source_dir.rglob("*")):
|
||||
if item.is_dir():
|
||||
continue
|
||||
arcname = item.relative_to(source_dir).as_posix()
|
||||
zip_info = zipfile.ZipInfo(arcname)
|
||||
zip_info.external_attr = (item.stat().st_mode & 0xFFFF) << 16
|
||||
with item.open("rb") as handle:
|
||||
zipf.writestr(zip_info, handle.read())
|
||||
|
||||
def _generate_cli_sbom(self, runtime: str, publish_dir: pathlib.Path) -> Optional[Mapping[str, Any]]:
|
||||
if self.dry_run:
|
||||
return None
|
||||
sbom_dir = ensure_directory(self.sboms_dir / "cli")
|
||||
sbom_path = sbom_dir / f"cli-{runtime}.cyclonedx.json"
|
||||
run([
|
||||
"syft",
|
||||
f"dir:{publish_dir}",
|
||||
"--output",
|
||||
f"cyclonedx-json={sbom_path}",
|
||||
])
|
||||
entry = OrderedDict((
|
||||
("path", self._relative_path(sbom_path)),
|
||||
("sha256", compute_sha256(sbom_path)),
|
||||
))
|
||||
signature_info = self._sign_file(sbom_path)
|
||||
if signature_info:
|
||||
entry["signature"] = signature_info
|
||||
return entry
|
||||
|
||||
def _sign_file(self, path: pathlib.Path) -> Optional[Mapping[str, Any]]:
|
||||
if self.skip_signing:
|
||||
return None
|
||||
if not (self.cosign_key_ref or self.cosign_identity_token):
|
||||
raise ValueError(
|
||||
"Signing requested but no cosign key or identity token provided. Use --skip-signing to bypass."
|
||||
)
|
||||
signature_path = path.with_suffix(path.suffix + ".sig")
|
||||
sha_path = path.with_suffix(path.suffix + ".sha256")
|
||||
digest = compute_sha256(path)
|
||||
sha_path.write_text(f"{digest} {path.name}\n", encoding="utf-8")
|
||||
cmd = ["cosign", "sign-blob", "--yes", str(path)]
|
||||
if self.cosign_key_ref:
|
||||
cmd.extend(["--key", self.cosign_key_ref])
|
||||
if self.cosign_identity_token:
|
||||
cmd.extend(["--identity-token", self.cosign_identity_token])
|
||||
if not self.tlog_upload:
|
||||
cmd.append("--tlog-upload=false")
|
||||
signature_data = run(cmd, env=self.cosign_env).strip()
|
||||
signature_path.write_text(signature_data + "\n", encoding="utf-8")
|
||||
return OrderedDict((
|
||||
("path", self._relative_path(signature_path)),
|
||||
("sha256", compute_sha256(signature_path)),
|
||||
("tlogUploaded", self.tlog_upload),
|
||||
))
|
||||
|
||||
def _extract_debug_entries(self, component_name: str, image_ref: str) -> List[OrderedDict[str, Any]]:
|
||||
if self.dry_run:
|
||||
return []
|
||||
@@ -832,6 +1002,7 @@ class ReleaseBuilder:
|
||||
helm_meta: Optional[Mapping[str, Any]],
|
||||
compose_meta: List[Mapping[str, Any]],
|
||||
debug_meta: Optional[Mapping[str, Any]],
|
||||
cli_meta: Sequence[Mapping[str, Any]],
|
||||
) -> Dict[str, Any]:
|
||||
manifest = OrderedDict()
|
||||
manifest["release"] = OrderedDict((
|
||||
@@ -847,6 +1018,8 @@ class ReleaseBuilder:
|
||||
manifest["compose"] = compose_meta
|
||||
if debug_meta:
|
||||
manifest["debugStore"] = debug_meta
|
||||
if cli_meta:
|
||||
manifest["cli"] = list(cli_meta)
|
||||
return manifest
|
||||
|
||||
|
||||
|
||||
@@ -80,6 +80,18 @@
|
||||
"dockerfile": "ops/devops/release/docker/Dockerfile.angular-ui"
|
||||
}
|
||||
],
|
||||
"cli": {
|
||||
"project": "src/StellaOps.Cli/StellaOps.Cli.csproj",
|
||||
"runtimes": [
|
||||
"linux-x64",
|
||||
"linux-arm64",
|
||||
"osx-x64",
|
||||
"osx-arm64",
|
||||
"win-x64"
|
||||
],
|
||||
"packagePrefix": "stella",
|
||||
"outputDir": "out/release/cli"
|
||||
},
|
||||
"helm": {
|
||||
"chartPath": "deploy/helm/stellaops",
|
||||
"outputDir": "out/release/helm"
|
||||
|
||||
@@ -238,6 +238,60 @@ def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, e
|
||||
f"(recorded {artefact_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None:
|
||||
sig_path_value = signature.get("path")
|
||||
if not sig_path_value:
|
||||
errors.append(f"{component_name}: {label} signature missing path.")
|
||||
return
|
||||
sig_path = resolve_path(str(sig_path_value), release_dir)
|
||||
if not sig_path.exists():
|
||||
errors.append(f"{component_name}: {label} signature missing → {sig_path}")
|
||||
return
|
||||
recorded_sha = signature.get("sha256")
|
||||
if recorded_sha:
|
||||
actual_sha = compute_sha256(sig_path)
|
||||
if actual_sha != recorded_sha:
|
||||
errors.append(
|
||||
f"{component_name}: {label} signature SHA mismatch for {sig_path} "
|
||||
f"(recorded {recorded_sha}, computed {actual_sha})"
|
||||
)
|
||||
|
||||
|
||||
def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
|
||||
cli_entries = manifest.get("cli")
|
||||
if not cli_entries:
|
||||
return
|
||||
if not isinstance(cli_entries, list):
|
||||
errors.append("CLI manifest section must be a list.")
|
||||
return
|
||||
for entry in cli_entries:
|
||||
if not isinstance(entry, Mapping):
|
||||
errors.append("CLI entry must be a mapping.")
|
||||
continue
|
||||
runtime = entry.get("runtime", "<unknown>")
|
||||
component_name = f"cli[{runtime}]"
|
||||
archive = entry.get("archive")
|
||||
if not isinstance(archive, Mapping):
|
||||
errors.append(f"{component_name}: archive metadata missing or invalid.")
|
||||
else:
|
||||
verify_artifact_entry(archive, release_dir, "archive", component_name, errors)
|
||||
signature = archive.get("signature")
|
||||
if isinstance(signature, Mapping):
|
||||
verify_signature(signature, release_dir, "archive", component_name, errors)
|
||||
elif signature is not None:
|
||||
errors.append(f"{component_name}: archive signature must be an object.")
|
||||
sbom = entry.get("sbom")
|
||||
if sbom:
|
||||
if not isinstance(sbom, Mapping):
|
||||
errors.append(f"{component_name}: sbom entry must be a mapping.")
|
||||
else:
|
||||
verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors)
|
||||
signature = sbom.get("signature")
|
||||
if isinstance(signature, Mapping):
|
||||
verify_signature(signature, release_dir, "sbom", component_name, errors)
|
||||
elif signature is not None:
|
||||
errors.append(f"{component_name}: sbom signature must be an object.")
|
||||
|
||||
|
||||
def verify_release(release_dir: pathlib.Path) -> None:
|
||||
if not release_dir.exists():
|
||||
@@ -246,6 +300,7 @@ def verify_release(release_dir: pathlib.Path) -> None:
|
||||
errors: list[str] = []
|
||||
verify_manifest_hashes(manifest, release_dir, errors)
|
||||
verify_components(manifest, release_dir, errors)
|
||||
verify_cli_entries(manifest, release_dir, errors)
|
||||
verify_collections(manifest, release_dir, errors)
|
||||
verify_debug_store(manifest, release_dir, errors)
|
||||
if errors:
|
||||
|
||||
77
ops/devops/scripts/check-advisory-raw-duplicates.js
Normal file
77
ops/devops/scripts/check-advisory-raw-duplicates.js
Normal file
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* Aggregation helper that surfaces advisory_raw duplicate candidates prior to enabling the
|
||||
* idempotency unique index. Intended for staging/offline snapshots.
|
||||
*
|
||||
* Usage:
|
||||
* mongo concelier ops/devops/scripts/check-advisory-raw-duplicates.js
|
||||
*
|
||||
* Environment variables:
|
||||
* LIMIT - optional cap on number of duplicate groups to print (default 50).
|
||||
*/
|
||||
(function () {
|
||||
function toInt(value, fallback) {
|
||||
var parsed = parseInt(value, 10);
|
||||
return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
|
||||
}
|
||||
|
||||
var limit = typeof LIMIT !== "undefined" ? toInt(LIMIT, 50) : 50;
|
||||
var database = db.getName ? db.getSiblingDB(db.getName()) : db;
|
||||
if (!database) {
|
||||
throw new Error("Unable to resolve database handle");
|
||||
}
|
||||
|
||||
print("");
|
||||
print("== advisory_raw duplicate audit ==");
|
||||
print("Database: " + database.getName());
|
||||
print("Limit : " + limit);
|
||||
print("");
|
||||
|
||||
var pipeline = [
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
vendor: "$source.vendor",
|
||||
upstreamId: "$upstream.upstream_id",
|
||||
contentHash: "$upstream.content_hash",
|
||||
tenant: "$tenant"
|
||||
},
|
||||
ids: { $addToSet: "$_id" },
|
||||
count: { $sum: 1 }
|
||||
}
|
||||
},
|
||||
{ $match: { count: { $gt: 1 } } },
|
||||
{
|
||||
$project: {
|
||||
_id: 0,
|
||||
vendor: "$_id.vendor",
|
||||
upstreamId: "$_id.upstreamId",
|
||||
contentHash: "$_id.contentHash",
|
||||
tenant: "$_id.tenant",
|
||||
count: 1,
|
||||
ids: 1
|
||||
}
|
||||
},
|
||||
{ $sort: { count: -1, vendor: 1, upstreamId: 1 } },
|
||||
{ $limit: limit }
|
||||
];
|
||||
|
||||
var cursor = database.getCollection("advisory_raw").aggregate(pipeline, { allowDiskUse: true });
|
||||
var any = false;
|
||||
while (cursor.hasNext()) {
|
||||
var doc = cursor.next();
|
||||
any = true;
|
||||
print("---");
|
||||
print("vendor : " + doc.vendor);
|
||||
print("upstream_id : " + doc.upstreamId);
|
||||
print("tenant : " + doc.tenant);
|
||||
print("content_hash: " + doc.contentHash);
|
||||
print("count : " + doc.count);
|
||||
print("ids : " + doc.ids.join(", "));
|
||||
}
|
||||
|
||||
if (!any) {
|
||||
print("No duplicate advisory_raw documents detected.");
|
||||
}
|
||||
|
||||
print("");
|
||||
})();
|
||||
Reference in New Issue
Block a user