feat: Implement console session management with tenant and profile handling

- Add ConsoleSessionStore for managing console session state including tenants, profile, and token information.
- Create OperatorContextService to manage operator context for orchestrator actions.
- Implement OperatorMetadataInterceptor to enrich HTTP requests with operator context metadata.
- Develop ConsoleProfileComponent to display user profile and session details, including tenant information and access tokens.
- Add corresponding HTML and SCSS for ConsoleProfileComponent to enhance UI presentation.
- Write unit tests for ConsoleProfileComponent to ensure correct rendering and functionality.
This commit is contained in:
2025-10-28 09:58:55 +02:00
parent 4d932cc1ba
commit 4e3e575db5
501 changed files with 51904 additions and 6663 deletions

View File

@@ -23,11 +23,13 @@ import pathlib
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import uuid
import zipfile
from collections import OrderedDict
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence, Tuple
@@ -190,6 +192,8 @@ class ReleaseBuilder:
self.metadata_dir = ensure_directory(self.artifacts_dir / "metadata")
self.debug_dir = ensure_directory(self.output_dir / "debug")
self.debug_store_dir = ensure_directory(self.debug_dir / ".build-id")
self.cli_config = config.get("cli")
self.cli_output_dir = ensure_directory(self.output_dir / "cli") if self.cli_config else None
self.temp_dir = pathlib.Path(tempfile.mkdtemp(prefix="stellaops-release-"))
self.skip_signing = skip_signing
self.tlog_upload = tlog_upload
@@ -220,7 +224,8 @@ class ReleaseBuilder:
helm_meta = self._package_helm()
compose_meta = self._digest_compose_files()
debug_meta = self._collect_debug_store(components_result)
manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta)
cli_meta = self._build_cli_artifacts()
manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta, cli_meta)
return manifest
def _prime_buildx_plugin(self) -> None:
@@ -262,6 +267,12 @@ class ReleaseBuilder:
def _component_ref(self, repo: str, digest: str) -> str:
return f"{self.registry}/{repo}@{digest}"
def _relative_path(self, path: pathlib.Path) -> str:
try:
return str(path.relative_to(self.output_dir.parent))
except ValueError:
return str(path)
def _build_component(self, component: Mapping[str, Any]) -> Mapping[str, Any]:
name = component["name"]
repo = component.get("repository", name)
@@ -601,6 +612,165 @@ class ReleaseBuilder:
("directory", store_rel),
))
# ----------------
# CLI packaging
# ----------------
def _build_cli_artifacts(self) -> List[Mapping[str, Any]]:
if not self.cli_config or self.dry_run:
return []
project_rel = self.cli_config.get("project")
if not project_rel:
return []
project_path = (self.repo_root / project_rel).resolve()
if not project_path.exists():
raise FileNotFoundError(f"CLI project not found at {project_path}")
runtimes: Sequence[str] = self.cli_config.get("runtimes", [])
if not runtimes:
runtimes = ("linux-x64",)
package_prefix = self.cli_config.get("packagePrefix", "stella")
ensure_directory(self.cli_output_dir or (self.output_dir / "cli"))
cli_entries: List[Mapping[str, Any]] = []
for runtime in runtimes:
entry = self._build_cli_for_runtime(project_path, runtime, package_prefix)
cli_entries.append(entry)
return cli_entries
def _build_cli_for_runtime(
self,
project_path: pathlib.Path,
runtime: str,
package_prefix: str,
) -> Mapping[str, Any]:
publish_dir = ensure_directory(self.temp_dir / f"cli-publish-{runtime}")
publish_cmd = [
"dotnet",
"publish",
str(project_path),
"--configuration",
"Release",
"--runtime",
runtime,
"--self-contained",
"true",
"/p:PublishSingleFile=true",
"/p:IncludeNativeLibrariesForSelfExtract=true",
"/p:EnableCompressionInSingleFile=true",
"/p:InvariantGlobalization=true",
"--output",
str(publish_dir),
]
run(publish_cmd, cwd=self.repo_root)
original_name = "StellaOps.Cli"
if runtime.startswith("win"):
source = publish_dir / f"{original_name}.exe"
target = publish_dir / "stella.exe"
else:
source = publish_dir / original_name
target = publish_dir / "stella"
if source.exists():
if target.exists():
target.unlink()
source.rename(target)
if not runtime.startswith("win"):
target.chmod(target.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
package_dir = self.cli_output_dir or (self.output_dir / "cli")
ensure_directory(package_dir)
archive_name = f"{package_prefix}-{self.version}-{runtime}"
if runtime.startswith("win"):
package_path = package_dir / f"{archive_name}.zip"
self._archive_zip(publish_dir, package_path)
else:
package_path = package_dir / f"{archive_name}.tar.gz"
self._archive_tar(publish_dir, package_path)
digest = compute_sha256(package_path)
sha_path = package_path.with_suffix(package_path.suffix + ".sha256")
sha_path.write_text(f"{digest} {package_path.name}\n", encoding="utf-8")
archive_info = OrderedDict((
("path", self._relative_path(package_path)),
("sha256", digest),
))
signature_info = self._sign_file(package_path)
if signature_info:
archive_info["signature"] = signature_info
sbom_info = self._generate_cli_sbom(runtime, publish_dir)
entry = OrderedDict((
("runtime", runtime),
("archive", archive_info),
))
if sbom_info:
entry["sbom"] = sbom_info
return entry
def _archive_tar(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None:
with tarfile.open(archive_path, "w:gz") as tar:
for item in sorted(source_dir.rglob("*")):
arcname = item.relative_to(source_dir)
tar.add(item, arcname=arcname)
def _archive_zip(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None:
with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zipf:
for item in sorted(source_dir.rglob("*")):
if item.is_dir():
continue
arcname = item.relative_to(source_dir).as_posix()
zip_info = zipfile.ZipInfo(arcname)
zip_info.external_attr = (item.stat().st_mode & 0xFFFF) << 16
with item.open("rb") as handle:
zipf.writestr(zip_info, handle.read())
def _generate_cli_sbom(self, runtime: str, publish_dir: pathlib.Path) -> Optional[Mapping[str, Any]]:
if self.dry_run:
return None
sbom_dir = ensure_directory(self.sboms_dir / "cli")
sbom_path = sbom_dir / f"cli-{runtime}.cyclonedx.json"
run([
"syft",
f"dir:{publish_dir}",
"--output",
f"cyclonedx-json={sbom_path}",
])
entry = OrderedDict((
("path", self._relative_path(sbom_path)),
("sha256", compute_sha256(sbom_path)),
))
signature_info = self._sign_file(sbom_path)
if signature_info:
entry["signature"] = signature_info
return entry
def _sign_file(self, path: pathlib.Path) -> Optional[Mapping[str, Any]]:
if self.skip_signing:
return None
if not (self.cosign_key_ref or self.cosign_identity_token):
raise ValueError(
"Signing requested but no cosign key or identity token provided. Use --skip-signing to bypass."
)
signature_path = path.with_suffix(path.suffix + ".sig")
sha_path = path.with_suffix(path.suffix + ".sha256")
digest = compute_sha256(path)
sha_path.write_text(f"{digest} {path.name}\n", encoding="utf-8")
cmd = ["cosign", "sign-blob", "--yes", str(path)]
if self.cosign_key_ref:
cmd.extend(["--key", self.cosign_key_ref])
if self.cosign_identity_token:
cmd.extend(["--identity-token", self.cosign_identity_token])
if not self.tlog_upload:
cmd.append("--tlog-upload=false")
signature_data = run(cmd, env=self.cosign_env).strip()
signature_path.write_text(signature_data + "\n", encoding="utf-8")
return OrderedDict((
("path", self._relative_path(signature_path)),
("sha256", compute_sha256(signature_path)),
("tlogUploaded", self.tlog_upload),
))
def _extract_debug_entries(self, component_name: str, image_ref: str) -> List[OrderedDict[str, Any]]:
if self.dry_run:
return []
@@ -832,6 +1002,7 @@ class ReleaseBuilder:
helm_meta: Optional[Mapping[str, Any]],
compose_meta: List[Mapping[str, Any]],
debug_meta: Optional[Mapping[str, Any]],
cli_meta: Sequence[Mapping[str, Any]],
) -> Dict[str, Any]:
manifest = OrderedDict()
manifest["release"] = OrderedDict((
@@ -847,6 +1018,8 @@ class ReleaseBuilder:
manifest["compose"] = compose_meta
if debug_meta:
manifest["debugStore"] = debug_meta
if cli_meta:
manifest["cli"] = list(cli_meta)
return manifest

View File

@@ -80,6 +80,18 @@
"dockerfile": "ops/devops/release/docker/Dockerfile.angular-ui"
}
],
"cli": {
"project": "src/StellaOps.Cli/StellaOps.Cli.csproj",
"runtimes": [
"linux-x64",
"linux-arm64",
"osx-x64",
"osx-arm64",
"win-x64"
],
"packagePrefix": "stella",
"outputDir": "out/release/cli"
},
"helm": {
"chartPath": "deploy/helm/stellaops",
"outputDir": "out/release/helm"

View File

@@ -238,6 +238,60 @@ def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, e
f"(recorded {artefact_sha}, computed {actual_sha})"
)
def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None:
sig_path_value = signature.get("path")
if not sig_path_value:
errors.append(f"{component_name}: {label} signature missing path.")
return
sig_path = resolve_path(str(sig_path_value), release_dir)
if not sig_path.exists():
errors.append(f"{component_name}: {label} signature missing → {sig_path}")
return
recorded_sha = signature.get("sha256")
if recorded_sha:
actual_sha = compute_sha256(sig_path)
if actual_sha != recorded_sha:
errors.append(
f"{component_name}: {label} signature SHA mismatch for {sig_path} "
f"(recorded {recorded_sha}, computed {actual_sha})"
)
def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
cli_entries = manifest.get("cli")
if not cli_entries:
return
if not isinstance(cli_entries, list):
errors.append("CLI manifest section must be a list.")
return
for entry in cli_entries:
if not isinstance(entry, Mapping):
errors.append("CLI entry must be a mapping.")
continue
runtime = entry.get("runtime", "<unknown>")
component_name = f"cli[{runtime}]"
archive = entry.get("archive")
if not isinstance(archive, Mapping):
errors.append(f"{component_name}: archive metadata missing or invalid.")
else:
verify_artifact_entry(archive, release_dir, "archive", component_name, errors)
signature = archive.get("signature")
if isinstance(signature, Mapping):
verify_signature(signature, release_dir, "archive", component_name, errors)
elif signature is not None:
errors.append(f"{component_name}: archive signature must be an object.")
sbom = entry.get("sbom")
if sbom:
if not isinstance(sbom, Mapping):
errors.append(f"{component_name}: sbom entry must be a mapping.")
else:
verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors)
signature = sbom.get("signature")
if isinstance(signature, Mapping):
verify_signature(signature, release_dir, "sbom", component_name, errors)
elif signature is not None:
errors.append(f"{component_name}: sbom signature must be an object.")
def verify_release(release_dir: pathlib.Path) -> None:
if not release_dir.exists():
@@ -246,6 +300,7 @@ def verify_release(release_dir: pathlib.Path) -> None:
errors: list[str] = []
verify_manifest_hashes(manifest, release_dir, errors)
verify_components(manifest, release_dir, errors)
verify_cli_entries(manifest, release_dir, errors)
verify_collections(manifest, release_dir, errors)
verify_debug_store(manifest, release_dir, errors)
if errors: