Refactor compare-view component to use observables for data loading, enhancing performance and responsiveness. Update compare service interfaces and methods for improved delta computation. Modify audit log component to handle optional event properties gracefully. Optimize Monaco editor worker loading to reduce bundle size. Introduce shared SCSS mixins for consistent styling across components. Add Gitea test instance setup and NuGet package publishing test scripts for CI/CD validation. Update documentation paths and ensure all references are accurate.

This commit is contained in:
StellaOps Bot
2025-12-26 21:39:17 +02:00
parent b4fc66feb6
commit 75de089ee8
61 changed files with 1318 additions and 958 deletions

View File

@@ -0,0 +1,61 @@
# docker-compose.gitea-test.yaml - Local Gitea instance for testing package registry
# Sprint: SPRINT_20251226_004_CICD
#
# Usage:
# docker compose -f devops/compose/docker-compose.gitea-test.yaml up -d
# # Wait for Gitea to start, then:
# # 1. Open http://localhost:3000 and complete initial setup
# # 2. Create a user and generate access token with package:write scope
# # 3. Test NuGet push:
# # dotnet nuget push pkg.nupkg --source http://localhost:3000/api/packages/owner/nuget/index.json --api-key YOUR_TOKEN
#
# Cleanup:
# docker compose -f devops/compose/docker-compose.gitea-test.yaml down -v
services:
gitea:
image: gitea/gitea:1.21
container_name: stellaops-gitea-test
environment:
- USER_UID=1000
- USER_GID=1000
# Enable package registry
- GITEA__packages__ENABLED=true
- GITEA__packages__CHUNKED_UPLOAD_PATH=/data/tmp/package-upload
# Enable NuGet
- GITEA__packages__NUGET_ENABLED=true
# Enable Container registry
- GITEA__packages__CONTAINER_ENABLED=true
# Database (SQLite for simplicity)
- GITEA__database__DB_TYPE=sqlite3
- GITEA__database__PATH=/data/gitea/gitea.db
# Server config
- GITEA__server__ROOT_URL=http://localhost:3000/
- GITEA__server__HTTP_PORT=3000
# Disable metrics/telemetry
- GITEA__metrics__ENABLED=false
# Session config
- GITEA__session__PROVIDER=memory
# Cache config
- GITEA__cache__ADAPTER=memory
# Log level
- GITEA__log__LEVEL=Warn
volumes:
- gitea-data:/data
- gitea-config:/etc/gitea
ports:
- "3000:3000" # Web UI
- "3022:22" # SSH (optional)
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3000/api/healthz"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
volumes:
gitea-data:
driver: local
gitea-config:
driver: local

View File

@@ -18,9 +18,9 @@ ENV DOTNET_CLI_TELEMETRY_OPTOUT=1 \
DOTNET_NOLOGO=1 \
SOURCE_DATE_EPOCH=1704067200
WORKDIR /src
# Expect restore sources to be available offline via local-nugets/
# Expect restore sources to be available offline via /.nuget/
COPY . .
RUN dotnet restore ${APP_PROJECT} --packages /src/local-nugets && \
RUN dotnet restore ${APP_PROJECT} --packages /.nuget/packages && \
dotnet publish ${APP_PROJECT} -c ${CONFIGURATION} -o ${PUBLISH_DIR} \
/p:UseAppHost=true /p:PublishTrimmed=false

View File

@@ -25,7 +25,7 @@ FROM ${SDK_IMAGE} AS build
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1 DOTNET_NOLOGO=1 SOURCE_DATE_EPOCH=1704067200
WORKDIR /src
COPY . .
RUN dotnet restore ${APP_PROJECT} --packages /src/local-nugets && \
RUN dotnet restore ${APP_PROJECT} --packages /.nuget/packages && \
dotnet publish ${APP_PROJECT} -c ${CONFIGURATION} -o /app/publish /p:UseAppHost=true /p:PublishTrimmed=false
FROM ${RUNTIME_IMAGE} AS runtime
@@ -47,7 +47,7 @@ ENTRYPOINT ["sh","-c","exec ./\"$APP_BINARY\""]
Build stage (per service) should:
- Use `mcr.microsoft.com/dotnet/sdk:10.0-bookworm-slim` (or mirror) with `DOTNET_CLI_TELEMETRY_OPTOUT=1`.
- Restore from `local-nugets/` (offline) and run `dotnet publish -c Release -o /app/out`.
- Restore from `/.nuget/` (offline) and run `dotnet publish -c Release -o /app/out`.
- Set `SOURCE_DATE_EPOCH` to freeze timestamps.
Required checks:

View File

@@ -90,16 +90,16 @@ def clean_directory(path: Path) -> None:
path.mkdir(parents=True, exist_ok=True)
def run_python_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def run_rust_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-rust-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def run_python_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def run_rust_analyzer_smoke() -> None:
script = REPO_ROOT / "ops" / "offline-kit" / "run-rust-analyzer-smoke.sh"
run(["bash", str(script)], cwd=REPO_ROOT)
def copy_if_exists(source: Path, target: Path) -> None:
if source.is_dir():
shutil.copytree(source, target, dirs_exist_ok=True)
@@ -175,110 +175,110 @@ def copy_debug_store(release_dir: Path, staging_dir: Path) -> None:
)
def copy_plugins_and_assets(staging_dir: Path) -> None:
copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner")
copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates")
copy_if_exists(REPO_ROOT / "seed-data", staging_dir / "seed-data")
docs_dir = staging_dir / "docs"
docs_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md")
copy_if_exists(REPO_ROOT / "docs" / "airgap" / "mirror-bundles.md", docs_dir / "mirror-bundles.md")
def copy_cli_and_taskrunner_assets(release_dir: Path, staging_dir: Path) -> None:
"""Bundle CLI binaries, task pack docs, and Task Runner samples when available."""
cli_src = release_dir / "cli"
if cli_src.exists():
copy_if_exists(cli_src, staging_dir / "cli")
taskrunner_bootstrap = staging_dir / "bootstrap" / "task-runner"
taskrunner_bootstrap.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "task-runner.yaml.sample", taskrunner_bootstrap / "task-runner.yaml.sample")
docs_dir = staging_dir / "docs"
copy_if_exists(REPO_ROOT / "docs" / "task-packs", docs_dir / "task-packs")
copy_if_exists(REPO_ROOT / "docs" / "modules" / "taskrunner", docs_dir / "modules" / "taskrunner")
def copy_orchestrator_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy orchestrator service, worker SDK, postgres snapshot, and dashboards when present."""
mapping = {
release_dir / "orchestrator" / "service": staging_dir / "orchestrator" / "service",
release_dir / "orchestrator" / "worker-sdk": staging_dir / "orchestrator" / "worker-sdk",
release_dir / "orchestrator" / "postgres": staging_dir / "orchestrator" / "postgres",
release_dir / "orchestrator" / "dashboards": staging_dir / "orchestrator" / "dashboards",
}
for src, dest in mapping.items():
copy_if_exists(src, dest)
def copy_export_and_notifier_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy Export Center and Notifier offline bundles and tooling when present."""
copy_if_exists(release_dir / "export-center", staging_dir / "export-center")
copy_if_exists(release_dir / "notifier", staging_dir / "notifier")
def copy_surface_secrets(release_dir: Path, staging_dir: Path) -> None:
"""Include Surface.Secrets bundles and manifests if present."""
copy_if_exists(release_dir / "surface-secrets", staging_dir / "surface-secrets")
def copy_bootstrap_configs(staging_dir: Path) -> None:
notify_config = REPO_ROOT / "etc" / "notify.airgap.yaml"
notify_secret = REPO_ROOT / "etc" / "secrets" / "notify-web-airgap.secret.example"
notify_doc = REPO_ROOT / "docs" / "modules" / "notify" / "bootstrap-pack.md"
if not notify_config.exists():
raise FileNotFoundError(f"Missing notifier air-gap config: {notify_config}")
if not notify_secret.exists():
raise FileNotFoundError(f"Missing notifier air-gap secret template: {notify_secret}")
notify_bootstrap_dir = staging_dir / "bootstrap" / "notify"
notify_bootstrap_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "bootstrap" / "notify", notify_bootstrap_dir)
copy_if_exists(notify_config, notify_bootstrap_dir / "notify.yaml")
copy_if_exists(notify_secret, notify_bootstrap_dir / "notify-web.secret.example")
copy_if_exists(notify_doc, notify_bootstrap_dir / "README.md")
def verify_required_seed_data(repo_root: Path) -> None:
ruby_git_sources = repo_root / "seed-data" / "analyzers" / "ruby" / "git-sources"
if not ruby_git_sources.is_dir():
raise FileNotFoundError(f"Missing Ruby git-sources seed directory: {ruby_git_sources}")
required_files = [
ruby_git_sources / "Gemfile.lock",
ruby_git_sources / "expected.json",
]
for path in required_files:
if not path.exists():
raise FileNotFoundError(f"Offline kit seed artefact missing: {path}")
def copy_third_party_licenses(staging_dir: Path) -> None:
licenses_src = REPO_ROOT / "third-party-licenses"
if not licenses_src.is_dir():
return
target_dir = staging_dir / "third-party-licenses"
target_dir.mkdir(parents=True, exist_ok=True)
entries = sorted(licenses_src.iterdir(), key=lambda entry: entry.name.lower())
for entry in entries:
if entry.is_dir():
shutil.copytree(entry, target_dir / entry.name, dirs_exist_ok=True)
elif entry.is_file():
shutil.copy2(entry, target_dir / entry.name)
def package_telemetry_bundle(staging_dir: Path) -> None:
script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py"
if not script.exists():
return
TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True)
def copy_plugins_and_assets(staging_dir: Path) -> None:
copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner")
copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates")
copy_if_exists(REPO_ROOT / "src" / "__Tests" / "__Datasets" / "seed-data", staging_dir / "seed-data")
docs_dir = staging_dir / "docs"
docs_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md")
copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md")
copy_if_exists(REPO_ROOT / "docs" / "airgap" / "mirror-bundles.md", docs_dir / "mirror-bundles.md")
def copy_cli_and_taskrunner_assets(release_dir: Path, staging_dir: Path) -> None:
"""Bundle CLI binaries, task pack docs, and Task Runner samples when available."""
cli_src = release_dir / "cli"
if cli_src.exists():
copy_if_exists(cli_src, staging_dir / "cli")
taskrunner_bootstrap = staging_dir / "bootstrap" / "task-runner"
taskrunner_bootstrap.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "task-runner.yaml.sample", taskrunner_bootstrap / "task-runner.yaml.sample")
docs_dir = staging_dir / "docs"
copy_if_exists(REPO_ROOT / "docs" / "task-packs", docs_dir / "task-packs")
copy_if_exists(REPO_ROOT / "docs" / "modules" / "taskrunner", docs_dir / "modules" / "taskrunner")
def copy_orchestrator_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy orchestrator service, worker SDK, postgres snapshot, and dashboards when present."""
mapping = {
release_dir / "orchestrator" / "service": staging_dir / "orchestrator" / "service",
release_dir / "orchestrator" / "worker-sdk": staging_dir / "orchestrator" / "worker-sdk",
release_dir / "orchestrator" / "postgres": staging_dir / "orchestrator" / "postgres",
release_dir / "orchestrator" / "dashboards": staging_dir / "orchestrator" / "dashboards",
}
for src, dest in mapping.items():
copy_if_exists(src, dest)
def copy_export_and_notifier_assets(release_dir: Path, staging_dir: Path) -> None:
"""Copy Export Center and Notifier offline bundles and tooling when present."""
copy_if_exists(release_dir / "export-center", staging_dir / "export-center")
copy_if_exists(release_dir / "notifier", staging_dir / "notifier")
def copy_surface_secrets(release_dir: Path, staging_dir: Path) -> None:
"""Include Surface.Secrets bundles and manifests if present."""
copy_if_exists(release_dir / "surface-secrets", staging_dir / "surface-secrets")
def copy_bootstrap_configs(staging_dir: Path) -> None:
notify_config = REPO_ROOT / "etc" / "notify.airgap.yaml"
notify_secret = REPO_ROOT / "etc" / "secrets" / "notify-web-airgap.secret.example"
notify_doc = REPO_ROOT / "docs" / "modules" / "notify" / "bootstrap-pack.md"
if not notify_config.exists():
raise FileNotFoundError(f"Missing notifier air-gap config: {notify_config}")
if not notify_secret.exists():
raise FileNotFoundError(f"Missing notifier air-gap secret template: {notify_secret}")
notify_bootstrap_dir = staging_dir / "bootstrap" / "notify"
notify_bootstrap_dir.mkdir(parents=True, exist_ok=True)
copy_if_exists(REPO_ROOT / "etc" / "bootstrap" / "notify", notify_bootstrap_dir)
copy_if_exists(notify_config, notify_bootstrap_dir / "notify.yaml")
copy_if_exists(notify_secret, notify_bootstrap_dir / "notify-web.secret.example")
copy_if_exists(notify_doc, notify_bootstrap_dir / "README.md")
def verify_required_seed_data(repo_root: Path) -> None:
ruby_git_sources = repo_root / "src" / "__Tests" / "__Datasets" / "seed-data" / "analyzers" / "ruby" / "git-sources"
if not ruby_git_sources.is_dir():
raise FileNotFoundError(f"Missing Ruby git-sources seed directory: {ruby_git_sources}")
required_files = [
ruby_git_sources / "Gemfile.lock",
ruby_git_sources / "expected.json",
]
for path in required_files:
if not path.exists():
raise FileNotFoundError(f"Offline kit seed artefact missing: {path}")
def copy_third_party_licenses(staging_dir: Path) -> None:
licenses_src = REPO_ROOT / "third-party-licenses"
if not licenses_src.is_dir():
return
target_dir = staging_dir / "third-party-licenses"
target_dir.mkdir(parents=True, exist_ok=True)
entries = sorted(licenses_src.iterdir(), key=lambda entry: entry.name.lower())
for entry in entries:
if entry.is_dir():
shutil.copytree(entry, target_dir / entry.name, dirs_exist_ok=True)
elif entry.is_file():
shutil.copy2(entry, target_dir / entry.name)
def package_telemetry_bundle(staging_dir: Path) -> None:
script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py"
if not script.exists():
return
TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True)
run(["python", str(script), "--output", str(TELEMETRY_BUNDLE_PATH)], cwd=REPO_ROOT)
telemetry_dir = staging_dir / "telemetry"
telemetry_dir.mkdir(parents=True, exist_ok=True)
@@ -288,8 +288,8 @@ def package_telemetry_bundle(staging_dir: Path) -> None:
shutil.copy2(sha_path, telemetry_dir / sha_path.name)
def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]:
entries: list[OrderedDict[str, Any]] = []
def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]:
entries: list[OrderedDict[str, Any]] = []
exclude = exclude or set()
for path in sorted(staging_dir.rglob("*")):
if not path.is_file():
@@ -306,39 +306,39 @@ def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[Or
)
)
)
return entries
def summarize_counts(staging_dir: Path) -> Mapping[str, int]:
def count_files(rel: str) -> int:
root = staging_dir / rel
if not root.exists():
return 0
return sum(1 for path in root.rglob("*") if path.is_file())
return {
"cli": count_files("cli"),
"taskPacksDocs": count_files("docs/task-packs"),
"containers": count_files("containers"),
"orchestrator": count_files("orchestrator"),
"exportCenter": count_files("export-center"),
"notifier": count_files("notifier"),
"surfaceSecrets": count_files("surface-secrets"),
}
def copy_container_bundles(release_dir: Path, staging_dir: Path) -> None:
"""Copy container air-gap bundles if present in the release directory."""
candidates = [release_dir / "containers", release_dir / "images"]
target_dir = staging_dir / "containers"
for root in candidates:
if not root.exists():
continue
for bundle in sorted(root.glob("**/*")):
if bundle.is_file() and bundle.suffix in {".gz", ".tar", ".tgz"}:
target_path = target_dir / bundle.relative_to(root)
target_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(bundle, target_path)
return entries
def summarize_counts(staging_dir: Path) -> Mapping[str, int]:
def count_files(rel: str) -> int:
root = staging_dir / rel
if not root.exists():
return 0
return sum(1 for path in root.rglob("*") if path.is_file())
return {
"cli": count_files("cli"),
"taskPacksDocs": count_files("docs/task-packs"),
"containers": count_files("containers"),
"orchestrator": count_files("orchestrator"),
"exportCenter": count_files("export-center"),
"notifier": count_files("notifier"),
"surfaceSecrets": count_files("surface-secrets"),
}
def copy_container_bundles(release_dir: Path, staging_dir: Path) -> None:
"""Copy container air-gap bundles if present in the release directory."""
candidates = [release_dir / "containers", release_dir / "images"]
target_dir = staging_dir / "containers"
for root in candidates:
if not root.exists():
continue
for bundle in sorted(root.glob("**/*")):
if bundle.is_file() and bundle.suffix in {".gz", ".tar", ".tgz"}:
target_path = target_dir / bundle.relative_to(root)
target_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(bundle, target_path)
def write_offline_manifest(
@@ -424,17 +424,17 @@ def sign_blob(
return sig_path
def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
release_dir = args.release_dir.resolve()
staging_dir = args.staging_dir.resolve()
output_dir = args.output_dir.resolve()
verify_release(release_dir)
verify_required_seed_data(REPO_ROOT)
if not args.skip_smoke:
run_rust_analyzer_smoke()
run_python_analyzer_smoke()
clean_directory(staging_dir)
def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
release_dir = args.release_dir.resolve()
staging_dir = args.staging_dir.resolve()
output_dir = args.output_dir.resolve()
verify_release(release_dir)
verify_required_seed_data(REPO_ROOT)
if not args.skip_smoke:
run_rust_analyzer_smoke()
run_python_analyzer_smoke()
clean_directory(staging_dir)
copy_debug_store(release_dir, staging_dir)
manifest_data = load_manifest(release_dir)
@@ -443,22 +443,22 @@ def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
if isinstance(checksums, Mapping):
release_manifest_sha = checksums.get("sha256")
copy_release_manifests(release_dir, staging_dir)
copy_component_artifacts(manifest_data, release_dir, staging_dir)
copy_collections(manifest_data, release_dir, staging_dir)
copy_plugins_and_assets(staging_dir)
copy_bootstrap_configs(staging_dir)
copy_cli_and_taskrunner_assets(release_dir, staging_dir)
copy_container_bundles(release_dir, staging_dir)
copy_orchestrator_assets(release_dir, staging_dir)
copy_export_and_notifier_assets(release_dir, staging_dir)
copy_surface_secrets(release_dir, staging_dir)
copy_third_party_licenses(staging_dir)
package_telemetry_bundle(staging_dir)
offline_manifest_path, offline_manifest_sha = write_offline_manifest(
staging_dir,
args.version,
copy_release_manifests(release_dir, staging_dir)
copy_component_artifacts(manifest_data, release_dir, staging_dir)
copy_collections(manifest_data, release_dir, staging_dir)
copy_plugins_and_assets(staging_dir)
copy_bootstrap_configs(staging_dir)
copy_cli_and_taskrunner_assets(release_dir, staging_dir)
copy_container_bundles(release_dir, staging_dir)
copy_orchestrator_assets(release_dir, staging_dir)
copy_export_and_notifier_assets(release_dir, staging_dir)
copy_surface_secrets(release_dir, staging_dir)
copy_third_party_licenses(staging_dir)
package_telemetry_bundle(staging_dir)
offline_manifest_path, offline_manifest_sha = write_offline_manifest(
staging_dir,
args.version,
args.channel,
release_manifest_sha,
)
@@ -491,8 +491,8 @@ def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
if manifest_sig:
signature_paths["manifestSignature"] = str(manifest_sig)
metadata = OrderedDict(
(
metadata = OrderedDict(
(
("bundleId", args.bundle_id or f"{args.version}-{args.channel}-{utc_now_iso()}"),
("bundleName", bundle_path.name),
("bundleSha256", bundle_sha_prefixed),
@@ -501,11 +501,11 @@ def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]:
("manifestSha256", f"sha256:{offline_manifest_sha}"),
("manifestSize", offline_manifest_path.stat().st_size),
("channel", args.channel),
("version", args.version),
("capturedAt", utc_now_iso()),
("counts", summarize_counts(staging_dir)),
)
)
("version", args.version),
("capturedAt", utc_now_iso()),
("counts", summarize_counts(staging_dir)),
)
)
if sig:
metadata["bundleSignatureName"] = Path(sig).name

View File

@@ -0,0 +1,181 @@
#!/bin/bash
# test-package-publish.sh - Test NuGet package publishing to local Gitea
# Sprint: SPRINT_20251226_004_CICD
#
# Prerequisites:
# - Docker running
# - Gitea test instance running (docker compose -f devops/compose/docker-compose.gitea-test.yaml up -d)
# - GITEA_TEST_TOKEN environment variable set
# - GITEA_TEST_OWNER environment variable set (default: stellaops)
#
# Usage:
# export GITEA_TEST_TOKEN="your-access-token"
# ./test-package-publish.sh # Test with sample package
# ./test-package-publish.sh --module Authority # Test specific module
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Configuration
GITEA_URL="${GITEA_TEST_URL:-http://localhost:3000}"
GITEA_OWNER="${GITEA_TEST_OWNER:-stellaops}"
GITEA_TOKEN="${GITEA_TEST_TOKEN:-}"
TEST_MODULE=""
DRY_RUN=false
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
NC='\033[0m'
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--module)
TEST_MODULE="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--help)
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " --module MODULE Test specific module (e.g., Authority)"
echo " --dry-run Validate without pushing"
echo " --help Show this help message"
echo ""
echo "Environment Variables:"
echo " GITEA_TEST_URL Gitea URL (default: http://localhost:3000)"
echo " GITEA_TEST_OWNER Package owner (default: stellaops)"
echo " GITEA_TEST_TOKEN Access token with package:write scope"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
echo "=== Package Publishing Test ==="
echo "Gitea URL: $GITEA_URL"
echo "Owner: $GITEA_OWNER"
echo "Dry Run: $DRY_RUN"
# Check prerequisites
if [[ -z "$GITEA_TOKEN" && "$DRY_RUN" == "false" ]]; then
echo -e "${RED}ERROR: GITEA_TEST_TOKEN environment variable is required${NC}"
echo "Generate a token at: $GITEA_URL/user/settings/applications"
exit 1
fi
# Check if Gitea is running
if ! curl -s "$GITEA_URL/api/healthz" >/dev/null 2>&1; then
echo -e "${YELLOW}WARNING: Gitea not reachable at $GITEA_URL${NC}"
echo "Start it with: docker compose -f devops/compose/docker-compose.gitea-test.yaml up -d"
if [[ "$DRY_RUN" == "false" ]]; then
exit 1
fi
fi
# NuGet source URL
NUGET_SOURCE="$GITEA_URL/api/packages/$GITEA_OWNER/nuget/index.json"
echo "NuGet Source: $NUGET_SOURCE"
echo ""
# Create a test package
TEST_DIR="$REPO_ROOT/out/package-test"
mkdir -p "$TEST_DIR"
# If no module specified, use a simple test
if [[ -z "$TEST_MODULE" ]]; then
echo "=== Creating Test Package ==="
# Create a minimal test package
TEST_PROJ_DIR="$TEST_DIR/StellaOps.PackageTest"
mkdir -p "$TEST_PROJ_DIR"
cat > "$TEST_PROJ_DIR/StellaOps.PackageTest.csproj" <<'EOF'
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<PackageId>StellaOps.PackageTest</PackageId>
<Version>0.0.1-test</Version>
<Authors>StellaOps</Authors>
<Description>Test package for registry validation</Description>
<PackageLicenseExpression>AGPL-3.0-or-later</PackageLicenseExpression>
</PropertyGroup>
</Project>
EOF
cat > "$TEST_PROJ_DIR/Class1.cs" <<'EOF'
namespace StellaOps.PackageTest;
public class TestClass { }
EOF
echo "Building test package..."
dotnet pack "$TEST_PROJ_DIR/StellaOps.PackageTest.csproj" -c Release -o "$TEST_DIR/packages"
PACKAGE_FILE=$(find "$TEST_DIR/packages" -name "*.nupkg" | head -1)
else
echo "=== Packing Module: $TEST_MODULE ==="
# Find the module's main project
MODULE_PROJ=$(find "$REPO_ROOT/src" -path "*/$TEST_MODULE/*" -name "StellaOps.$TEST_MODULE.csproj" | head -1)
if [[ -z "$MODULE_PROJ" ]]; then
echo -e "${RED}ERROR: Module project not found for $TEST_MODULE${NC}"
exit 1
fi
echo "Project: $MODULE_PROJ"
dotnet pack "$MODULE_PROJ" -c Release -p:Version=0.0.1-test -o "$TEST_DIR/packages"
PACKAGE_FILE=$(find "$TEST_DIR/packages" -name "*.nupkg" | head -1)
fi
if [[ -z "$PACKAGE_FILE" ]]; then
echo -e "${RED}ERROR: No package file created${NC}"
exit 1
fi
echo ""
echo "Package created: $PACKAGE_FILE"
echo ""
if [[ "$DRY_RUN" == "true" ]]; then
echo -e "${YELLOW}=== DRY RUN: Skipping push ===${NC}"
echo "Package validated successfully!"
echo ""
echo "To push manually:"
echo " dotnet nuget push \"$PACKAGE_FILE\" \\"
echo " --source $NUGET_SOURCE \\"
echo " --api-key YOUR_TOKEN"
else
echo "=== Pushing Package ==="
if dotnet nuget push "$PACKAGE_FILE" \
--source "$NUGET_SOURCE" \
--api-key "$GITEA_TOKEN" \
--skip-duplicate; then
echo ""
echo -e "${GREEN}SUCCESS: Package pushed to Gitea registry${NC}"
echo "View at: $GITEA_URL/$GITEA_OWNER/-/packages"
else
echo ""
echo -e "${RED}FAILED: Package push failed${NC}"
exit 1
fi
fi
echo ""
echo "=== Cleanup ==="
rm -rf "$TEST_DIR"
echo "Test directory cleaned up"
echo ""
echo -e "${GREEN}Done!${NC}"

View File

@@ -70,7 +70,11 @@ fi
# Validate each profile
for profile in "${PROFILES[@]}"; do
PROFILE_FILE="$COMPOSE_DIR/docker-compose.${profile}.yml"
# Check for both .yml and .yaml extensions
PROFILE_FILE="$COMPOSE_DIR/docker-compose.${profile}.yaml"
if [[ ! -f "$PROFILE_FILE" ]]; then
PROFILE_FILE="$COMPOSE_DIR/docker-compose.${profile}.yml"
fi
echo ""
echo "=== Validating profile: $profile ==="

View File

@@ -13,12 +13,11 @@ mkdir -p "$logs_dir"
export DOTNET_CLI_TELEMETRY_OPTOUT=${DOTNET_CLI_TELEMETRY_OPTOUT:-1}
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=${DOTNET_SKIP_FIRST_TIME_EXPERIENCE:-1}
export NUGET_PACKAGES=${NUGET_PACKAGES:-$repo_root/.nuget/packages}
export NUGET_SOURCES=${NUGET_SOURCES:-"$repo_root/local-nugets;$repo_root/.nuget/packages"}
export NUGET_SOURCES=${NUGET_SOURCES:-"$repo_root/.nuget/packages"}
export TEST_FILTER=${TEST_FILTER:-""}
export DOTNET_RESTORE_DISABLE_PARALLEL=${DOTNET_RESTORE_DISABLE_PARALLEL:-1}
mkdir -p "$NUGET_PACKAGES"
rsync -a "$repo_root/local-nugets/" "$NUGET_PACKAGES/" >/dev/null 2>&1 || true
restore_sources=()
IFS=';' read -ra SRC_ARR <<< "$NUGET_SOURCES"

View File

@@ -18,7 +18,7 @@ else
fi
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SEED_DIR="${ROOT_DIR}/seed-data/concelier/store-aoc-19-005"
SEED_DIR="${ROOT_DIR}/src/__Tests/__Datasets/seed-data/concelier/store-aoc-19-005"
OUT_DIR="${ROOT_DIR}/out/linksets"
OUT_PATH="${1:-${OUT_DIR}/linksets-stage-backfill.tar.zst}"
GEN_TIME="2025-12-07T00:00:00Z"
@@ -46,7 +46,7 @@ cat >"${WORKDIR}/manifest.json" <<EOF
{
"datasetId": "store-aoc-19-005-dev",
"generatedAt": "${GEN_TIME}",
"source": "seed-data/concelier/store-aoc-19-005",
"source": "src/__Tests/__Datasets/seed-data/concelier/store-aoc-19-005",
"records": {
"linksets": ${linksets_count},
"advisory_chunks": ${advisory_count}

View File

@@ -1,5 +1,5 @@
param(
[string]$Destination = "$(Join-Path (Split-Path -Parent $PSCommandPath) '..' | Resolve-Path)/seed-data/ics-cisa"
[string]$Destination = "$(Join-Path (Split-Path -Parent $PSCommandPath) '../..' | Resolve-Path)/src/__Tests/__Datasets/seed-data/ics-cisa"
)
$ErrorActionPreference = 'Stop'

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
DEST_DIR="${1:-$ROOT_DIR/seed-data/ics-cisa}"
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
DEST_DIR="${1:-$ROOT_DIR/src/__Tests/__Datasets/seed-data/ics-cisa}"
mkdir -p "$DEST_DIR"
info() { printf "[ics-seed] %s\n" "$*"; }

View File

@@ -45,7 +45,7 @@ def capture(idx: str, title: str, out_dir: Path) -> Path:
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument("--out", type=Path, default=Path("seed-data/kisa/html"))
parser.add_argument("--out", type=Path, default=Path("src/__Tests/__Datasets/seed-data/kisa/html"))
parser.add_argument("--limit", type=int, default=10, help="Maximum advisories to download")
args = parser.parse_args()

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<RestorePackagesPath>../../local-nugets/packages</RestorePackagesPath>
<RestorePackagesPath>../../.nuget/packages</RestorePackagesPath>
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
<EnableDefaultItems>false</EnableDefaultItems>
</PropertyGroup>

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<RestorePackagesPath>../../local-nugets/packages</RestorePackagesPath>
<RestorePackagesPath>../../.nuget/packages</RestorePackagesPath>
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
<EnableDefaultItems>false</EnableDefaultItems>
</PropertyGroup>

View File

@@ -2,12 +2,11 @@
# Convenience wrapper to run the isolated Node analyzer suite with cleanup enabled.
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
# auto-clean workspace outputs before running tests (uses cleanup helper inside test script)
export CLEAN_BEFORE_NODE_TESTS="${CLEAN_BEFORE_NODE_TESTS:-1}"
export DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
export DOTNET_CLI_TELEMETRY_OPTOUT=1
export NUGET_PACKAGES="${ROOT}/offline/packages"
exec "${ROOT}/src/Scanner/__Tests/node-tests-isolated.sh"

View File

@@ -2,8 +2,8 @@
"""Generate manifests for curated binaries.
- .nuget/manifest.json : NuGet packages (id, version, sha256)
- vendor/manifest.json : Plugin/tool/deploy/ops binaries with sha256
- offline/feeds/manifest.json : Offline bundles (tar/tgz/zip) with sha256
- devops/manifests/binary-plugins.manifest.json : Plugin/tool/deploy/ops binaries with sha256
- devops/offline/feeds/manifest.json : Offline bundles (tar/tgz/zip) with sha256
Intended to be idempotent and run in CI to ensure manifests stay current.
"""
@@ -99,16 +99,16 @@ def generate_vendor_manifest() -> None:
"entries": entries,
}
vendor_dir = ROOT / "vendor"
vendor_dir.mkdir(exist_ok=True)
write_json(vendor_dir / "manifest.json", manifest)
manifests_dir = ROOT / "devops" / "manifests"
manifests_dir.mkdir(parents=True, exist_ok=True)
write_json(manifests_dir / "binary-plugins.manifest.json", manifest)
FEED_SUFFIXES = (".tar.gz", ".tgz", ".tar", ".zip", ".gz")
def generate_offline_manifest() -> None:
feeds_dir = ROOT / "offline" / "feeds"
feeds_dir = ROOT / "devops" / "offline" / "feeds"
feeds_dir.mkdir(parents=True, exist_ok=True)
existing = {}