Restructure solution layout by module
	
		
			
	
		
	
	
		
	
		
			Some checks failed
		
		
	
	
		
			
				
	
				Docs CI / lint-and-preview (push) Has been cancelled
				
			
		
		
	
	
				
					
				
			
		
			Some checks failed
		
		
	
	Docs CI / lint-and-preview (push) Has been cancelled
				
			This commit is contained in:
		
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -15,7 +15,7 @@ | ||||
|       "kind": "dotnet-service", | ||||
|       "context": ".", | ||||
|       "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", | ||||
|       "project": "src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj", | ||||
|       "project": "src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj", | ||||
|       "entrypoint": "StellaOps.Authority.dll" | ||||
|     }, | ||||
|     { | ||||
| @@ -24,7 +24,7 @@ | ||||
|       "kind": "dotnet-service", | ||||
|       "context": ".", | ||||
|       "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", | ||||
|       "project": "src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj", | ||||
|       "project": "src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj", | ||||
|       "entrypoint": "StellaOps.Signer.WebService.dll" | ||||
|     }, | ||||
|     { | ||||
| @@ -33,7 +33,7 @@ | ||||
|       "kind": "dotnet-service", | ||||
|       "context": ".", | ||||
|       "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", | ||||
|       "project": "src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj", | ||||
|       "project": "src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj", | ||||
|       "entrypoint": "StellaOps.Attestor.WebService.dll" | ||||
|     }, | ||||
|     { | ||||
| @@ -42,7 +42,7 @@ | ||||
|       "kind": "dotnet-service", | ||||
|       "context": ".", | ||||
|       "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", | ||||
|       "project": "src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj", | ||||
|       "project": "src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj", | ||||
|       "entrypoint": "StellaOps.Scanner.WebService.dll" | ||||
|     }, | ||||
|     { | ||||
| @@ -51,7 +51,7 @@ | ||||
|       "kind": "dotnet-service", | ||||
|       "context": ".", | ||||
|       "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", | ||||
|       "project": "src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj", | ||||
|       "project": "src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj", | ||||
|       "entrypoint": "StellaOps.Scanner.Worker.dll" | ||||
|     }, | ||||
|     { | ||||
| @@ -60,7 +60,7 @@ | ||||
|       "kind": "dotnet-service", | ||||
|       "context": ".", | ||||
|       "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", | ||||
|       "project": "src/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj", | ||||
|       "project": "src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj", | ||||
|       "entrypoint": "StellaOps.Concelier.WebService.dll" | ||||
|     }, | ||||
|     { | ||||
| @@ -69,7 +69,7 @@ | ||||
|       "kind": "dotnet-service", | ||||
|       "context": ".", | ||||
|       "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", | ||||
|       "project": "src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj", | ||||
|       "project": "src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj", | ||||
|       "entrypoint": "StellaOps.Excititor.WebService.dll" | ||||
|     }, | ||||
|     { | ||||
| @@ -81,7 +81,7 @@ | ||||
|     } | ||||
|   ], | ||||
|   "cli": { | ||||
|     "project": "src/StellaOps.Cli/StellaOps.Cli.csproj", | ||||
|     "project": "src/Cli/StellaOps.Cli/StellaOps.Cli.csproj", | ||||
|     "runtimes": [ | ||||
|       "linux-x64", | ||||
|       "linux-arm64", | ||||
| @@ -104,6 +104,6 @@ | ||||
|     ] | ||||
|   }, | ||||
|   "buildxPlugin": { | ||||
|     "project": "src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj" | ||||
|     "project": "src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj" | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -11,9 +11,9 @@ FROM ${NODE_IMAGE} AS build | ||||
| WORKDIR /workspace | ||||
| ENV CI=1 \ | ||||
|     SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH} | ||||
| COPY src/StellaOps.Web/package.json src/StellaOps.Web/package-lock.json ./ | ||||
| COPY src/Web/StellaOps.Web/package.json src/Web/StellaOps.Web/package-lock.json ./ | ||||
| RUN npm ci --prefer-offline --no-audit --no-fund | ||||
| COPY src/StellaOps.Web/ ./ | ||||
| COPY src/Web/StellaOps.Web/ ./ | ||||
| RUN npm run build -- --configuration=production | ||||
|  | ||||
| FROM ${NGINX_IMAGE} AS runtime | ||||
|   | ||||
| @@ -1,52 +1,52 @@ | ||||
| # syntax=docker/dockerfile:1.7-labs | ||||
|  | ||||
| ARG SDK_IMAGE=mcr.microsoft.com/dotnet/nightly/sdk:10.0 | ||||
| ARG RUNTIME_IMAGE=gcr.io/distroless/dotnet/aspnet:latest | ||||
|  | ||||
| ARG PROJECT | ||||
| ARG ENTRYPOINT_DLL | ||||
| ARG VERSION=0.0.0 | ||||
| ARG CHANNEL=dev | ||||
| ARG GIT_SHA=0000000 | ||||
| ARG SOURCE_DATE_EPOCH=0 | ||||
|  | ||||
| FROM ${SDK_IMAGE} AS build | ||||
| ARG PROJECT | ||||
| ARG GIT_SHA | ||||
| ARG SOURCE_DATE_EPOCH | ||||
| WORKDIR /src | ||||
| ENV DOTNET_CLI_TELEMETRY_OPTOUT=1 \ | ||||
|     DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 \ | ||||
|     NUGET_XMLDOC_MODE=skip \ | ||||
|     SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH} | ||||
| COPY . . | ||||
| RUN --mount=type=cache,target=/root/.nuget/packages \ | ||||
|     dotnet restore "${PROJECT}" | ||||
| RUN --mount=type=cache,target=/root/.nuget/packages \ | ||||
|     dotnet publish "${PROJECT}" \ | ||||
|       -c Release \ | ||||
|       -o /app/publish \ | ||||
|       /p:UseAppHost=false \ | ||||
|       /p:ContinuousIntegrationBuild=true \ | ||||
|       /p:SourceRevisionId=${GIT_SHA} \ | ||||
|       /p:Deterministic=true \ | ||||
|       /p:TreatWarningsAsErrors=true | ||||
|  | ||||
| FROM ${RUNTIME_IMAGE} AS runtime | ||||
| WORKDIR /app | ||||
| ARG ENTRYPOINT_DLL | ||||
| ARG VERSION | ||||
| ARG CHANNEL | ||||
| ARG GIT_SHA | ||||
| ENV DOTNET_EnableDiagnostics=0 \ | ||||
|     ASPNETCORE_URLS=http://0.0.0.0:8080 | ||||
| COPY --from=build /app/publish/ ./ | ||||
| RUN set -eu; \ | ||||
|     printf '#!/usr/bin/env sh\nset -e\nexec dotnet %s "$@"\n' "${ENTRYPOINT_DLL}" > /entrypoint.sh; \ | ||||
|     chmod +x /entrypoint.sh | ||||
| EXPOSE 8080 | ||||
| LABEL org.opencontainers.image.version="${VERSION}" \ | ||||
|       org.opencontainers.image.revision="${GIT_SHA}" \ | ||||
|       org.opencontainers.image.source="https://git.stella-ops.org/stella-ops/feedser" \ | ||||
|       org.stellaops.release.channel="${CHANNEL}" | ||||
| ENTRYPOINT ["/entrypoint.sh"] | ||||
| # syntax=docker/dockerfile:1.7-labs | ||||
|  | ||||
| ARG SDK_IMAGE=mcr.microsoft.com/dotnet/nightly/sdk:10.0 | ||||
| ARG RUNTIME_IMAGE=gcr.io/distroless/dotnet/aspnet:latest | ||||
|  | ||||
| ARG PROJECT | ||||
| ARG ENTRYPOINT_DLL | ||||
| ARG VERSION=0.0.0 | ||||
| ARG CHANNEL=dev | ||||
| ARG GIT_SHA=0000000 | ||||
| ARG SOURCE_DATE_EPOCH=0 | ||||
|  | ||||
| FROM ${SDK_IMAGE} AS build | ||||
| ARG PROJECT | ||||
| ARG GIT_SHA | ||||
| ARG SOURCE_DATE_EPOCH | ||||
| WORKDIR /src | ||||
| ENV DOTNET_CLI_TELEMETRY_OPTOUT=1 \ | ||||
|     DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 \ | ||||
|     NUGET_XMLDOC_MODE=skip \ | ||||
|     SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH} | ||||
| COPY . . | ||||
| RUN --mount=type=cache,target=/root/.nuget/packages \ | ||||
|     dotnet restore "${PROJECT}" | ||||
| RUN --mount=type=cache,target=/root/.nuget/packages \ | ||||
|     dotnet publish "${PROJECT}" \ | ||||
|       -c Release \ | ||||
|       -o /app/publish \ | ||||
|       /p:UseAppHost=false \ | ||||
|       /p:ContinuousIntegrationBuild=true \ | ||||
|       /p:SourceRevisionId=${GIT_SHA} \ | ||||
|       /p:Deterministic=true \ | ||||
|       /p:TreatWarningsAsErrors=true | ||||
|  | ||||
| FROM ${RUNTIME_IMAGE} AS runtime | ||||
| WORKDIR /app | ||||
| ARG ENTRYPOINT_DLL | ||||
| ARG VERSION | ||||
| ARG CHANNEL | ||||
| ARG GIT_SHA | ||||
| ENV DOTNET_EnableDiagnostics=0 \ | ||||
|     ASPNETCORE_URLS=http://0.0.0.0:8080 | ||||
| COPY --from=build /app/publish/ ./ | ||||
| RUN set -eu; \ | ||||
|     printf '#!/usr/bin/env sh\nset -e\nexec dotnet %s "$@"\n' "${ENTRYPOINT_DLL}" > /entrypoint.sh; \ | ||||
|     chmod +x /entrypoint.sh | ||||
| EXPOSE 8080 | ||||
| LABEL org.opencontainers.image.version="${VERSION}" \ | ||||
|       org.opencontainers.image.revision="${GIT_SHA}" \ | ||||
|       org.opencontainers.image.source="https://git.stella-ops.org/stella-ops/feedser" \ | ||||
|       org.stellaops.release.channel="${CHANNEL}" | ||||
| ENTRYPOINT ["/entrypoint.sh"] | ||||
|   | ||||
| @@ -1,22 +1,22 @@ | ||||
| server { | ||||
|     listen       8080; | ||||
|     listen       [::]:8080; | ||||
|     server_name  _; | ||||
|  | ||||
|     root   /usr/share/nginx/html; | ||||
|     index  index.html; | ||||
|  | ||||
|     location / { | ||||
|         try_files $uri $uri/ /index.html; | ||||
|     } | ||||
|  | ||||
|     location ~* \.(?:js|css|svg|png|jpg|jpeg|gif|ico|woff2?)$ { | ||||
|         add_header Cache-Control "public, max-age=2592000"; | ||||
|     } | ||||
|  | ||||
|     location = /healthz { | ||||
|         access_log off; | ||||
|         add_header Content-Type text/plain; | ||||
|         return 200 'ok'; | ||||
|     } | ||||
| } | ||||
| server { | ||||
|     listen       8080; | ||||
|     listen       [::]:8080; | ||||
|     server_name  _; | ||||
|  | ||||
|     root   /usr/share/nginx/html; | ||||
|     index  index.html; | ||||
|  | ||||
|     location / { | ||||
|         try_files $uri $uri/ /index.html; | ||||
|     } | ||||
|  | ||||
|     location ~* \.(?:js|css|svg|png|jpg|jpeg|gif|ico|woff2?)$ { | ||||
|         add_header Cache-Control "public, max-age=2592000"; | ||||
|     } | ||||
|  | ||||
|     location = /healthz { | ||||
|         access_log off; | ||||
|         add_header Content-Type text/plain; | ||||
|         return 200 'ok'; | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -1,232 +1,232 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| import json | ||||
| import tempfile | ||||
| import unittest | ||||
| from collections import OrderedDict | ||||
| from pathlib import Path | ||||
| import sys | ||||
|  | ||||
| sys.path.append(str(Path(__file__).resolve().parent)) | ||||
|  | ||||
| from build_release import write_manifest  # type: ignore import-not-found | ||||
| from verify_release import VerificationError, compute_sha256, verify_release | ||||
|  | ||||
|  | ||||
| class VerifyReleaseTests(unittest.TestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self._temp = tempfile.TemporaryDirectory() | ||||
|         self.base_path = Path(self._temp.name) | ||||
|         self.out_dir = self.base_path / "out" | ||||
|         self.release_dir = self.out_dir / "release" | ||||
|         self.release_dir.mkdir(parents=True, exist_ok=True) | ||||
|  | ||||
|     def tearDown(self) -> None: | ||||
|         self._temp.cleanup() | ||||
|  | ||||
|     def _relative_to_out(self, path: Path) -> str: | ||||
|         return path.relative_to(self.out_dir).as_posix() | ||||
|  | ||||
|     def _write_json(self, path: Path, payload: dict[str, object]) -> None: | ||||
|         path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         with path.open("w", encoding="utf-8") as handle: | ||||
|             json.dump(payload, handle, indent=2) | ||||
|             handle.write("\n") | ||||
|  | ||||
|     def _create_sample_release(self) -> None: | ||||
|         sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" | ||||
|         sbom_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8") | ||||
|         sbom_sha = compute_sha256(sbom_path) | ||||
|  | ||||
|         provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json" | ||||
|         self._write_json( | ||||
|             provenance_path, | ||||
|             { | ||||
|                 "buildDefinition": {"buildType": "https://example/build", "externalParameters": {}}, | ||||
|                 "runDetails": {"builder": {"id": "https://example/ci"}}, | ||||
|             }, | ||||
|         ) | ||||
|         provenance_sha = compute_sha256(provenance_path) | ||||
|  | ||||
|         signature_path = self.release_dir / "artifacts/signatures/sample.signature" | ||||
|         signature_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         signature_path.write_text("signature-data\n", encoding="utf-8") | ||||
|         signature_sha = compute_sha256(signature_path) | ||||
|  | ||||
|         metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json" | ||||
|         self._write_json(metadata_path, {"digest": "sha256:1234"}) | ||||
|         metadata_sha = compute_sha256(metadata_path) | ||||
|  | ||||
|         chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz" | ||||
|         chart_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         chart_path.write_bytes(b"helm-chart-data") | ||||
|         chart_sha = compute_sha256(chart_path) | ||||
|  | ||||
|         compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml" | ||||
|         compose_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         compose_path.write_text("services: {}\n", encoding="utf-8") | ||||
|         compose_sha = compute_sha256(compose_path) | ||||
|  | ||||
|         debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" | ||||
|         debug_file.parent.mkdir(parents=True, exist_ok=True) | ||||
|         debug_file.write_bytes(b"\x7fELFDEBUGDATA") | ||||
|         debug_sha = compute_sha256(debug_file) | ||||
|  | ||||
|         debug_manifest_path = self.release_dir / "debug/debug-manifest.json" | ||||
|         debug_manifest = OrderedDict( | ||||
|             ( | ||||
|                 ("generatedAt", "2025-10-26T00:00:00Z"), | ||||
|                 ("version", "1.0.0"), | ||||
|                 ("channel", "edge"), | ||||
|                 ( | ||||
|                     "artifacts", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("buildId", "abcdef1234"), | ||||
|                                 ("platform", "linux/amd64"), | ||||
|                                 ("debugPath", "debug/.build-id/ab/cdef.debug"), | ||||
|                                 ("sha256", debug_sha), | ||||
|                                 ("size", debug_file.stat().st_size), | ||||
|                                 ("components", ["sample"]), | ||||
|                                 ("images", ["registry.example/sample@sha256:feedface"]), | ||||
|                                 ("sources", ["app/sample.dll"]), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|             ) | ||||
|         ) | ||||
|         self._write_json(debug_manifest_path, debug_manifest) | ||||
|         debug_manifest_sha = compute_sha256(debug_manifest_path) | ||||
|         (debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text( | ||||
|             f"{debug_manifest_sha}  {debug_manifest_path.name}\n", encoding="utf-8" | ||||
|         ) | ||||
|  | ||||
|         manifest = OrderedDict( | ||||
|             ( | ||||
|                 ( | ||||
|                     "release", | ||||
|                     OrderedDict( | ||||
|                         ( | ||||
|                             ("version", "1.0.0"), | ||||
|                             ("channel", "edge"), | ||||
|                             ("date", "2025-10-26T00:00:00Z"), | ||||
|                             ("calendar", "2025.10"), | ||||
|                         ) | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "components", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("name", "sample"), | ||||
|                                 ("image", "registry.example/sample@sha256:feedface"), | ||||
|                                 ("tags", ["registry.example/sample:1.0.0"]), | ||||
|                                 ( | ||||
|                                     "sbom", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(sbom_path)), | ||||
|                                             ("sha256", sbom_sha), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "provenance", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(provenance_path)), | ||||
|                                             ("sha256", provenance_sha), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "signature", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(signature_path)), | ||||
|                                             ("sha256", signature_sha), | ||||
|                                             ("ref", "sigstore://example"), | ||||
|                                             ("tlogUploaded", True), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "metadata", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(metadata_path)), | ||||
|                                             ("sha256", metadata_sha), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|                 ( | ||||
|                     "charts", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("name", "stellaops"), | ||||
|                                 ("version", "1.0.0"), | ||||
|                                 ("path", self._relative_to_out(chart_path)), | ||||
|                                 ("sha256", chart_sha), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|                 ( | ||||
|                     "compose", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("name", "docker-compose.dev.yaml"), | ||||
|                                 ("path", compose_path.relative_to(self.out_dir).as_posix()), | ||||
|                                 ("sha256", compose_sha), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|                 ( | ||||
|                     "debugStore", | ||||
|                     OrderedDict( | ||||
|                         ( | ||||
|                             ("manifest", "debug/debug-manifest.json"), | ||||
|                             ("sha256", debug_manifest_sha), | ||||
|                             ("entries", 1), | ||||
|                             ("platforms", ["linux/amd64"]), | ||||
|                             ("directory", "debug/.build-id"), | ||||
|                         ) | ||||
|                     ), | ||||
|                 ), | ||||
|             ) | ||||
|         ) | ||||
|         write_manifest(manifest, self.release_dir) | ||||
|  | ||||
|     def test_verify_release_success(self) -> None: | ||||
|         self._create_sample_release() | ||||
|         # Should not raise | ||||
|         verify_release(self.release_dir) | ||||
|  | ||||
|     def test_verify_release_detects_sha_mismatch(self) -> None: | ||||
|         self._create_sample_release() | ||||
|         tampered = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" | ||||
|         tampered.write_text("tampered\n", encoding="utf-8") | ||||
|         with self.assertRaises(VerificationError): | ||||
|             verify_release(self.release_dir) | ||||
|  | ||||
|     def test_verify_release_detects_missing_debug_file(self) -> None: | ||||
|         self._create_sample_release() | ||||
|         debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" | ||||
|         debug_file.unlink() | ||||
|         with self.assertRaises(VerificationError): | ||||
|             verify_release(self.release_dir) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     unittest.main() | ||||
| from __future__ import annotations | ||||
|  | ||||
| import json | ||||
| import tempfile | ||||
| import unittest | ||||
| from collections import OrderedDict | ||||
| from pathlib import Path | ||||
| import sys | ||||
|  | ||||
| sys.path.append(str(Path(__file__).resolve().parent)) | ||||
|  | ||||
| from build_release import write_manifest  # type: ignore import-not-found | ||||
| from verify_release import VerificationError, compute_sha256, verify_release | ||||
|  | ||||
|  | ||||
| class VerifyReleaseTests(unittest.TestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self._temp = tempfile.TemporaryDirectory() | ||||
|         self.base_path = Path(self._temp.name) | ||||
|         self.out_dir = self.base_path / "out" | ||||
|         self.release_dir = self.out_dir / "release" | ||||
|         self.release_dir.mkdir(parents=True, exist_ok=True) | ||||
|  | ||||
|     def tearDown(self) -> None: | ||||
|         self._temp.cleanup() | ||||
|  | ||||
|     def _relative_to_out(self, path: Path) -> str: | ||||
|         return path.relative_to(self.out_dir).as_posix() | ||||
|  | ||||
|     def _write_json(self, path: Path, payload: dict[str, object]) -> None: | ||||
|         path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         with path.open("w", encoding="utf-8") as handle: | ||||
|             json.dump(payload, handle, indent=2) | ||||
|             handle.write("\n") | ||||
|  | ||||
|     def _create_sample_release(self) -> None: | ||||
|         sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" | ||||
|         sbom_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8") | ||||
|         sbom_sha = compute_sha256(sbom_path) | ||||
|  | ||||
|         provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json" | ||||
|         self._write_json( | ||||
|             provenance_path, | ||||
|             { | ||||
|                 "buildDefinition": {"buildType": "https://example/build", "externalParameters": {}}, | ||||
|                 "runDetails": {"builder": {"id": "https://example/ci"}}, | ||||
|             }, | ||||
|         ) | ||||
|         provenance_sha = compute_sha256(provenance_path) | ||||
|  | ||||
|         signature_path = self.release_dir / "artifacts/signatures/sample.signature" | ||||
|         signature_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         signature_path.write_text("signature-data\n", encoding="utf-8") | ||||
|         signature_sha = compute_sha256(signature_path) | ||||
|  | ||||
|         metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json" | ||||
|         self._write_json(metadata_path, {"digest": "sha256:1234"}) | ||||
|         metadata_sha = compute_sha256(metadata_path) | ||||
|  | ||||
|         chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz" | ||||
|         chart_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         chart_path.write_bytes(b"helm-chart-data") | ||||
|         chart_sha = compute_sha256(chart_path) | ||||
|  | ||||
|         compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml" | ||||
|         compose_path.parent.mkdir(parents=True, exist_ok=True) | ||||
|         compose_path.write_text("services: {}\n", encoding="utf-8") | ||||
|         compose_sha = compute_sha256(compose_path) | ||||
|  | ||||
|         debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" | ||||
|         debug_file.parent.mkdir(parents=True, exist_ok=True) | ||||
|         debug_file.write_bytes(b"\x7fELFDEBUGDATA") | ||||
|         debug_sha = compute_sha256(debug_file) | ||||
|  | ||||
|         debug_manifest_path = self.release_dir / "debug/debug-manifest.json" | ||||
|         debug_manifest = OrderedDict( | ||||
|             ( | ||||
|                 ("generatedAt", "2025-10-26T00:00:00Z"), | ||||
|                 ("version", "1.0.0"), | ||||
|                 ("channel", "edge"), | ||||
|                 ( | ||||
|                     "artifacts", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("buildId", "abcdef1234"), | ||||
|                                 ("platform", "linux/amd64"), | ||||
|                                 ("debugPath", "debug/.build-id/ab/cdef.debug"), | ||||
|                                 ("sha256", debug_sha), | ||||
|                                 ("size", debug_file.stat().st_size), | ||||
|                                 ("components", ["sample"]), | ||||
|                                 ("images", ["registry.example/sample@sha256:feedface"]), | ||||
|                                 ("sources", ["app/sample.dll"]), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|             ) | ||||
|         ) | ||||
|         self._write_json(debug_manifest_path, debug_manifest) | ||||
|         debug_manifest_sha = compute_sha256(debug_manifest_path) | ||||
|         (debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text( | ||||
|             f"{debug_manifest_sha}  {debug_manifest_path.name}\n", encoding="utf-8" | ||||
|         ) | ||||
|  | ||||
|         manifest = OrderedDict( | ||||
|             ( | ||||
|                 ( | ||||
|                     "release", | ||||
|                     OrderedDict( | ||||
|                         ( | ||||
|                             ("version", "1.0.0"), | ||||
|                             ("channel", "edge"), | ||||
|                             ("date", "2025-10-26T00:00:00Z"), | ||||
|                             ("calendar", "2025.10"), | ||||
|                         ) | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "components", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("name", "sample"), | ||||
|                                 ("image", "registry.example/sample@sha256:feedface"), | ||||
|                                 ("tags", ["registry.example/sample:1.0.0"]), | ||||
|                                 ( | ||||
|                                     "sbom", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(sbom_path)), | ||||
|                                             ("sha256", sbom_sha), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "provenance", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(provenance_path)), | ||||
|                                             ("sha256", provenance_sha), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "signature", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(signature_path)), | ||||
|                                             ("sha256", signature_sha), | ||||
|                                             ("ref", "sigstore://example"), | ||||
|                                             ("tlogUploaded", True), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                                 ( | ||||
|                                     "metadata", | ||||
|                                     OrderedDict( | ||||
|                                         ( | ||||
|                                             ("path", self._relative_to_out(metadata_path)), | ||||
|                                             ("sha256", metadata_sha), | ||||
|                                         ) | ||||
|                                     ), | ||||
|                                 ), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|                 ( | ||||
|                     "charts", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("name", "stellaops"), | ||||
|                                 ("version", "1.0.0"), | ||||
|                                 ("path", self._relative_to_out(chart_path)), | ||||
|                                 ("sha256", chart_sha), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|                 ( | ||||
|                     "compose", | ||||
|                     [ | ||||
|                         OrderedDict( | ||||
|                             ( | ||||
|                                 ("name", "docker-compose.dev.yaml"), | ||||
|                                 ("path", compose_path.relative_to(self.out_dir).as_posix()), | ||||
|                                 ("sha256", compose_sha), | ||||
|                             ) | ||||
|                         ) | ||||
|                     ], | ||||
|                 ), | ||||
|                 ( | ||||
|                     "debugStore", | ||||
|                     OrderedDict( | ||||
|                         ( | ||||
|                             ("manifest", "debug/debug-manifest.json"), | ||||
|                             ("sha256", debug_manifest_sha), | ||||
|                             ("entries", 1), | ||||
|                             ("platforms", ["linux/amd64"]), | ||||
|                             ("directory", "debug/.build-id"), | ||||
|                         ) | ||||
|                     ), | ||||
|                 ), | ||||
|             ) | ||||
|         ) | ||||
|         write_manifest(manifest, self.release_dir) | ||||
|  | ||||
|     def test_verify_release_success(self) -> None: | ||||
|         self._create_sample_release() | ||||
|         # Should not raise | ||||
|         verify_release(self.release_dir) | ||||
|  | ||||
|     def test_verify_release_detects_sha_mismatch(self) -> None: | ||||
|         self._create_sample_release() | ||||
|         tampered = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" | ||||
|         tampered.write_text("tampered\n", encoding="utf-8") | ||||
|         with self.assertRaises(VerificationError): | ||||
|             verify_release(self.release_dir) | ||||
|  | ||||
|     def test_verify_release_detects_missing_debug_file(self) -> None: | ||||
|         self._create_sample_release() | ||||
|         debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" | ||||
|         debug_file.unlink() | ||||
|         with self.assertRaises(VerificationError): | ||||
|             verify_release(self.release_dir) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -1,334 +1,334 @@ | ||||
| #!/usr/bin/env python3 | ||||
| """Verify release artefacts (SBOMs, provenance, signatures, manifest hashes).""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import argparse | ||||
| import hashlib | ||||
| import json | ||||
| import pathlib | ||||
| import sys | ||||
| from collections import OrderedDict | ||||
| from typing import Any, Mapping, Optional | ||||
|  | ||||
| from build_release import dump_yaml  # type: ignore import-not-found | ||||
|  | ||||
|  | ||||
| class VerificationError(Exception): | ||||
|     """Raised when release artefacts fail verification.""" | ||||
|  | ||||
|  | ||||
| def compute_sha256(path: pathlib.Path) -> str: | ||||
|     sha = hashlib.sha256() | ||||
|     with path.open("rb") as handle: | ||||
|         for chunk in iter(lambda: handle.read(1024 * 1024), b""): | ||||
|             sha.update(chunk) | ||||
|     return sha.hexdigest() | ||||
|  | ||||
|  | ||||
| def parse_sha_file(path: pathlib.Path) -> Optional[str]: | ||||
|     if not path.exists(): | ||||
|         return None | ||||
|     content = path.read_text(encoding="utf-8").strip() | ||||
|     if not content: | ||||
|         return None | ||||
|     return content.split()[0] | ||||
|  | ||||
|  | ||||
| def resolve_path(path_str: str, release_dir: pathlib.Path) -> pathlib.Path: | ||||
|     candidate = pathlib.Path(path_str.replace("\\", "/")) | ||||
|     if candidate.is_absolute(): | ||||
|         return candidate | ||||
|  | ||||
|     for base in (release_dir, release_dir.parent, release_dir.parent.parent): | ||||
|         resolved = (base / candidate).resolve() | ||||
|         if resolved.exists(): | ||||
|             return resolved | ||||
|     # Fall back to release_dir joined path even if missing to surface in caller. | ||||
|     return (release_dir / candidate).resolve() | ||||
|  | ||||
|  | ||||
| def load_manifest(release_dir: pathlib.Path) -> OrderedDict[str, Any]: | ||||
|     manifest_path = release_dir / "release.json" | ||||
|     if not manifest_path.exists(): | ||||
|         raise VerificationError(f"Release manifest JSON missing at {manifest_path}") | ||||
|     try: | ||||
|         with manifest_path.open("r", encoding="utf-8") as handle: | ||||
|             return json.load(handle, object_pairs_hook=OrderedDict) | ||||
|     except json.JSONDecodeError as exc: | ||||
|         raise VerificationError(f"Failed to parse {manifest_path}: {exc}") from exc | ||||
|  | ||||
|  | ||||
| def verify_manifest_hashes( | ||||
|     manifest: Mapping[str, Any], | ||||
|     release_dir: pathlib.Path, | ||||
|     errors: list[str], | ||||
| ) -> None: | ||||
|     yaml_path = release_dir / "release.yaml" | ||||
|     if not yaml_path.exists(): | ||||
|         errors.append(f"Missing release.yaml at {yaml_path}") | ||||
|         return | ||||
|  | ||||
|     recorded_yaml_sha = parse_sha_file(yaml_path.with_name(yaml_path.name + ".sha256")) | ||||
|     actual_yaml_sha = compute_sha256(yaml_path) | ||||
|     if recorded_yaml_sha and recorded_yaml_sha != actual_yaml_sha: | ||||
|         errors.append( | ||||
|             f"release.yaml.sha256 recorded {recorded_yaml_sha} but file hashes to {actual_yaml_sha}" | ||||
|         ) | ||||
|  | ||||
|     json_path = release_dir / "release.json" | ||||
|     recorded_json_sha = parse_sha_file(json_path.with_name(json_path.name + ".sha256")) | ||||
|     actual_json_sha = compute_sha256(json_path) | ||||
|     if recorded_json_sha and recorded_json_sha != actual_json_sha: | ||||
|         errors.append( | ||||
|             f"release.json.sha256 recorded {recorded_json_sha} but file hashes to {actual_json_sha}" | ||||
|         ) | ||||
|  | ||||
|     checksums = manifest.get("checksums") | ||||
|     if isinstance(checksums, Mapping): | ||||
|         recorded_digest = checksums.get("sha256") | ||||
|         base_manifest = OrderedDict(manifest) | ||||
|         base_manifest.pop("checksums", None) | ||||
|         yaml_without_checksums = dump_yaml(base_manifest) | ||||
|         computed_digest = hashlib.sha256(yaml_without_checksums.encode("utf-8")).hexdigest() | ||||
|         if recorded_digest != computed_digest: | ||||
|             errors.append( | ||||
|                 "Manifest checksum mismatch: " | ||||
|                 f"recorded {recorded_digest}, computed {computed_digest}" | ||||
|             ) | ||||
|  | ||||
|  | ||||
| def verify_artifact_entry( | ||||
|     entry: Mapping[str, Any], | ||||
|     release_dir: pathlib.Path, | ||||
|     label: str, | ||||
|     component_name: str, | ||||
|     errors: list[str], | ||||
| ) -> None: | ||||
|     path_str = entry.get("path") | ||||
|     if not path_str: | ||||
|         errors.append(f"{component_name}: {label} missing 'path' field.") | ||||
|         return | ||||
|     resolved = resolve_path(str(path_str), release_dir) | ||||
|     if not resolved.exists(): | ||||
|         errors.append(f"{component_name}: {label} path does not exist → {resolved}") | ||||
|         return | ||||
|     recorded_sha = entry.get("sha256") | ||||
|     if recorded_sha: | ||||
|         actual_sha = compute_sha256(resolved) | ||||
|         if actual_sha != recorded_sha: | ||||
|             errors.append( | ||||
|                 f"{component_name}: {label} SHA mismatch for {resolved} " | ||||
|                 f"(recorded {recorded_sha}, computed {actual_sha})" | ||||
|             ) | ||||
|  | ||||
|  | ||||
| def verify_components(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     for component in manifest.get("components", []): | ||||
|         if not isinstance(component, Mapping): | ||||
|             errors.append("Component entry is not a mapping.") | ||||
|             continue | ||||
|         name = str(component.get("name", "<unknown>")) | ||||
|         for key, label in ( | ||||
|             ("sbom", "SBOM"), | ||||
|             ("provenance", "provenance"), | ||||
|             ("signature", "signature"), | ||||
|             ("metadata", "metadata"), | ||||
|         ): | ||||
|             entry = component.get(key) | ||||
|             if not entry: | ||||
|                 continue | ||||
|             if not isinstance(entry, Mapping): | ||||
|                 errors.append(f"{name}: {label} entry must be a mapping.") | ||||
|                 continue | ||||
|             verify_artifact_entry(entry, release_dir, label, name, errors) | ||||
|  | ||||
|  | ||||
| def verify_collections(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     for collection, label in ( | ||||
|         ("charts", "chart"), | ||||
|         ("compose", "compose file"), | ||||
|     ): | ||||
|         for item in manifest.get(collection, []): | ||||
|             if not isinstance(item, Mapping): | ||||
|                 errors.append(f"{collection} entry is not a mapping.") | ||||
|                 continue | ||||
|             path_value = item.get("path") | ||||
|             if not path_value: | ||||
|                 errors.append(f"{collection} entry missing path.") | ||||
|                 continue | ||||
|             resolved = resolve_path(str(path_value), release_dir) | ||||
|             if not resolved.exists(): | ||||
|                 errors.append(f"{label} missing file → {resolved}") | ||||
|                 continue | ||||
|             recorded_sha = item.get("sha256") | ||||
|             if recorded_sha: | ||||
|                 actual_sha = compute_sha256(resolved) | ||||
|                 if actual_sha != recorded_sha: | ||||
|                     errors.append( | ||||
|                         f"{label} SHA mismatch for {resolved} " | ||||
|                         f"(recorded {recorded_sha}, computed {actual_sha})" | ||||
|                     ) | ||||
|  | ||||
|  | ||||
| def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     debug = manifest.get("debugStore") | ||||
|     if not isinstance(debug, Mapping): | ||||
|         return | ||||
|     manifest_path_str = debug.get("manifest") | ||||
|     manifest_data: Optional[Mapping[str, Any]] = None | ||||
|     if manifest_path_str: | ||||
|         manifest_path = resolve_path(str(manifest_path_str), release_dir) | ||||
|         if not manifest_path.exists(): | ||||
|             errors.append(f"Debug manifest missing → {manifest_path}") | ||||
|         else: | ||||
|             recorded_sha = debug.get("sha256") | ||||
|             if recorded_sha: | ||||
|                 actual_sha = compute_sha256(manifest_path) | ||||
|                 if actual_sha != recorded_sha: | ||||
|                     errors.append( | ||||
|                         f"Debug manifest SHA mismatch (recorded {recorded_sha}, computed {actual_sha})" | ||||
|                     ) | ||||
|             sha_sidecar = manifest_path.with_suffix(manifest_path.suffix + ".sha256") | ||||
|             sidecar_sha = parse_sha_file(sha_sidecar) | ||||
|             if sidecar_sha and recorded_sha and sidecar_sha != recorded_sha: | ||||
|                 errors.append( | ||||
|                     f"Debug manifest sidecar digest {sidecar_sha} disagrees with recorded {recorded_sha}" | ||||
|                 ) | ||||
|             try: | ||||
|                 with manifest_path.open("r", encoding="utf-8") as handle: | ||||
|                     manifest_data = json.load(handle) | ||||
|             except json.JSONDecodeError as exc: | ||||
|                 errors.append(f"Debug manifest JSON invalid: {exc}") | ||||
|     directory = debug.get("directory") | ||||
|     if directory: | ||||
|         debug_dir = resolve_path(str(directory), release_dir) | ||||
|         if not debug_dir.exists(): | ||||
|             errors.append(f"Debug directory missing → {debug_dir}") | ||||
|  | ||||
|     if manifest_data: | ||||
|         artifacts = manifest_data.get("artifacts") | ||||
|         if not isinstance(artifacts, list) or not artifacts: | ||||
|             errors.append("Debug manifest contains no artefacts.") | ||||
|             return | ||||
|  | ||||
|         declared_entries = debug.get("entries") | ||||
|         if isinstance(declared_entries, int) and declared_entries != len(artifacts): | ||||
|             errors.append( | ||||
|                 f"Debug manifest reports {declared_entries} entries but contains {len(artifacts)} artefacts." | ||||
|             ) | ||||
|  | ||||
|         for artefact in artifacts: | ||||
|             if not isinstance(artefact, Mapping): | ||||
|                 errors.append("Debug manifest artefact entry is not a mapping.") | ||||
|                 continue | ||||
|             debug_path = artefact.get("debugPath") | ||||
|             artefact_sha = artefact.get("sha256") | ||||
|             if not debug_path or not artefact_sha: | ||||
|                 errors.append("Debug manifest artefact missing debugPath or sha256.") | ||||
|                 continue | ||||
|             resolved_debug = resolve_path(str(debug_path), release_dir) | ||||
|             if not resolved_debug.exists(): | ||||
|                 errors.append(f"Debug artefact missing → {resolved_debug}") | ||||
|                 continue | ||||
|             actual_sha = compute_sha256(resolved_debug) | ||||
|             if actual_sha != artefact_sha: | ||||
|                 errors.append( | ||||
|                     f"Debug artefact SHA mismatch for {resolved_debug} " | ||||
|                     f"(recorded {artefact_sha}, computed {actual_sha})" | ||||
|                 ) | ||||
|  | ||||
| def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None: | ||||
|     sig_path_value = signature.get("path") | ||||
|     if not sig_path_value: | ||||
|         errors.append(f"{component_name}: {label} signature missing path.") | ||||
|         return | ||||
|     sig_path = resolve_path(str(sig_path_value), release_dir) | ||||
|     if not sig_path.exists(): | ||||
|         errors.append(f"{component_name}: {label} signature missing → {sig_path}") | ||||
|         return | ||||
|     recorded_sha = signature.get("sha256") | ||||
|     if recorded_sha: | ||||
|         actual_sha = compute_sha256(sig_path) | ||||
|         if actual_sha != recorded_sha: | ||||
|             errors.append( | ||||
|                 f"{component_name}: {label} signature SHA mismatch for {sig_path} " | ||||
|                 f"(recorded {recorded_sha}, computed {actual_sha})" | ||||
|             ) | ||||
|  | ||||
|  | ||||
| def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     cli_entries = manifest.get("cli") | ||||
|     if not cli_entries: | ||||
|         return | ||||
|     if not isinstance(cli_entries, list): | ||||
|         errors.append("CLI manifest section must be a list.") | ||||
|         return | ||||
|     for entry in cli_entries: | ||||
|         if not isinstance(entry, Mapping): | ||||
|             errors.append("CLI entry must be a mapping.") | ||||
|             continue | ||||
|         runtime = entry.get("runtime", "<unknown>") | ||||
|         component_name = f"cli[{runtime}]" | ||||
|         archive = entry.get("archive") | ||||
|         if not isinstance(archive, Mapping): | ||||
|             errors.append(f"{component_name}: archive metadata missing or invalid.") | ||||
|         else: | ||||
|             verify_artifact_entry(archive, release_dir, "archive", component_name, errors) | ||||
|             signature = archive.get("signature") | ||||
|             if isinstance(signature, Mapping): | ||||
|                 verify_signature(signature, release_dir, "archive", component_name, errors) | ||||
|             elif signature is not None: | ||||
|                 errors.append(f"{component_name}: archive signature must be an object.") | ||||
|         sbom = entry.get("sbom") | ||||
|         if sbom: | ||||
|             if not isinstance(sbom, Mapping): | ||||
|                 errors.append(f"{component_name}: sbom entry must be a mapping.") | ||||
|             else: | ||||
|                 verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors) | ||||
|                 signature = sbom.get("signature") | ||||
|                 if isinstance(signature, Mapping): | ||||
|                     verify_signature(signature, release_dir, "sbom", component_name, errors) | ||||
|                 elif signature is not None: | ||||
|                     errors.append(f"{component_name}: sbom signature must be an object.") | ||||
|  | ||||
|  | ||||
| def verify_release(release_dir: pathlib.Path) -> None: | ||||
|     if not release_dir.exists(): | ||||
|         raise VerificationError(f"Release directory not found: {release_dir}") | ||||
|     manifest = load_manifest(release_dir) | ||||
|     errors: list[str] = [] | ||||
|     verify_manifest_hashes(manifest, release_dir, errors) | ||||
|     verify_components(manifest, release_dir, errors) | ||||
|     verify_cli_entries(manifest, release_dir, errors) | ||||
|     verify_collections(manifest, release_dir, errors) | ||||
|     verify_debug_store(manifest, release_dir, errors) | ||||
|     if errors: | ||||
|         bullet_list = "\n - ".join(errors) | ||||
|         raise VerificationError(f"Release verification failed:\n - {bullet_list}") | ||||
|  | ||||
|  | ||||
| def parse_args(argv: list[str] | None = None) -> argparse.Namespace: | ||||
|     parser = argparse.ArgumentParser(description=__doc__) | ||||
|     parser.add_argument( | ||||
|         "--release-dir", | ||||
|         type=pathlib.Path, | ||||
|         default=pathlib.Path("out/release"), | ||||
|         help="Path to the release artefact directory (default: %(default)s)", | ||||
|     ) | ||||
|     return parser.parse_args(argv) | ||||
|  | ||||
|  | ||||
| def main(argv: list[str] | None = None) -> int: | ||||
|     args = parse_args(argv) | ||||
|     try: | ||||
|         verify_release(args.release_dir.resolve()) | ||||
|     except VerificationError as exc: | ||||
|         print(str(exc), file=sys.stderr) | ||||
|         return 1 | ||||
|     print(f"✅ Release artefacts verified OK in {args.release_dir}") | ||||
|     return 0 | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     raise SystemExit(main()) | ||||
| #!/usr/bin/env python3 | ||||
| """Verify release artefacts (SBOMs, provenance, signatures, manifest hashes).""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import argparse | ||||
| import hashlib | ||||
| import json | ||||
| import pathlib | ||||
| import sys | ||||
| from collections import OrderedDict | ||||
| from typing import Any, Mapping, Optional | ||||
|  | ||||
| from build_release import dump_yaml  # type: ignore import-not-found | ||||
|  | ||||
|  | ||||
| class VerificationError(Exception): | ||||
|     """Raised when release artefacts fail verification.""" | ||||
|  | ||||
|  | ||||
| def compute_sha256(path: pathlib.Path) -> str: | ||||
|     sha = hashlib.sha256() | ||||
|     with path.open("rb") as handle: | ||||
|         for chunk in iter(lambda: handle.read(1024 * 1024), b""): | ||||
|             sha.update(chunk) | ||||
|     return sha.hexdigest() | ||||
|  | ||||
|  | ||||
| def parse_sha_file(path: pathlib.Path) -> Optional[str]: | ||||
|     if not path.exists(): | ||||
|         return None | ||||
|     content = path.read_text(encoding="utf-8").strip() | ||||
|     if not content: | ||||
|         return None | ||||
|     return content.split()[0] | ||||
|  | ||||
|  | ||||
| def resolve_path(path_str: str, release_dir: pathlib.Path) -> pathlib.Path: | ||||
|     candidate = pathlib.Path(path_str.replace("\\", "/")) | ||||
|     if candidate.is_absolute(): | ||||
|         return candidate | ||||
|  | ||||
|     for base in (release_dir, release_dir.parent, release_dir.parent.parent): | ||||
|         resolved = (base / candidate).resolve() | ||||
|         if resolved.exists(): | ||||
|             return resolved | ||||
|     # Fall back to release_dir joined path even if missing to surface in caller. | ||||
|     return (release_dir / candidate).resolve() | ||||
|  | ||||
|  | ||||
| def load_manifest(release_dir: pathlib.Path) -> OrderedDict[str, Any]: | ||||
|     manifest_path = release_dir / "release.json" | ||||
|     if not manifest_path.exists(): | ||||
|         raise VerificationError(f"Release manifest JSON missing at {manifest_path}") | ||||
|     try: | ||||
|         with manifest_path.open("r", encoding="utf-8") as handle: | ||||
|             return json.load(handle, object_pairs_hook=OrderedDict) | ||||
|     except json.JSONDecodeError as exc: | ||||
|         raise VerificationError(f"Failed to parse {manifest_path}: {exc}") from exc | ||||
|  | ||||
|  | ||||
| def verify_manifest_hashes( | ||||
|     manifest: Mapping[str, Any], | ||||
|     release_dir: pathlib.Path, | ||||
|     errors: list[str], | ||||
| ) -> None: | ||||
|     yaml_path = release_dir / "release.yaml" | ||||
|     if not yaml_path.exists(): | ||||
|         errors.append(f"Missing release.yaml at {yaml_path}") | ||||
|         return | ||||
|  | ||||
|     recorded_yaml_sha = parse_sha_file(yaml_path.with_name(yaml_path.name + ".sha256")) | ||||
|     actual_yaml_sha = compute_sha256(yaml_path) | ||||
|     if recorded_yaml_sha and recorded_yaml_sha != actual_yaml_sha: | ||||
|         errors.append( | ||||
|             f"release.yaml.sha256 recorded {recorded_yaml_sha} but file hashes to {actual_yaml_sha}" | ||||
|         ) | ||||
|  | ||||
|     json_path = release_dir / "release.json" | ||||
|     recorded_json_sha = parse_sha_file(json_path.with_name(json_path.name + ".sha256")) | ||||
|     actual_json_sha = compute_sha256(json_path) | ||||
|     if recorded_json_sha and recorded_json_sha != actual_json_sha: | ||||
|         errors.append( | ||||
|             f"release.json.sha256 recorded {recorded_json_sha} but file hashes to {actual_json_sha}" | ||||
|         ) | ||||
|  | ||||
|     checksums = manifest.get("checksums") | ||||
|     if isinstance(checksums, Mapping): | ||||
|         recorded_digest = checksums.get("sha256") | ||||
|         base_manifest = OrderedDict(manifest) | ||||
|         base_manifest.pop("checksums", None) | ||||
|         yaml_without_checksums = dump_yaml(base_manifest) | ||||
|         computed_digest = hashlib.sha256(yaml_without_checksums.encode("utf-8")).hexdigest() | ||||
|         if recorded_digest != computed_digest: | ||||
|             errors.append( | ||||
|                 "Manifest checksum mismatch: " | ||||
|                 f"recorded {recorded_digest}, computed {computed_digest}" | ||||
|             ) | ||||
|  | ||||
|  | ||||
| def verify_artifact_entry( | ||||
|     entry: Mapping[str, Any], | ||||
|     release_dir: pathlib.Path, | ||||
|     label: str, | ||||
|     component_name: str, | ||||
|     errors: list[str], | ||||
| ) -> None: | ||||
|     path_str = entry.get("path") | ||||
|     if not path_str: | ||||
|         errors.append(f"{component_name}: {label} missing 'path' field.") | ||||
|         return | ||||
|     resolved = resolve_path(str(path_str), release_dir) | ||||
|     if not resolved.exists(): | ||||
|         errors.append(f"{component_name}: {label} path does not exist → {resolved}") | ||||
|         return | ||||
|     recorded_sha = entry.get("sha256") | ||||
|     if recorded_sha: | ||||
|         actual_sha = compute_sha256(resolved) | ||||
|         if actual_sha != recorded_sha: | ||||
|             errors.append( | ||||
|                 f"{component_name}: {label} SHA mismatch for {resolved} " | ||||
|                 f"(recorded {recorded_sha}, computed {actual_sha})" | ||||
|             ) | ||||
|  | ||||
|  | ||||
| def verify_components(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     for component in manifest.get("components", []): | ||||
|         if not isinstance(component, Mapping): | ||||
|             errors.append("Component entry is not a mapping.") | ||||
|             continue | ||||
|         name = str(component.get("name", "<unknown>")) | ||||
|         for key, label in ( | ||||
|             ("sbom", "SBOM"), | ||||
|             ("provenance", "provenance"), | ||||
|             ("signature", "signature"), | ||||
|             ("metadata", "metadata"), | ||||
|         ): | ||||
|             entry = component.get(key) | ||||
|             if not entry: | ||||
|                 continue | ||||
|             if not isinstance(entry, Mapping): | ||||
|                 errors.append(f"{name}: {label} entry must be a mapping.") | ||||
|                 continue | ||||
|             verify_artifact_entry(entry, release_dir, label, name, errors) | ||||
|  | ||||
|  | ||||
| def verify_collections(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     for collection, label in ( | ||||
|         ("charts", "chart"), | ||||
|         ("compose", "compose file"), | ||||
|     ): | ||||
|         for item in manifest.get(collection, []): | ||||
|             if not isinstance(item, Mapping): | ||||
|                 errors.append(f"{collection} entry is not a mapping.") | ||||
|                 continue | ||||
|             path_value = item.get("path") | ||||
|             if not path_value: | ||||
|                 errors.append(f"{collection} entry missing path.") | ||||
|                 continue | ||||
|             resolved = resolve_path(str(path_value), release_dir) | ||||
|             if not resolved.exists(): | ||||
|                 errors.append(f"{label} missing file → {resolved}") | ||||
|                 continue | ||||
|             recorded_sha = item.get("sha256") | ||||
|             if recorded_sha: | ||||
|                 actual_sha = compute_sha256(resolved) | ||||
|                 if actual_sha != recorded_sha: | ||||
|                     errors.append( | ||||
|                         f"{label} SHA mismatch for {resolved} " | ||||
|                         f"(recorded {recorded_sha}, computed {actual_sha})" | ||||
|                     ) | ||||
|  | ||||
|  | ||||
| def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     debug = manifest.get("debugStore") | ||||
|     if not isinstance(debug, Mapping): | ||||
|         return | ||||
|     manifest_path_str = debug.get("manifest") | ||||
|     manifest_data: Optional[Mapping[str, Any]] = None | ||||
|     if manifest_path_str: | ||||
|         manifest_path = resolve_path(str(manifest_path_str), release_dir) | ||||
|         if not manifest_path.exists(): | ||||
|             errors.append(f"Debug manifest missing → {manifest_path}") | ||||
|         else: | ||||
|             recorded_sha = debug.get("sha256") | ||||
|             if recorded_sha: | ||||
|                 actual_sha = compute_sha256(manifest_path) | ||||
|                 if actual_sha != recorded_sha: | ||||
|                     errors.append( | ||||
|                         f"Debug manifest SHA mismatch (recorded {recorded_sha}, computed {actual_sha})" | ||||
|                     ) | ||||
|             sha_sidecar = manifest_path.with_suffix(manifest_path.suffix + ".sha256") | ||||
|             sidecar_sha = parse_sha_file(sha_sidecar) | ||||
|             if sidecar_sha and recorded_sha and sidecar_sha != recorded_sha: | ||||
|                 errors.append( | ||||
|                     f"Debug manifest sidecar digest {sidecar_sha} disagrees with recorded {recorded_sha}" | ||||
|                 ) | ||||
|             try: | ||||
|                 with manifest_path.open("r", encoding="utf-8") as handle: | ||||
|                     manifest_data = json.load(handle) | ||||
|             except json.JSONDecodeError as exc: | ||||
|                 errors.append(f"Debug manifest JSON invalid: {exc}") | ||||
|     directory = debug.get("directory") | ||||
|     if directory: | ||||
|         debug_dir = resolve_path(str(directory), release_dir) | ||||
|         if not debug_dir.exists(): | ||||
|             errors.append(f"Debug directory missing → {debug_dir}") | ||||
|  | ||||
|     if manifest_data: | ||||
|         artifacts = manifest_data.get("artifacts") | ||||
|         if not isinstance(artifacts, list) or not artifacts: | ||||
|             errors.append("Debug manifest contains no artefacts.") | ||||
|             return | ||||
|  | ||||
|         declared_entries = debug.get("entries") | ||||
|         if isinstance(declared_entries, int) and declared_entries != len(artifacts): | ||||
|             errors.append( | ||||
|                 f"Debug manifest reports {declared_entries} entries but contains {len(artifacts)} artefacts." | ||||
|             ) | ||||
|  | ||||
|         for artefact in artifacts: | ||||
|             if not isinstance(artefact, Mapping): | ||||
|                 errors.append("Debug manifest artefact entry is not a mapping.") | ||||
|                 continue | ||||
|             debug_path = artefact.get("debugPath") | ||||
|             artefact_sha = artefact.get("sha256") | ||||
|             if not debug_path or not artefact_sha: | ||||
|                 errors.append("Debug manifest artefact missing debugPath or sha256.") | ||||
|                 continue | ||||
|             resolved_debug = resolve_path(str(debug_path), release_dir) | ||||
|             if not resolved_debug.exists(): | ||||
|                 errors.append(f"Debug artefact missing → {resolved_debug}") | ||||
|                 continue | ||||
|             actual_sha = compute_sha256(resolved_debug) | ||||
|             if actual_sha != artefact_sha: | ||||
|                 errors.append( | ||||
|                     f"Debug artefact SHA mismatch for {resolved_debug} " | ||||
|                     f"(recorded {artefact_sha}, computed {actual_sha})" | ||||
|                 ) | ||||
|  | ||||
| def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None: | ||||
|     sig_path_value = signature.get("path") | ||||
|     if not sig_path_value: | ||||
|         errors.append(f"{component_name}: {label} signature missing path.") | ||||
|         return | ||||
|     sig_path = resolve_path(str(sig_path_value), release_dir) | ||||
|     if not sig_path.exists(): | ||||
|         errors.append(f"{component_name}: {label} signature missing → {sig_path}") | ||||
|         return | ||||
|     recorded_sha = signature.get("sha256") | ||||
|     if recorded_sha: | ||||
|         actual_sha = compute_sha256(sig_path) | ||||
|         if actual_sha != recorded_sha: | ||||
|             errors.append( | ||||
|                 f"{component_name}: {label} signature SHA mismatch for {sig_path} " | ||||
|                 f"(recorded {recorded_sha}, computed {actual_sha})" | ||||
|             ) | ||||
|  | ||||
|  | ||||
| def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: | ||||
|     cli_entries = manifest.get("cli") | ||||
|     if not cli_entries: | ||||
|         return | ||||
|     if not isinstance(cli_entries, list): | ||||
|         errors.append("CLI manifest section must be a list.") | ||||
|         return | ||||
|     for entry in cli_entries: | ||||
|         if not isinstance(entry, Mapping): | ||||
|             errors.append("CLI entry must be a mapping.") | ||||
|             continue | ||||
|         runtime = entry.get("runtime", "<unknown>") | ||||
|         component_name = f"cli[{runtime}]" | ||||
|         archive = entry.get("archive") | ||||
|         if not isinstance(archive, Mapping): | ||||
|             errors.append(f"{component_name}: archive metadata missing or invalid.") | ||||
|         else: | ||||
|             verify_artifact_entry(archive, release_dir, "archive", component_name, errors) | ||||
|             signature = archive.get("signature") | ||||
|             if isinstance(signature, Mapping): | ||||
|                 verify_signature(signature, release_dir, "archive", component_name, errors) | ||||
|             elif signature is not None: | ||||
|                 errors.append(f"{component_name}: archive signature must be an object.") | ||||
|         sbom = entry.get("sbom") | ||||
|         if sbom: | ||||
|             if not isinstance(sbom, Mapping): | ||||
|                 errors.append(f"{component_name}: sbom entry must be a mapping.") | ||||
|             else: | ||||
|                 verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors) | ||||
|                 signature = sbom.get("signature") | ||||
|                 if isinstance(signature, Mapping): | ||||
|                     verify_signature(signature, release_dir, "sbom", component_name, errors) | ||||
|                 elif signature is not None: | ||||
|                     errors.append(f"{component_name}: sbom signature must be an object.") | ||||
|  | ||||
|  | ||||
| def verify_release(release_dir: pathlib.Path) -> None: | ||||
|     if not release_dir.exists(): | ||||
|         raise VerificationError(f"Release directory not found: {release_dir}") | ||||
|     manifest = load_manifest(release_dir) | ||||
|     errors: list[str] = [] | ||||
|     verify_manifest_hashes(manifest, release_dir, errors) | ||||
|     verify_components(manifest, release_dir, errors) | ||||
|     verify_cli_entries(manifest, release_dir, errors) | ||||
|     verify_collections(manifest, release_dir, errors) | ||||
|     verify_debug_store(manifest, release_dir, errors) | ||||
|     if errors: | ||||
|         bullet_list = "\n - ".join(errors) | ||||
|         raise VerificationError(f"Release verification failed:\n - {bullet_list}") | ||||
|  | ||||
|  | ||||
| def parse_args(argv: list[str] | None = None) -> argparse.Namespace: | ||||
|     parser = argparse.ArgumentParser(description=__doc__) | ||||
|     parser.add_argument( | ||||
|         "--release-dir", | ||||
|         type=pathlib.Path, | ||||
|         default=pathlib.Path("out/release"), | ||||
|         help="Path to the release artefact directory (default: %(default)s)", | ||||
|     ) | ||||
|     return parser.parse_args(argv) | ||||
|  | ||||
|  | ||||
| def main(argv: list[str] | None = None) -> int: | ||||
|     args = parse_args(argv) | ||||
|     try: | ||||
|         verify_release(args.release_dir.resolve()) | ||||
|     except VerificationError as exc: | ||||
|         print(str(exc), file=sys.stderr) | ||||
|         return 1 | ||||
|     print(f"✅ Release artefacts verified OK in {args.release_dir}") | ||||
|     return 0 | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     raise SystemExit(main()) | ||||
|   | ||||
		Reference in New Issue
	
	Block a user