Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Introduced a comprehensive deployment guide for AdvisoryAI, detailing local builds, remote inference toggles, and scaling guidance. - Created a multi-role Dockerfile for building WebService and Worker images. - Added a docker-compose file for local and offline deployment. - Implemented a Helm chart for Kubernetes deployment with persistence and remote inference options. - Established a new API endpoint `/advisories/summary` for deterministic summaries of observations and linksets. - Introduced a JSON schema for risk profiles and a validator to ensure compliance with the schema. - Added unit tests for the risk profile validator to ensure functionality and error handling.
56 lines
2.0 KiB
YAML
56 lines
2.0 KiB
YAML
version: "3.9"
|
|
|
|
# Local/offline deployment for AdvisoryAI WebService + Worker.
|
|
services:
|
|
advisoryai-web:
|
|
build:
|
|
context: ../..
|
|
dockerfile: ops/advisory-ai/Dockerfile
|
|
args:
|
|
PROJECT: src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj
|
|
APP_DLL: StellaOps.AdvisoryAI.WebService.dll
|
|
image: stellaops-advisoryai-web:dev
|
|
depends_on:
|
|
- advisoryai-worker
|
|
environment:
|
|
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
|
ADVISORYAI__QUEUE__DIRECTORYPATH: "/app/data/queue"
|
|
ADVISORYAI__STORAGE__PLANCACHEDIRECTORY: "/app/data/plans"
|
|
ADVISORYAI__STORAGE__OUTPUTDIRECTORY: "/app/data/outputs"
|
|
ADVISORYAI__INFERENCE__MODE: "Local" # switch to Remote to call an external inference host
|
|
# ADVISORYAI__INFERENCE__REMOTE__BASEADDRESS: "https://inference.example.com"
|
|
# ADVISORYAI__INFERENCE__REMOTE__ENDPOINT: "/v1/inference"
|
|
# ADVISORYAI__INFERENCE__REMOTE__APIKEY: "set-me"
|
|
# ADVISORYAI__INFERENCE__REMOTE__TIMEOUT: "00:00:30"
|
|
# Example SBOM context feed; optional.
|
|
# ADVISORYAI__SBOMBASEADDRESS: "https://sbom.local/v1/sbom/context"
|
|
# ADVISORYAI__SBOMTENANT: "tenant-a"
|
|
# ADVISORYAI__GUARDRAILS__PHRASESLIST: "/app/etc/guardrails/phrases.txt"
|
|
volumes:
|
|
- advisoryai-data:/app/data
|
|
- ./etc:/app/etc:ro
|
|
ports:
|
|
- "7071:8080"
|
|
restart: unless-stopped
|
|
|
|
advisoryai-worker:
|
|
build:
|
|
context: ../..
|
|
dockerfile: ops/advisory-ai/Dockerfile
|
|
args:
|
|
PROJECT: src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/StellaOps.AdvisoryAI.Worker.csproj
|
|
APP_DLL: StellaOps.AdvisoryAI.Worker.dll
|
|
image: stellaops-advisoryai-worker:dev
|
|
environment:
|
|
ADVISORYAI__QUEUE__DIRECTORYPATH: "/app/data/queue"
|
|
ADVISORYAI__STORAGE__PLANCACHEDIRECTORY: "/app/data/plans"
|
|
ADVISORYAI__STORAGE__OUTPUTDIRECTORY: "/app/data/outputs"
|
|
ADVISORYAI__INFERENCE__MODE: "Local"
|
|
volumes:
|
|
- advisoryai-data:/app/data
|
|
- ./etc:/app/etc:ro
|
|
restart: unless-stopped
|
|
|
|
volumes:
|
|
advisoryai-data:
|