version: "3.9" # Local/offline deployment for AdvisoryAI WebService + Worker. services: advisoryai-web: build: context: ../.. dockerfile: ops/advisory-ai/Dockerfile args: PROJECT: src/AdvisoryAI/StellaOps.AdvisoryAI.WebService/StellaOps.AdvisoryAI.WebService.csproj APP_DLL: StellaOps.AdvisoryAI.WebService.dll image: stellaops-advisoryai-web:dev depends_on: - advisoryai-worker environment: ASPNETCORE_URLS: "http://0.0.0.0:8080" ADVISORYAI__QUEUE__DIRECTORYPATH: "/app/data/queue" ADVISORYAI__STORAGE__PLANCACHEDIRECTORY: "/app/data/plans" ADVISORYAI__STORAGE__OUTPUTDIRECTORY: "/app/data/outputs" ADVISORYAI__INFERENCE__MODE: "Local" # switch to Remote to call an external inference host # ADVISORYAI__INFERENCE__REMOTE__BASEADDRESS: "https://inference.example.com" # ADVISORYAI__INFERENCE__REMOTE__ENDPOINT: "/v1/inference" # ADVISORYAI__INFERENCE__REMOTE__APIKEY: "set-me" # ADVISORYAI__INFERENCE__REMOTE__TIMEOUT: "00:00:30" # Example SBOM context feed; optional. # ADVISORYAI__SBOMBASEADDRESS: "https://sbom.local/v1/sbom/context" # ADVISORYAI__SBOMTENANT: "tenant-a" # ADVISORYAI__GUARDRAILS__PHRASESLIST: "/app/etc/guardrails/phrases.txt" volumes: - advisoryai-data:/app/data - ./etc:/app/etc:ro ports: - "7071:8080" restart: unless-stopped advisoryai-worker: build: context: ../.. dockerfile: ops/advisory-ai/Dockerfile args: PROJECT: src/AdvisoryAI/StellaOps.AdvisoryAI.Worker/StellaOps.AdvisoryAI.Worker.csproj APP_DLL: StellaOps.AdvisoryAI.Worker.dll image: stellaops-advisoryai-worker:dev environment: ADVISORYAI__QUEUE__DIRECTORYPATH: "/app/data/queue" ADVISORYAI__STORAGE__PLANCACHEDIRECTORY: "/app/data/plans" ADVISORYAI__STORAGE__OUTPUTDIRECTORY: "/app/data/outputs" ADVISORYAI__INFERENCE__MODE: "Local" volumes: - advisoryai-data:/app/data - ./etc:/app/etc:ro restart: unless-stopped volumes: advisoryai-data: