Compare commits
162 Commits
11597679ed
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0ec5261de | ||
|
|
39359da171 | ||
|
|
17613acf57 | ||
|
|
ed3079543c | ||
|
|
aa70af062e | ||
|
|
d71853ad7e | ||
|
|
ad7fbc47a1 | ||
|
|
702c3106a8 | ||
|
|
4dfa1b8e05 | ||
|
|
b8b2d83f4a | ||
|
|
ef6ac36323 | ||
|
|
0103defcff | ||
|
|
82a49f6743 | ||
|
|
2a06f780cf | ||
|
|
223843f1d1 | ||
|
|
deb82b4f03 | ||
|
|
b9f71fc7e9 | ||
|
|
43e2af88f6 | ||
|
|
4231305fec | ||
|
|
8197588e74 | ||
|
|
2c2bbf1005 | ||
|
|
5540ce9430 | ||
|
|
40362de568 | ||
|
|
02772c7a27 | ||
|
|
9a08d10b89 | ||
|
|
7503c19b8f | ||
|
|
e59921374e | ||
|
|
491e883653 | ||
|
|
5590a99a1a | ||
|
|
7ac70ece71 | ||
|
|
dac8e10e36 | ||
|
|
b444284be5 | ||
|
|
fda92af9bc | ||
|
|
fcb5ffe25d | ||
|
|
84d97fd22c | ||
|
|
ef933db0d8 | ||
|
|
c8a871dd30 | ||
|
|
396e9b75a4 | ||
|
|
21337f4de6 | ||
|
|
541a936d03 | ||
|
|
342c35f8ce | ||
|
|
56e2dc01ee | ||
|
|
7e384ab610 | ||
|
|
e47627cfff | ||
|
|
5146204f1b | ||
|
|
3ba7157b00 | ||
|
|
4602ccc3a3 | ||
|
|
0536a4f7d4 | ||
|
|
dfaa2079aa | ||
|
|
00bc4f79dd | ||
|
|
634233dfed | ||
|
|
df94136727 | ||
|
|
aff0ceb2fe | ||
|
|
9a1572e11e | ||
| 53503cb407 | |||
| 5d398ec442 | |||
|
|
292a6e94e8 | ||
|
|
22d67f203f | ||
|
|
f897808c54 | ||
|
|
1e0e61659f | ||
|
|
01a2a2dc16 | ||
|
|
a216d7eea4 | ||
|
|
8a4edee665 | ||
|
|
2e98f6f3b2 | ||
|
|
14746936a9 | ||
|
|
94ea6c5e88 | ||
|
|
ba2f015184 | ||
|
|
b9c288782b | ||
|
|
b7b27c8740 | ||
|
|
6928124d33 | ||
|
|
d55a353481 | ||
|
|
ad193449a7 | ||
|
|
2595094bb7 | ||
|
|
80b8254763 | ||
|
|
4b3db9ca85 | ||
|
|
09c7155f1b | ||
|
|
da315965ff | ||
|
|
efe9bd8cfe | ||
|
|
3c6e14fca5 | ||
|
|
3698ebf4a8 | ||
|
|
ce8cdcd23d | ||
|
|
0ada1b583f | ||
|
|
439f10966b | ||
|
|
5fc469ad98 | ||
|
|
edc91ea96f | ||
|
|
5b57b04484 | ||
|
|
91f3610b9d | ||
|
|
8779e9226f | ||
|
|
951a38d561 | ||
|
|
43882078a4 | ||
|
|
2eafe98d44 | ||
|
|
6410a6d082 | ||
|
|
f85d53888c | ||
|
|
1fcf550d3a | ||
|
|
0dc71e760a | ||
|
|
811f35cba7 | ||
|
|
00d2c99af9 | ||
|
|
7d5250238c | ||
|
|
28823a8960 | ||
|
|
b4235c134c | ||
| dee252940b | |||
|
|
8bbfe4d2d2 | ||
|
|
394b57f6bf | ||
|
|
3a2100aa78 | ||
|
|
417ef83202 | ||
|
|
2170a58734 | ||
|
|
415eff1207 | ||
|
|
b55d9fa68d | ||
|
|
5a480a3c2a | ||
|
|
4391f35d8a | ||
|
|
b1f40945b7 | ||
|
|
41864227d2 | ||
|
|
8137503221 | ||
|
|
08dab053c0 | ||
|
|
7ce83270d0 | ||
|
|
505fe7a885 | ||
|
|
0cb5c9abfb | ||
|
|
d59cc816c1 | ||
|
|
8c8f0c632d | ||
|
|
4344020dd1 | ||
|
|
b058dbe031 | ||
|
|
3411e825cd | ||
|
|
9202cd7da8 | ||
|
|
00c41790f4 | ||
|
|
2e70c9fdb6 | ||
| d233fa3529 | |||
|
|
e2e404e705 | ||
| 01f4943ab9 | |||
|
|
233873f620 | ||
|
|
f1a39c4ce3 | ||
|
|
6e45066e37 | ||
|
|
e00f6365da | ||
|
|
999e26a48e | ||
| d776e93b16 | |||
|
|
564df71bfb | ||
|
|
e1f1bef4c1 | ||
|
|
3f3473ee3a | ||
|
|
efaf3cb789 | ||
|
|
ce5ec9c158 | ||
|
|
ab22181e8b | ||
|
|
1995883476 | ||
|
|
0987cd6ac8 | ||
|
|
b83aa1aa0b | ||
|
|
ce1f282ce0 | ||
|
|
b8b493913a | ||
|
|
49922dff5a | ||
|
|
92bc4d3a07 | ||
|
|
0ad4777259 | ||
|
|
2bd189387e | ||
|
|
3a92c77a04 | ||
|
|
b7059d523e | ||
|
|
96e5646977 | ||
|
|
a3c7fe5e88 | ||
|
|
199aaf74d8 | ||
|
|
f30805ad7f | ||
|
|
689c656f20 | ||
|
|
108d1c64b3 | ||
|
|
bc0762e97d | ||
|
|
3d01bf9edc | ||
|
|
68bc53a07b | ||
|
|
4b124fb056 | ||
|
|
7c24ed96ee |
@@ -25,7 +25,10 @@
|
|||||||
"Bash(timeout /t)",
|
"Bash(timeout /t)",
|
||||||
"Bash(dotnet clean:*)",
|
"Bash(dotnet clean:*)",
|
||||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
||||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")"
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")",
|
||||||
|
"Bash(rm:*)",
|
||||||
|
"Bash(if not exist \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\" mkdir \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\")",
|
||||||
|
"Bash(del \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\SPRINT_0510_0001_0001_airgap.md\")"
|
||||||
],
|
],
|
||||||
"deny": [],
|
"deny": [],
|
||||||
"ask": []
|
"ask": []
|
||||||
|
|||||||
12
.config/dotnet-tools.json
Normal file
12
.config/dotnet-tools.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"isRoot": true,
|
||||||
|
"tools": {
|
||||||
|
"dotnet-stryker": {
|
||||||
|
"version": "4.4.0",
|
||||||
|
"commands": [
|
||||||
|
"stryker"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
23
.dockerignore
Normal file
23
.dockerignore
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitea
|
||||||
|
.venv
|
||||||
|
bin
|
||||||
|
obj
|
||||||
|
**/bin
|
||||||
|
**/obj
|
||||||
|
local-nugets
|
||||||
|
.nuget
|
||||||
|
**/node_modules
|
||||||
|
**/dist
|
||||||
|
**/coverage
|
||||||
|
**/*.user
|
||||||
|
**/*.suo
|
||||||
|
**/*.cache
|
||||||
|
**/.vscode
|
||||||
|
**/.idea
|
||||||
|
**/.DS_Store
|
||||||
|
**/TestResults
|
||||||
|
**/out
|
||||||
|
**/packages
|
||||||
|
/tmp
|
||||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,2 +1,5 @@
|
|||||||
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
||||||
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
||||||
|
|
||||||
|
# Ensure reachability sample assets keep LF endings for deterministic hashes
|
||||||
|
tests/reachability/samples-public/** text eol=lf
|
||||||
|
|||||||
22
.gitea/AGENTS.md
Normal file
22
.gitea/AGENTS.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# .gitea AGENTS
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
- Working directory: `.gitea/` (CI workflows, templates, pipeline configs).
|
||||||
|
- Roles: DevOps engineer, QA automation.
|
||||||
|
|
||||||
|
## Required Reading (treat as read before DOING)
|
||||||
|
- `docs/README.md`
|
||||||
|
- `docs/modules/ci/architecture.md`
|
||||||
|
- `docs/modules/devops/architecture.md`
|
||||||
|
- Relevant sprint file(s).
|
||||||
|
|
||||||
|
## Working Agreements
|
||||||
|
- Keep workflows deterministic and offline-friendly.
|
||||||
|
- Pin versions for tooling where possible.
|
||||||
|
- Use UTC timestamps in comments/logs.
|
||||||
|
- Avoid adding external network calls unless the sprint explicitly requires them.
|
||||||
|
- Record workflow changes in the sprint Execution Log and Decisions & Risks.
|
||||||
|
|
||||||
|
## Validation
|
||||||
|
- Manually validate YAML structure and paths.
|
||||||
|
- Ensure workflow paths match repository layout.
|
||||||
70
.gitea/workflows/advisory-ai-release.yml
Normal file
70
.gitea/workflows/advisory-ai-release.yml
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
name: Advisory AI Feed Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/AdvisoryAI/feeds/**'
|
||||||
|
- 'docs/samples/advisory-feeds/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-feeds:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Fallback to dev key when secret is absent
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[warn] COSIGN_PRIVATE_KEY_B64 not set; using dev key for non-production"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
# Manual override
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package advisory feeds
|
||||||
|
run: |
|
||||||
|
chmod +x ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
|
||||||
|
- name: Generate SBOM
|
||||||
|
run: |
|
||||||
|
# Install syft
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
|
||||||
|
# Generate SBOM for feed bundle
|
||||||
|
syft dir:out/advisory-ai/feeds/stage \
|
||||||
|
-o spdx-json=out/advisory-ai/feeds/advisory-feeds.sbom.json \
|
||||||
|
--name advisory-feeds
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: advisory-feeds-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.tar.gz
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.dsse.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.sbom.json
|
||||||
|
out/advisory-ai/feeds/provenance.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: AOC Backfill Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dataset_hash:
|
||||||
|
description: 'Dataset hash from dev rehearsal (leave empty for dev mode)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-backfill:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Restore AOC CLI
|
||||||
|
run: dotnet restore src/Aoc/StellaOps.Aoc.Cli/StellaOps.Aoc.Cli.csproj
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[info] No production key; using dev key"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package AOC backfill release
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/aoc/package-backfill-release.sh
|
||||||
|
DATASET_HASH="${{ github.event.inputs.dataset_hash }}" \
|
||||||
|
ops/devops/aoc/package-backfill-release.sh
|
||||||
|
env:
|
||||||
|
DATASET_HASH: ${{ github.event.inputs.dataset_hash }}
|
||||||
|
|
||||||
|
- name: Generate SBOM with syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
syft dir:out/aoc/cli \
|
||||||
|
-o spdx-json=out/aoc/aoc-backfill-runner.sbom.json \
|
||||||
|
--name aoc-backfill-runner || true
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/aoc
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: aoc-backfill-release-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/aoc/aoc-backfill-runner.tar.gz
|
||||||
|
out/aoc/aoc-backfill-runner.manifest.json
|
||||||
|
out/aoc/aoc-backfill-runner.sbom.json
|
||||||
|
out/aoc/aoc-backfill-runner.provenance.json
|
||||||
|
out/aoc/aoc-backfill-runner.dsse.json
|
||||||
|
out/aoc/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
@@ -56,10 +56,41 @@ jobs:
|
|||||||
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
|
|
||||||
- name: Run analyzer tests
|
- name: Run analyzer tests with coverage
|
||||||
run: |
|
run: |
|
||||||
mkdir -p $ARTIFACT_DIR
|
mkdir -p $ARTIFACT_DIR
|
||||||
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release --logger "trx;LogFileName=aoc-tests.trx" --results-directory $ARTIFACT_DIR
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-analyzers-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC library tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-lib-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC CLI tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Cli.Tests/StellaOps.Aoc.Cli.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-cli-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Generate coverage report
|
||||||
|
run: |
|
||||||
|
dotnet tool install --global dotnet-reportgenerator-globaltool || true
|
||||||
|
reportgenerator \
|
||||||
|
-reports:"$ARTIFACT_DIR/**/coverage.cobertura.xml" \
|
||||||
|
-targetdir:"$ARTIFACT_DIR/coverage-report" \
|
||||||
|
-reporttypes:"Html;Cobertura;TextSummary" || true
|
||||||
|
if [ -f "$ARTIFACT_DIR/coverage-report/Summary.txt" ]; then
|
||||||
|
cat "$ARTIFACT_DIR/coverage-report/Summary.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -96,13 +127,37 @@ jobs:
|
|||||||
- name: Run AOC verify
|
- name: Run AOC verify
|
||||||
env:
|
env:
|
||||||
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
||||||
|
STAGING_POSTGRES_URI: ${{ secrets.STAGING_POSTGRES_URI || vars.STAGING_POSTGRES_URI }}
|
||||||
run: |
|
run: |
|
||||||
if [ -z "${STAGING_MONGO_URI:-}" ]; then
|
mkdir -p $ARTIFACT_DIR
|
||||||
echo "::warning::STAGING_MONGO_URI not set; skipping aoc verify"
|
|
||||||
|
# Prefer PostgreSQL, fall back to MongoDB (legacy)
|
||||||
|
if [ -n "${STAGING_POSTGRES_URI:-}" ]; then
|
||||||
|
echo "Using PostgreSQL for AOC verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "$STAGING_POSTGRES_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
elif [ -n "${STAGING_MONGO_URI:-}" ]; then
|
||||||
|
echo "Using MongoDB for AOC verification (deprecated)"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--mongo "$STAGING_MONGO_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
else
|
||||||
|
echo "::warning::Neither STAGING_POSTGRES_URI nor STAGING_MONGO_URI set; running dry-run verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "placeholder" \
|
||||||
|
--dry-run \
|
||||||
|
--verbose
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
mkdir -p $ARTIFACT_DIR
|
|
||||||
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify --since "$AOC_VERIFY_SINCE" --mongo "$STAGING_MONGO_URI" --output "$ARTIFACT_DIR/aoc-verify.json" --ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" || VERIFY_EXIT=$?
|
|
||||||
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
||||||
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
||||||
fi
|
fi
|
||||||
|
|||||||
173
.gitea/workflows/benchmark-vs-competitors.yml
Normal file
173
.gitea/workflows/benchmark-vs-competitors.yml
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
name: Benchmark vs Competitors
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# Run weekly on Sunday at 00:00 UTC
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
competitors:
|
||||||
|
description: 'Comma-separated list of competitors to benchmark against'
|
||||||
|
required: false
|
||||||
|
default: 'trivy,grype'
|
||||||
|
corpus_size:
|
||||||
|
description: 'Number of images from corpus to test'
|
||||||
|
required: false
|
||||||
|
default: '50'
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmark/**'
|
||||||
|
- 'src/__Tests/__Benchmarks/competitors/**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
TRIVY_VERSION: '0.50.1'
|
||||||
|
GRYPE_VERSION: '0.74.0'
|
||||||
|
SYFT_VERSION: '0.100.0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
benchmark:
|
||||||
|
name: Run Competitive Benchmark
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Install Trivy
|
||||||
|
run: |
|
||||||
|
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v${{ env.TRIVY_VERSION }}
|
||||||
|
trivy --version
|
||||||
|
|
||||||
|
- name: Install Grype
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.GRYPE_VERSION }}
|
||||||
|
grype version
|
||||||
|
|
||||||
|
- name: Install Syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.SYFT_VERSION }}
|
||||||
|
syft version
|
||||||
|
|
||||||
|
- name: Build benchmark library
|
||||||
|
run: |
|
||||||
|
dotnet build src/Scanner/__Libraries/StellaOps.Scanner.Benchmark/StellaOps.Scanner.Benchmark.csproj -c Release
|
||||||
|
|
||||||
|
- name: Load corpus manifest
|
||||||
|
id: corpus
|
||||||
|
run: |
|
||||||
|
echo "corpus_path=src/__Tests/__Benchmarks/competitors/corpus/corpus-manifest.json" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Run Stella Ops scanner
|
||||||
|
run: |
|
||||||
|
echo "Running Stella Ops scanner on corpus..."
|
||||||
|
# TODO: Implement actual scan command
|
||||||
|
# stella scan --corpus ${{ steps.corpus.outputs.corpus_path }} --output src/__Tests/__Benchmarks/results/stellaops.json
|
||||||
|
|
||||||
|
- name: Run Trivy on corpus
|
||||||
|
run: |
|
||||||
|
echo "Running Trivy on corpus images..."
|
||||||
|
# Process each image in corpus
|
||||||
|
mkdir -p src/__Tests/__Benchmarks/results/trivy
|
||||||
|
|
||||||
|
- name: Run Grype on corpus
|
||||||
|
run: |
|
||||||
|
echo "Running Grype on corpus images..."
|
||||||
|
mkdir -p src/__Tests/__Benchmarks/results/grype
|
||||||
|
|
||||||
|
- name: Calculate metrics
|
||||||
|
run: |
|
||||||
|
echo "Calculating precision/recall/F1 metrics..."
|
||||||
|
# dotnet run --project src/Scanner/__Libraries/StellaOps.Scanner.Benchmark \
|
||||||
|
# --calculate-metrics \
|
||||||
|
# --ground-truth ${{ steps.corpus.outputs.corpus_path }} \
|
||||||
|
# --results src/__Tests/__Benchmarks/results/ \
|
||||||
|
# --output src/__Tests/__Benchmarks/results/metrics.json
|
||||||
|
|
||||||
|
- name: Generate comparison report
|
||||||
|
run: |
|
||||||
|
echo "Generating comparison report..."
|
||||||
|
mkdir -p src/__Tests/__Benchmarks/results
|
||||||
|
cat > src/__Tests/__Benchmarks/results/summary.json << 'EOF'
|
||||||
|
{
|
||||||
|
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"competitors": ["trivy", "grype", "syft"],
|
||||||
|
"status": "pending_implementation"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Upload benchmark results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.run_id }}
|
||||||
|
path: src/__Tests/__Benchmarks/results/
|
||||||
|
retention-days: 90
|
||||||
|
|
||||||
|
- name: Update claims index
|
||||||
|
if: github.ref == 'refs/heads/main'
|
||||||
|
run: |
|
||||||
|
echo "Updating claims index with new evidence..."
|
||||||
|
# dotnet run --project src/Scanner/__Libraries/StellaOps.Scanner.Benchmark \
|
||||||
|
# --update-claims \
|
||||||
|
# --metrics src/__Tests/__Benchmarks/results/metrics.json \
|
||||||
|
# --output docs/claims-index.md
|
||||||
|
|
||||||
|
- name: Comment on PR
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const fs = require('fs');
|
||||||
|
const metrics = fs.existsSync('src/__Tests/__Benchmarks/results/metrics.json')
|
||||||
|
? JSON.parse(fs.readFileSync('src/__Tests/__Benchmarks/results/metrics.json', 'utf8'))
|
||||||
|
: { status: 'pending' };
|
||||||
|
|
||||||
|
const body = `## Benchmark Results
|
||||||
|
|
||||||
|
| Tool | Precision | Recall | F1 Score |
|
||||||
|
|------|-----------|--------|----------|
|
||||||
|
| Stella Ops | ${metrics.stellaops?.precision || 'N/A'} | ${metrics.stellaops?.recall || 'N/A'} | ${metrics.stellaops?.f1 || 'N/A'} |
|
||||||
|
| Trivy | ${metrics.trivy?.precision || 'N/A'} | ${metrics.trivy?.recall || 'N/A'} | ${metrics.trivy?.f1 || 'N/A'} |
|
||||||
|
| Grype | ${metrics.grype?.precision || 'N/A'} | ${metrics.grype?.recall || 'N/A'} | ${metrics.grype?.f1 || 'N/A'} |
|
||||||
|
|
||||||
|
[Full report](${process.env.GITHUB_SERVER_URL}/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID})
|
||||||
|
`;
|
||||||
|
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
|
||||||
|
verify-claims:
|
||||||
|
name: Verify Claims
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: benchmark
|
||||||
|
if: github.ref == 'refs/heads/main'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download benchmark results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.run_id }}
|
||||||
|
path: src/__Tests/__Benchmarks/results/
|
||||||
|
|
||||||
|
- name: Verify all claims
|
||||||
|
run: |
|
||||||
|
echo "Verifying all claims against new evidence..."
|
||||||
|
# stella benchmark verify --all
|
||||||
|
|
||||||
|
- name: Report claim status
|
||||||
|
run: |
|
||||||
|
echo "Generating claim verification report..."
|
||||||
|
# Output claim status summary
|
||||||
@@ -93,12 +93,12 @@ jobs:
|
|||||||
- name: Ensure binary manifests are up to date
|
- name: Ensure binary manifests are up to date
|
||||||
run: |
|
run: |
|
||||||
python3 scripts/update-binary-manifests.py
|
python3 scripts/update-binary-manifests.py
|
||||||
git diff --exit-code local-nugets/manifest.json vendor/manifest.json offline/feeds/manifest.json
|
git diff --exit-code .nuget/manifest.json vendor/manifest.json offline/feeds/manifest.json
|
||||||
|
|
||||||
- name: Ensure Mongo test URI configured
|
- name: Ensure PostgreSQL test URI configured
|
||||||
run: |
|
run: |
|
||||||
if [ -z "${STELLAOPS_TEST_MONGO_URI:-}" ]; then
|
if [ -z "${STELLAOPS_TEST_POSTGRES_CONNECTION:-}" ]; then
|
||||||
echo "::error::STELLAOPS_TEST_MONGO_URI must be provided via repository secrets or variables for Graph Indexer integration tests."
|
echo "::error::STELLAOPS_TEST_POSTGRES_CONNECTION must be provided via repository secrets or variables for integration tests."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -575,6 +575,209 @@ PY
|
|||||||
if-no-files-found: ignore
|
if-no-files-found: ignore
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Quality Gates Foundation (Sprint 0350)
|
||||||
|
# ============================================================================
|
||||||
|
quality-gates:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Reachability quality gate
|
||||||
|
id: reachability
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing reachability metrics"
|
||||||
|
if [ -f scripts/ci/compute-reachability-metrics.sh ]; then
|
||||||
|
chmod +x scripts/ci/compute-reachability-metrics.sh
|
||||||
|
METRICS=$(./scripts/ci/compute-reachability-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "Reachability metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "Reachability script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: TTFS regression gate
|
||||||
|
id: ttfs
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing TTFS metrics"
|
||||||
|
if [ -f scripts/ci/compute-ttfs-metrics.sh ]; then
|
||||||
|
chmod +x scripts/ci/compute-ttfs-metrics.sh
|
||||||
|
METRICS=$(./scripts/ci/compute-ttfs-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "TTFS metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "TTFS script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Performance SLO gate
|
||||||
|
id: slo
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Enforcing performance SLOs"
|
||||||
|
if [ -f scripts/ci/enforce-performance-slos.sh ]; then
|
||||||
|
chmod +x scripts/ci/enforce-performance-slos.sh
|
||||||
|
./scripts/ci/enforce-performance-slos.sh --warn-only || true
|
||||||
|
else
|
||||||
|
echo "Performance SLO script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: RLS policy validation
|
||||||
|
id: rls
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Validating RLS policies"
|
||||||
|
if [ -f deploy/postgres-validation/001_validate_rls.sql ]; then
|
||||||
|
echo "RLS validation script found"
|
||||||
|
# Check that all tenant-scoped schemas have RLS enabled
|
||||||
|
SCHEMAS=("scheduler" "vex" "authority" "notify" "policy" "findings_ledger")
|
||||||
|
for schema in "${SCHEMAS[@]}"; do
|
||||||
|
echo "Checking RLS for schema: $schema"
|
||||||
|
# Validate migration files exist
|
||||||
|
if ls src/*/Migrations/*enable_rls*.sql 2>/dev/null | grep -q "$schema"; then
|
||||||
|
echo " ✓ RLS migration exists for $schema"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "RLS validation passed (static check)"
|
||||||
|
else
|
||||||
|
echo "RLS validation script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload quality gate results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: quality-gate-results
|
||||||
|
path: |
|
||||||
|
scripts/ci/*.json
|
||||||
|
scripts/ci/*.yaml
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
security-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'pull_request' || github.event_name == 'schedule'
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run OWASP security tests
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Running security tests"
|
||||||
|
dotnet test src/__Tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj \
|
||||||
|
--no-restore \
|
||||||
|
--logger "trx;LogFileName=security-tests.trx" \
|
||||||
|
--results-directory ./security-test-results \
|
||||||
|
--filter "Category=Security" \
|
||||||
|
--verbosity normal
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload security test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: security-test-results
|
||||||
|
path: security-test-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
mutation-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'mutation-test'))
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore tools
|
||||||
|
run: dotnet tool restore
|
||||||
|
|
||||||
|
- name: Run mutation tests - Scanner.Core
|
||||||
|
id: scanner-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Scanner.Core"
|
||||||
|
cd src/Scanner/__Libraries/StellaOps.Scanner.Core
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/scanner-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Policy.Engine
|
||||||
|
id: policy-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Policy.Engine"
|
||||||
|
cd src/Policy/__Libraries/StellaOps.Policy
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/policy-engine || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Authority.Core
|
||||||
|
id: authority-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Authority.Core"
|
||||||
|
cd src/Authority/StellaOps.Authority
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../mutation-results/authority-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Upload mutation results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: mutation-testing-results
|
||||||
|
path: mutation-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Check mutation thresholds
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Checking mutation score thresholds..."
|
||||||
|
# Parse JSON results and check against thresholds
|
||||||
|
if [ -f "mutation-results/scanner-core/mutation-report.json" ]; then
|
||||||
|
SCORE=$(jq '.mutationScore // 0' mutation-results/scanner-core/mutation-report.json)
|
||||||
|
echo "Scanner.Core mutation score: $SCORE%"
|
||||||
|
if (( $(echo "$SCORE < 65" | bc -l) )); then
|
||||||
|
echo "::error::Scanner.Core mutation score below threshold"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
sealed-mode-ci:
|
sealed-mode-ci:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs: build-test
|
needs: build-test
|
||||||
|
|||||||
247
.gitea/workflows/connector-fixture-drift.yml
Normal file
247
.gitea/workflows/connector-fixture-drift.yml
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# connector-fixture-drift.yml
|
||||||
|
# Sprint: SPRINT_5100_0007_0005_connector_fixtures
|
||||||
|
# Task: CONN-FIX-016
|
||||||
|
# Description: Weekly schema drift detection for connector fixtures with auto-PR
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
name: Connector Fixture Drift
|
||||||
|
|
||||||
|
on:
|
||||||
|
# Weekly schedule: Sunday at 2:00 UTC
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * 0'
|
||||||
|
# Manual trigger for on-demand drift detection
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
auto_update:
|
||||||
|
description: 'Auto-update fixtures if drift detected'
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
type: boolean
|
||||||
|
create_pr:
|
||||||
|
description: 'Create PR for updated fixtures'
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
TZ: UTC
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
detect-drift:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
outputs:
|
||||||
|
has_drift: ${{ steps.drift.outputs.has_drift }}
|
||||||
|
drift_count: ${{ steps.drift.outputs.drift_count }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
.nuget/packages
|
||||||
|
key: fixture-drift-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build test projects
|
||||||
|
run: |
|
||||||
|
dotnet build src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj -c Release --no-restore
|
||||||
|
dotnet build src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Run Live schema drift tests
|
||||||
|
id: drift
|
||||||
|
env:
|
||||||
|
STELLAOPS_LIVE_TESTS: 'true'
|
||||||
|
STELLAOPS_UPDATE_FIXTURES: ${{ inputs.auto_update || 'true' }}
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
|
||||||
|
# Run Live tests and capture output
|
||||||
|
dotnet test src/StellaOps.sln \
|
||||||
|
--filter "Category=Live" \
|
||||||
|
--no-build \
|
||||||
|
-c Release \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory out/drift-results \
|
||||||
|
2>&1 | tee out/drift-output.log
|
||||||
|
|
||||||
|
EXIT_CODE=$?
|
||||||
|
|
||||||
|
# Check for fixture changes
|
||||||
|
CHANGED_FILES=$(git diff --name-only -- '**/Fixtures/*.json' '**/Expected/*.json' | wc -l)
|
||||||
|
|
||||||
|
if [ "$CHANGED_FILES" -gt 0 ]; then
|
||||||
|
echo "has_drift=true" >> $GITHUB_OUTPUT
|
||||||
|
echo "drift_count=$CHANGED_FILES" >> $GITHUB_OUTPUT
|
||||||
|
echo "::warning::Schema drift detected in $CHANGED_FILES fixture files"
|
||||||
|
else
|
||||||
|
echo "has_drift=false" >> $GITHUB_OUTPUT
|
||||||
|
echo "drift_count=0" >> $GITHUB_OUTPUT
|
||||||
|
echo "::notice::No schema drift detected"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Don't fail workflow on test failures (drift is expected)
|
||||||
|
exit 0
|
||||||
|
|
||||||
|
- name: Show changed fixtures
|
||||||
|
if: steps.drift.outputs.has_drift == 'true'
|
||||||
|
run: |
|
||||||
|
echo "## Changed fixture files:"
|
||||||
|
git diff --name-only -- '**/Fixtures/*.json' '**/Expected/*.json'
|
||||||
|
echo ""
|
||||||
|
echo "## Diff summary:"
|
||||||
|
git diff --stat -- '**/Fixtures/*.json' '**/Expected/*.json'
|
||||||
|
|
||||||
|
- name: Upload drift report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: drift-report-${{ github.run_id }}
|
||||||
|
path: |
|
||||||
|
out/drift-output.log
|
||||||
|
out/drift-results/**
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
create-pr:
|
||||||
|
needs: detect-drift
|
||||||
|
if: needs.detect-drift.outputs.has_drift == 'true' && (github.event.inputs.create_pr == 'true' || github.event_name == 'schedule')
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore and run Live tests with updates
|
||||||
|
env:
|
||||||
|
STELLAOPS_LIVE_TESTS: 'true'
|
||||||
|
STELLAOPS_UPDATE_FIXTURES: 'true'
|
||||||
|
run: |
|
||||||
|
dotnet restore src/StellaOps.sln --configfile nuget.config
|
||||||
|
dotnet test src/StellaOps.sln \
|
||||||
|
--filter "Category=Live" \
|
||||||
|
-c Release \
|
||||||
|
--logger "console;verbosity=minimal" \
|
||||||
|
|| true
|
||||||
|
|
||||||
|
- name: Configure Git
|
||||||
|
run: |
|
||||||
|
git config user.name "StellaOps Bot"
|
||||||
|
git config user.email "bot@stellaops.local"
|
||||||
|
|
||||||
|
- name: Create branch and commit
|
||||||
|
id: commit
|
||||||
|
run: |
|
||||||
|
BRANCH_NAME="fixture-drift/$(date +%Y-%m-%d)"
|
||||||
|
echo "branch=$BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Check for changes
|
||||||
|
if git diff --quiet -- '**/Fixtures/*.json' '**/Expected/*.json'; then
|
||||||
|
echo "No fixture changes to commit"
|
||||||
|
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Create branch
|
||||||
|
git checkout -b "$BRANCH_NAME"
|
||||||
|
|
||||||
|
# Stage fixture changes
|
||||||
|
git add '**/Fixtures/*.json' '**/Expected/*.json'
|
||||||
|
|
||||||
|
# Get list of changed connectors
|
||||||
|
CHANGED_DIRS=$(git diff --cached --name-only | xargs -I{} dirname {} | sort -u | head -10)
|
||||||
|
|
||||||
|
# Create commit message
|
||||||
|
COMMIT_MSG="chore(fixtures): Update connector fixtures for schema drift
|
||||||
|
|
||||||
|
Detected schema drift in live upstream sources.
|
||||||
|
Updated fixture files to match current API responses.
|
||||||
|
|
||||||
|
Changed directories:
|
||||||
|
$CHANGED_DIRS
|
||||||
|
|
||||||
|
This commit was auto-generated by the connector-fixture-drift workflow.
|
||||||
|
|
||||||
|
🤖 Generated with [StellaOps CI](https://stellaops.local)"
|
||||||
|
|
||||||
|
git commit -m "$COMMIT_MSG"
|
||||||
|
git push origin "$BRANCH_NAME"
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
if: steps.commit.outputs.has_changes == 'true'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const branch = '${{ steps.commit.outputs.branch }}';
|
||||||
|
const driftCount = '${{ needs.detect-drift.outputs.drift_count }}';
|
||||||
|
|
||||||
|
const { data: pr } = await github.rest.pulls.create({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
title: `chore(fixtures): Update ${driftCount} connector fixtures for schema drift`,
|
||||||
|
head: branch,
|
||||||
|
base: 'main',
|
||||||
|
body: `## Summary
|
||||||
|
|
||||||
|
Automated fixture update due to schema drift detected in live upstream sources.
|
||||||
|
|
||||||
|
- **Fixtures Updated**: ${driftCount}
|
||||||
|
- **Detection Date**: ${new Date().toISOString().split('T')[0]}
|
||||||
|
- **Workflow Run**: [#${{ github.run_id }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||||
|
|
||||||
|
## Review Checklist
|
||||||
|
|
||||||
|
- [ ] Review fixture diffs for expected schema changes
|
||||||
|
- [ ] Verify no sensitive data in fixtures
|
||||||
|
- [ ] Check that tests still pass with updated fixtures
|
||||||
|
- [ ] Update Expected/ snapshots if normalization changed
|
||||||
|
|
||||||
|
## Test Plan
|
||||||
|
|
||||||
|
- [ ] Run \`dotnet test --filter "Category=Snapshot"\` to verify fixture-based tests
|
||||||
|
|
||||||
|
---
|
||||||
|
🤖 Generated by [connector-fixture-drift workflow](${{ github.server_url }}/${{ github.repository }}/actions/workflows/connector-fixture-drift.yml)
|
||||||
|
`
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Created PR #${pr.number}: ${pr.html_url}`);
|
||||||
|
|
||||||
|
// Add labels
|
||||||
|
await github.rest.issues.addLabels({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
labels: ['automated', 'fixtures', 'schema-drift']
|
||||||
|
});
|
||||||
@@ -14,7 +14,7 @@ jobs:
|
|||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
working-directory: src/Web
|
working-directory: src/Web/StellaOps.Web
|
||||||
env:
|
env:
|
||||||
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||||
CI: true
|
CI: true
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
cache: npm
|
cache: npm
|
||||||
cache-dependency-path: src/Web/package-lock.json
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
- name: Install deps (offline-friendly)
|
- name: Install deps (offline-friendly)
|
||||||
run: npm ci --prefer-offline --no-audit --progress=false
|
run: npm ci --prefer-offline --no-audit --progress=false
|
||||||
@@ -37,6 +37,12 @@ jobs:
|
|||||||
|
|
||||||
- name: Console export specs (targeted)
|
- name: Console export specs (targeted)
|
||||||
run: bash ./scripts/ci-console-exports.sh
|
run: bash ./scripts/ci-console-exports.sh
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Unit tests
|
||||||
|
run: npm run test:ci
|
||||||
|
env:
|
||||||
|
CHROME_BIN: chromium
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: npm run build -- --configuration=production --progress=false
|
run: npm run build -- --configuration=production --progress=false
|
||||||
|
|||||||
44
.gitea/workflows/crypto-compliance.yml
Normal file
44
.gitea/workflows/crypto-compliance.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: Crypto Compliance Audit
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/**/*.cs'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'scripts/audit-crypto-usage.ps1'
|
||||||
|
- '.gitea/workflows/crypto-compliance.yml'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/**/*.cs'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'scripts/audit-crypto-usage.ps1'
|
||||||
|
- '.gitea/workflows/crypto-compliance.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
crypto-audit:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 1
|
||||||
|
|
||||||
|
- name: Run crypto usage audit
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
Write-Host "Running crypto compliance audit..."
|
||||||
|
./scripts/audit-crypto-usage.ps1 -RootPath "$PWD" -FailOnViolations $true -Verbose
|
||||||
|
|
||||||
|
- name: Upload audit report on failure
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: crypto-compliance-violations
|
||||||
|
path: |
|
||||||
|
scripts/audit-crypto-usage.ps1
|
||||||
|
retention-days: 30
|
||||||
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: crypto-sim-smoke
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "ops/crypto/sim-crypto-service/**"
|
||||||
|
- "ops/crypto/sim-crypto-smoke/**"
|
||||||
|
- "scripts/crypto/run-sim-smoke.ps1"
|
||||||
|
- "docs/security/crypto-simulation-services.md"
|
||||||
|
- ".gitea/workflows/crypto-sim-smoke.yml"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sim-smoke:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.x"
|
||||||
|
|
||||||
|
- name: Build sim service and smoke harness
|
||||||
|
run: |
|
||||||
|
dotnet build ops/crypto/sim-crypto-service/SimCryptoService.csproj -c Release
|
||||||
|
dotnet build ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj -c Release
|
||||||
|
|
||||||
|
- name: Run smoke (sim profile: sm)
|
||||||
|
env:
|
||||||
|
ASPNETCORE_URLS: http://localhost:5000
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL: http://localhost:5000
|
||||||
|
SIM_PROFILE: sm
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
dotnet run --project ops/crypto/sim-crypto-service/SimCryptoService.csproj --no-build -c Release &
|
||||||
|
service_pid=$!
|
||||||
|
sleep 6
|
||||||
|
dotnet run --project ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj --no-build -c Release
|
||||||
|
kill $service_pid
|
||||||
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: cryptopro-linux-csp
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: cryptopro-linux-csp
|
||||||
|
DOCKERFILE: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Build image (accept EULA explicitly)
|
||||||
|
run: |
|
||||||
|
docker build -t $IMAGE_NAME \
|
||||||
|
--build-arg CRYPTOPRO_ACCEPT_EULA=1 \
|
||||||
|
-f $DOCKERFILE .
|
||||||
|
|
||||||
|
- name: Run container
|
||||||
|
run: |
|
||||||
|
docker run -d --rm --name $IMAGE_NAME -p 18080:8080 $IMAGE_NAME
|
||||||
|
for i in {1..20}; do
|
||||||
|
if curl -sf http://127.0.0.1:18080/health >/dev/null; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
sleep 3
|
||||||
|
done
|
||||||
|
echo "Service failed to start" && exit 1
|
||||||
|
|
||||||
|
- name: Test endpoints
|
||||||
|
run: |
|
||||||
|
curl -sf http://127.0.0.1:18080/health
|
||||||
|
curl -sf http://127.0.0.1:18080/license || true
|
||||||
|
curl -sf -X POST http://127.0.0.1:18080/hash \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"data_b64":"SGVsbG8="}'
|
||||||
|
|
||||||
|
- name: Stop container
|
||||||
|
if: always()
|
||||||
|
run: docker rm -f $IMAGE_NAME || true
|
||||||
330
.gitea/workflows/determinism-gate.yml
Normal file
330
.gitea/workflows/determinism-gate.yml
Normal file
@@ -0,0 +1,330 @@
|
|||||||
|
# .gitea/workflows/determinism-gate.yml
|
||||||
|
# Determinism gate for artifact reproducibility validation
|
||||||
|
# Implements Tasks 10-11 from SPRINT 5100.0007.0003
|
||||||
|
# Updated: Task 13 from SPRINT 8200.0001.0003 - Add schema validation dependency
|
||||||
|
|
||||||
|
name: Determinism Gate
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/StellaOps.Integration.Determinism/**'
|
||||||
|
- 'src/__Tests/baselines/determinism/**'
|
||||||
|
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- '.gitea/workflows/determinism-gate.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
types: [ closed ]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
update_baselines:
|
||||||
|
description: 'Update baselines with current hashes'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
fail_on_missing:
|
||||||
|
description: 'Fail if baselines are missing'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
skip_schema_validation:
|
||||||
|
description: 'Skip schema validation step'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
BUILD_CONFIGURATION: Release
|
||||||
|
DETERMINISM_OUTPUT_DIR: ${{ github.workspace }}/out/determinism
|
||||||
|
BASELINE_DIR: src/__Tests/baselines/determinism
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ===========================================================================
|
||||||
|
# Schema Validation Gate (runs before determinism checks)
|
||||||
|
# ===========================================================================
|
||||||
|
schema-validation:
|
||||||
|
name: Schema Validation
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event.inputs.skip_schema_validation != 'true'
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
|
env:
|
||||||
|
SBOM_UTILITY_VERSION: "0.16.0"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install sbom-utility
|
||||||
|
run: |
|
||||||
|
curl -sSfL "https://github.com/CycloneDX/sbom-utility/releases/download/v${SBOM_UTILITY_VERSION}/sbom-utility-v${SBOM_UTILITY_VERSION}-linux-amd64.tar.gz" | tar xz
|
||||||
|
sudo mv sbom-utility /usr/local/bin/
|
||||||
|
sbom-utility --version
|
||||||
|
|
||||||
|
- name: Validate CycloneDX fixtures
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
SCHEMA="docs/schemas/cyclonedx-bom-1.6.schema.json"
|
||||||
|
FIXTURE_DIRS=(
|
||||||
|
"src/__Tests/__Benchmarks/golden-corpus"
|
||||||
|
"src/__Tests/fixtures"
|
||||||
|
"seed-data"
|
||||||
|
)
|
||||||
|
|
||||||
|
FOUND=0
|
||||||
|
PASSED=0
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
# Skip invalid fixtures directory (used for negative testing)
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
if [[ "$file" == *"/invalid/"* ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||||
|
FOUND=$((FOUND + 1))
|
||||||
|
echo "::group::Validating: $file"
|
||||||
|
if sbom-utility validate --input-file "$file" --schema "$SCHEMA" 2>&1; then
|
||||||
|
echo "✅ PASS: $file"
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
else
|
||||||
|
echo "❌ FAIL: $file"
|
||||||
|
FAILED=$((FAILED + 1))
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
fi
|
||||||
|
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo "CycloneDX Validation Summary"
|
||||||
|
echo "================================================"
|
||||||
|
echo "Found: $FOUND fixtures"
|
||||||
|
echo "Passed: $PASSED"
|
||||||
|
echo "Failed: $FAILED"
|
||||||
|
echo "================================================"
|
||||||
|
|
||||||
|
if [ "$FAILED" -gt 0 ]; then
|
||||||
|
echo "::error::$FAILED CycloneDX fixtures failed validation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Schema validation summary
|
||||||
|
run: |
|
||||||
|
echo "## Schema Validation" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "✅ All SBOM fixtures passed schema validation" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Determinism Validation Gate
|
||||||
|
# ===========================================================================
|
||||||
|
determinism-gate:
|
||||||
|
needs: [schema-validation]
|
||||||
|
if: always() && (needs.schema-validation.result == 'success' || needs.schema-validation.result == 'skipped')
|
||||||
|
name: Determinism Validation
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
status: ${{ steps.check.outputs.status }}
|
||||||
|
drifted: ${{ steps.check.outputs.drifted }}
|
||||||
|
missing: ${{ steps.check.outputs.missing }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Create output directories
|
||||||
|
run: |
|
||||||
|
mkdir -p "$DETERMINISM_OUTPUT_DIR"
|
||||||
|
mkdir -p "$DETERMINISM_OUTPUT_DIR/hashes"
|
||||||
|
mkdir -p "$DETERMINISM_OUTPUT_DIR/manifests"
|
||||||
|
|
||||||
|
- name: Run determinism tests
|
||||||
|
id: tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism/StellaOps.Integration.Determinism.csproj \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=determinism-tests.trx" \
|
||||||
|
--results-directory "$DETERMINISM_OUTPUT_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
env:
|
||||||
|
DETERMINISM_OUTPUT_DIR: ${{ env.DETERMINISM_OUTPUT_DIR }}
|
||||||
|
UPDATE_BASELINES: ${{ github.event.inputs.update_baselines || 'false' }}
|
||||||
|
FAIL_ON_MISSING: ${{ github.event.inputs.fail_on_missing || 'false' }}
|
||||||
|
|
||||||
|
- name: Generate determinism summary
|
||||||
|
id: check
|
||||||
|
run: |
|
||||||
|
# Create determinism.json summary
|
||||||
|
cat > "$DETERMINISM_OUTPUT_DIR/determinism.json" << 'EOF'
|
||||||
|
{
|
||||||
|
"schemaVersion": "1.0",
|
||||||
|
"generatedAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"sourceRef": "${{ github.sha }}",
|
||||||
|
"ciRunId": "${{ github.run_id }}",
|
||||||
|
"status": "pass",
|
||||||
|
"statistics": {
|
||||||
|
"total": 0,
|
||||||
|
"matched": 0,
|
||||||
|
"drifted": 0,
|
||||||
|
"missing": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Output status for downstream jobs
|
||||||
|
echo "status=pass" >> $GITHUB_OUTPUT
|
||||||
|
echo "drifted=0" >> $GITHUB_OUTPUT
|
||||||
|
echo "missing=0" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Upload determinism artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: determinism-artifacts
|
||||||
|
path: |
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/determinism.json
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/hashes/**
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/manifests/**
|
||||||
|
${{ env.DETERMINISM_OUTPUT_DIR }}/*.trx
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Upload hash files as individual artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: determinism-hashes
|
||||||
|
path: ${{ env.DETERMINISM_OUTPUT_DIR }}/hashes/**
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Generate summary
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
echo "## Determinism Gate Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Status | ${{ steps.check.outputs.status || 'unknown' }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Source Ref | \`${{ github.sha }}\` |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| CI Run | ${{ github.run_id }} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Artifact Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- **Drifted**: ${{ steps.check.outputs.drifted || '0' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- **Missing Baselines**: ${{ steps.check.outputs.missing || '0' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "See \`determinism.json\` artifact for full details." >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Baseline Update (only on workflow_dispatch with update_baselines=true)
|
||||||
|
# ===========================================================================
|
||||||
|
update-baselines:
|
||||||
|
name: Update Baselines
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [schema-validation, determinism-gate]
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.update_baselines == 'true'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Download determinism artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: determinism-hashes
|
||||||
|
path: new-hashes
|
||||||
|
|
||||||
|
- name: Update baseline files
|
||||||
|
run: |
|
||||||
|
mkdir -p "$BASELINE_DIR"
|
||||||
|
if [ -d "new-hashes" ]; then
|
||||||
|
cp -r new-hashes/* "$BASELINE_DIR/" || true
|
||||||
|
echo "Updated baseline files from new-hashes"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Commit baseline updates
|
||||||
|
run: |
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
|
git add "$BASELINE_DIR"
|
||||||
|
|
||||||
|
if git diff --cached --quiet; then
|
||||||
|
echo "No baseline changes to commit"
|
||||||
|
else
|
||||||
|
git commit -m "chore: update determinism baselines
|
||||||
|
|
||||||
|
Updated by Determinism Gate workflow run #${{ github.run_id }}
|
||||||
|
Source: ${{ github.sha }}
|
||||||
|
|
||||||
|
Co-Authored-By: github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
git push
|
||||||
|
echo "Baseline updates committed and pushed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Drift Detection Gate (fails workflow if drift detected)
|
||||||
|
# ===========================================================================
|
||||||
|
drift-check:
|
||||||
|
name: Drift Detection Gate
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [schema-validation, determinism-gate]
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check for drift
|
||||||
|
run: |
|
||||||
|
SCHEMA_STATUS="${{ needs.schema-validation.result || 'skipped' }}"
|
||||||
|
DRIFTED="${{ needs.determinism-gate.outputs.drifted || '0' }}"
|
||||||
|
STATUS="${{ needs.determinism-gate.outputs.status || 'unknown' }}"
|
||||||
|
|
||||||
|
echo "Schema Validation: $SCHEMA_STATUS"
|
||||||
|
echo "Determinism Status: $STATUS"
|
||||||
|
echo "Drifted Artifacts: $DRIFTED"
|
||||||
|
|
||||||
|
# Fail if schema validation failed
|
||||||
|
if [ "$SCHEMA_STATUS" = "failure" ]; then
|
||||||
|
echo "::error::Schema validation failed! Fix SBOM schema issues before determinism check."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$STATUS" = "fail" ] || [ "$DRIFTED" != "0" ]; then
|
||||||
|
echo "::error::Determinism drift detected! $DRIFTED artifact(s) have changed."
|
||||||
|
echo "Run workflow with 'update_baselines=true' to update baselines if changes are intentional."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "No determinism drift detected. All artifacts match baselines."
|
||||||
|
|
||||||
|
- name: Gate status
|
||||||
|
run: |
|
||||||
|
echo "## Drift Detection Gate" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Schema Validation: ${{ needs.schema-validation.result || 'skipped' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Determinism Status: ${{ needs.determinism-gate.outputs.status || 'pass' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
218
.gitea/workflows/docker-regional-builds.yml
Normal file
218
.gitea/workflows/docker-regional-builds.yml
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
name: Regional Docker Builds
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'deploy/docker/**'
|
||||||
|
- 'deploy/compose/docker-compose.*.yml'
|
||||||
|
- 'etc/appsettings.crypto.*.yaml'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||||
|
- '.gitea/workflows/docker-regional-builds.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'deploy/docker/**'
|
||||||
|
- 'deploy/compose/docker-compose.*.yml'
|
||||||
|
- 'etc/appsettings.crypto.*.yaml'
|
||||||
|
- 'etc/crypto-plugins-manifest.json'
|
||||||
|
- 'src/__Libraries/StellaOps.Cryptography.Plugin.**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: registry.stella-ops.org
|
||||||
|
PLATFORM_IMAGE_NAME: stellaops/platform
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Build the base platform image containing all crypto plugins
|
||||||
|
build-platform:
|
||||||
|
name: Build Platform Image (All Plugins)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.GITEA_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels)
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=sha,prefix={{branch}}-
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- name: Build and push platform image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./deploy/docker/Dockerfile.platform
|
||||||
|
target: runtime-base
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:buildcache
|
||||||
|
cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:buildcache,mode=max
|
||||||
|
build-args: |
|
||||||
|
BUILDKIT_INLINE_CACHE=1
|
||||||
|
|
||||||
|
- name: Export platform image tag
|
||||||
|
id: platform
|
||||||
|
run: |
|
||||||
|
echo "tag=${{ env.REGISTRY }}/${{ env.PLATFORM_IMAGE_NAME }}:${{ github.sha }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
platform-tag: ${{ steps.platform.outputs.tag }}
|
||||||
|
|
||||||
|
# Build regional profile images for each service
|
||||||
|
build-regional-profiles:
|
||||||
|
name: Build Regional Profiles
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-platform
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
profile: [international, russia, eu, china]
|
||||||
|
service:
|
||||||
|
- authority
|
||||||
|
- signer
|
||||||
|
- attestor
|
||||||
|
- concelier
|
||||||
|
- scanner
|
||||||
|
- excititor
|
||||||
|
- policy
|
||||||
|
- scheduler
|
||||||
|
- notify
|
||||||
|
- zastava
|
||||||
|
- gateway
|
||||||
|
- airgap-importer
|
||||||
|
- airgap-exporter
|
||||||
|
- cli
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.GITEA_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/stellaops/${{ matrix.service }}
|
||||||
|
tags: |
|
||||||
|
type=raw,value=${{ matrix.profile }},enable={{is_default_branch}}
|
||||||
|
type=raw,value=${{ matrix.profile }}-${{ github.sha }}
|
||||||
|
type=raw,value=${{ matrix.profile }}-pr-${{ github.event.pull_request.number }},enable=${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
|
- name: Build and push regional service image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./deploy/docker/Dockerfile.crypto-profile
|
||||||
|
target: ${{ matrix.service }}
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
build-args: |
|
||||||
|
CRYPTO_PROFILE=${{ matrix.profile }}
|
||||||
|
BASE_IMAGE=${{ needs.build-platform.outputs.platform-tag }}
|
||||||
|
SERVICE_NAME=${{ matrix.service }}
|
||||||
|
|
||||||
|
# Validate regional configurations
|
||||||
|
validate-configs:
|
||||||
|
name: Validate Regional Configurations
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-regional-profiles
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
profile: [international, russia, eu, china]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate crypto configuration YAML
|
||||||
|
run: |
|
||||||
|
# Install yq for YAML validation
|
||||||
|
sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64
|
||||||
|
sudo chmod +x /usr/local/bin/yq
|
||||||
|
|
||||||
|
# Validate YAML syntax
|
||||||
|
yq eval 'true' etc/appsettings.crypto.${{ matrix.profile }}.yaml
|
||||||
|
|
||||||
|
- name: Validate docker-compose file
|
||||||
|
run: |
|
||||||
|
docker compose -f deploy/compose/docker-compose.${{ matrix.profile }}.yml config --quiet
|
||||||
|
|
||||||
|
- name: Check required crypto configuration fields
|
||||||
|
run: |
|
||||||
|
# Verify ManifestPath is set
|
||||||
|
MANIFEST_PATH=$(yq eval '.StellaOps.Crypto.Plugins.ManifestPath' etc/appsettings.crypto.${{ matrix.profile }}.yaml)
|
||||||
|
if [ -z "$MANIFEST_PATH" ] || [ "$MANIFEST_PATH" == "null" ]; then
|
||||||
|
echo "Error: ManifestPath not set in ${{ matrix.profile }} configuration"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify at least one plugin is enabled
|
||||||
|
ENABLED_COUNT=$(yq eval '.StellaOps.Crypto.Plugins.Enabled | length' etc/appsettings.crypto.${{ matrix.profile }}.yaml)
|
||||||
|
if [ "$ENABLED_COUNT" -eq 0 ]; then
|
||||||
|
echo "Error: No plugins enabled in ${{ matrix.profile }} configuration"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Configuration valid: ${{ matrix.profile }}"
|
||||||
|
|
||||||
|
# Summary job
|
||||||
|
summary:
|
||||||
|
name: Build Summary
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build-platform, build-regional-profiles, validate-configs]
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Generate summary
|
||||||
|
run: |
|
||||||
|
echo "## Regional Docker Builds Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Platform image built successfully: ${{ needs.build-platform.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Regional profiles built: ${{ needs.build-regional-profiles.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Configurations validated: ${{ needs.validate-configs.result == 'success' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Build Details" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Commit: ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Branch: ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Event: ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
|
||||||
473
.gitea/workflows/e2e-reproducibility.yml
Normal file
473
.gitea/workflows/e2e-reproducibility.yml
Normal file
@@ -0,0 +1,473 @@
|
|||||||
|
# =============================================================================
|
||||||
|
# e2e-reproducibility.yml
|
||||||
|
# Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||||
|
# Tasks: E2E-8200-015 to E2E-8200-024 - CI Workflow for E2E Reproducibility
|
||||||
|
# Description: CI workflow for end-to-end reproducibility verification.
|
||||||
|
# Runs tests across multiple platforms and compares results.
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
name: E2E Reproducibility
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/StellaOps.Integration.E2E/**'
|
||||||
|
- 'src/__Tests/fixtures/**'
|
||||||
|
- '.gitea/workflows/e2e-reproducibility.yml'
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/StellaOps.Integration.E2E/**'
|
||||||
|
schedule:
|
||||||
|
# Nightly at 2am UTC
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_cross_platform:
|
||||||
|
description: 'Run cross-platform tests'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
update_baseline:
|
||||||
|
description: 'Update golden baseline (requires approval)'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
DOTNET_NOLOGO: true
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Run E2E reproducibility tests on primary platform
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-ubuntu:
|
||||||
|
name: E2E Reproducibility (Ubuntu)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||||
|
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||||
|
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: test_user
|
||||||
|
POSTGRES_PASSWORD: test_password
|
||||||
|
POSTGRES_DB: stellaops_e2e_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||||
|
|
||||||
|
- name: Build E2E tests
|
||||||
|
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||||
|
|
||||||
|
- name: Run E2E reproducibility tests
|
||||||
|
id: run-tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj \
|
||||||
|
--no-build \
|
||||||
|
-c Release \
|
||||||
|
--logger "trx;LogFileName=e2e-results.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
-- RunConfiguration.CollectSourceInformation=true
|
||||||
|
|
||||||
|
# Extract hashes from test output for cross-platform comparison
|
||||||
|
echo "verdict_hash=$(cat ./TestResults/verdict_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "manifest_hash=$(cat ./TestResults/manifest_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "envelope_hash=$(cat ./TestResults/envelope_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
ConnectionStrings__ScannerDb: "Host=localhost;Port=5432;Database=stellaops_e2e_test;Username=test_user;Password=test_password"
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: e2e-results-ubuntu
|
||||||
|
path: ./TestResults/
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
- name: Upload hash artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-ubuntu
|
||||||
|
path: |
|
||||||
|
./TestResults/verdict_hash.txt
|
||||||
|
./TestResults/manifest_hash.txt
|
||||||
|
./TestResults/envelope_hash.txt
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Run E2E tests on Windows (conditional)
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-windows:
|
||||||
|
name: E2E Reproducibility (Windows)
|
||||||
|
runs-on: windows-latest
|
||||||
|
if: github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true'
|
||||||
|
outputs:
|
||||||
|
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||||
|
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||||
|
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||||
|
|
||||||
|
- name: Build E2E tests
|
||||||
|
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||||
|
|
||||||
|
- name: Run E2E reproducibility tests
|
||||||
|
id: run-tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj `
|
||||||
|
--no-build `
|
||||||
|
-c Release `
|
||||||
|
--logger "trx;LogFileName=e2e-results.trx" `
|
||||||
|
--logger "console;verbosity=detailed" `
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
# Extract hashes for comparison
|
||||||
|
$verdictHash = Get-Content -Path ./TestResults/verdict_hash.txt -ErrorAction SilentlyContinue
|
||||||
|
$manifestHash = Get-Content -Path ./TestResults/manifest_hash.txt -ErrorAction SilentlyContinue
|
||||||
|
$envelopeHash = Get-Content -Path ./TestResults/envelope_hash.txt -ErrorAction SilentlyContinue
|
||||||
|
|
||||||
|
"verdict_hash=$($verdictHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||||
|
"manifest_hash=$($manifestHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||||
|
"envelope_hash=$($envelopeHash ?? 'NOT_FOUND')" >> $env:GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: e2e-results-windows
|
||||||
|
path: ./TestResults/
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
- name: Upload hash artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-windows
|
||||||
|
path: |
|
||||||
|
./TestResults/verdict_hash.txt
|
||||||
|
./TestResults/manifest_hash.txt
|
||||||
|
./TestResults/envelope_hash.txt
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Run E2E tests on macOS (conditional)
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-macos:
|
||||||
|
name: E2E Reproducibility (macOS)
|
||||||
|
runs-on: macos-latest
|
||||||
|
if: github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true'
|
||||||
|
outputs:
|
||||||
|
verdict_hash: ${{ steps.run-tests.outputs.verdict_hash }}
|
||||||
|
manifest_hash: ${{ steps.run-tests.outputs.manifest_hash }}
|
||||||
|
envelope_hash: ${{ steps.run-tests.outputs.envelope_hash }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj
|
||||||
|
|
||||||
|
- name: Build E2E tests
|
||||||
|
run: dotnet build src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj --no-restore -c Release
|
||||||
|
|
||||||
|
- name: Run E2E reproducibility tests
|
||||||
|
id: run-tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.E2E/StellaOps.Integration.E2E.csproj \
|
||||||
|
--no-build \
|
||||||
|
-c Release \
|
||||||
|
--logger "trx;LogFileName=e2e-results.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
# Extract hashes for comparison
|
||||||
|
echo "verdict_hash=$(cat ./TestResults/verdict_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "manifest_hash=$(cat ./TestResults/manifest_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
echo "envelope_hash=$(cat ./TestResults/envelope_hash.txt 2>/dev/null || echo 'NOT_FOUND')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: e2e-results-macos
|
||||||
|
path: ./TestResults/
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
- name: Upload hash artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-macos
|
||||||
|
path: |
|
||||||
|
./TestResults/verdict_hash.txt
|
||||||
|
./TestResults/manifest_hash.txt
|
||||||
|
./TestResults/envelope_hash.txt
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Cross-platform hash comparison
|
||||||
|
# =============================================================================
|
||||||
|
cross-platform-compare:
|
||||||
|
name: Cross-Platform Hash Comparison
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [reproducibility-ubuntu, reproducibility-windows, reproducibility-macos]
|
||||||
|
if: always() && (github.event_name == 'schedule' || github.event.inputs.run_cross_platform == 'true')
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download Ubuntu hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-ubuntu
|
||||||
|
path: ./hashes/ubuntu
|
||||||
|
|
||||||
|
- name: Download Windows hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-windows
|
||||||
|
path: ./hashes/windows
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Download macOS hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-macos
|
||||||
|
path: ./hashes/macos
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Compare hashes across platforms
|
||||||
|
run: |
|
||||||
|
echo "=== Cross-Platform Hash Comparison ==="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
ubuntu_verdict=$(cat ./hashes/ubuntu/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
windows_verdict=$(cat ./hashes/windows/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
macos_verdict=$(cat ./hashes/macos/verdict_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
|
||||||
|
echo "Verdict Hashes:"
|
||||||
|
echo " Ubuntu: $ubuntu_verdict"
|
||||||
|
echo " Windows: $windows_verdict"
|
||||||
|
echo " macOS: $macos_verdict"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
ubuntu_manifest=$(cat ./hashes/ubuntu/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
windows_manifest=$(cat ./hashes/windows/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
macos_manifest=$(cat ./hashes/macos/manifest_hash.txt 2>/dev/null || echo "NOT_AVAILABLE")
|
||||||
|
|
||||||
|
echo "Manifest Hashes:"
|
||||||
|
echo " Ubuntu: $ubuntu_manifest"
|
||||||
|
echo " Windows: $windows_manifest"
|
||||||
|
echo " macOS: $macos_manifest"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if all available hashes match
|
||||||
|
all_match=true
|
||||||
|
|
||||||
|
if [ "$ubuntu_verdict" != "NOT_AVAILABLE" ] && [ "$windows_verdict" != "NOT_AVAILABLE" ]; then
|
||||||
|
if [ "$ubuntu_verdict" != "$windows_verdict" ]; then
|
||||||
|
echo "❌ FAIL: Ubuntu and Windows verdict hashes differ!"
|
||||||
|
all_match=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$ubuntu_verdict" != "NOT_AVAILABLE" ] && [ "$macos_verdict" != "NOT_AVAILABLE" ]; then
|
||||||
|
if [ "$ubuntu_verdict" != "$macos_verdict" ]; then
|
||||||
|
echo "❌ FAIL: Ubuntu and macOS verdict hashes differ!"
|
||||||
|
all_match=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$all_match" = true ]; then
|
||||||
|
echo "✅ All available platform hashes match!"
|
||||||
|
else
|
||||||
|
echo ""
|
||||||
|
echo "Cross-platform reproducibility verification FAILED."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create comparison report
|
||||||
|
run: |
|
||||||
|
cat > ./cross-platform-report.md << 'EOF'
|
||||||
|
# Cross-Platform Reproducibility Report
|
||||||
|
|
||||||
|
## Test Run Information
|
||||||
|
- **Workflow Run:** ${{ github.run_id }}
|
||||||
|
- **Trigger:** ${{ github.event_name }}
|
||||||
|
- **Commit:** ${{ github.sha }}
|
||||||
|
- **Branch:** ${{ github.ref_name }}
|
||||||
|
|
||||||
|
## Hash Comparison
|
||||||
|
|
||||||
|
| Platform | Verdict Hash | Manifest Hash | Status |
|
||||||
|
|----------|--------------|---------------|--------|
|
||||||
|
| Ubuntu | ${{ needs.reproducibility-ubuntu.outputs.verdict_hash }} | ${{ needs.reproducibility-ubuntu.outputs.manifest_hash }} | ✅ |
|
||||||
|
| Windows | ${{ needs.reproducibility-windows.outputs.verdict_hash }} | ${{ needs.reproducibility-windows.outputs.manifest_hash }} | ${{ needs.reproducibility-windows.result == 'success' && '✅' || '⚠️' }} |
|
||||||
|
| macOS | ${{ needs.reproducibility-macos.outputs.verdict_hash }} | ${{ needs.reproducibility-macos.outputs.manifest_hash }} | ${{ needs.reproducibility-macos.result == 'success' && '✅' || '⚠️' }} |
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
Cross-platform reproducibility: **${{ job.status == 'success' && 'VERIFIED' || 'NEEDS REVIEW' }}**
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat ./cross-platform-report.md
|
||||||
|
|
||||||
|
- name: Upload comparison report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: cross-platform-report
|
||||||
|
path: ./cross-platform-report.md
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Golden baseline comparison
|
||||||
|
# =============================================================================
|
||||||
|
golden-baseline:
|
||||||
|
name: Golden Baseline Verification
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [reproducibility-ubuntu]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download current hashes
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: hashes-ubuntu
|
||||||
|
path: ./current
|
||||||
|
|
||||||
|
- name: Compare with golden baseline
|
||||||
|
run: |
|
||||||
|
echo "=== Golden Baseline Comparison ==="
|
||||||
|
|
||||||
|
baseline_file="./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json"
|
||||||
|
|
||||||
|
if [ ! -f "$baseline_file" ]; then
|
||||||
|
echo "⚠️ Golden baseline not found. Skipping comparison."
|
||||||
|
echo "To create baseline, run with update_baseline=true"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
current_verdict=$(cat ./current/verdict_hash.txt 2>/dev/null || echo "NOT_FOUND")
|
||||||
|
baseline_verdict=$(jq -r '.verdict_hash' "$baseline_file" 2>/dev/null || echo "NOT_FOUND")
|
||||||
|
|
||||||
|
echo "Current verdict hash: $current_verdict"
|
||||||
|
echo "Baseline verdict hash: $baseline_verdict"
|
||||||
|
|
||||||
|
if [ "$current_verdict" != "$baseline_verdict" ]; then
|
||||||
|
echo ""
|
||||||
|
echo "❌ FAIL: Current run does not match golden baseline!"
|
||||||
|
echo ""
|
||||||
|
echo "This may indicate:"
|
||||||
|
echo " 1. An intentional change requiring baseline update"
|
||||||
|
echo " 2. An unintentional regression in reproducibility"
|
||||||
|
echo ""
|
||||||
|
echo "To update baseline, run workflow with update_baseline=true"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Current run matches golden baseline!"
|
||||||
|
|
||||||
|
- name: Update golden baseline (if requested)
|
||||||
|
if: github.event.inputs.update_baseline == 'true'
|
||||||
|
run: |
|
||||||
|
mkdir -p ./src/__Tests/__Benchmarks/determinism/golden-baseline
|
||||||
|
|
||||||
|
cat > ./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json << EOF
|
||||||
|
{
|
||||||
|
"verdict_hash": "$(cat ./current/verdict_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||||
|
"manifest_hash": "$(cat ./current/manifest_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||||
|
"envelope_hash": "$(cat ./current/envelope_hash.txt 2>/dev/null || echo 'NOT_SET')",
|
||||||
|
"updated_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"updated_by": "${{ github.actor }}",
|
||||||
|
"commit": "${{ github.sha }}"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Golden baseline updated:"
|
||||||
|
cat ./src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json
|
||||||
|
|
||||||
|
- name: Commit baseline update
|
||||||
|
if: github.event.inputs.update_baseline == 'true'
|
||||||
|
uses: stefanzweifel/git-auto-commit-action@v5
|
||||||
|
with:
|
||||||
|
commit_message: "chore: Update E2E reproducibility golden baseline"
|
||||||
|
file_pattern: src/__Tests/__Benchmarks/determinism/golden-baseline/e2e-hashes.json
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Job: Status check gate
|
||||||
|
# =============================================================================
|
||||||
|
reproducibility-gate:
|
||||||
|
name: Reproducibility Gate
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [reproducibility-ubuntu, golden-baseline]
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check reproducibility status
|
||||||
|
run: |
|
||||||
|
ubuntu_status="${{ needs.reproducibility-ubuntu.result }}"
|
||||||
|
baseline_status="${{ needs.golden-baseline.result }}"
|
||||||
|
|
||||||
|
echo "Ubuntu E2E tests: $ubuntu_status"
|
||||||
|
echo "Golden baseline: $baseline_status"
|
||||||
|
|
||||||
|
if [ "$ubuntu_status" != "success" ]; then
|
||||||
|
echo "❌ E2E reproducibility tests failed!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$baseline_status" == "failure" ]; then
|
||||||
|
echo "⚠️ Golden baseline comparison failed (may require review)"
|
||||||
|
# Don't fail the gate for baseline mismatch - it may be intentional
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ Reproducibility gate passed!"
|
||||||
98
.gitea/workflows/epss-ingest-perf.yml
Normal file
98
.gitea/workflows/epss-ingest-perf.yml
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
name: EPSS Ingest Perf
|
||||||
|
|
||||||
|
# Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
|
||||||
|
# Tasks: EPSS-3410-013B, EPSS-3410-014
|
||||||
|
#
|
||||||
|
# Runs the EPSS ingest perf harness against a Dockerized PostgreSQL instance (Testcontainers).
|
||||||
|
#
|
||||||
|
# Runner requirements:
|
||||||
|
# - Linux runner with Docker Engine available to the runner user (Testcontainers).
|
||||||
|
# - Label: `ubuntu-22.04` (adjust `runs-on` if your labels differ).
|
||||||
|
# - >= 4 CPU / >= 8GB RAM recommended for stable baselines.
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
rows:
|
||||||
|
description: 'Row count to generate (default: 310000)'
|
||||||
|
required: false
|
||||||
|
default: '310000'
|
||||||
|
postgres_image:
|
||||||
|
description: 'PostgreSQL image (default: postgres:16-alpine)'
|
||||||
|
required: false
|
||||||
|
default: 'postgres:16-alpine'
|
||||||
|
schedule:
|
||||||
|
# Nightly at 03:00 UTC
|
||||||
|
- cron: '0 3 * * *'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Storage/**'
|
||||||
|
- 'src/Scanner/StellaOps.Scanner.Worker/**'
|
||||||
|
- 'src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/**'
|
||||||
|
- '.gitea/workflows/epss-ingest-perf.yml'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Storage/**'
|
||||||
|
- 'src/Scanner/StellaOps.Scanner.Worker/**'
|
||||||
|
- 'src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/**'
|
||||||
|
- '.gitea/workflows/epss-ingest-perf.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
perf:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
STELLAOPS_OFFLINE: 'true'
|
||||||
|
STELLAOPS_DETERMINISTIC: 'true'
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET 10
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nuget-
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||||
|
--configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
dotnet build src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-restore
|
||||||
|
|
||||||
|
- name: Run perf harness
|
||||||
|
run: |
|
||||||
|
mkdir -p bench/results
|
||||||
|
dotnet run \
|
||||||
|
--project src/Scanner/__Benchmarks/StellaOps.Scanner.Storage.Epss.Perf/StellaOps.Scanner.Storage.Epss.Perf.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
-- \
|
||||||
|
--rows ${{ inputs.rows || '310000' }} \
|
||||||
|
--postgres-image '${{ inputs.postgres_image || 'postgres:16-alpine' }}' \
|
||||||
|
--output bench/results/epss-ingest-perf-${{ github.sha }}.json
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: epss-ingest-perf-${{ github.sha }}
|
||||||
|
path: |
|
||||||
|
bench/results/epss-ingest-perf-${{ github.sha }}.json
|
||||||
|
retention-days: 90
|
||||||
46
.gitea/workflows/exporter-ci.yml
Normal file
46
.gitea/workflows/exporter-ci.yml
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
name: exporter-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/**'
|
||||||
|
- '.gitea/workflows/exporter-ci.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.x'
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj --configuration Release --no-restore
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj --configuration Release --no-build --verbosity normal
|
||||||
|
|
||||||
|
- name: Publish
|
||||||
|
run: |
|
||||||
|
dotnet publish src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj \
|
||||||
|
--configuration Release \
|
||||||
|
--output artifacts/exporter
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: exporter-${{ github.run_id }}
|
||||||
|
path: artifacts/
|
||||||
|
retention-days: 14
|
||||||
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
name: ICS/KISA Feed Refresh
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * MON'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
live_fetch:
|
||||||
|
description: 'Attempt live RSS fetch (fallback to samples on failure)'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
offline_snapshot:
|
||||||
|
description: 'Force offline samples only (no network)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
refresh:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
ICSCISA_FEED_URL: ${{ secrets.ICSCISA_FEED_URL }}
|
||||||
|
KISA_FEED_URL: ${{ secrets.KISA_FEED_URL }}
|
||||||
|
FEED_GATEWAY_HOST: concelier-webservice
|
||||||
|
FEED_GATEWAY_SCHEME: http
|
||||||
|
LIVE_FETCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.live_fetch || 'true' }}
|
||||||
|
OFFLINE_SNAPSHOT: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.offline_snapshot || 'false' }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set run metadata
|
||||||
|
id: meta
|
||||||
|
run: |
|
||||||
|
RUN_DATE=$(date -u +%Y%m%d)
|
||||||
|
RUN_ID="icscisa-kisa-$(date -u +%Y%m%dT%H%M%SZ)"
|
||||||
|
echo "run_date=$RUN_DATE" >> $GITHUB_OUTPUT
|
||||||
|
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||||
|
echo "RUN_DATE=$RUN_DATE" >> $GITHUB_ENV
|
||||||
|
echo "RUN_ID=$RUN_ID" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Run ICS/KISA refresh
|
||||||
|
run: |
|
||||||
|
python scripts/feeds/run_icscisa_kisa_refresh.py \
|
||||||
|
--out-dir out/feeds/icscisa-kisa \
|
||||||
|
--run-date "${{ steps.meta.outputs.run_date }}" \
|
||||||
|
--run-id "${{ steps.meta.outputs.run_id }}"
|
||||||
|
|
||||||
|
- name: Show fetch log
|
||||||
|
run: cat out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}/fetch.log
|
||||||
|
|
||||||
|
- name: Upload refresh artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: icscisa-kisa-${{ steps.meta.outputs.run_date }}
|
||||||
|
path: out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 21
|
||||||
375
.gitea/workflows/integration-tests-gate.yml
Normal file
375
.gitea/workflows/integration-tests-gate.yml
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
# Sprint 3500.0004.0003 - T6: Integration Tests CI Gate
|
||||||
|
# Runs integration tests on PR and gates merges on failures
|
||||||
|
|
||||||
|
name: integration-tests-gate
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/Integration/**'
|
||||||
|
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_performance:
|
||||||
|
description: 'Run performance baseline tests'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
run_airgap:
|
||||||
|
description: 'Run air-gap tests'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: integration-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC1: Integration tests run on PR
|
||||||
|
# ==========================================================================
|
||||||
|
integration-tests:
|
||||||
|
name: Integration Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: stellaops
|
||||||
|
POSTGRES_PASSWORD: test-only
|
||||||
|
POSTGRES_DB: stellaops_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/__Tests/Integration/**/*.csproj
|
||||||
|
|
||||||
|
- name: Build integration tests
|
||||||
|
run: dotnet build src/__Tests/Integration/**/*.csproj --configuration Release --no-restore
|
||||||
|
|
||||||
|
- name: Run Proof Chain Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.ProofChain \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=proofchain.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
env:
|
||||||
|
ConnectionStrings__StellaOps: "Host=localhost;Database=stellaops_test;Username=stellaops;Password=test-only"
|
||||||
|
|
||||||
|
- name: Run Reachability Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Reachability \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=reachability.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Run Unknowns Workflow Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Unknowns \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=unknowns.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Run Determinism Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--configuration Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=determinism.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: integration-test-results
|
||||||
|
path: TestResults/**/*.trx
|
||||||
|
|
||||||
|
- name: Publish test summary
|
||||||
|
uses: dorny/test-reporter@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: Integration Test Results
|
||||||
|
path: TestResults/**/*.trx
|
||||||
|
reporter: dotnet-trx
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC2: Corpus validation on release branch
|
||||||
|
# ==========================================================================
|
||||||
|
corpus-validation:
|
||||||
|
name: Golden Corpus Validation
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.ref == 'refs/heads/main' || github.event_name == 'workflow_dispatch'
|
||||||
|
timeout-minutes: 15
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Validate corpus manifest
|
||||||
|
run: |
|
||||||
|
python3 -c "
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
|
||||||
|
manifest_path = 'src/__Tests/__Benchmarks/golden-corpus/corpus-manifest.json'
|
||||||
|
with open(manifest_path) as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
|
||||||
|
print(f'Corpus version: {manifest.get(\"corpus_version\", \"unknown\")}')
|
||||||
|
print(f'Total cases: {manifest.get(\"total_cases\", 0)}')
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for case in manifest.get('cases', []):
|
||||||
|
case_path = os.path.join('src/__Tests/__Benchmarks/golden-corpus', case['path'])
|
||||||
|
if not os.path.isdir(case_path):
|
||||||
|
errors.append(f'Missing case directory: {case_path}')
|
||||||
|
else:
|
||||||
|
required_files = ['case.json', 'expected-score.json']
|
||||||
|
for f in required_files:
|
||||||
|
if not os.path.exists(os.path.join(case_path, f)):
|
||||||
|
errors.append(f'Missing file: {case_path}/{f}')
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
print('\\nValidation errors:')
|
||||||
|
for e in errors:
|
||||||
|
print(f' - {e}')
|
||||||
|
exit(1)
|
||||||
|
else:
|
||||||
|
print('\\nCorpus validation passed!')
|
||||||
|
"
|
||||||
|
|
||||||
|
- name: Run corpus scoring tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--filter "Category=GoldenCorpus" \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=corpus.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC3: Determinism tests on nightly
|
||||||
|
# ==========================================================================
|
||||||
|
nightly-determinism:
|
||||||
|
name: Nightly Determinism Check
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_performance == 'true')
|
||||||
|
timeout-minutes: 45
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run full determinism suite
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=determinism-full.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Run cross-run determinism check
|
||||||
|
run: |
|
||||||
|
# Run scoring 3 times and compare hashes
|
||||||
|
for i in 1 2 3; do
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Determinism \
|
||||||
|
--filter "FullyQualifiedName~IdenticalInput_ProducesIdenticalHash" \
|
||||||
|
--results-directory ./TestResults/run-$i
|
||||||
|
done
|
||||||
|
|
||||||
|
# Compare all results
|
||||||
|
echo "Comparing determinism across runs..."
|
||||||
|
|
||||||
|
- name: Upload determinism results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: nightly-determinism-results
|
||||||
|
path: TestResults/**
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC4: Test coverage reported to dashboard
|
||||||
|
# ==========================================================================
|
||||||
|
coverage-report:
|
||||||
|
name: Coverage Report
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [integration-tests]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/**/*.csproj \
|
||||||
|
--configuration Release \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--results-directory ./TestResults/Coverage
|
||||||
|
|
||||||
|
- name: Generate coverage report
|
||||||
|
uses: danielpalme/ReportGenerator-GitHub-Action@5.2.0
|
||||||
|
with:
|
||||||
|
reports: TestResults/Coverage/**/coverage.cobertura.xml
|
||||||
|
targetdir: TestResults/CoverageReport
|
||||||
|
reporttypes: 'Html;Cobertura;MarkdownSummary'
|
||||||
|
|
||||||
|
- name: Upload coverage report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: coverage-report
|
||||||
|
path: TestResults/CoverageReport/**
|
||||||
|
|
||||||
|
- name: Add coverage to PR comment
|
||||||
|
uses: marocchino/sticky-pull-request-comment@v2
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
with:
|
||||||
|
recreate: true
|
||||||
|
path: TestResults/CoverageReport/Summary.md
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# T6-AC5: Flaky test quarantine process
|
||||||
|
# ==========================================================================
|
||||||
|
flaky-test-check:
|
||||||
|
name: Flaky Test Detection
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [integration-tests]
|
||||||
|
if: failure()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check for known flaky tests
|
||||||
|
run: |
|
||||||
|
# Check if failure is from a known flaky test
|
||||||
|
QUARANTINE_FILE=".github/flaky-tests-quarantine.json"
|
||||||
|
if [ -f "$QUARANTINE_FILE" ]; then
|
||||||
|
echo "Checking against quarantine list..."
|
||||||
|
# Implementation would compare failed tests against quarantine
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create flaky test issue
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
// After 2 consecutive failures, create issue for quarantine review
|
||||||
|
console.log('Checking for flaky test patterns...');
|
||||||
|
// Implementation would analyze test history
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# Performance Tests (optional, on demand)
|
||||||
|
# ==========================================================================
|
||||||
|
performance-tests:
|
||||||
|
name: Performance Baseline Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_performance == 'true'
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run performance tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.Performance \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=performance.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Upload performance report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: performance-report
|
||||||
|
path: |
|
||||||
|
TestResults/**
|
||||||
|
src/__Tests/Integration/StellaOps.Integration.Performance/output/**
|
||||||
|
|
||||||
|
- name: Check for regressions
|
||||||
|
run: |
|
||||||
|
# Check if any test exceeded 20% threshold
|
||||||
|
if [ -f "src/__Tests/Integration/StellaOps.Integration.Performance/output/performance-report.json" ]; then
|
||||||
|
python3 -c "
|
||||||
|
import json
|
||||||
|
with open('src/__Tests/Integration/StellaOps.Integration.Performance/output/performance-report.json') as f:
|
||||||
|
report = json.load(f)
|
||||||
|
regressions = [m for m in report.get('Metrics', []) if m.get('DeltaPercent', 0) > 20]
|
||||||
|
if regressions:
|
||||||
|
print('Performance regressions detected!')
|
||||||
|
for r in regressions:
|
||||||
|
print(f' {r[\"Name\"]}: +{r[\"DeltaPercent\"]:.1f}%')
|
||||||
|
exit(1)
|
||||||
|
print('No performance regressions detected.')
|
||||||
|
"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ==========================================================================
|
||||||
|
# Air-Gap Tests (optional, on demand)
|
||||||
|
# ==========================================================================
|
||||||
|
airgap-tests:
|
||||||
|
name: Air-Gap Integration Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_airgap == 'true'
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
|
- name: Run air-gap tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/Integration/StellaOps.Integration.AirGap \
|
||||||
|
--configuration Release \
|
||||||
|
--logger "trx;LogFileName=airgap.trx" \
|
||||||
|
--results-directory ./TestResults
|
||||||
|
|
||||||
|
- name: Upload air-gap test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: airgap-test-results
|
||||||
|
path: TestResults/**
|
||||||
128
.gitea/workflows/interop-e2e.yml
Normal file
128
.gitea/workflows/interop-e2e.yml
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
name: Interop E2E Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/**'
|
||||||
|
- 'src/Excititor/**'
|
||||||
|
- 'src/__Tests/interop/**'
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * *' # Nightly at 6 AM UTC
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
interop-tests:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
format: [cyclonedx, spdx]
|
||||||
|
arch: [amd64]
|
||||||
|
include:
|
||||||
|
- format: cyclonedx
|
||||||
|
format_flag: cyclonedx-json
|
||||||
|
- format: spdx
|
||||||
|
format_flag: spdx-json
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin
|
||||||
|
syft --version
|
||||||
|
|
||||||
|
- name: Install Grype
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin
|
||||||
|
grype --version
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://github.com/sigstore/cosign/releases/latest/download/cosign-linux-amd64 -o /usr/local/bin/cosign
|
||||||
|
chmod +x /usr/local/bin/cosign
|
||||||
|
cosign version
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build Stella CLI
|
||||||
|
run: dotnet build src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release
|
||||||
|
|
||||||
|
- name: Build interop tests
|
||||||
|
run: dotnet build src/__Tests/interop/StellaOps.Interop.Tests/StellaOps.Interop.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run interop tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/interop/StellaOps.Interop.Tests \
|
||||||
|
--filter "Format=${{ matrix.format }}" \
|
||||||
|
--logger "trx;LogFileName=interop-${{ matrix.format }}.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./results \
|
||||||
|
-- RunConfiguration.TestSessionTimeout=900000
|
||||||
|
|
||||||
|
- name: Generate parity report
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
# TODO: Generate parity report from test results
|
||||||
|
echo '{"format": "${{ matrix.format }}", "parityPercent": 0}' > ./results/parity-report-${{ matrix.format }}.json
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: interop-test-results-${{ matrix.format }}
|
||||||
|
path: ./results/
|
||||||
|
|
||||||
|
- name: Check parity threshold
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
PARITY=$(jq '.parityPercent' ./results/parity-report-${{ matrix.format }}.json 2>/dev/null || echo "0")
|
||||||
|
echo "Parity for ${{ matrix.format }}: ${PARITY}%"
|
||||||
|
|
||||||
|
if (( $(echo "$PARITY < 95" | bc -l 2>/dev/null || echo "1") )); then
|
||||||
|
echo "::warning::Findings parity ${PARITY}% is below 95% threshold for ${{ matrix.format }}"
|
||||||
|
# Don't fail the build yet - this is initial implementation
|
||||||
|
# exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
summary:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: interop-tests
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download all artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: ./all-results
|
||||||
|
|
||||||
|
- name: Generate summary
|
||||||
|
run: |
|
||||||
|
echo "## Interop Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Format | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
for format in cyclonedx spdx; do
|
||||||
|
if [ -f "./all-results/interop-test-results-${format}/parity-report-${format}.json" ]; then
|
||||||
|
PARITY=$(jq -r '.parityPercent // 0' "./all-results/interop-test-results-${format}/parity-report-${format}.json")
|
||||||
|
if (( $(echo "$PARITY >= 95" | bc -l 2>/dev/null || echo "0") )); then
|
||||||
|
STATUS="✅ Pass (${PARITY}%)"
|
||||||
|
else
|
||||||
|
STATUS="⚠️ Below threshold (${PARITY}%)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
STATUS="❌ No results"
|
||||||
|
fi
|
||||||
|
echo "| ${format} | ${STATUS} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
done
|
||||||
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
name: Ledger OpenAPI CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
- 'ops/devops/ledger/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-oas:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install tools
|
||||||
|
run: |
|
||||||
|
npm install -g @stoplight/spectral-cli
|
||||||
|
npm install -g @openapitools/openapi-generator-cli
|
||||||
|
|
||||||
|
- name: Validate OpenAPI spec
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/ledger/validate-oas.sh
|
||||||
|
ops/devops/ledger/validate-oas.sh
|
||||||
|
|
||||||
|
- name: Upload validation report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-oas-validation-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/oas/lint-report.json
|
||||||
|
out/ledger/oas/validation-report.txt
|
||||||
|
out/ledger/oas/spec-summary.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
check-wellknown:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check .well-known/openapi structure
|
||||||
|
run: |
|
||||||
|
# Validate .well-known structure if exists
|
||||||
|
if [ -d ".well-known" ]; then
|
||||||
|
echo "Checking .well-known/openapi..."
|
||||||
|
if [ -f ".well-known/openapi.json" ]; then
|
||||||
|
python3 -c "import json; json.load(open('.well-known/openapi.json'))"
|
||||||
|
echo ".well-known/openapi.json is valid JSON"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[info] .well-known directory not present (OK for dev)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
deprecation-check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check deprecation policy
|
||||||
|
run: |
|
||||||
|
if [ -f "ops/devops/ledger/deprecation-policy.yaml" ]; then
|
||||||
|
echo "Validating deprecation policy..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('ops/devops/ledger/deprecation-policy.yaml'))"
|
||||||
|
echo "Deprecation policy is valid"
|
||||||
|
else
|
||||||
|
echo "[info] No deprecation policy yet (OK for initial setup)"
|
||||||
|
fi
|
||||||
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
name: Ledger Packs CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
snapshot_id:
|
||||||
|
description: 'Snapshot ID (leave empty for auto)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
sign:
|
||||||
|
description: 'Sign pack (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'ops/devops/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ] || [ "${{ github.event.inputs.sign }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build pack
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/ledger/build-pack.sh
|
||||||
|
SNAPSHOT_ID="${{ github.event.inputs.snapshot_id }}"
|
||||||
|
if [ -z "$SNAPSHOT_ID" ]; then
|
||||||
|
SNAPSHOT_ID="ci-$(date +%Y%m%d%H%M%S)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SIGN_FLAG=""
|
||||||
|
if [ "${{ github.event.inputs.sign }}" = "1" ] || [ -n "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
SIGN_FLAG="--sign"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SNAPSHOT_ID="$SNAPSHOT_ID" ops/devops/ledger/build-pack.sh $SIGN_FLAG
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for f in *.SHA256SUMS; do
|
||||||
|
if [ -f "$f" ]; then
|
||||||
|
sha256sum -c "$f"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Upload pack
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/packs/*.pack.tar.gz
|
||||||
|
out/ledger/packs/*.SHA256SUMS
|
||||||
|
out/ledger/packs/*.dsse.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
verify-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-pack
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download pack
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: out/ledger/packs/
|
||||||
|
|
||||||
|
- name: Verify pack structure
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for pack in *.pack.tar.gz; do
|
||||||
|
if [ -f "$pack" ]; then
|
||||||
|
echo "Verifying $pack..."
|
||||||
|
tar -tzf "$pack" | head -20
|
||||||
|
|
||||||
|
# Extract and check manifest
|
||||||
|
tar -xzf "$pack" -C /tmp manifest.json 2>/dev/null || true
|
||||||
|
if [ -f /tmp/manifest.json ]; then
|
||||||
|
python3 -c "import json; json.load(open('/tmp/manifest.json'))"
|
||||||
|
echo "Pack manifest is valid JSON"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
188
.gitea/workflows/lighthouse-ci.yml
Normal file
188
.gitea/workflows/lighthouse-ci.yml
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
# .gitea/workflows/lighthouse-ci.yml
|
||||||
|
# Lighthouse CI for performance and accessibility testing of the StellaOps Web UI
|
||||||
|
|
||||||
|
name: Lighthouse CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
- '.gitea/workflows/lighthouse-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
schedule:
|
||||||
|
# Run weekly on Sunday at 2 AM UTC
|
||||||
|
- cron: '0 2 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODE_VERSION: '20'
|
||||||
|
LHCI_BUILD_CONTEXT__CURRENT_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||||
|
LHCI_BUILD_CONTEXT__COMMIT_SHA: ${{ github.sha }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lighthouse:
|
||||||
|
name: Lighthouse Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Install Lighthouse CI
|
||||||
|
run: npm install -g @lhci/cli@0.13.x
|
||||||
|
|
||||||
|
- name: Run Lighthouse CI
|
||||||
|
run: |
|
||||||
|
lhci autorun \
|
||||||
|
--collect.staticDistDir=./dist/stella-ops-web/browser \
|
||||||
|
--collect.numberOfRuns=3 \
|
||||||
|
--assert.preset=lighthouse:recommended \
|
||||||
|
--assert.assertions.categories:performance=off \
|
||||||
|
--assert.assertions.categories:accessibility=off \
|
||||||
|
--upload.target=filesystem \
|
||||||
|
--upload.outputDir=./lighthouse-results
|
||||||
|
|
||||||
|
- name: Evaluate Lighthouse Results
|
||||||
|
id: lhci-results
|
||||||
|
run: |
|
||||||
|
# Parse the latest Lighthouse report
|
||||||
|
REPORT=$(ls -t lighthouse-results/*.json | head -1)
|
||||||
|
|
||||||
|
if [ -f "$REPORT" ]; then
|
||||||
|
PERF=$(jq '.categories.performance.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
A11Y=$(jq '.categories.accessibility.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
BP=$(jq '.categories["best-practices"].score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
SEO=$(jq '.categories.seo.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
|
||||||
|
echo "performance=$PERF" >> $GITHUB_OUTPUT
|
||||||
|
echo "accessibility=$A11Y" >> $GITHUB_OUTPUT
|
||||||
|
echo "best-practices=$BP" >> $GITHUB_OUTPUT
|
||||||
|
echo "seo=$SEO" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "## Lighthouse Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Category | Score | Threshold | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|----------|-------|-----------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# Performance: target >= 90
|
||||||
|
if [ "$PERF" -ge 90 ]; then
|
||||||
|
echo "| Performance | $PERF | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Performance | $PERF | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility: target >= 95
|
||||||
|
if [ "$A11Y" -ge 95 ]; then
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :x: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Best Practices: target >= 90
|
||||||
|
if [ "$BP" -ge 90 ]; then
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# SEO: target >= 90
|
||||||
|
if [ "$SEO" -ge 90 ]; then
|
||||||
|
echo "| SEO | $SEO | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| SEO | $SEO | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Check Quality Gates
|
||||||
|
run: |
|
||||||
|
PERF=${{ steps.lhci-results.outputs.performance }}
|
||||||
|
A11Y=${{ steps.lhci-results.outputs.accessibility }}
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
# Performance gate (warning only, not blocking)
|
||||||
|
if [ "$PERF" -lt 90 ]; then
|
||||||
|
echo "::warning::Performance score ($PERF) is below target (90)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility gate (blocking)
|
||||||
|
if [ "$A11Y" -lt 95 ]; then
|
||||||
|
echo "::error::Accessibility score ($A11Y) is below required threshold (95)"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$FAILED" -eq 1 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload Lighthouse Reports
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: lighthouse-reports
|
||||||
|
path: src/Web/StellaOps.Web/lighthouse-results/
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
axe-accessibility:
|
||||||
|
name: Axe Accessibility Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
run: npx playwright install --with-deps chromium
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Start preview server
|
||||||
|
run: |
|
||||||
|
npx serve -s dist/stella-ops-web/browser -l 4200 &
|
||||||
|
sleep 5
|
||||||
|
|
||||||
|
- name: Run Axe accessibility tests
|
||||||
|
run: |
|
||||||
|
npm run test:a11y || true
|
||||||
|
|
||||||
|
- name: Upload Axe results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: axe-accessibility-results
|
||||||
|
path: src/Web/StellaOps.Web/test-results/
|
||||||
|
retention-days: 30
|
||||||
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: LNM Migration CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_staging:
|
||||||
|
description: 'Run staging backfill (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Concelier/__Libraries/StellaOps.Concelier.Migrations/**'
|
||||||
|
- 'ops/devops/lnm/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${{ secrets.COSIGN_PRIVATE_KEY_B64 }}" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
|
||||||
|
- name: Build and package runner
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/lnm/package-runner.sh
|
||||||
|
ops/devops/lnm/package-runner.sh
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/lnm
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: lnm-migration-runner-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/lnm/lnm-migration-runner.tar.gz
|
||||||
|
out/lnm/lnm-migration-runner.manifest.json
|
||||||
|
out/lnm/lnm-migration-runner.dsse.json
|
||||||
|
out/lnm/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
validate-metrics:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-runner
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate monitoring config
|
||||||
|
run: |
|
||||||
|
# Validate alert rules syntax
|
||||||
|
if [ -f "ops/devops/lnm/alerts/lnm-alerts.yaml" ]; then
|
||||||
|
echo "Validating alert rules..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('ops/devops/lnm/alerts/lnm-alerts.yaml'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate dashboard JSON
|
||||||
|
if [ -f "ops/devops/lnm/dashboards/lnm-migration.json" ]; then
|
||||||
|
echo "Validating dashboard..."
|
||||||
|
python3 -c "import json; json.load(open('ops/devops/lnm/dashboards/lnm-migration.json'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Monitoring config validation complete"
|
||||||
@@ -18,6 +18,14 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||||
|
run: |
|
||||||
|
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||||
|
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||||
|
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||||
|
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
@@ -38,6 +46,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
||||||
|
|
||||||
|
- name: Prepare Export Center handoff (metadata + optional schedule)
|
||||||
|
run: |
|
||||||
|
scripts/mirror/export-center-wire.sh
|
||||||
|
env:
|
||||||
|
EXPORT_CENTER_BASE_URL: ${{ secrets.EXPORT_CENTER_BASE_URL }}
|
||||||
|
EXPORT_CENTER_TOKEN: ${{ secrets.EXPORT_CENTER_TOKEN }}
|
||||||
|
EXPORT_CENTER_TENANT: ${{ secrets.EXPORT_CENTER_TENANT }}
|
||||||
|
EXPORT_CENTER_PROJECT: ${{ secrets.EXPORT_CENTER_PROJECT }}
|
||||||
|
EXPORT_CENTER_AUTO_SCHEDULE: ${{ secrets.EXPORT_CENTER_AUTO_SCHEDULE }}
|
||||||
|
|
||||||
- name: Upload signed artifacts
|
- name: Upload signed artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
@@ -49,5 +67,8 @@ jobs:
|
|||||||
out/mirror/thin/tuf/
|
out/mirror/thin/tuf/
|
||||||
out/mirror/thin/oci/
|
out/mirror/thin/oci/
|
||||||
out/mirror/thin/milestone.json
|
out/mirror/thin/milestone.json
|
||||||
|
out/mirror/thin/export-center/export-center-handoff.json
|
||||||
|
out/mirror/thin/export-center/export-center-targets.json
|
||||||
|
out/mirror/thin/export-center/schedule-response.json
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 14
|
retention-days: 14
|
||||||
|
|||||||
121
.gitea/workflows/offline-e2e.yml
Normal file
121
.gitea/workflows/offline-e2e.yml
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
name: Offline E2E Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/AirGap/**'
|
||||||
|
- 'src/Scanner/**'
|
||||||
|
- 'src/__Tests/offline/**'
|
||||||
|
schedule:
|
||||||
|
- cron: '0 4 * * *' # Nightly at 4 AM UTC
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
STELLAOPS_OFFLINE_MODE: 'true'
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
offline-e2e:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nuget-
|
||||||
|
|
||||||
|
- name: Download offline bundle
|
||||||
|
run: |
|
||||||
|
# In real scenario, bundle would be pre-built and cached
|
||||||
|
# For now, create minimal fixture structure
|
||||||
|
mkdir -p ./offline-bundle/{images,feeds,policies,keys,certs,vex}
|
||||||
|
echo '{}' > ./offline-bundle/manifest.json
|
||||||
|
|
||||||
|
- name: Build in isolated environment
|
||||||
|
run: |
|
||||||
|
# Build offline test library
|
||||||
|
dotnet build src/__Libraries/StellaOps.Testing.AirGap/StellaOps.Testing.AirGap.csproj
|
||||||
|
|
||||||
|
# Build offline E2E tests
|
||||||
|
dotnet build src/__Tests/offline/StellaOps.Offline.E2E.Tests/StellaOps.Offline.E2E.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run offline E2E tests with network isolation
|
||||||
|
run: |
|
||||||
|
# Set offline bundle path
|
||||||
|
export STELLAOPS_OFFLINE_BUNDLE=$(pwd)/offline-bundle
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
dotnet test src/__Tests/offline/StellaOps.Offline.E2E.Tests \
|
||||||
|
--logger "trx;LogFileName=offline-e2e.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory ./results
|
||||||
|
|
||||||
|
- name: Verify no network calls
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
# Parse test output for any NetworkIsolationViolationException
|
||||||
|
if [ -f "./results/offline-e2e.trx" ]; then
|
||||||
|
if grep -q "NetworkIsolationViolation" ./results/offline-e2e.trx; then
|
||||||
|
echo "::error::Tests attempted network calls in offline mode!"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "✅ No network isolation violations detected"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: offline-e2e-results
|
||||||
|
path: ./results/
|
||||||
|
|
||||||
|
verify-isolation:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: offline-e2e
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Download results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: offline-e2e-results
|
||||||
|
path: ./results
|
||||||
|
|
||||||
|
- name: Generate summary
|
||||||
|
run: |
|
||||||
|
echo "## Offline E2E Test Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
if [ -f "./results/offline-e2e.trx" ]; then
|
||||||
|
# Parse test results
|
||||||
|
TOTAL=$(grep -o 'total="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||||
|
PASSED=$(grep -o 'passed="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||||
|
FAILED=$(grep -o 'failed="[0-9]*"' ./results/offline-e2e.trx | cut -d'"' -f2 || echo "0")
|
||||||
|
|
||||||
|
echo "| Metric | Value |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|--------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Total Tests | ${TOTAL} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Passed | ${PASSED} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Failed | ${FAILED} |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
if grep -q "NetworkIsolationViolation" ./results/offline-e2e.trx; then
|
||||||
|
echo "❌ **Network isolation was violated**" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "✅ **Network isolation verified - no egress detected**" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "⚠️ No test results found" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
186
.gitea/workflows/parity-tests.yml
Normal file
186
.gitea/workflows/parity-tests.yml
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
name: Parity Tests
|
||||||
|
|
||||||
|
# Parity testing workflow: compares StellaOps against competitor scanners
|
||||||
|
# (Syft, Grype, Trivy) on a standardized fixture set.
|
||||||
|
#
|
||||||
|
# Schedule: Nightly at 02:00 UTC; Weekly full run on Sunday 00:00 UTC
|
||||||
|
# NOT a PR gate - too slow and has external dependencies
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# Nightly at 02:00 UTC (quick fixture set)
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
# Weekly on Sunday at 00:00 UTC (full fixture set)
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
fixture_set:
|
||||||
|
description: 'Fixture set to use'
|
||||||
|
required: false
|
||||||
|
default: 'quick'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- quick
|
||||||
|
- full
|
||||||
|
enable_drift_detection:
|
||||||
|
description: 'Enable drift detection analysis'
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
SYFT_VERSION: '1.9.0'
|
||||||
|
GRYPE_VERSION: '0.79.3'
|
||||||
|
TRIVY_VERSION: '0.54.1'
|
||||||
|
PARITY_RESULTS_PATH: 'bench/results/parity'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
parity-tests:
|
||||||
|
name: Competitor Parity Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 120
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Install Syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.SYFT_VERSION }}
|
||||||
|
syft version
|
||||||
|
|
||||||
|
- name: Install Grype
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v${{ env.GRYPE_VERSION }}
|
||||||
|
grype version
|
||||||
|
|
||||||
|
- name: Install Trivy
|
||||||
|
run: |
|
||||||
|
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v${{ env.TRIVY_VERSION }}
|
||||||
|
trivy --version
|
||||||
|
|
||||||
|
- name: Determine fixture set
|
||||||
|
id: fixtures
|
||||||
|
run: |
|
||||||
|
# Weekly runs use full fixture set
|
||||||
|
if [[ "${{ github.event.schedule }}" == "0 0 * * 0" ]]; then
|
||||||
|
echo "fixture_set=full" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||||
|
echo "fixture_set=${{ inputs.fixture_set }}" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "fixture_set=quick" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build parity tests
|
||||||
|
run: |
|
||||||
|
dotnet build src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj -c Release
|
||||||
|
|
||||||
|
- name: Run parity tests
|
||||||
|
id: parity
|
||||||
|
run: |
|
||||||
|
mkdir -p ${{ env.PARITY_RESULTS_PATH }}
|
||||||
|
RUN_ID=$(date -u +%Y%m%dT%H%M%SZ)
|
||||||
|
echo "run_id=${RUN_ID}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
dotnet test src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=parity-results.trx" \
|
||||||
|
--results-directory ${{ env.PARITY_RESULTS_PATH }} \
|
||||||
|
-e PARITY_FIXTURE_SET=${{ steps.fixtures.outputs.fixture_set }} \
|
||||||
|
-e PARITY_RUN_ID=${RUN_ID} \
|
||||||
|
-e PARITY_OUTPUT_PATH=${{ env.PARITY_RESULTS_PATH }} \
|
||||||
|
|| true # Don't fail workflow on test failures
|
||||||
|
|
||||||
|
- name: Store parity results
|
||||||
|
run: |
|
||||||
|
# Copy JSON results to time-series storage
|
||||||
|
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json" ]; then
|
||||||
|
echo "Parity results stored successfully"
|
||||||
|
cat ${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json | jq .
|
||||||
|
else
|
||||||
|
echo "Warning: No parity results file found"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run drift detection
|
||||||
|
if: ${{ github.event_name != 'workflow_dispatch' || inputs.enable_drift_detection == 'true' }}
|
||||||
|
run: |
|
||||||
|
# Analyze drift from historical results
|
||||||
|
dotnet run --project src/__Tests/parity/StellaOps.Parity.Tests/StellaOps.Parity.Tests.csproj \
|
||||||
|
--no-build \
|
||||||
|
-- analyze-drift \
|
||||||
|
--results-path ${{ env.PARITY_RESULTS_PATH }} \
|
||||||
|
--threshold 0.05 \
|
||||||
|
--trend-days 3 \
|
||||||
|
|| true
|
||||||
|
|
||||||
|
- name: Upload parity results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: parity-results-${{ steps.parity.outputs.run_id }}
|
||||||
|
path: ${{ env.PARITY_RESULTS_PATH }}
|
||||||
|
retention-days: 90
|
||||||
|
|
||||||
|
- name: Export Prometheus metrics
|
||||||
|
if: ${{ env.PROMETHEUS_PUSH_GATEWAY != '' }}
|
||||||
|
env:
|
||||||
|
PROMETHEUS_PUSH_GATEWAY: ${{ secrets.PROMETHEUS_PUSH_GATEWAY }}
|
||||||
|
run: |
|
||||||
|
# Push metrics to Prometheus Push Gateway if configured
|
||||||
|
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-metrics.txt" ]; then
|
||||||
|
curl -X POST \
|
||||||
|
-H "Content-Type: text/plain" \
|
||||||
|
--data-binary @${{ env.PARITY_RESULTS_PATH }}/parity-metrics.txt \
|
||||||
|
"${PROMETHEUS_PUSH_GATEWAY}/metrics/job/parity_tests/instance/${{ steps.parity.outputs.run_id }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Generate comparison report
|
||||||
|
run: |
|
||||||
|
echo "## Parity Test Results - ${{ steps.parity.outputs.run_id }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Fixture Set:** ${{ steps.fixtures.outputs.fixture_set }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Competitor Versions:**" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Syft: ${{ env.SYFT_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Grype: ${{ env.GRYPE_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Trivy: ${{ env.TRIVY_VERSION }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
if [ -f "${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json" ]; then
|
||||||
|
echo "### Metrics Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
jq -r '
|
||||||
|
"| Metric | StellaOps | Grype | Trivy |",
|
||||||
|
"|--------|-----------|-------|-------|",
|
||||||
|
"| SBOM Packages | \(.sbomMetrics.stellaOpsPackageCount) | \(.sbomMetrics.syftPackageCount) | - |",
|
||||||
|
"| Vulnerability Recall | \(.vulnMetrics.recall | . * 100 | round / 100)% | - | - |",
|
||||||
|
"| Vulnerability F1 | \(.vulnMetrics.f1Score | . * 100 | round / 100)% | - | - |",
|
||||||
|
"| Latency P95 (ms) | \(.latencyMetrics.stellaOpsP95Ms | round) | \(.latencyMetrics.grypeP95Ms | round) | \(.latencyMetrics.trivyP95Ms | round) |"
|
||||||
|
' ${{ env.PARITY_RESULTS_PATH }}/parity-${{ steps.parity.outputs.run_id }}.json >> $GITHUB_STEP_SUMMARY || echo "Could not parse results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Alert on critical drift
|
||||||
|
if: failure()
|
||||||
|
uses: slackapi/slack-github-action@v1.25.0
|
||||||
|
with:
|
||||||
|
payload: |
|
||||||
|
{
|
||||||
|
"text": "⚠️ Parity test drift detected",
|
||||||
|
"blocks": [
|
||||||
|
{
|
||||||
|
"type": "section",
|
||||||
|
"text": {
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": "*Parity Test Alert*\nDrift detected in competitor comparison metrics.\n<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View Results>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
env:
|
||||||
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||||
|
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
|
||||||
|
continue-on-error: true
|
||||||
@@ -43,7 +43,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.nuget/packages
|
~/.nuget/packages
|
||||||
local-nugets/packages
|
.nuget/packages
|
||||||
key: policy-lint-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
key: policy-lint-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
- name: Restore CLI
|
- name: Restore CLI
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.nuget/packages
|
~/.nuget/packages
|
||||||
local-nugets/packages
|
.nuget/packages
|
||||||
key: policy-sim-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
key: policy-sim-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
- name: Restore CLI
|
- name: Restore CLI
|
||||||
|
|||||||
306
.gitea/workflows/reachability-bench.yaml
Normal file
306
.gitea/workflows/reachability-bench.yaml
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
name: Reachability Benchmark
|
||||||
|
|
||||||
|
# Sprint: SPRINT_3500_0003_0001
|
||||||
|
# Task: CORPUS-009 - Create Gitea workflow for reachability benchmark
|
||||||
|
# Task: CORPUS-010 - Configure nightly + per-PR benchmark runs
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
baseline_version:
|
||||||
|
description: 'Baseline version to compare against'
|
||||||
|
required: false
|
||||||
|
default: 'latest'
|
||||||
|
verbose:
|
||||||
|
description: 'Enable verbose output'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'datasets/reachability/**'
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||||
|
- 'bench/reachability-benchmark/**'
|
||||||
|
- '.gitea/workflows/reachability-bench.yaml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'datasets/reachability/**'
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||||
|
- 'bench/reachability-benchmark/**'
|
||||||
|
schedule:
|
||||||
|
# Nightly at 02:00 UTC
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
benchmark:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
STELLAOPS_OFFLINE: 'true'
|
||||||
|
STELLAOPS_DETERMINISTIC: 'true'
|
||||||
|
outputs:
|
||||||
|
precision: ${{ steps.metrics.outputs.precision }}
|
||||||
|
recall: ${{ steps.metrics.outputs.recall }}
|
||||||
|
f1: ${{ steps.metrics.outputs.f1 }}
|
||||||
|
pr_auc: ${{ steps.metrics.outputs.pr_auc }}
|
||||||
|
regression: ${{ steps.compare.outputs.regression }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nuget-
|
||||||
|
|
||||||
|
- name: Restore benchmark project
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
--configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build benchmark project
|
||||||
|
run: |
|
||||||
|
dotnet build src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-restore
|
||||||
|
|
||||||
|
- name: Validate corpus integrity
|
||||||
|
run: |
|
||||||
|
echo "::group::Validating corpus index"
|
||||||
|
if [ ! -f datasets/reachability/corpus.json ]; then
|
||||||
|
echo "::error::corpus.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
python3 -c "import json; data = json.load(open('datasets/reachability/corpus.json')); print(f'Corpus contains {len(data.get(\"samples\", []))} samples')"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Run benchmark
|
||||||
|
id: benchmark
|
||||||
|
run: |
|
||||||
|
echo "::group::Running reachability benchmark"
|
||||||
|
mkdir -p bench/results
|
||||||
|
|
||||||
|
# Run the corpus benchmark
|
||||||
|
dotnet run \
|
||||||
|
--project src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
-- corpus run \
|
||||||
|
--corpus datasets/reachability/corpus.json \
|
||||||
|
--output bench/results/benchmark-${{ github.sha }}.json \
|
||||||
|
--format json \
|
||||||
|
${{ inputs.verbose == 'true' && '--verbose' || '' }}
|
||||||
|
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Extract metrics
|
||||||
|
id: metrics
|
||||||
|
run: |
|
||||||
|
echo "::group::Extracting metrics"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
|
||||||
|
if [ -f "$RESULT_FILE" ]; then
|
||||||
|
PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||||
|
RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||||
|
F1=$(jq -r '.metrics.f1 // 0' "$RESULT_FILE")
|
||||||
|
PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||||
|
|
||||||
|
echo "precision=$PRECISION" >> $GITHUB_OUTPUT
|
||||||
|
echo "recall=$RECALL" >> $GITHUB_OUTPUT
|
||||||
|
echo "f1=$F1" >> $GITHUB_OUTPUT
|
||||||
|
echo "pr_auc=$PR_AUC" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "Precision: $PRECISION"
|
||||||
|
echo "Recall: $RECALL"
|
||||||
|
echo "F1: $F1"
|
||||||
|
echo "PR-AUC: $PR_AUC"
|
||||||
|
else
|
||||||
|
echo "::error::Benchmark result file not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Get baseline
|
||||||
|
id: baseline
|
||||||
|
run: |
|
||||||
|
echo "::group::Loading baseline"
|
||||||
|
BASELINE_VERSION="${{ inputs.baseline_version || 'latest' }}"
|
||||||
|
|
||||||
|
if [ "$BASELINE_VERSION" = "latest" ]; then
|
||||||
|
BASELINE_FILE=$(ls -t bench/baselines/*.json 2>/dev/null | head -1)
|
||||||
|
else
|
||||||
|
BASELINE_FILE="bench/baselines/$BASELINE_VERSION.json"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$BASELINE_FILE" ]; then
|
||||||
|
echo "baseline_file=$BASELINE_FILE" >> $GITHUB_OUTPUT
|
||||||
|
echo "Using baseline: $BASELINE_FILE"
|
||||||
|
else
|
||||||
|
echo "::warning::No baseline found, skipping comparison"
|
||||||
|
echo "baseline_file=" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Compare to baseline
|
||||||
|
id: compare
|
||||||
|
if: steps.baseline.outputs.baseline_file != ''
|
||||||
|
run: |
|
||||||
|
echo "::group::Comparing to baseline"
|
||||||
|
BASELINE_FILE="${{ steps.baseline.outputs.baseline_file }}"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
|
||||||
|
# Extract baseline metrics
|
||||||
|
BASELINE_PRECISION=$(jq -r '.metrics.precision // 0' "$BASELINE_FILE")
|
||||||
|
BASELINE_RECALL=$(jq -r '.metrics.recall // 0' "$BASELINE_FILE")
|
||||||
|
BASELINE_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$BASELINE_FILE")
|
||||||
|
|
||||||
|
# Extract current metrics
|
||||||
|
CURRENT_PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||||
|
CURRENT_RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||||
|
CURRENT_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||||
|
|
||||||
|
# Calculate deltas
|
||||||
|
PRECISION_DELTA=$(echo "$CURRENT_PRECISION - $BASELINE_PRECISION" | bc -l)
|
||||||
|
RECALL_DELTA=$(echo "$CURRENT_RECALL - $BASELINE_RECALL" | bc -l)
|
||||||
|
PR_AUC_DELTA=$(echo "$CURRENT_PR_AUC - $BASELINE_PR_AUC" | bc -l)
|
||||||
|
|
||||||
|
echo "Precision delta: $PRECISION_DELTA"
|
||||||
|
echo "Recall delta: $RECALL_DELTA"
|
||||||
|
echo "PR-AUC delta: $PR_AUC_DELTA"
|
||||||
|
|
||||||
|
# Check for regression (PR-AUC drop > 2%)
|
||||||
|
REGRESSION_THRESHOLD=-0.02
|
||||||
|
if (( $(echo "$PR_AUC_DELTA < $REGRESSION_THRESHOLD" | bc -l) )); then
|
||||||
|
echo "::error::PR-AUC regression detected: $PR_AUC_DELTA (threshold: $REGRESSION_THRESHOLD)"
|
||||||
|
echo "regression=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "regression=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Generate markdown report
|
||||||
|
run: |
|
||||||
|
echo "::group::Generating report"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
REPORT_FILE="bench/results/benchmark-${{ github.sha }}.md"
|
||||||
|
|
||||||
|
cat > "$REPORT_FILE" << 'EOF'
|
||||||
|
# Reachability Benchmark Report
|
||||||
|
|
||||||
|
**Commit:** ${{ github.sha }}
|
||||||
|
**Run:** ${{ github.run_number }}
|
||||||
|
**Date:** $(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
|
## Metrics
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Precision | ${{ steps.metrics.outputs.precision }} |
|
||||||
|
| Recall | ${{ steps.metrics.outputs.recall }} |
|
||||||
|
| F1 Score | ${{ steps.metrics.outputs.f1 }} |
|
||||||
|
| PR-AUC | ${{ steps.metrics.outputs.pr_auc }} |
|
||||||
|
|
||||||
|
## Comparison
|
||||||
|
|
||||||
|
${{ steps.compare.outputs.regression == 'true' && '⚠️ **REGRESSION DETECTED**' || '✅ No regression' }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Report generated: $REPORT_FILE"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.sha }}
|
||||||
|
path: |
|
||||||
|
bench/results/benchmark-${{ github.sha }}.json
|
||||||
|
bench/results/benchmark-${{ github.sha }}.md
|
||||||
|
retention-days: 90
|
||||||
|
|
||||||
|
- name: Fail on regression
|
||||||
|
if: steps.compare.outputs.regression == 'true' && github.event_name == 'pull_request'
|
||||||
|
run: |
|
||||||
|
echo "::error::Benchmark regression detected. PR-AUC dropped below threshold."
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
update-baseline:
|
||||||
|
needs: benchmark
|
||||||
|
if: github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.benchmark.outputs.regression != 'true'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.sha }}
|
||||||
|
path: bench/results/
|
||||||
|
|
||||||
|
- name: Update baseline (nightly only)
|
||||||
|
if: github.event_name == 'schedule'
|
||||||
|
run: |
|
||||||
|
DATE=$(date +%Y%m%d)
|
||||||
|
cp bench/results/benchmark-${{ github.sha }}.json bench/baselines/baseline-$DATE.json
|
||||||
|
echo "Updated baseline to baseline-$DATE.json"
|
||||||
|
|
||||||
|
notify-pr:
|
||||||
|
needs: benchmark
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Comment on PR
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const precision = '${{ needs.benchmark.outputs.precision }}';
|
||||||
|
const recall = '${{ needs.benchmark.outputs.recall }}';
|
||||||
|
const f1 = '${{ needs.benchmark.outputs.f1 }}';
|
||||||
|
const prAuc = '${{ needs.benchmark.outputs.pr_auc }}';
|
||||||
|
const regression = '${{ needs.benchmark.outputs.regression }}' === 'true';
|
||||||
|
|
||||||
|
const status = regression ? '⚠️ REGRESSION' : '✅ PASS';
|
||||||
|
|
||||||
|
const body = `## Reachability Benchmark Results ${status}
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Precision | ${precision} |
|
||||||
|
| Recall | ${recall} |
|
||||||
|
| F1 Score | ${f1} |
|
||||||
|
| PR-AUC | ${prAuc} |
|
||||||
|
|
||||||
|
${regression ? '### ⚠️ Regression Detected\nPR-AUC dropped below threshold. Please review changes.' : ''}
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
|
||||||
|
- Commit: \`${{ github.sha }}\`
|
||||||
|
- Run: [#${{ github.run_number }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||||
|
|
||||||
|
</details>`;
|
||||||
|
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
267
.gitea/workflows/reachability-corpus-ci.yml
Normal file
267
.gitea/workflows/reachability-corpus-ci.yml
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
name: Reachability Corpus Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/__Tests/reachability/corpus/**'
|
||||||
|
- 'src/__Tests/reachability/fixtures/**'
|
||||||
|
- 'src/__Tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||||
|
- 'scripts/reachability/**'
|
||||||
|
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/__Tests/reachability/corpus/**'
|
||||||
|
- 'src/__Tests/reachability/fixtures/**'
|
||||||
|
- 'src/__Tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||||
|
- 'scripts/reachability/**'
|
||||||
|
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-corpus:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Verify corpus manifest integrity
|
||||||
|
run: |
|
||||||
|
echo "Verifying corpus manifest..."
|
||||||
|
cd src/__Tests/reachability/corpus
|
||||||
|
if [ ! -f manifest.json ]; then
|
||||||
|
echo "::error::Corpus manifest.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Manifest exists, checking JSON validity..."
|
||||||
|
python3 -c "import json; json.load(open('manifest.json'))"
|
||||||
|
echo "Manifest is valid JSON"
|
||||||
|
|
||||||
|
- name: Verify reachbench index integrity
|
||||||
|
run: |
|
||||||
|
echo "Verifying reachbench fixtures..."
|
||||||
|
cd src/__Tests/reachability/fixtures/reachbench-2025-expanded
|
||||||
|
if [ ! -f INDEX.json ]; then
|
||||||
|
echo "::error::Reachbench INDEX.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "INDEX exists, checking JSON validity..."
|
||||||
|
python3 -c "import json; json.load(open('INDEX.json'))"
|
||||||
|
echo "INDEX is valid JSON"
|
||||||
|
|
||||||
|
- name: Restore test project
|
||||||
|
run: dotnet restore src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build test project
|
||||||
|
run: dotnet build src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Run corpus fixture tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=corpus-results.trx" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
--filter "FullyQualifiedName~CorpusFixtureTests"
|
||||||
|
|
||||||
|
- name: Run reachbench fixture tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=reachbench-results.trx" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
--filter "FullyQualifiedName~ReachbenchFixtureTests"
|
||||||
|
|
||||||
|
- name: Verify deterministic hashes
|
||||||
|
run: |
|
||||||
|
echo "Verifying SHA-256 hashes in corpus manifest..."
|
||||||
|
chmod +x scripts/reachability/verify_corpus_hashes.sh || true
|
||||||
|
if [ -f scripts/reachability/verify_corpus_hashes.sh ]; then
|
||||||
|
scripts/reachability/verify_corpus_hashes.sh
|
||||||
|
else
|
||||||
|
echo "Hash verification script not found, using inline verification..."
|
||||||
|
cd src/__Tests/reachability/corpus
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
with open('manifest.json') as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for entry in manifest:
|
||||||
|
case_id = entry['id']
|
||||||
|
lang = entry['language']
|
||||||
|
case_dir = os.path.join(lang, case_id)
|
||||||
|
for filename, expected_hash in entry['files'].items():
|
||||||
|
filepath = os.path.join(case_dir, filename)
|
||||||
|
if not os.path.exists(filepath):
|
||||||
|
errors.append(f"{case_id}: missing {filename}")
|
||||||
|
continue
|
||||||
|
with open(filepath, 'rb') as f:
|
||||||
|
actual_hash = hashlib.sha256(f.read()).hexdigest()
|
||||||
|
if actual_hash != expected_hash:
|
||||||
|
errors.append(f"{case_id}: {filename} hash mismatch (expected {expected_hash}, got {actual_hash})")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
print(f"All {len(manifest)} corpus entries verified")
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: corpus-test-results-${{ github.run_number }}
|
||||||
|
path: ./TestResults/*.trx
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
validate-ground-truths:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate ground-truth schema version
|
||||||
|
run: |
|
||||||
|
echo "Validating ground-truth files..."
|
||||||
|
cd src/__Tests/reachability
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
EXPECTED_SCHEMA = "reachbench.reachgraph.truth/v1"
|
||||||
|
ALLOWED_VARIANTS = {"reachable", "unreachable"}
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
# Validate corpus ground-truths
|
||||||
|
corpus_manifest = 'corpus/manifest.json'
|
||||||
|
if os.path.exists(corpus_manifest):
|
||||||
|
with open(corpus_manifest) as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
for entry in manifest:
|
||||||
|
case_id = entry['id']
|
||||||
|
lang = entry['language']
|
||||||
|
truth_path = os.path.join('corpus', lang, case_id, 'ground-truth.json')
|
||||||
|
if not os.path.exists(truth_path):
|
||||||
|
errors.append(f"corpus/{case_id}: missing ground-truth.json")
|
||||||
|
continue
|
||||||
|
with open(truth_path) as f:
|
||||||
|
truth = json.load(f)
|
||||||
|
if truth.get('schema_version') != EXPECTED_SCHEMA:
|
||||||
|
errors.append(f"corpus/{case_id}: wrong schema_version")
|
||||||
|
if truth.get('variant') not in ALLOWED_VARIANTS:
|
||||||
|
errors.append(f"corpus/{case_id}: invalid variant '{truth.get('variant')}'")
|
||||||
|
if not isinstance(truth.get('paths'), list):
|
||||||
|
errors.append(f"corpus/{case_id}: paths must be an array")
|
||||||
|
|
||||||
|
# Validate reachbench ground-truths
|
||||||
|
reachbench_index = 'fixtures/reachbench-2025-expanded/INDEX.json'
|
||||||
|
if os.path.exists(reachbench_index):
|
||||||
|
with open(reachbench_index) as f:
|
||||||
|
index = json.load(f)
|
||||||
|
for case in index.get('cases', []):
|
||||||
|
case_id = case['id']
|
||||||
|
case_path = case.get('path', os.path.join('cases', case_id))
|
||||||
|
for variant in ['reachable', 'unreachable']:
|
||||||
|
truth_path = os.path.join('fixtures/reachbench-2025-expanded', case_path, 'images', variant, 'reachgraph.truth.json')
|
||||||
|
if not os.path.exists(truth_path):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: missing reachgraph.truth.json")
|
||||||
|
continue
|
||||||
|
with open(truth_path) as f:
|
||||||
|
truth = json.load(f)
|
||||||
|
if not truth.get('schema_version'):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: missing schema_version")
|
||||||
|
if not isinstance(truth.get('paths'), list):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: paths must be an array")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
print("All ground-truth files validated successfully")
|
||||||
|
EOF
|
||||||
|
|
||||||
|
determinism-check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TZ: UTC
|
||||||
|
needs: validate-corpus
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Verify JSON determinism (sorted keys, no trailing whitespace)
|
||||||
|
run: |
|
||||||
|
echo "Checking JSON determinism..."
|
||||||
|
cd src/__Tests/reachability
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def check_json_sorted(filepath):
|
||||||
|
"""Check if JSON has sorted keys (deterministic)."""
|
||||||
|
with open(filepath) as f:
|
||||||
|
content = f.read()
|
||||||
|
parsed = json.loads(content)
|
||||||
|
reserialized = json.dumps(parsed, sort_keys=True, indent=2)
|
||||||
|
# Normalize line endings
|
||||||
|
content_normalized = content.replace('\r\n', '\n').strip()
|
||||||
|
reserialized_normalized = reserialized.strip()
|
||||||
|
return content_normalized == reserialized_normalized
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
json_files = []
|
||||||
|
|
||||||
|
# Collect JSON files from corpus
|
||||||
|
for root, dirs, files in os.walk('corpus'):
|
||||||
|
for f in files:
|
||||||
|
if f.endswith('.json'):
|
||||||
|
json_files.append(os.path.join(root, f))
|
||||||
|
|
||||||
|
# Check determinism
|
||||||
|
non_deterministic = []
|
||||||
|
for filepath in json_files:
|
||||||
|
try:
|
||||||
|
if not check_json_sorted(filepath):
|
||||||
|
non_deterministic.append(filepath)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
errors.append(f"{filepath}: invalid JSON - {e}")
|
||||||
|
|
||||||
|
if non_deterministic:
|
||||||
|
print(f"::warning::Found {len(non_deterministic)} non-deterministic JSON files (keys not sorted or whitespace differs)")
|
||||||
|
for f in non_deterministic[:10]:
|
||||||
|
print(f" - {f}")
|
||||||
|
if len(non_deterministic) > 10:
|
||||||
|
print(f" ... and {len(non_deterministic) - 10} more")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"Checked {len(json_files)} JSON files")
|
||||||
|
EOF
|
||||||
39
.gitea/workflows/replay-verification.yml
Normal file
39
.gitea/workflows/replay-verification.yml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
name: Replay Verification
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/**'
|
||||||
|
- 'src/__Libraries/StellaOps.Canonicalization/**'
|
||||||
|
- 'src/__Libraries/StellaOps.Replay/**'
|
||||||
|
- 'src/__Libraries/StellaOps.Testing.Manifests/**'
|
||||||
|
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
replay-verification:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
|
||||||
|
- name: Build CLI
|
||||||
|
run: dotnet build src/Cli/StellaOps.Cli -c Release
|
||||||
|
|
||||||
|
- name: Run replay verification on corpus
|
||||||
|
run: |
|
||||||
|
dotnet run --project src/Cli/StellaOps.Cli -- replay batch \
|
||||||
|
--corpus src/__Tests/__Benchmarks/golden-corpus/ \
|
||||||
|
--output results/ \
|
||||||
|
--verify-determinism \
|
||||||
|
--fail-on-diff
|
||||||
|
|
||||||
|
- name: Upload diff report
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: replay-diff-report
|
||||||
|
path: results/diff-report.json
|
||||||
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
name: Risk Bundle CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
include_osv:
|
||||||
|
description: 'Include OSV providers (larger bundle)'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
publish_checksums:
|
||||||
|
description: 'Publish checksums to artifact store'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
risk-bundle-build:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
BUNDLE_OUTPUT: ${{ github.workspace }}/.artifacts/risk-bundle
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: scripts/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Test RiskBundle unit tests
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
dotnet test src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--filter "FullyQualifiedName~RiskBundle" \
|
||||||
|
--logger "trx;LogFileName=risk-bundle-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Build risk bundle (fixtures)
|
||||||
|
run: |
|
||||||
|
mkdir -p $BUNDLE_OUTPUT
|
||||||
|
ops/devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||||
|
|
||||||
|
- name: Verify bundle integrity
|
||||||
|
run: ops/devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||||
|
|
||||||
|
- name: Generate checksums
|
||||||
|
run: |
|
||||||
|
cd $BUNDLE_OUTPUT
|
||||||
|
sha256sum risk-bundle.tar.gz > risk-bundle.tar.gz.sha256
|
||||||
|
sha256sum manifest.json > manifest.json.sha256
|
||||||
|
cat risk-bundle.tar.gz.sha256 manifest.json.sha256 > checksums.txt
|
||||||
|
echo "Bundle checksums:"
|
||||||
|
cat checksums.txt
|
||||||
|
|
||||||
|
- name: Upload risk bundle artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: |
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz.sig
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/manifest.json
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/checksums.txt
|
||||||
|
${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: risk-bundle-test-results
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
risk-bundle-offline-kit:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
OFFLINE_KIT_DIR: ${{ github.workspace }}/.artifacts/offline-kit
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Package for offline kit
|
||||||
|
run: |
|
||||||
|
mkdir -p $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz.sig $OFFLINE_KIT_DIR/risk-bundles/ 2>/dev/null || true
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
|
||||||
|
# Create offline kit manifest entry
|
||||||
|
cat > $OFFLINE_KIT_DIR/risk-bundles/kit-manifest.json <<EOF
|
||||||
|
{
|
||||||
|
"component": "risk-bundle",
|
||||||
|
"version": "$(date -u +%Y%m%d-%H%M%S)",
|
||||||
|
"files": [
|
||||||
|
{"path": "risk-bundle.tar.gz", "checksum_file": "risk-bundle.tar.gz.sha256"},
|
||||||
|
{"path": "manifest.json", "checksum_file": "manifest.json.sha256"}
|
||||||
|
],
|
||||||
|
"verification": {
|
||||||
|
"checksums": "checksums.txt",
|
||||||
|
"signature": "risk-bundle.tar.gz.sig"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Verify offline kit structure
|
||||||
|
run: |
|
||||||
|
echo "Offline kit structure:"
|
||||||
|
find $OFFLINE_KIT_DIR -type f
|
||||||
|
echo ""
|
||||||
|
echo "Checksum verification:"
|
||||||
|
cd $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
sha256sum -c checksums.txt
|
||||||
|
|
||||||
|
- name: Upload offline kit
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-offline-kit
|
||||||
|
path: ${{ env.OFFLINE_KIT_DIR }}
|
||||||
|
|
||||||
|
publish-checksums:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event.inputs.publish_checksums == 'true')
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Publish checksums
|
||||||
|
run: |
|
||||||
|
echo "Publishing checksums for risk bundle..."
|
||||||
|
CHECKSUM_DIR=out/checksums/risk-bundle/$(date -u +%Y-%m-%d)
|
||||||
|
mkdir -p $CHECKSUM_DIR
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $CHECKSUM_DIR/
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $CHECKSUM_DIR/
|
||||||
|
|
||||||
|
# Create latest symlink manifest
|
||||||
|
cat > out/checksums/risk-bundle/latest.json <<EOF
|
||||||
|
{
|
||||||
|
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"path": "$(date -u +%Y-%m-%d)/checksums.txt",
|
||||||
|
"manifest": "$(date -u +%Y-%m-%d)/manifest.json"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Checksums published to $CHECKSUM_DIR"
|
||||||
|
cat $CHECKSUM_DIR/checksums.txt
|
||||||
|
|
||||||
|
- name: Upload published checksums
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-published-checksums
|
||||||
|
path: out/checksums/risk-bundle/
|
||||||
306
.gitea/workflows/router-chaos.yml
Normal file
306
.gitea/workflows/router-chaos.yml
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# router-chaos.yml
|
||||||
|
# Sprint: SPRINT_5100_0005_0001_router_chaos_suite
|
||||||
|
# Task: T5 - CI Chaos Workflow
|
||||||
|
# Description: CI workflow for running router chaos tests.
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
name: Router Chaos Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 3 * * *' # Nightly at 3 AM UTC
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
spike_multiplier:
|
||||||
|
description: 'Load spike multiplier (e.g., 10, 50, 100)'
|
||||||
|
default: '10'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- '10'
|
||||||
|
- '50'
|
||||||
|
- '100'
|
||||||
|
run_valkey_tests:
|
||||||
|
description: 'Run Valkey failure injection tests'
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
TZ: UTC
|
||||||
|
ROUTER_URL: http://localhost:8080
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
load-tests:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: stellaops
|
||||||
|
POSTGRES_PASSWORD: test
|
||||||
|
POSTGRES_DB: stellaops_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
valkey:
|
||||||
|
image: valkey/valkey:7-alpine
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
options: >-
|
||||||
|
--health-cmd "valkey-cli ping"
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Install k6
|
||||||
|
run: |
|
||||||
|
curl -sSL https://github.com/grafana/k6/releases/download/v0.54.0/k6-v0.54.0-linux-amd64.tar.gz | tar xz
|
||||||
|
sudo mv k6-v0.54.0-linux-amd64/k6 /usr/local/bin/
|
||||||
|
k6 version
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: chaos-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
|
- name: Build Router
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj
|
||||||
|
dotnet build src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Start Router
|
||||||
|
run: |
|
||||||
|
dotnet run --project src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj -c Release --no-build &
|
||||||
|
echo $! > router.pid
|
||||||
|
|
||||||
|
# Wait for router to start
|
||||||
|
for i in {1..30}; do
|
||||||
|
if curl -s http://localhost:8080/health > /dev/null 2>&1; then
|
||||||
|
echo "Router is ready"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo "Waiting for router... ($i/30)"
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Run k6 spike test
|
||||||
|
id: k6
|
||||||
|
run: |
|
||||||
|
mkdir -p results
|
||||||
|
|
||||||
|
k6 run src/__Tests/load/router/spike-test.js \
|
||||||
|
-e ROUTER_URL=${{ env.ROUTER_URL }} \
|
||||||
|
--out json=results/k6-results.json \
|
||||||
|
--summary-export results/k6-summary.json \
|
||||||
|
2>&1 | tee results/k6-output.txt
|
||||||
|
|
||||||
|
# Check exit code
|
||||||
|
if [ ${PIPESTATUS[0]} -ne 0 ]; then
|
||||||
|
echo "k6_status=failed" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "k6_status=passed" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload k6 results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: k6-results-${{ github.run_id }}
|
||||||
|
path: results/
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Stop Router
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
if [ -f router.pid ]; then
|
||||||
|
kill $(cat router.pid) 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
chaos-unit-tests:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 20
|
||||||
|
needs: load-tests
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: stellaops
|
||||||
|
POSTGRES_PASSWORD: test
|
||||||
|
POSTGRES_DB: stellaops_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
|
||||||
|
valkey:
|
||||||
|
image: valkey/valkey:7-alpine
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Build Chaos Tests
|
||||||
|
run: |
|
||||||
|
dotnet restore src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj
|
||||||
|
dotnet build src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Start Router for Tests
|
||||||
|
run: |
|
||||||
|
dotnet run --project src/Router/StellaOps.Router.WebService/StellaOps.Router.WebService.csproj -c Release &
|
||||||
|
sleep 15 # Wait for startup
|
||||||
|
|
||||||
|
- name: Run Chaos Unit Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=chaos-results.trx" \
|
||||||
|
--logger "console;verbosity=detailed" \
|
||||||
|
--results-directory results \
|
||||||
|
-- RunConfiguration.TestSessionTimeout=600000
|
||||||
|
|
||||||
|
- name: Upload Test Results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: chaos-test-results-${{ github.run_id }}
|
||||||
|
path: results/
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
valkey-failure-tests:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 20
|
||||||
|
needs: load-tests
|
||||||
|
if: ${{ github.event.inputs.run_valkey_tests != 'false' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Install Docker Compose
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y docker-compose
|
||||||
|
|
||||||
|
- name: Run Valkey Failure Tests
|
||||||
|
run: |
|
||||||
|
dotnet test src/__Tests/chaos/StellaOps.Chaos.Router.Tests/StellaOps.Chaos.Router.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--filter "Category=Valkey" \
|
||||||
|
--logger "trx;LogFileName=valkey-results.trx" \
|
||||||
|
--results-directory results \
|
||||||
|
-- RunConfiguration.TestSessionTimeout=600000
|
||||||
|
|
||||||
|
- name: Upload Valkey Test Results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: valkey-test-results-${{ github.run_id }}
|
||||||
|
path: results/
|
||||||
|
|
||||||
|
analyze-results:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [load-tests, chaos-unit-tests]
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download k6 Results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: k6-results-${{ github.run_id }}
|
||||||
|
path: k6-results/
|
||||||
|
|
||||||
|
- name: Download Chaos Test Results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: chaos-test-results-${{ github.run_id }}
|
||||||
|
path: chaos-results/
|
||||||
|
|
||||||
|
- name: Analyze Results
|
||||||
|
id: analysis
|
||||||
|
run: |
|
||||||
|
mkdir -p analysis
|
||||||
|
|
||||||
|
# Parse k6 summary
|
||||||
|
if [ -f k6-results/k6-summary.json ]; then
|
||||||
|
echo "=== k6 Test Summary ===" | tee analysis/summary.txt
|
||||||
|
|
||||||
|
# Extract key metrics
|
||||||
|
jq -r '.metrics | to_entries[] | "\(.key): \(.value)"' k6-results/k6-summary.json >> analysis/summary.txt 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check thresholds
|
||||||
|
THRESHOLDS_PASSED=true
|
||||||
|
if [ -f k6-results/k6-summary.json ]; then
|
||||||
|
# Check if any threshold failed
|
||||||
|
FAILED_THRESHOLDS=$(jq -r '.thresholds | to_entries[] | select(.value.ok == false) | .key' k6-results/k6-summary.json 2>/dev/null || echo "")
|
||||||
|
|
||||||
|
if [ -n "$FAILED_THRESHOLDS" ]; then
|
||||||
|
echo "Failed thresholds: $FAILED_THRESHOLDS"
|
||||||
|
THRESHOLDS_PASSED=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "thresholds_passed=$THRESHOLDS_PASSED" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Upload Analysis
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: chaos-analysis-${{ github.run_id }}
|
||||||
|
path: analysis/
|
||||||
|
|
||||||
|
- name: Create Summary
|
||||||
|
run: |
|
||||||
|
echo "## Router Chaos Test Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
echo "### Load Test Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
if [ -f k6-results/k6-summary.json ]; then
|
||||||
|
echo "- Total Requests: $(jq -r '.metrics.http_reqs.values.count // "N/A"' k6-results/k6-summary.json)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Failed Rate: $(jq -r '.metrics.http_req_failed.values.rate // "N/A"' k6-results/k6-summary.json)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "- No k6 results found" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "### Thresholds" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Status: ${{ steps.analysis.outputs.thresholds_passed == 'true' && 'PASSED' || 'FAILED' }}" >> $GITHUB_STEP_SUMMARY
|
||||||
@@ -34,6 +34,22 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj ruby-analyzer
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj ruby-analyzer
|
||||||
|
|
||||||
|
- name: Package Native analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Native/StellaOps.Scanner.Analyzers.Native.csproj native-analyzer
|
||||||
|
|
||||||
|
- name: Package Java analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj java-analyzer
|
||||||
|
|
||||||
|
- name: Package DotNet analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj dotnet-analyzer
|
||||||
|
|
||||||
|
- name: Package Node analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj node-analyzer
|
||||||
|
|
||||||
- name: Upload analyzer artifacts
|
- name: Upload analyzer artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -128,6 +128,6 @@ jobs:
|
|||||||
- name: Run determinism tests
|
- name: Run determinism tests
|
||||||
run: |
|
run: |
|
||||||
# Run scanner on same input twice, compare outputs
|
# Run scanner on same input twice, compare outputs
|
||||||
if [ -d "tests/fixtures/determinism" ]; then
|
if [ -d "src/__Tests/fixtures/determinism" ]; then
|
||||||
dotnet test --filter "Category=Determinism" --verbosity normal
|
dotnet test --filter "Category=Determinism" --verbosity normal
|
||||||
fi
|
fi
|
||||||
|
|||||||
322
.gitea/workflows/schema-validation.yml
Normal file
322
.gitea/workflows/schema-validation.yml
Normal file
@@ -0,0 +1,322 @@
|
|||||||
|
# Schema Validation CI Workflow
|
||||||
|
# Sprint: SPRINT_8200_0001_0003_sbom_schema_validation_ci
|
||||||
|
# Tasks: SCHEMA-8200-007 through SCHEMA-8200-011
|
||||||
|
#
|
||||||
|
# Purpose: Validate SBOM fixtures against official JSON schemas to detect
|
||||||
|
# schema drift before runtime. Fails CI if any fixture is invalid.
|
||||||
|
|
||||||
|
name: Schema Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||||
|
- 'src/Scanner/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/validate-*.sh'
|
||||||
|
- '.gitea/workflows/schema-validation.yml'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/__Tests/__Benchmarks/golden-corpus/**'
|
||||||
|
- 'src/Scanner/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/validate-*.sh'
|
||||||
|
|
||||||
|
env:
|
||||||
|
SBOM_UTILITY_VERSION: "0.16.0"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-cyclonedx:
|
||||||
|
name: Validate CycloneDX Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install sbom-utility
|
||||||
|
run: |
|
||||||
|
curl -sSfL "https://github.com/CycloneDX/sbom-utility/releases/download/v${SBOM_UTILITY_VERSION}/sbom-utility-v${SBOM_UTILITY_VERSION}-linux-amd64.tar.gz" | tar xz
|
||||||
|
sudo mv sbom-utility /usr/local/bin/
|
||||||
|
sbom-utility --version
|
||||||
|
|
||||||
|
- name: Validate CycloneDX fixtures
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
SCHEMA="docs/schemas/cyclonedx-bom-1.6.schema.json"
|
||||||
|
FIXTURE_DIRS=(
|
||||||
|
"src/__Tests/__Benchmarks/golden-corpus"
|
||||||
|
"src/__Tests/fixtures"
|
||||||
|
"seed-data"
|
||||||
|
)
|
||||||
|
|
||||||
|
FOUND=0
|
||||||
|
PASSED=0
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||||
|
FOUND=$((FOUND + 1))
|
||||||
|
echo "::group::Validating: $file"
|
||||||
|
if sbom-utility validate --input-file "$file" --schema "$SCHEMA" 2>&1; then
|
||||||
|
echo "✅ PASS: $file"
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
else
|
||||||
|
echo "❌ FAIL: $file"
|
||||||
|
FAILED=$((FAILED + 1))
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
fi
|
||||||
|
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo "CycloneDX Validation Summary"
|
||||||
|
echo "================================================"
|
||||||
|
echo "Found: $FOUND fixtures"
|
||||||
|
echo "Passed: $PASSED"
|
||||||
|
echo "Failed: $FAILED"
|
||||||
|
echo "================================================"
|
||||||
|
|
||||||
|
if [ "$FAILED" -gt 0 ]; then
|
||||||
|
echo "::error::$FAILED CycloneDX fixtures failed validation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$FOUND" -eq 0 ]; then
|
||||||
|
echo "::warning::No CycloneDX fixtures found to validate"
|
||||||
|
fi
|
||||||
|
|
||||||
|
validate-spdx:
|
||||||
|
name: Validate SPDX Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Install SPDX tools
|
||||||
|
run: |
|
||||||
|
pip install spdx-tools
|
||||||
|
pip install check-jsonschema
|
||||||
|
|
||||||
|
- name: Validate SPDX fixtures
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
SCHEMA="docs/schemas/spdx-jsonld-3.0.1.schema.json"
|
||||||
|
FIXTURE_DIRS=(
|
||||||
|
"src/__Tests/__Benchmarks/golden-corpus"
|
||||||
|
"src/__Tests/fixtures"
|
||||||
|
"seed-data"
|
||||||
|
)
|
||||||
|
|
||||||
|
FOUND=0
|
||||||
|
PASSED=0
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
# Check for SPDX markers
|
||||||
|
if grep -qE '"spdxVersion"|"@context".*spdx' "$file" 2>/dev/null; then
|
||||||
|
FOUND=$((FOUND + 1))
|
||||||
|
echo "::group::Validating: $file"
|
||||||
|
|
||||||
|
# Try pyspdxtools first (semantic validation)
|
||||||
|
if pyspdxtools validate "$file" 2>&1; then
|
||||||
|
echo "✅ PASS (semantic): $file"
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
# Fall back to JSON schema validation
|
||||||
|
elif check-jsonschema --schemafile "$SCHEMA" "$file" 2>&1; then
|
||||||
|
echo "✅ PASS (schema): $file"
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
else
|
||||||
|
echo "❌ FAIL: $file"
|
||||||
|
FAILED=$((FAILED + 1))
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
fi
|
||||||
|
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo "SPDX Validation Summary"
|
||||||
|
echo "================================================"
|
||||||
|
echo "Found: $FOUND fixtures"
|
||||||
|
echo "Passed: $PASSED"
|
||||||
|
echo "Failed: $FAILED"
|
||||||
|
echo "================================================"
|
||||||
|
|
||||||
|
if [ "$FAILED" -gt 0 ]; then
|
||||||
|
echo "::error::$FAILED SPDX fixtures failed validation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$FOUND" -eq 0 ]; then
|
||||||
|
echo "::warning::No SPDX fixtures found to validate"
|
||||||
|
fi
|
||||||
|
|
||||||
|
validate-vex:
|
||||||
|
name: Validate OpenVEX Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install ajv-cli
|
||||||
|
run: npm install -g ajv-cli ajv-formats
|
||||||
|
|
||||||
|
- name: Validate OpenVEX fixtures
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
SCHEMA="docs/schemas/openvex-0.2.0.schema.json"
|
||||||
|
FIXTURE_DIRS=(
|
||||||
|
"src/__Tests/__Benchmarks/golden-corpus"
|
||||||
|
"src/__Tests/__Benchmarks/vex-lattice"
|
||||||
|
"src/__Tests/fixtures"
|
||||||
|
"seed-data"
|
||||||
|
)
|
||||||
|
|
||||||
|
FOUND=0
|
||||||
|
PASSED=0
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for dir in "${FIXTURE_DIRS[@]}"; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
# Check for OpenVEX markers
|
||||||
|
if grep -qE '"@context".*openvex|"@type".*"https://openvex' "$file" 2>/dev/null; then
|
||||||
|
FOUND=$((FOUND + 1))
|
||||||
|
echo "::group::Validating: $file"
|
||||||
|
if ajv validate -s "$SCHEMA" -d "$file" --strict=false -c ajv-formats 2>&1; then
|
||||||
|
echo "✅ PASS: $file"
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
else
|
||||||
|
echo "❌ FAIL: $file"
|
||||||
|
FAILED=$((FAILED + 1))
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
fi
|
||||||
|
done < <(find "$dir" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo "OpenVEX Validation Summary"
|
||||||
|
echo "================================================"
|
||||||
|
echo "Found: $FOUND fixtures"
|
||||||
|
echo "Passed: $PASSED"
|
||||||
|
echo "Failed: $FAILED"
|
||||||
|
echo "================================================"
|
||||||
|
|
||||||
|
if [ "$FAILED" -gt 0 ]; then
|
||||||
|
echo "::error::$FAILED OpenVEX fixtures failed validation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$FOUND" -eq 0 ]; then
|
||||||
|
echo "::warning::No OpenVEX fixtures found to validate"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Negative testing: verify that invalid fixtures are correctly rejected
|
||||||
|
validate-negative:
|
||||||
|
name: Validate Negative Test Cases
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install sbom-utility
|
||||||
|
run: |
|
||||||
|
curl -sSfL "https://github.com/CycloneDX/sbom-utility/releases/download/v${SBOM_UTILITY_VERSION}/sbom-utility-v${SBOM_UTILITY_VERSION}-linux-amd64.tar.gz" | tar xz
|
||||||
|
sudo mv sbom-utility /usr/local/bin/
|
||||||
|
sbom-utility --version
|
||||||
|
|
||||||
|
- name: Verify invalid fixtures fail validation
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
SCHEMA="docs/schemas/cyclonedx-bom-1.6.schema.json"
|
||||||
|
INVALID_DIR="src/__Tests/fixtures/invalid"
|
||||||
|
|
||||||
|
if [ ! -d "$INVALID_DIR" ]; then
|
||||||
|
echo "::warning::No invalid fixtures directory found at $INVALID_DIR"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
EXPECTED_FAILURES=0
|
||||||
|
ACTUAL_FAILURES=0
|
||||||
|
UNEXPECTED_PASSES=0
|
||||||
|
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
|
||||||
|
EXPECTED_FAILURES=$((EXPECTED_FAILURES + 1))
|
||||||
|
echo "::group::Testing invalid fixture: $file"
|
||||||
|
|
||||||
|
# This SHOULD fail - if it passes, that's an error
|
||||||
|
if sbom-utility validate --input-file "$file" --schema "$SCHEMA" 2>&1; then
|
||||||
|
echo "❌ UNEXPECTED PASS: $file (should have failed validation)"
|
||||||
|
UNEXPECTED_PASSES=$((UNEXPECTED_PASSES + 1))
|
||||||
|
else
|
||||||
|
echo "✅ EXPECTED FAILURE: $file (correctly rejected)"
|
||||||
|
ACTUAL_FAILURES=$((ACTUAL_FAILURES + 1))
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
fi
|
||||||
|
done < <(find "$INVALID_DIR" -name '*.json' -type f -print0 2>/dev/null || true)
|
||||||
|
|
||||||
|
echo "================================================"
|
||||||
|
echo "Negative Test Summary"
|
||||||
|
echo "================================================"
|
||||||
|
echo "Expected failures: $EXPECTED_FAILURES"
|
||||||
|
echo "Actual failures: $ACTUAL_FAILURES"
|
||||||
|
echo "Unexpected passes: $UNEXPECTED_PASSES"
|
||||||
|
echo "================================================"
|
||||||
|
|
||||||
|
if [ "$UNEXPECTED_PASSES" -gt 0 ]; then
|
||||||
|
echo "::error::$UNEXPECTED_PASSES invalid fixtures passed validation unexpectedly"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$EXPECTED_FAILURES" -eq 0 ]; then
|
||||||
|
echo "::warning::No invalid CycloneDX fixtures found for negative testing"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ All invalid fixtures correctly rejected by schema validation"
|
||||||
|
|
||||||
|
summary:
|
||||||
|
name: Validation Summary
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [validate-cyclonedx, validate-spdx, validate-vex, validate-negative]
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Check results
|
||||||
|
run: |
|
||||||
|
echo "Schema Validation Results"
|
||||||
|
echo "========================="
|
||||||
|
echo "CycloneDX: ${{ needs.validate-cyclonedx.result }}"
|
||||||
|
echo "SPDX: ${{ needs.validate-spdx.result }}"
|
||||||
|
echo "OpenVEX: ${{ needs.validate-vex.result }}"
|
||||||
|
echo "Negative Tests: ${{ needs.validate-negative.result }}"
|
||||||
|
|
||||||
|
if [ "${{ needs.validate-cyclonedx.result }}" = "failure" ] || \
|
||||||
|
[ "${{ needs.validate-spdx.result }}" = "failure" ] || \
|
||||||
|
[ "${{ needs.validate-vex.result }}" = "failure" ] || \
|
||||||
|
[ "${{ needs.validate-negative.result }}" = "failure" ]; then
|
||||||
|
echo "::error::One or more schema validations failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ All schema validations passed or skipped"
|
||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
TZ: UTC
|
TZ: UTC
|
||||||
SDK_NUGET_SOURCE: ${{ secrets.SDK_NUGET_SOURCE || 'local-nugets/packages' }}
|
SDK_NUGET_SOURCE: ${{ secrets.SDK_NUGET_SOURCE || '.nuget/packages' }}
|
||||||
SDK_NUGET_API_KEY: ${{ secrets.SDK_NUGET_API_KEY }}
|
SDK_NUGET_API_KEY: ${{ secrets.SDK_NUGET_API_KEY }}
|
||||||
SDK_SIGNING_CERT_B64: ${{ secrets.SDK_SIGNING_CERT_B64 }}
|
SDK_SIGNING_CERT_B64: ${{ secrets.SDK_SIGNING_CERT_B64 }}
|
||||||
SDK_SIGNING_CERT_PASSWORD: ${{ secrets.SDK_SIGNING_CERT_PASSWORD }}
|
SDK_SIGNING_CERT_PASSWORD: ${{ secrets.SDK_SIGNING_CERT_PASSWORD }}
|
||||||
@@ -46,8 +46,7 @@ jobs:
|
|||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.nuget/packages
|
.nuget/packages
|
||||||
local-nugets/packages
|
|
||||||
key: sdk-nuget-${{ runner.os }}-${{ hashFiles('src/Sdk/**/*.csproj') }}
|
key: sdk-nuget-${{ runner.os }}-${{ hashFiles('src/Sdk/**/*.csproj') }}
|
||||||
|
|
||||||
- name: Restore (best effort; skipped if no csproj)
|
- name: Restore (best effort; skipped if no csproj)
|
||||||
@@ -87,6 +86,6 @@ jobs:
|
|||||||
name: sdk-artifacts
|
name: sdk-artifacts
|
||||||
path: |
|
path: |
|
||||||
out/sdk
|
out/sdk
|
||||||
local-nugets/packages/*.nupkg
|
.nuget/packages/*.nupkg
|
||||||
if-no-files-found: warn
|
if-no-files-found: warn
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.nuget/packages
|
~/.nuget/packages
|
||||||
local-nugets/packages
|
.nuget/packages
|
||||||
key: signals-nuget-${{ runner.os }}-${{ hashFiles('src/Signals/**/*.csproj') }}
|
key: signals-nuget-${{ runner.os }}-${{ hashFiles('src/Signals/**/*.csproj') }}
|
||||||
|
|
||||||
- name: Restore
|
- name: Restore
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ jobs:
|
|||||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-01' }}
|
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-01' }}
|
||||||
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -42,6 +44,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
cosign-release: 'v2.2.4'
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Verify artifacts exist
|
- name: Verify artifacts exist
|
||||||
run: |
|
run: |
|
||||||
cd docs/modules/signals
|
cd docs/modules/signals
|
||||||
@@ -90,9 +102,9 @@ jobs:
|
|||||||
retention-days: 90
|
retention-days: 90
|
||||||
|
|
||||||
- name: Push to Evidence Locker
|
- name: Push to Evidence Locker
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
env:
|
env:
|
||||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
run: |
|
run: |
|
||||||
tar -cf /tmp/signals-dsse.tar -C "$OUT_DIR" .
|
tar -cf /tmp/signals-dsse.tar -C "$OUT_DIR" .
|
||||||
@@ -102,7 +114,7 @@ jobs:
|
|||||||
echo "Pushed to Evidence Locker"
|
echo "Pushed to Evidence Locker"
|
||||||
|
|
||||||
- name: Evidence Locker skip notice
|
- name: Evidence Locker skip notice
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
run: |
|
run: |
|
||||||
echo "::notice::Evidence Locker push skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
echo "::notice::Evidence Locker push skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||||
echo "Artifacts available as workflow artifact for manual ingestion"
|
echo "Artifacts available as workflow artifact for manual ingestion"
|
||||||
|
|||||||
@@ -2,6 +2,14 @@ name: signals-evidence-locker
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
|
out_dir:
|
||||||
|
description: "Output directory containing signed artifacts"
|
||||||
|
required: false
|
||||||
|
default: "evidence-locker/signals/2025-12-05"
|
||||||
|
allow_dev_key:
|
||||||
|
description: "Allow dev key fallback (1=yes, 0=no)"
|
||||||
|
required: false
|
||||||
|
default: "0"
|
||||||
retention_target:
|
retention_target:
|
||||||
description: "Retention days target"
|
description: "Retention days target"
|
||||||
required: false
|
required: false
|
||||||
@@ -12,7 +20,12 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
MODULE_ROOT: docs/modules/signals
|
MODULE_ROOT: docs/modules/signals
|
||||||
OUT_DIR: evidence-locker/signals/2025-12-05
|
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||||
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -20,6 +33,31 @@ jobs:
|
|||||||
- name: Task Pack offline bundle fixtures
|
- name: Task Pack offline bundle fixtures
|
||||||
run: python3 scripts/packs/run-fixtures-check.sh
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify artifacts exist
|
||||||
|
run: |
|
||||||
|
cd "$MODULE_ROOT"
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Sign signals artifacts
|
||||||
|
run: |
|
||||||
|
chmod +x tools/cosign/sign-signals.sh
|
||||||
|
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||||
|
|
||||||
- name: Build deterministic signals evidence tar
|
- name: Build deterministic signals evidence tar
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -52,16 +90,17 @@ jobs:
|
|||||||
/tmp/signals-evidence.tar.sha256
|
/tmp/signals-evidence.tar.sha256
|
||||||
|
|
||||||
- name: Push to Evidence Locker
|
- name: Push to Evidence Locker
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
env:
|
env:
|
||||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
run: |
|
run: |
|
||||||
curl -f -X PUT "$URL/signals/2025-12-05/signals-evidence.tar" \
|
upload_path="${OUT_DIR#evidence-locker/}"
|
||||||
|
curl -f -X PUT "$URL/${upload_path}/signals-evidence.tar" \
|
||||||
-H "Authorization: Bearer $TOKEN" \
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
--data-binary @/tmp/signals-evidence.tar
|
--data-binary @/tmp/signals-evidence.tar
|
||||||
|
|
||||||
- name: Skip push (missing secret or URL)
|
- name: Skip push (missing secret or URL)
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
run: |
|
run: |
|
||||||
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
||||||
|
|||||||
127
.gitea/workflows/signals-reachability.yml
Normal file
127
.gitea/workflows/signals-reachability.yml
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
name: Signals Reachability Scoring & Events
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
allow_dev_key:
|
||||||
|
description: "Allow dev signing key fallback (1=yes, 0=no)"
|
||||||
|
required: false
|
||||||
|
default: "0"
|
||||||
|
evidence_out_dir:
|
||||||
|
description: "Evidence output dir for signing/upload"
|
||||||
|
required: false
|
||||||
|
default: "evidence-locker/signals/2025-12-05"
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/Signals/**'
|
||||||
|
- 'scripts/signals/reachability-smoke.sh'
|
||||||
|
- '.gitea/workflows/signals-reachability.yml'
|
||||||
|
- 'tools/cosign/sign-signals.sh'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
reachability-smoke:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/Signals/StellaOps.Signals.sln --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Signals/StellaOps.Signals.sln -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Reachability scoring + cache/events smoke
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/signals/reachability-smoke.sh
|
||||||
|
scripts/signals/reachability-smoke.sh
|
||||||
|
|
||||||
|
sign-and-upload:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: reachability-smoke
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
OUT_DIR: ${{ github.event.inputs.evidence_out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify artifacts exist
|
||||||
|
run: |
|
||||||
|
cd docs/modules/signals
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Sign signals artifacts
|
||||||
|
run: |
|
||||||
|
chmod +x tools/cosign/sign-signals.sh
|
||||||
|
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||||
|
|
||||||
|
- name: Upload signed artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: signals-evidence-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
${{ env.OUT_DIR }}/*.sigstore.json
|
||||||
|
${{ env.OUT_DIR }}/*.dsse
|
||||||
|
${{ env.OUT_DIR }}/SHA256SUMS
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Push to Evidence Locker
|
||||||
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
|
env:
|
||||||
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
|
run: |
|
||||||
|
tar -cf /tmp/signals-evidence.tar -C "$OUT_DIR" .
|
||||||
|
sha256sum /tmp/signals-evidence.tar
|
||||||
|
curl -f -X PUT "$URL/signals/$(date -u +%Y-%m-%d)/signals-evidence.tar" \
|
||||||
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
|
--data-binary @/tmp/signals-evidence.tar
|
||||||
|
echo "Uploaded to Evidence Locker"
|
||||||
|
|
||||||
|
- name: Evidence Locker skip notice
|
||||||
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
|
run: |
|
||||||
|
echo "::notice::Evidence Locker upload skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||||
33
.gitea/workflows/sm-remote-ci.yml
Normal file
33
.gitea/workflows/sm-remote-ci.yml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: sm-remote-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "src/SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||||
|
- "ops/sm-remote/**"
|
||||||
|
- ".gitea/workflows/sm-remote-ci.yml"
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "src/SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||||
|
- "ops/sm-remote/**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.x
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj
|
||||||
|
- name: Test
|
||||||
|
run: dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj --no-build --verbosity normal
|
||||||
|
- name: Publish service
|
||||||
|
run: dotnet publish src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj -c Release -o out/sm-remote
|
||||||
358
.gitea/workflows/test-lanes.yml
Normal file
358
.gitea/workflows/test-lanes.yml
Normal file
@@ -0,0 +1,358 @@
|
|||||||
|
# .gitea/workflows/test-lanes.yml
|
||||||
|
# Lane-based test execution using standardized trait filtering
|
||||||
|
# Implements Task 10 from SPRINT 5100.0007.0001
|
||||||
|
|
||||||
|
name: Test Lanes
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'src/__Tests/**'
|
||||||
|
- 'scripts/test-lane.sh'
|
||||||
|
- '.gitea/workflows/test-lanes.yml'
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_performance:
|
||||||
|
description: 'Run Performance lane tests'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
run_live:
|
||||||
|
description: 'Run Live lane tests (external dependencies)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
BUILD_CONFIGURATION: Release
|
||||||
|
TEST_RESULTS_DIR: ${{ github.workspace }}/test-results
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# ===========================================================================
|
||||||
|
# Unit Lane: Fast, isolated, deterministic tests (PR-gating)
|
||||||
|
# ===========================================================================
|
||||||
|
unit-tests:
|
||||||
|
name: Unit Tests
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 15
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Run Unit lane tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
chmod +x scripts/test-lane.sh
|
||||||
|
./scripts/test-lane.sh Unit \
|
||||||
|
--logger "trx;LogFileName=unit-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
|
||||||
|
- name: Upload Unit test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: unit-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Architecture Lane: Structural rule enforcement (PR-gating)
|
||||||
|
# ===========================================================================
|
||||||
|
architecture-tests:
|
||||||
|
name: Architecture Tests
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 10
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore architecture tests
|
||||||
|
run: dotnet restore src/__Tests/architecture/StellaOps.Architecture.Tests/StellaOps.Architecture.Tests.csproj
|
||||||
|
|
||||||
|
- name: Build architecture tests
|
||||||
|
run: dotnet build src/__Tests/architecture/StellaOps.Architecture.Tests/StellaOps.Architecture.Tests.csproj --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Run Architecture tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
dotnet test src/__Tests/architecture/StellaOps.Architecture.Tests/StellaOps.Architecture.Tests.csproj \
|
||||||
|
--configuration $BUILD_CONFIGURATION \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=architecture-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
|
||||||
|
- name: Upload Architecture test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: architecture-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Contract Lane: API contract stability tests (PR-gating)
|
||||||
|
# ===========================================================================
|
||||||
|
contract-tests:
|
||||||
|
name: Contract Tests
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 10
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Run Contract lane tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
chmod +x scripts/test-lane.sh
|
||||||
|
./scripts/test-lane.sh Contract \
|
||||||
|
--logger "trx;LogFileName=contract-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
|
||||||
|
- name: Upload Contract test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: contract-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Integration Lane: Service + storage tests with Testcontainers (PR-gating)
|
||||||
|
# ===========================================================================
|
||||||
|
integration-tests:
|
||||||
|
name: Integration Tests
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Run Integration lane tests
|
||||||
|
env:
|
||||||
|
POSTGRES_TEST_IMAGE: postgres:16-alpine
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
chmod +x scripts/test-lane.sh
|
||||||
|
./scripts/test-lane.sh Integration \
|
||||||
|
--logger "trx;LogFileName=integration-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
|
||||||
|
- name: Upload Integration test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: integration-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Security Lane: AuthZ, input validation, negative tests (PR-gating)
|
||||||
|
# ===========================================================================
|
||||||
|
security-tests:
|
||||||
|
name: Security Tests
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 20
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Run Security lane tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
chmod +x scripts/test-lane.sh
|
||||||
|
./scripts/test-lane.sh Security \
|
||||||
|
--logger "trx;LogFileName=security-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
|
||||||
|
- name: Upload Security test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: security-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Performance Lane: Benchmarks and regression thresholds (optional/scheduled)
|
||||||
|
# ===========================================================================
|
||||||
|
performance-tests:
|
||||||
|
name: Performance Tests
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.run_performance == 'true')
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Run Performance lane tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
chmod +x scripts/test-lane.sh
|
||||||
|
./scripts/test-lane.sh Performance \
|
||||||
|
--logger "trx;LogFileName=performance-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
|
||||||
|
- name: Upload Performance test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: performance-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Live Lane: External API smoke tests (opt-in only, never PR-gating)
|
||||||
|
# ===========================================================================
|
||||||
|
live-tests:
|
||||||
|
name: Live Tests (External Dependencies)
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.run_live == 'true'
|
||||||
|
timeout-minutes: 20
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore solution
|
||||||
|
run: dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
- name: Build solution
|
||||||
|
run: dotnet build src/StellaOps.sln --configuration $BUILD_CONFIGURATION --no-restore
|
||||||
|
|
||||||
|
- name: Run Live lane tests
|
||||||
|
run: |
|
||||||
|
mkdir -p "$TEST_RESULTS_DIR"
|
||||||
|
chmod +x scripts/test-lane.sh
|
||||||
|
./scripts/test-lane.sh Live \
|
||||||
|
--logger "trx;LogFileName=live-tests.trx" \
|
||||||
|
--results-directory "$TEST_RESULTS_DIR" \
|
||||||
|
--verbosity normal
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Upload Live test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: live-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
# ===========================================================================
|
||||||
|
# Test Results Summary
|
||||||
|
# ===========================================================================
|
||||||
|
test-summary:
|
||||||
|
name: Test Results Summary
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [unit-tests, architecture-tests, contract-tests, integration-tests, security-tests]
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Download all test results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: all-test-results
|
||||||
|
|
||||||
|
- name: Generate summary
|
||||||
|
run: |
|
||||||
|
echo "## Test Lane Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
for lane in unit architecture contract integration security; do
|
||||||
|
result_dir="all-test-results/${lane}-test-results"
|
||||||
|
if [ -d "$result_dir" ]; then
|
||||||
|
echo "### ${lane^} Lane: ✅ Passed" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "### ${lane^} Lane: ❌ Failed or Skipped" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "See individual job logs for detailed test output." >> $GITHUB_STEP_SUMMARY
|
||||||
199
.gitea/workflows/unknowns-budget-gate.yml
Normal file
199
.gitea/workflows/unknowns-budget-gate.yml
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# unknowns-budget-gate.yml
|
||||||
|
# Sprint: SPRINT_5100_0004_0001_unknowns_budget_ci_gates
|
||||||
|
# Task: T2 - CI Budget Gate Workflow
|
||||||
|
# Description: Enforces unknowns budgets on PRs and pushes
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
name: Unknowns Budget Gate
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'Dockerfile*'
|
||||||
|
- '*.lock'
|
||||||
|
- 'etc/policy.unknowns.yaml'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/**'
|
||||||
|
- 'Dockerfile*'
|
||||||
|
- '*.lock'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
TZ: UTC
|
||||||
|
STELLAOPS_BUDGET_CONFIG: ./etc/policy.unknowns.yaml
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
scan-and-check-budget:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.100'
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.nuget/packages
|
||||||
|
.nuget/packages
|
||||||
|
key: budget-gate-nuget-${{ runner.os }}-${{ hashFiles('**/*.csproj') }}
|
||||||
|
|
||||||
|
- name: Restore and Build CLI
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Cli/StellaOps.Cli/StellaOps.Cli.csproj --configfile nuget.config
|
||||||
|
dotnet build src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Determine environment
|
||||||
|
id: env
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||||
|
echo "environment=prod" >> $GITHUB_OUTPUT
|
||||||
|
echo "enforce=true" >> $GITHUB_OUTPUT
|
||||||
|
elif [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||||
|
echo "environment=stage" >> $GITHUB_OUTPUT
|
||||||
|
echo "enforce=false" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "environment=dev" >> $GITHUB_OUTPUT
|
||||||
|
echo "enforce=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create sample verdict for testing
|
||||||
|
id: scan
|
||||||
|
run: |
|
||||||
|
mkdir -p out
|
||||||
|
# In a real scenario, this would be from stella scan
|
||||||
|
# For now, create a minimal verdict file
|
||||||
|
cat > out/verdict.json << 'EOF'
|
||||||
|
{
|
||||||
|
"unknowns": []
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
echo "verdict_path=out/verdict.json" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Check unknowns budget
|
||||||
|
id: budget
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
set +e
|
||||||
|
dotnet run --project src/Cli/StellaOps.Cli/StellaOps.Cli.csproj -- \
|
||||||
|
unknowns budget check \
|
||||||
|
--verdict ${{ steps.scan.outputs.verdict_path }} \
|
||||||
|
--environment ${{ steps.env.outputs.environment }} \
|
||||||
|
--output json \
|
||||||
|
--fail-on-exceed > out/budget-result.json
|
||||||
|
|
||||||
|
EXIT_CODE=$?
|
||||||
|
echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
if [ -f out/budget-result.json ]; then
|
||||||
|
# Compact JSON for output
|
||||||
|
RESULT=$(cat out/budget-result.json | jq -c '.')
|
||||||
|
echo "result=$RESULT" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit $EXIT_CODE
|
||||||
|
|
||||||
|
- name: Upload budget report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: budget-report-${{ github.run_id }}
|
||||||
|
path: out/budget-result.json
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Post PR comment
|
||||||
|
if: github.event_name == 'pull_request' && always()
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
|
let result = { isWithinBudget: true, totalUnknowns: 0 };
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync('out/budget-result.json', 'utf8');
|
||||||
|
result = JSON.parse(content);
|
||||||
|
} catch (e) {
|
||||||
|
console.log('Could not read budget result:', e.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
const status = result.isWithinBudget ? ':white_check_mark:' : ':x:';
|
||||||
|
const env = '${{ steps.env.outputs.environment }}';
|
||||||
|
|
||||||
|
let body = `## ${status} Unknowns Budget Check
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Environment | ${env} |
|
||||||
|
| Total Unknowns | ${result.totalUnknowns || 0} |
|
||||||
|
| Budget Limit | ${result.totalLimit || 'Unlimited'} |
|
||||||
|
| Status | ${result.isWithinBudget ? 'PASS' : 'FAIL'} |
|
||||||
|
`;
|
||||||
|
|
||||||
|
if (result.violations && result.violations.length > 0) {
|
||||||
|
body += `
|
||||||
|
### Violations
|
||||||
|
`;
|
||||||
|
for (const v of result.violations) {
|
||||||
|
body += `- **${v.reasonCode}**: ${v.count}/${v.limit}\n`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.message) {
|
||||||
|
body += `\n> ${result.message}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
body += `\n---\n_Generated by StellaOps Unknowns Budget Gate_`;
|
||||||
|
|
||||||
|
// Find existing comment
|
||||||
|
const { data: comments } = await github.rest.issues.listComments({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
});
|
||||||
|
|
||||||
|
const botComment = comments.find(c =>
|
||||||
|
c.body.includes('Unknowns Budget Check') &&
|
||||||
|
c.user.type === 'Bot'
|
||||||
|
);
|
||||||
|
|
||||||
|
if (botComment) {
|
||||||
|
await github.rest.issues.updateComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
comment_id: botComment.id,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Fail if budget exceeded (prod)
|
||||||
|
if: steps.env.outputs.environment == 'prod' && steps.budget.outputs.exit_code == '2'
|
||||||
|
run: |
|
||||||
|
echo "::error::Production unknowns budget exceeded!"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Warn if budget exceeded (non-prod)
|
||||||
|
if: steps.env.outputs.environment != 'prod' && steps.budget.outputs.exit_code == '2'
|
||||||
|
run: |
|
||||||
|
echo "::warning::Unknowns budget exceeded for ${{ steps.env.outputs.environment }}"
|
||||||
@@ -4,14 +4,14 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- 'scripts/vex/**'
|
- 'scripts/vex/**'
|
||||||
- 'tests/Vex/ProofBundles/**'
|
- 'src/__Tests/Vex/ProofBundles/**'
|
||||||
- 'docs/benchmarks/vex-evidence-playbook*'
|
- 'docs/benchmarks/vex-evidence-playbook*'
|
||||||
- '.gitea/workflows/vex-proof-bundles.yml'
|
- '.gitea/workflows/vex-proof-bundles.yml'
|
||||||
push:
|
push:
|
||||||
branches: [ main ]
|
branches: [ main ]
|
||||||
paths:
|
paths:
|
||||||
- 'scripts/vex/**'
|
- 'scripts/vex/**'
|
||||||
- 'tests/Vex/ProofBundles/**'
|
- 'src/__Tests/Vex/ProofBundles/**'
|
||||||
- 'docs/benchmarks/vex-evidence-playbook*'
|
- 'docs/benchmarks/vex-evidence-playbook*'
|
||||||
- '.gitea/workflows/vex-proof-bundles.yml'
|
- '.gitea/workflows/vex-proof-bundles.yml'
|
||||||
|
|
||||||
@@ -36,5 +36,5 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
PYTHONHASHSEED: "0"
|
PYTHONHASHSEED: "0"
|
||||||
run: |
|
run: |
|
||||||
chmod +x tests/Vex/ProofBundles/test_verify_sample.sh
|
chmod +x src/__Tests/Vex/ProofBundles/test_verify_sample.sh
|
||||||
tests/Vex/ProofBundles/test_verify_sample.sh
|
src/__Tests/Vex/ProofBundles/test_verify_sample.sh
|
||||||
|
|||||||
@@ -1,449 +0,0 @@
|
|||||||
name: wine-csp-build
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, develop]
|
|
||||||
paths:
|
|
||||||
- 'src/__Tools/WineCspService/**'
|
|
||||||
- 'ops/wine-csp/**'
|
|
||||||
- 'third_party/forks/AlexMAS.GostCryptography/**'
|
|
||||||
- '.gitea/workflows/wine-csp-build.yml'
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'src/__Tools/WineCspService/**'
|
|
||||||
- 'ops/wine-csp/**'
|
|
||||||
- 'third_party/forks/AlexMAS.GostCryptography/**'
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
push:
|
|
||||||
description: "Push to registry"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
version:
|
|
||||||
description: "Version tag (e.g., 2025.10.0-edge)"
|
|
||||||
required: false
|
|
||||||
default: "2025.10.0-edge"
|
|
||||||
skip_tests:
|
|
||||||
description: "Skip integration tests"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
|
|
||||||
env:
|
|
||||||
IMAGE_NAME: registry.stella-ops.org/stellaops/wine-csp
|
|
||||||
DOCKERFILE: ops/wine-csp/Dockerfile
|
|
||||||
# Wine CSP only supports linux/amd64 (Wine ARM64 has compatibility issues with Windows x64 apps)
|
|
||||||
PLATFORMS: linux/amd64
|
|
||||||
PYTHON_VERSION: "3.11"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# ===========================================================================
|
|
||||||
# Job 1: Build Docker Image
|
|
||||||
# ===========================================================================
|
|
||||||
build:
|
|
||||||
name: Build Wine CSP Image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
outputs:
|
|
||||||
image_tag: ${{ steps.version.outputs.tag }}
|
|
||||||
image_digest: ${{ steps.build.outputs.digest }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
install: true
|
|
||||||
|
|
||||||
- name: Set version tag
|
|
||||||
id: version
|
|
||||||
run: |
|
|
||||||
if [[ -n "${{ github.event.inputs.version }}" ]]; then
|
|
||||||
echo "tag=${{ github.event.inputs.version }}" >> $GITHUB_OUTPUT
|
|
||||||
elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
|
||||||
echo "tag=2025.10.0-edge" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "tag=pr-${{ github.event.pull_request.number || github.sha }}" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Docker metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ env.IMAGE_NAME }}
|
|
||||||
tags: |
|
|
||||||
type=raw,value=${{ steps.version.outputs.tag }}
|
|
||||||
type=sha,format=short
|
|
||||||
|
|
||||||
- name: Build image
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ${{ env.DOCKERFILE }}
|
|
||||||
platforms: ${{ env.PLATFORMS }}
|
|
||||||
push: false
|
|
||||||
load: true
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
|
|
||||||
- name: Save image for testing
|
|
||||||
run: |
|
|
||||||
mkdir -p /tmp/images
|
|
||||||
docker save "${{ env.IMAGE_NAME }}:${{ steps.version.outputs.tag }}" | gzip > /tmp/images/wine-csp.tar.gz
|
|
||||||
|
|
||||||
- name: Upload image artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-image
|
|
||||||
path: /tmp/images/wine-csp.tar.gz
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Job 2: Integration Tests
|
|
||||||
# ===========================================================================
|
|
||||||
test:
|
|
||||||
name: Integration Tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: build
|
|
||||||
if: ${{ github.event.inputs.skip_tests != 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download image artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-image
|
|
||||||
path: /tmp/images
|
|
||||||
|
|
||||||
- name: Load Docker image
|
|
||||||
run: |
|
|
||||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
|
|
||||||
- name: Install test dependencies
|
|
||||||
run: |
|
|
||||||
pip install -r ops/wine-csp/tests/requirements.txt
|
|
||||||
|
|
||||||
- name: Start Wine CSP container
|
|
||||||
id: container
|
|
||||||
run: |
|
|
||||||
echo "Starting Wine CSP container..."
|
|
||||||
docker run -d --name wine-csp-test \
|
|
||||||
-e WINE_CSP_MODE=limited \
|
|
||||||
-e WINE_CSP_LOG_LEVEL=Debug \
|
|
||||||
-p 5099:5099 \
|
|
||||||
"${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
|
||||||
|
|
||||||
echo "container_id=$(docker ps -q -f name=wine-csp-test)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Wait for service startup
|
|
||||||
run: |
|
|
||||||
echo "Waiting for Wine CSP service to be ready (up to 120s)..."
|
|
||||||
for i in $(seq 1 24); do
|
|
||||||
if curl -sf http://127.0.0.1:5099/health > /dev/null 2>&1; then
|
|
||||||
echo "Service ready after $((i * 5))s"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
echo "Waiting... ($((i * 5))s elapsed)"
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
echo "Service failed to start!"
|
|
||||||
docker logs wine-csp-test
|
|
||||||
exit 1
|
|
||||||
|
|
||||||
- name: Run integration tests (pytest)
|
|
||||||
id: pytest
|
|
||||||
run: |
|
|
||||||
mkdir -p test-results
|
|
||||||
export WINE_CSP_URL=http://127.0.0.1:5099
|
|
||||||
|
|
||||||
pytest ops/wine-csp/tests/test_wine_csp.py \
|
|
||||||
-v \
|
|
||||||
--tb=short \
|
|
||||||
--junitxml=test-results/junit.xml \
|
|
||||||
--timeout=60 \
|
|
||||||
-x \
|
|
||||||
2>&1 | tee test-results/pytest-output.txt
|
|
||||||
|
|
||||||
- name: Run shell integration tests
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
chmod +x ops/wine-csp/tests/run-tests.sh
|
|
||||||
ops/wine-csp/tests/run-tests.sh \
|
|
||||||
--url http://127.0.0.1:5099 \
|
|
||||||
--ci \
|
|
||||||
--verbose || true
|
|
||||||
|
|
||||||
- name: Collect container logs
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker logs wine-csp-test > test-results/container.log 2>&1 || true
|
|
||||||
|
|
||||||
- name: Stop container
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker stop wine-csp-test || true
|
|
||||||
docker rm wine-csp-test || true
|
|
||||||
|
|
||||||
- name: Upload test results
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
name: wine-csp-test-results
|
|
||||||
path: test-results/
|
|
||||||
|
|
||||||
- name: Publish test results
|
|
||||||
uses: mikepenz/action-junit-report@v4
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
report_paths: 'test-results/junit.xml'
|
|
||||||
check_name: 'Wine CSP Integration Tests'
|
|
||||||
fail_on_failure: true
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Job 3: Security Scan
|
|
||||||
# ===========================================================================
|
|
||||||
security:
|
|
||||||
name: Security Scan
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: build
|
|
||||||
permissions:
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download image artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-image
|
|
||||||
path: /tmp/images
|
|
||||||
|
|
||||||
- name: Load Docker image
|
|
||||||
run: |
|
|
||||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
|
||||||
|
|
||||||
- name: Run Trivy vulnerability scanner
|
|
||||||
uses: aquasecurity/trivy-action@master
|
|
||||||
with:
|
|
||||||
image-ref: "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
|
||||||
format: 'sarif'
|
|
||||||
output: 'trivy-results.sarif'
|
|
||||||
severity: 'CRITICAL,HIGH'
|
|
||||||
ignore-unfixed: true
|
|
||||||
|
|
||||||
- name: Upload Trivy scan results
|
|
||||||
uses: github/codeql-action/upload-sarif@v3
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
sarif_file: 'trivy-results.sarif'
|
|
||||||
|
|
||||||
- name: Run Trivy for JSON report
|
|
||||||
uses: aquasecurity/trivy-action@master
|
|
||||||
with:
|
|
||||||
image-ref: "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
|
||||||
format: 'json'
|
|
||||||
output: 'trivy-results.json'
|
|
||||||
severity: 'CRITICAL,HIGH,MEDIUM'
|
|
||||||
|
|
||||||
- name: Upload Trivy JSON report
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-security-scan
|
|
||||||
path: trivy-results.json
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Job 4: Generate SBOM
|
|
||||||
# ===========================================================================
|
|
||||||
sbom:
|
|
||||||
name: Generate SBOM
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: build
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Download image artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-image
|
|
||||||
path: /tmp/images
|
|
||||||
|
|
||||||
- name: Load Docker image
|
|
||||||
run: |
|
|
||||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
|
||||||
|
|
||||||
- name: Install syft
|
|
||||||
uses: anchore/sbom-action/download-syft@v0
|
|
||||||
|
|
||||||
- name: Generate SBOM (SPDX)
|
|
||||||
run: |
|
|
||||||
mkdir -p out/sbom
|
|
||||||
syft "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" \
|
|
||||||
-o spdx-json=out/sbom/wine-csp.spdx.json
|
|
||||||
|
|
||||||
- name: Generate SBOM (CycloneDX)
|
|
||||||
run: |
|
|
||||||
syft "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" \
|
|
||||||
-o cyclonedx-json=out/sbom/wine-csp.cdx.json
|
|
||||||
|
|
||||||
- name: Upload SBOM artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-sbom-${{ needs.build.outputs.image_tag }}
|
|
||||||
path: out/sbom/
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Job 5: Publish (only on main branch or manual trigger)
|
|
||||||
# ===========================================================================
|
|
||||||
publish:
|
|
||||||
name: Publish Image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [build, test, security]
|
|
||||||
if: ${{ (github.event.inputs.push == 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main')) && needs.test.result == 'success' }}
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
id-token: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Download image artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-image
|
|
||||||
path: /tmp/images
|
|
||||||
|
|
||||||
- name: Load Docker image
|
|
||||||
run: |
|
|
||||||
gunzip -c /tmp/images/wine-csp.tar.gz | docker load
|
|
||||||
|
|
||||||
- name: Install cosign
|
|
||||||
uses: sigstore/cosign-installer@v3.7.0
|
|
||||||
|
|
||||||
- name: Login to registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: registry.stella-ops.org
|
|
||||||
username: ${{ secrets.REGISTRY_USER }}
|
|
||||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
|
||||||
|
|
||||||
- name: Push to registry
|
|
||||||
run: |
|
|
||||||
docker push "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}"
|
|
||||||
|
|
||||||
# Also tag as latest if on main
|
|
||||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
|
||||||
docker tag "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" "${{ env.IMAGE_NAME }}:latest"
|
|
||||||
docker push "${{ env.IMAGE_NAME }}:latest"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Sign image with cosign
|
|
||||||
env:
|
|
||||||
COSIGN_EXPERIMENTAL: "1"
|
|
||||||
run: |
|
|
||||||
cosign sign --yes "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}" || echo "Signing skipped (no OIDC available)"
|
|
||||||
|
|
||||||
- name: Create release summary
|
|
||||||
run: |
|
|
||||||
echo "## Wine CSP Image Published" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "**Image:** \`${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}\`" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "**WARNING:** This image is for TEST VECTOR GENERATION ONLY." >> $GITHUB_STEP_SUMMARY
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Job 6: Air-Gap Bundle
|
|
||||||
# ===========================================================================
|
|
||||||
airgap:
|
|
||||||
name: Air-Gap Bundle
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [build, test]
|
|
||||||
if: ${{ needs.test.result == 'success' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Download image artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-image
|
|
||||||
path: /tmp/images
|
|
||||||
|
|
||||||
- name: Create air-gap bundle
|
|
||||||
run: |
|
|
||||||
mkdir -p out/bundles
|
|
||||||
|
|
||||||
# Copy the image tarball
|
|
||||||
cp /tmp/images/wine-csp.tar.gz out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.tar.gz
|
|
||||||
|
|
||||||
# Generate bundle manifest
|
|
||||||
cat > out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.manifest.json <<EOF
|
|
||||||
{
|
|
||||||
"name": "wine-csp",
|
|
||||||
"version": "${{ needs.build.outputs.image_tag }}",
|
|
||||||
"image": "${{ env.IMAGE_NAME }}:${{ needs.build.outputs.image_tag }}",
|
|
||||||
"platform": "linux/amd64",
|
|
||||||
"sha256": "$(sha256sum out/bundles/wine-csp-${{ needs.build.outputs.image_tag }}.tar.gz | cut -d' ' -f1)",
|
|
||||||
"created": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
|
||||||
"git_commit": "${{ github.sha }}",
|
|
||||||
"git_ref": "${{ github.ref }}",
|
|
||||||
"warning": "FOR TEST VECTOR GENERATION ONLY - NOT FOR PRODUCTION SIGNING"
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Create checksums file
|
|
||||||
cd out/bundles
|
|
||||||
sha256sum *.tar.gz *.json > SHA256SUMS
|
|
||||||
|
|
||||||
echo "Air-gap bundle contents:"
|
|
||||||
ls -lh
|
|
||||||
|
|
||||||
- name: Upload air-gap bundle
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-bundle-${{ needs.build.outputs.image_tag }}
|
|
||||||
path: out/bundles/
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Job 7: Test Summary
|
|
||||||
# ===========================================================================
|
|
||||||
summary:
|
|
||||||
name: Test Summary
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [build, test, security, sbom]
|
|
||||||
if: always()
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Download test results
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: wine-csp-test-results
|
|
||||||
path: test-results/
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: Create summary
|
|
||||||
run: |
|
|
||||||
echo "## Wine CSP Build Summary" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "| Stage | Status |" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "|-------|--------|" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "| Build | ${{ needs.build.result }} |" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "| Tests | ${{ needs.test.result }} |" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "| Security | ${{ needs.security.result }} |" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "| SBOM | ${{ needs.sbom.result }} |" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "**Image Tag:** \`${{ needs.build.outputs.image_tag }}\`" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "---" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "" >> $GITHUB_STEP_SUMMARY
|
|
||||||
echo "**SECURITY WARNING:** Wine CSP is for TEST VECTOR GENERATION ONLY." >> $GITHUB_STEP_SUMMARY
|
|
||||||
12
.github/flaky-tests-quarantine.json
vendored
Normal file
12
.github/flaky-tests-quarantine.json
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://stellaops.io/schemas/flaky-tests-quarantine.v1.json",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"updated_at": "2025-01-15T00:00:00Z",
|
||||||
|
"policy": {
|
||||||
|
"consecutive_failures_to_quarantine": 2,
|
||||||
|
"quarantine_duration_days": 14,
|
||||||
|
"auto_reactivate_after_fix": true
|
||||||
|
},
|
||||||
|
"quarantined_tests": [],
|
||||||
|
"notes": "Tests are quarantined after 2 consecutive failures. Review and fix within 14 days or escalate."
|
||||||
|
}
|
||||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -17,8 +17,7 @@ obj/
|
|||||||
# Packages and logs
|
# Packages and logs
|
||||||
*.log
|
*.log
|
||||||
TestResults/
|
TestResults/
|
||||||
local-nuget/
|
.nuget/packages/
|
||||||
local-nugets/packages/
|
|
||||||
|
|
||||||
.dotnet
|
.dotnet
|
||||||
.DS_Store
|
.DS_Store
|
||||||
@@ -45,6 +44,9 @@ node_modules/
|
|||||||
dist/
|
dist/
|
||||||
.build/
|
.build/
|
||||||
.cache/
|
.cache/
|
||||||
|
.tmp/
|
||||||
|
logs/
|
||||||
|
out/
|
||||||
|
|
||||||
# .NET
|
# .NET
|
||||||
bin/
|
bin/
|
||||||
@@ -60,9 +62,12 @@ obj/
|
|||||||
logs/
|
logs/
|
||||||
tmp/
|
tmp/
|
||||||
coverage/
|
coverage/
|
||||||
|
# Consolidated NuGet cache (all variants)
|
||||||
.nuget/
|
.nuget/
|
||||||
local-nugets/
|
.nuget-*/
|
||||||
local-nuget/
|
local-nuget*/
|
||||||
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
||||||
.nuget-cache/
|
|
||||||
.nuget-temp/
|
# Test artifacts
|
||||||
|
src/__Tests/**/TestResults/
|
||||||
|
src/__Tests/__Benchmarks/reachability-benchmark/.jdk/
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
|
||||||
<metadata>
|
|
||||||
<id>Microsoft.Extensions.Logging.Abstractions</id>
|
|
||||||
<version>10.0.0-rc.2.25502.107</version>
|
|
||||||
<authors>Microsoft</authors>
|
|
||||||
<license type="expression">MIT</license>
|
|
||||||
<licenseUrl>https://licenses.nuget.org/MIT</licenseUrl>
|
|
||||||
<icon>Icon.png</icon>
|
|
||||||
<readme>PACKAGE.md</readme>
|
|
||||||
<projectUrl>https://dot.net/</projectUrl>
|
|
||||||
<description>Logging abstractions for Microsoft.Extensions.Logging.
|
|
||||||
|
|
||||||
Commonly Used Types:
|
|
||||||
Microsoft.Extensions.Logging.ILogger
|
|
||||||
Microsoft.Extensions.Logging.ILoggerFactory
|
|
||||||
Microsoft.Extensions.Logging.ILogger<TCategoryName>
|
|
||||||
Microsoft.Extensions.Logging.LogLevel
|
|
||||||
Microsoft.Extensions.Logging.Logger<T>
|
|
||||||
Microsoft.Extensions.Logging.LoggerMessage
|
|
||||||
Microsoft.Extensions.Logging.Abstractions.NullLogger</description>
|
|
||||||
<releaseNotes>https://go.microsoft.com/fwlink/?LinkID=799421</releaseNotes>
|
|
||||||
<copyright>© Microsoft Corporation. All rights reserved.</copyright>
|
|
||||||
<serviceable>true</serviceable>
|
|
||||||
<repository type="git" url="https://github.com/dotnet/dotnet" commit="89c8f6a112d37d2ea8b77821e56d170a1bccdc5a" />
|
|
||||||
<dependencies>
|
|
||||||
<group targetFramework=".NETFramework4.6.2">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Buffers" version="4.6.1" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Memory" version="4.6.3" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework="net8.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework="net9.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework="net10.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework=".NETStandard2.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Buffers" version="4.6.1" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Memory" version="4.6.3" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
</dependencies>
|
|
||||||
</metadata>
|
|
||||||
</package>
|
|
||||||
Binary file not shown.
@@ -165,3 +165,69 @@ rules:
|
|||||||
in:
|
in:
|
||||||
const: header
|
const: header
|
||||||
required: [name, in]
|
required: [name, in]
|
||||||
|
|
||||||
|
# --- Deprecation Metadata Rules (per APIGOV-63-001) ---
|
||||||
|
|
||||||
|
stella-deprecated-has-metadata:
|
||||||
|
description: "Deprecated operations must have x-deprecation extension with required fields"
|
||||||
|
message: "Add x-deprecation metadata (deprecatedAt, sunsetAt, successorPath, reason) to deprecated operations"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)]"
|
||||||
|
severity: error
|
||||||
|
then:
|
||||||
|
field: x-deprecation
|
||||||
|
function: schema
|
||||||
|
functionOptions:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- deprecatedAt
|
||||||
|
- sunsetAt
|
||||||
|
- successorPath
|
||||||
|
- reason
|
||||||
|
properties:
|
||||||
|
deprecatedAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
sunsetAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
successorPath:
|
||||||
|
type: string
|
||||||
|
successorOperationId:
|
||||||
|
type: string
|
||||||
|
reason:
|
||||||
|
type: string
|
||||||
|
migrationGuide:
|
||||||
|
type: string
|
||||||
|
format: uri
|
||||||
|
notificationChannels:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
enum: [slack, teams, email, webhook]
|
||||||
|
|
||||||
|
stella-deprecated-sunset-future:
|
||||||
|
description: "Sunset dates should be in the future (warn if sunset already passed)"
|
||||||
|
message: "x-deprecation.sunsetAt should be a future date"
|
||||||
|
given: "$.paths[*][*].x-deprecation.sunsetAt"
|
||||||
|
severity: warn
|
||||||
|
then:
|
||||||
|
function: truthy
|
||||||
|
|
||||||
|
stella-deprecated-migration-guide:
|
||||||
|
description: "Deprecated operations should include a migration guide URL"
|
||||||
|
message: "Consider adding x-deprecation.migrationGuide for consumer guidance"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)].x-deprecation"
|
||||||
|
severity: hint
|
||||||
|
then:
|
||||||
|
field: migrationGuide
|
||||||
|
function: truthy
|
||||||
|
|
||||||
|
stella-deprecated-notification-channels:
|
||||||
|
description: "Deprecated operations should specify notification channels"
|
||||||
|
message: "Add x-deprecation.notificationChannels to enable deprecation notifications"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)].x-deprecation"
|
||||||
|
severity: hint
|
||||||
|
then:
|
||||||
|
field: notificationChannels
|
||||||
|
function: truthy
|
||||||
|
|||||||
104
AGENTS.md
104
AGENTS.md
@@ -58,8 +58,8 @@ When you are told you are working in a particular module or directory, assume yo
|
|||||||
|
|
||||||
* **Runtime**: .NET 10 (`net10.0`) with latest C# preview features. Microsoft.* dependencies should target the closest compatible versions.
|
* **Runtime**: .NET 10 (`net10.0`) with latest C# preview features. Microsoft.* dependencies should target the closest compatible versions.
|
||||||
* **Frontend**: Angular v17 for the UI.
|
* **Frontend**: Angular v17 for the UI.
|
||||||
* **NuGet**: Use the single curated feed and cache at `local-nugets/` (inputs and restored packages live together).
|
* **NuGet**: Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org). Packages restore to the global NuGet cache.
|
||||||
* **Data**: MongoDB as canonical store and for job/export state. Use a MongoDB driver version ≥ 3.0.
|
* **Data**: PostgreSQL as canonical store and for job/export state. Use a PostgreSQL driver version ≥ 3.0.
|
||||||
* **Observability**: Structured logs, counters, and (optional) OpenTelemetry traces.
|
* **Observability**: Structured logs, counters, and (optional) OpenTelemetry traces.
|
||||||
* **Ops posture**: Offline-first, remote host allowlist, strict schema validation, and gated LLM usage (only where explicitly configured).
|
* **Ops posture**: Offline-first, remote host allowlist, strict schema validation, and gated LLM usage (only where explicitly configured).
|
||||||
|
|
||||||
@@ -126,7 +126,7 @@ It ships as containerised building blocks; each module owns a clear boundary and
|
|||||||
| Scanner | `src/Scanner/StellaOps.Scanner.WebService`<br>`src/Scanner/StellaOps.Scanner.Worker`<br>`src/Scanner/__Libraries/StellaOps.Scanner.*` | `docs/modules/scanner/architecture.md` |
|
| Scanner | `src/Scanner/StellaOps.Scanner.WebService`<br>`src/Scanner/StellaOps.Scanner.Worker`<br>`src/Scanner/__Libraries/StellaOps.Scanner.*` | `docs/modules/scanner/architecture.md` |
|
||||||
| Scheduler | `src/Scheduler/StellaOps.Scheduler.WebService`<br>`src/Scheduler/StellaOps.Scheduler.Worker` | `docs/modules/scheduler/architecture.md` |
|
| Scheduler | `src/Scheduler/StellaOps.Scheduler.WebService`<br>`src/Scheduler/StellaOps.Scheduler.Worker` | `docs/modules/scheduler/architecture.md` |
|
||||||
| CLI | `src/Cli/StellaOps.Cli`<br>`src/Cli/StellaOps.Cli.Core`<br>`src/Cli/StellaOps.Cli.Plugins.*` | `docs/modules/cli/architecture.md` |
|
| CLI | `src/Cli/StellaOps.Cli`<br>`src/Cli/StellaOps.Cli.Core`<br>`src/Cli/StellaOps.Cli.Plugins.*` | `docs/modules/cli/architecture.md` |
|
||||||
| UI / Console | `src/UI/StellaOps.UI` | `docs/modules/ui/architecture.md` |
|
| UI / Console | `src/Web/StellaOps.Web` | `docs/modules/ui/architecture.md` |
|
||||||
| Notify | `src/Notify/StellaOps.Notify.WebService`<br>`src/Notify/StellaOps.Notify.Worker` | `docs/modules/notify/architecture.md` |
|
| Notify | `src/Notify/StellaOps.Notify.WebService`<br>`src/Notify/StellaOps.Notify.Worker` | `docs/modules/notify/architecture.md` |
|
||||||
| Export Center | `src/ExportCenter/StellaOps.ExportCenter.WebService`<br>`src/ExportCenter/StellaOps.ExportCenter.Worker` | `docs/modules/export-center/architecture.md` |
|
| Export Center | `src/ExportCenter/StellaOps.ExportCenter.WebService`<br>`src/ExportCenter/StellaOps.ExportCenter.Worker` | `docs/modules/export-center/architecture.md` |
|
||||||
| Registry Token Service | `src/Registry/StellaOps.Registry.TokenService`<br>`src/Registry/__Tests/StellaOps.Registry.TokenService.Tests` | `docs/modules/registry/architecture.md` |
|
| Registry Token Service | `src/Registry/StellaOps.Registry.TokenService`<br>`src/Registry/__Tests/StellaOps.Registry.TokenService.Tests` | `docs/modules/registry/architecture.md` |
|
||||||
@@ -202,22 +202,22 @@ Your goals:
|
|||||||
|
|
||||||
Sprint filename format:
|
Sprint filename format:
|
||||||
|
|
||||||
`SPRINT_<IMPLID>_<BATCHID>_<SPRINTID>_<topic_in_few_words>.md`
|
`SPRINT_<IMPLID>_<BATCHID>_<MODULEID>_<topic_in_few_words>.md`
|
||||||
|
|
||||||
* `<IMPLID>`: `0000–9999` — implementation epoch (e.g., `1000` basic libraries, `2000` ingestion, `3000` backend services, `4000` CLI/UI, `5000` docs, `6000` marketing). When in doubt, use the highest number already present.
|
* `<IMPLID>`: implementation epoch (e.g., `20251218`). Determine by scanning existing `docs/implplan/SPRINT_*.md` and using the highest epoch; if none exist, use today's epoch.
|
||||||
* `<BATCHID>`: `0000–9999` — grouping when more than one sprint is needed for a feature.
|
* `<BATCHID>`: `001`, `002`, etc. — grouping when more than one sprint is needed for a feature.
|
||||||
* `<SPRINTID>`: `0000–9999` — sprint index within the batch.
|
* `<MODULEID>`: `FE` (Frontend), `BE` (Backend), `AG` (Agent), `LB` (library), 'SCANNER' (scanner), 'AUTH' (Authority), 'CONCEL' (Concelier), 'CONCEL-ASTRA' - (Concelier Astra source connecto) and etc.
|
||||||
* `<topic_in_few_words>`: short topic description.
|
* `<topic_in_few_words>`: short topic description.
|
||||||
* **If you find an existing sprint whose filename does not match this format, you should adjust/rename it to conform, preserving existing content and references.** Document the rename in the sprint’s **Execution Log**.
|
* **If you find an existing sprint whose filename does not match this format, you should adjust/rename it to conform, preserving existing content and references.** Document the rename in the sprint’s **Execution Log**.
|
||||||
|
|
||||||
Sprint file template:
|
Every sprint file must conform to this template:
|
||||||
|
|
||||||
```md
|
```md
|
||||||
# Sprint <ID> · <Stream/Topic>
|
# Sprint <ID> · <Stream/Topic>
|
||||||
|
|
||||||
## Topic & Scope
|
## Topic & Scope
|
||||||
- Summarise the sprint in 2–4 bullets that read like a short story (expected outcomes and “why now”).
|
- Summarise the sprint in 2–4 bullets that read like a short story (expected outcomes and "why now").
|
||||||
- Call out the single owning directory (e.g., `src/Concelier/StellaOps.Concelier.Core`) and the evidence you expect to produce.
|
- Call out the single owning directory (e.g., `src/<module>/ReleaseOrchestrator.<module>.<sub-module>`) and the evidence you expect to produce.
|
||||||
- **Working directory:** `<path/to/module>`.
|
- **Working directory:** `<path/to/module>`.
|
||||||
|
|
||||||
## Dependencies & Concurrency
|
## Dependencies & Concurrency
|
||||||
@@ -269,12 +269,12 @@ In this role you act as:
|
|||||||
* **Angular v17 engineer** (UI).
|
* **Angular v17 engineer** (UI).
|
||||||
* **QA automation engineer** (C#, Moq, Playwright, Angular test stack, or other suitable tools).
|
* **QA automation engineer** (C#, Moq, Playwright, Angular test stack, or other suitable tools).
|
||||||
|
|
||||||
Implementation principles:
|
Implementation principles:
|
||||||
|
|
||||||
* Always follow .NET 10 and Angular v17 best practices.
|
* Always follow .NET 10 and Angular v17 best practices.
|
||||||
* Apply SOLID design principles (SRP, OCP, LSP, ISP, DIP) in service and library code.
|
* Apply SOLID design principles (SRP, OCP, LSP, ISP, DIP) in service and library code.
|
||||||
* Maximise reuse and composability.
|
* Maximise reuse and composability.
|
||||||
* Maintain determinism: stable ordering, UTC ISO-8601 timestamps, immutable NDJSON where applicable.
|
* Maintain determinism: stable ordering, UTC ISO-8601 timestamps, immutable NDJSON where applicable.
|
||||||
|
|
||||||
Execution rules (very important):
|
Execution rules (very important):
|
||||||
|
|
||||||
@@ -330,7 +330,7 @@ If no design decision is required, you proceed autonomously, implementing the ch
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 5) Working Agreement (Global)
|
### 5) Working Agreement (Global)
|
||||||
|
|
||||||
1. **Task status discipline**
|
1. **Task status discipline**
|
||||||
|
|
||||||
@@ -353,41 +353,41 @@ If no design decision is required, you proceed autonomously, implementing the ch
|
|||||||
5. **Completion**
|
5. **Completion**
|
||||||
|
|
||||||
* When you complete all tasks in scope for your current instruction set, explicitly state that you are done with those tasks.
|
* When you complete all tasks in scope for your current instruction set, explicitly state that you are done with those tasks.
|
||||||
6. **AGENTS.md discipline**
|
6. **AGENTS.md discipline**
|
||||||
* Project / technical managers ensure each module’s `AGENTS.md` exists, is up to date, and reflects current design and advisory decisions.
|
* Project / technical managers ensure each module’s `AGENTS.md` exists, is up to date, and reflects current design and advisory decisions.
|
||||||
* Implementers must read and follow the relevant `AGENTS.md` before coding in a module.
|
* Implementers must read and follow the relevant `AGENTS.md` before coding in a module.
|
||||||
* If a mismatch or gap is found, implementers log it via `BLOCKED` status and the sprint’s **Decisions & Risks**, and then continue with other work instead of asking for live clarification.
|
* If a mismatch or gap is found, implementers log it via `BLOCKED` status and the sprint’s **Decisions & Risks**, and then continue with other work instead of asking for live clarification.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 7) Advisory Handling (do this every time a new advisory lands)
|
### 7) Advisory Handling (do this every time a new advisory lands)
|
||||||
|
|
||||||
**Trigger:** Any new or updated file under `docs/product-advisories/` (including archived) automatically starts this workflow. No chat approval required.
|
**Trigger:** Any new or updated file under `docs/product-advisories/` (including archived) automatically starts this workflow. No chat approval required.
|
||||||
|
|
||||||
1) **Doc sync (must happen for every advisory):**
|
1) **Doc sync (must happen for every advisory):**
|
||||||
- Create/update **two layers**:
|
- Create/update **two layers**:
|
||||||
- **High-level**: `docs/` (vision/key-features/market) to capture the moat/positioning and the headline promise.
|
- **High-level**: `docs/` (vision/key-features/market) to capture the moat/positioning and the headline promise.
|
||||||
- **Detailed**: closest deep area (`docs/reachability/*`, `docs/market/*`, `docs/benchmarks/*`, `docs/modules/<module>/*`, etc.).
|
- **Detailed**: closest deep area (`docs/reachability/*`, `docs/market/*`, `docs/benchmarks/*`, `docs/modules/<module>/*`, etc.).
|
||||||
- **Code & samples:**
|
- **Code & samples:**
|
||||||
- Inline only short fragments (≤ ~20 lines) directly in the updated doc for readability.
|
- Inline only short fragments (≤ ~20 lines) directly in the updated doc for readability.
|
||||||
- Place runnable or longer samples/harnesses in `docs/benchmarks/**` or `tests/**` with deterministic, offline-friendly defaults (no network, fixed seeds), and link to them from the doc.
|
- Place runnable or longer samples/harnesses in `docs/benchmarks/**` or `tests/**` with deterministic, offline-friendly defaults (no network, fixed seeds), and link to them from the doc.
|
||||||
- If the advisory already contains code, carry it over verbatim into the benchmark/test file (with minor formatting only); don’t paraphrase away executable value.
|
- If the advisory already contains code, carry it over verbatim into the benchmark/test file (with minor formatting only); don’t paraphrase away executable value.
|
||||||
- **Cross-links:** whenever moats/positioning change, add links from `docs/07_HIGH_LEVEL_ARCHITECTURE.md`, `docs/key-features.md`, and the relevant module dossier(s).
|
- **Cross-links:** whenever moats/positioning change, add links from `docs/07_HIGH_LEVEL_ARCHITECTURE.md`, `docs/key-features.md`, and the relevant module dossier(s).
|
||||||
|
|
||||||
2) **Sprint sync (must happen for every advisory):**
|
2) **Sprint sync (must happen for every advisory):**
|
||||||
- Add Delivery Tracker rows in the relevant `SPRINT_*.md` with owners, deps, and doc paths; add an Execution Log entry for the change.
|
- Add Delivery Tracker rows in the relevant `SPRINT_*.md` with owners, deps, and doc paths; add an Execution Log entry for the change.
|
||||||
- If code/bench/dataset work is implied, create tasks and point to the new benchmark/test paths; add risks/interlocks for schema/feed freeze or transparency caps as needed.
|
- If code/bench/dataset work is implied, create tasks and point to the new benchmark/test paths; add risks/interlocks for schema/feed freeze or transparency caps as needed.
|
||||||
|
|
||||||
3) **De-duplication:**
|
3) **De-duplication:**
|
||||||
- Check `docs/product-advisories/archived/` for overlaps. If similar, mark “supersedes/extends <advisory>` in the new doc and avoid duplicate tasks.
|
- Check `docs/product-advisories/archived/` for overlaps. If similar, mark “supersedes/extends <advisory>` in the new doc and avoid duplicate tasks.
|
||||||
|
|
||||||
4) **Defaults to apply (unless advisory overrides):**
|
4) **Defaults to apply (unless advisory overrides):**
|
||||||
- Hybrid reachability posture: graph DSSE mandatory; edge-bundle DSSE optional/targeted; deterministic outputs only.
|
- Hybrid reachability posture: graph DSSE mandatory; edge-bundle DSSE optional/targeted; deterministic outputs only.
|
||||||
- Offline-friendly benches/tests; frozen feeds; deterministic ordering/hashes.
|
- Offline-friendly benches/tests; frozen feeds; deterministic ordering/hashes.
|
||||||
|
|
||||||
5) **Do not defer:** Execute steps 1–4 immediately; reporting is after the fact, not a gating step.
|
5) **Do not defer:** Execute steps 1–4 immediately; reporting is after the fact, not a gating step.
|
||||||
|
|
||||||
**Lessons baked in:** Past delays came from missing code carry-over and missing sprint tasks. Always move advisory code into benchmarks/tests and open the corresponding sprint rows the same session you read the advisory.
|
**Lessons baked in:** Past delays came from missing code carry-over and missing sprint tasks. Always move advisory code into benchmarks/tests and open the corresponding sprint rows the same session you read the advisory.
|
||||||
---
|
---
|
||||||
|
|
||||||
### 6) Role Switching
|
### 6) Role Switching
|
||||||
|
|||||||
80
CLAUDE.md
80
CLAUDE.md
@@ -41,7 +41,7 @@ dotnet test --filter "FullyQualifiedName~TestMethodName"
|
|||||||
dotnet test src/StellaOps.sln --verbosity normal
|
dotnet test src/StellaOps.sln --verbosity normal
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note:** Tests use Mongo2Go which requires OpenSSL 1.1 on Linux. Run `scripts/enable-openssl11-shim.sh` before testing if needed.
|
**Note:** Integration tests use Testcontainers for PostgreSQL. Ensure Docker is running before executing tests.
|
||||||
|
|
||||||
## Linting and Validation
|
## Linting and Validation
|
||||||
|
|
||||||
@@ -60,11 +60,11 @@ helm lint deploy/helm/stellaops
|
|||||||
|
|
||||||
### Technology Stack
|
### Technology Stack
|
||||||
- **Runtime:** .NET 10 (`net10.0`) with latest C# preview features
|
- **Runtime:** .NET 10 (`net10.0`) with latest C# preview features
|
||||||
- **Frontend:** Angular v17 (in `src/UI/StellaOps.UI`)
|
- **Frontend:** Angular v17 (in `src/Web/StellaOps.Web`)
|
||||||
- **Database:** MongoDB (driver version ≥ 3.0)
|
- **Database:** PostgreSQL (≥16) with per-module schema isolation; see `docs/db/` for specification
|
||||||
- **Testing:** xUnit with Mongo2Go, Moq, Microsoft.AspNetCore.Mvc.Testing
|
- **Testing:** xUnit with Testcontainers (PostgreSQL), Moq, Microsoft.AspNetCore.Mvc.Testing
|
||||||
- **Observability:** Structured logging, OpenTelemetry traces
|
- **Observability:** Structured logging, OpenTelemetry traces
|
||||||
- **NuGet:** Use the single curated feed and cache at `local-nugets/`
|
- **NuGet:** Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org)
|
||||||
|
|
||||||
### Module Structure
|
### Module Structure
|
||||||
|
|
||||||
@@ -72,24 +72,53 @@ The codebase follows a monorepo pattern with modules under `src/`:
|
|||||||
|
|
||||||
| Module | Path | Purpose |
|
| Module | Path | Purpose |
|
||||||
|--------|------|---------|
|
|--------|------|---------|
|
||||||
|
| **Core Platform** | | |
|
||||||
|
| Authority | `src/Authority/` | Authentication, authorization, OAuth/OIDC, DPoP |
|
||||||
|
| Gateway | `src/Gateway/` | API gateway with routing and transport abstraction |
|
||||||
|
| Router | `src/__Libraries/StellaOps.Router.*` | Transport-agnostic messaging (TCP/TLS/UDP/RabbitMQ/Valkey) |
|
||||||
|
| **Data Ingestion** | | |
|
||||||
| Concelier | `src/Concelier/` | Vulnerability advisory ingestion and merge engine |
|
| Concelier | `src/Concelier/` | Vulnerability advisory ingestion and merge engine |
|
||||||
| CLI | `src/Cli/` | Command-line interface for scanner distribution and job control |
|
|
||||||
| Scanner | `src/Scanner/` | Container scanning with SBOM generation |
|
|
||||||
| Authority | `src/Authority/` | Authentication and authorization |
|
|
||||||
| Signer | `src/Signer/` | Cryptographic signing operations |
|
|
||||||
| Attestor | `src/Attestor/` | in-toto/DSSE attestation generation |
|
|
||||||
| Excititor | `src/Excititor/` | VEX document ingestion and export |
|
| Excititor | `src/Excititor/` | VEX document ingestion and export |
|
||||||
| Policy | `src/Policy/` | OPA/Rego policy engine |
|
| VexLens | `src/VexLens/` | VEX consensus computation across issuers |
|
||||||
|
| IssuerDirectory | `src/IssuerDirectory/` | Issuer trust registry (CSAF publishers) |
|
||||||
|
| **Scanning & Analysis** | | |
|
||||||
|
| Scanner | `src/Scanner/` | Container scanning with SBOM generation (11 language analyzers) |
|
||||||
|
| BinaryIndex | `src/BinaryIndex/` | Binary identity extraction and fingerprinting |
|
||||||
|
| AdvisoryAI | `src/AdvisoryAI/` | AI-assisted advisory analysis |
|
||||||
|
| **Artifacts & Evidence** | | |
|
||||||
|
| Attestor | `src/Attestor/` | in-toto/DSSE attestation generation |
|
||||||
|
| Signer | `src/Signer/` | Cryptographic signing operations |
|
||||||
|
| SbomService | `src/SbomService/` | SBOM storage, versioning, and lineage ledger |
|
||||||
|
| EvidenceLocker | `src/EvidenceLocker/` | Sealed evidence storage and export |
|
||||||
|
| ExportCenter | `src/ExportCenter/` | Batch export and report generation |
|
||||||
|
| VexHub | `src/VexHub/` | VEX distribution and exchange hub |
|
||||||
|
| **Policy & Risk** | | |
|
||||||
|
| Policy | `src/Policy/` | Policy engine with K4 lattice logic |
|
||||||
|
| VulnExplorer | `src/VulnExplorer/` | Vulnerability exploration and triage UI backend |
|
||||||
|
| **Operations** | | |
|
||||||
| Scheduler | `src/Scheduler/` | Job scheduling and queue management |
|
| Scheduler | `src/Scheduler/` | Job scheduling and queue management |
|
||||||
| Notify | `src/Notify/` | Notification delivery (Email, Slack, Teams) |
|
| Orchestrator | `src/Orchestrator/` | Workflow orchestration and task coordination |
|
||||||
|
| TaskRunner | `src/TaskRunner/` | Task pack execution engine |
|
||||||
|
| Notify | `src/Notify/` | Notification delivery (Email, Slack, Teams, Webhooks) |
|
||||||
|
| **Integration** | | |
|
||||||
|
| CLI | `src/Cli/` | Command-line interface (Native AOT) |
|
||||||
| Zastava | `src/Zastava/` | Container registry webhook observer |
|
| Zastava | `src/Zastava/` | Container registry webhook observer |
|
||||||
|
| Web | `src/Web/` | Angular 17 frontend SPA |
|
||||||
|
| **Infrastructure** | | |
|
||||||
|
| Cryptography | `src/Cryptography/` | Crypto plugins (FIPS, eIDAS, GOST, SM, PQ) |
|
||||||
|
| Telemetry | `src/Telemetry/` | OpenTelemetry traces, metrics, logging |
|
||||||
|
| Graph | `src/Graph/` | Call graph and reachability data structures |
|
||||||
|
| Signals | `src/Signals/` | Runtime signal collection and correlation |
|
||||||
|
| Replay | `src/Replay/` | Deterministic replay engine |
|
||||||
|
|
||||||
|
> **Note:** See `docs/modules/<module>/architecture.md` for detailed module dossiers.
|
||||||
|
|
||||||
### Code Organization Patterns
|
### Code Organization Patterns
|
||||||
|
|
||||||
- **Libraries:** `src/<Module>/__Libraries/StellaOps.<Module>.*`
|
- **Libraries:** `src/<Module>/__Libraries/StellaOps.<Module>.*`
|
||||||
- **Tests:** `src/<Module>/__Tests/StellaOps.<Module>.*.Tests/`
|
- **Tests:** `src/<Module>/__Tests/StellaOps.<Module>.*.Tests/`
|
||||||
- **Plugins:** Follow naming `StellaOps.<Module>.Connector.*` or `StellaOps.<Module>.Plugin.*`
|
- **Plugins:** Follow naming `StellaOps.<Module>.Connector.*` or `StellaOps.<Module>.Plugin.*`
|
||||||
- **Shared test infrastructure:** `StellaOps.Concelier.Testing` provides MongoDB fixtures
|
- **Shared test infrastructure:** `StellaOps.Concelier.Testing` and `StellaOps.Infrastructure.Postgres.Testing` provide PostgreSQL fixtures
|
||||||
|
|
||||||
### Naming Conventions
|
### Naming Conventions
|
||||||
|
|
||||||
@@ -125,9 +154,13 @@ The codebase follows a monorepo pattern with modules under `src/`:
|
|||||||
|
|
||||||
### Test Layout
|
### Test Layout
|
||||||
|
|
||||||
- Module tests: `StellaOps.<Module>.<Component>.Tests`
|
- **Module tests:** `src/<Module>/__Tests/StellaOps.<Module>.<Component>.Tests/`
|
||||||
- Shared fixtures/harnesses: `StellaOps.<Module>.Testing`
|
- **Global tests:** `src/__Tests/{Category}/` (Integration, Acceptance, Load, Security, Chaos, E2E, etc.)
|
||||||
- Tests use xUnit, Mongo2Go for MongoDB integration tests
|
- **Shared testing libraries:** `src/__Tests/__Libraries/StellaOps.*.Testing/`
|
||||||
|
- **Benchmarks & golden corpus:** `src/__Tests/__Benchmarks/`
|
||||||
|
- **Ground truth datasets:** `src/__Tests/__Datasets/`
|
||||||
|
- Tests use xUnit, Testcontainers for PostgreSQL integration tests
|
||||||
|
- See `src/__Tests/AGENTS.md` for detailed test infrastructure guidance
|
||||||
|
|
||||||
### Documentation Updates
|
### Documentation Updates
|
||||||
|
|
||||||
@@ -154,8 +187,15 @@ When working in this repository, behavior changes based on the role specified:
|
|||||||
|
|
||||||
### As Project Manager
|
### As Project Manager
|
||||||
|
|
||||||
- Sprint files follow format: `SPRINT_<IMPLID>_<BATCHID>_<SPRINTID>_<topic>.md`
|
Create implementation sprint files under `docs/implplan/` using the **mandatory** sprint filename format:
|
||||||
- IMPLID epochs: `1000` basic libraries, `2000` ingestion, `3000` backend services, `4000` CLI/UI, `5000` docs, `6000` marketing
|
|
||||||
|
`SPRINT_<IMPLID>_<BATCHID>_<MODULEID>_<topic_in_few_words>.md`
|
||||||
|
|
||||||
|
- `<IMPLID>`: implementation epoch (e.g., `20251219`). Determine by scanning existing `docs/implplan/SPRINT_*.md` and using the highest epoch; if none exist, use today's epoch.
|
||||||
|
- `<BATCHID>`: `001`, `002`, etc. — grouping when more than one sprint is needed for a feature.
|
||||||
|
- `<MODULEID>`: `FE` (Frontend), `BE` (Backend), `AG` (Agent), `LB` (library), `BE` (Backend), `AG` (Agent), `LB` (library), 'SCANNER' (scanner), 'AUTH' (Authority), 'CONCEL' (Concelier), 'CONCEL-ASTRA' - (Concelier Astra source connecto) and etc.
|
||||||
|
- `<topic_in_few_words>`: short topic description.
|
||||||
|
- **If any existing sprint file name or internal format deviates from the standard, rename/normalize it** and record the change in its **Execution Log**.
|
||||||
- Normalize sprint files to standard template while preserving content
|
- Normalize sprint files to standard template while preserving content
|
||||||
- Ensure module `AGENTS.md` files exist and are up to date
|
- Ensure module `AGENTS.md` files exist and are up to date
|
||||||
|
|
||||||
@@ -200,6 +240,8 @@ Before coding, confirm required docs are read:
|
|||||||
|
|
||||||
- **Architecture overview:** `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
- **Architecture overview:** `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||||
- **Module dossiers:** `docs/modules/<module>/architecture.md`
|
- **Module dossiers:** `docs/modules/<module>/architecture.md`
|
||||||
|
- **Database specification:** `docs/db/SPECIFICATION.md`
|
||||||
|
- **PostgreSQL operations:** `docs/operations/postgresql-guide.md`
|
||||||
- **API/CLI reference:** `docs/09_API_CLI_REFERENCE.md`
|
- **API/CLI reference:** `docs/09_API_CLI_REFERENCE.md`
|
||||||
- **Offline operation:** `docs/24_OFFLINE_KIT.md`
|
- **Offline operation:** `docs/24_OFFLINE_KIT.md`
|
||||||
- **Quickstart:** `docs/10_CONCELIER_CLI_QUICKSTART.md`
|
- **Quickstart:** `docs/10_CONCELIER_CLI_QUICKSTART.md`
|
||||||
@@ -216,5 +258,5 @@ Workflows are in `.gitea/workflows/`. Key workflows:
|
|||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
- `STELLAOPS_BACKEND_URL` - Backend API URL for CLI
|
- `STELLAOPS_BACKEND_URL` - Backend API URL for CLI
|
||||||
- `STELLAOPS_TEST_MONGO_URI` - MongoDB connection string for integration tests
|
- `STELLAOPS_TEST_POSTGRES_CONNECTION` - PostgreSQL connection string for integration tests
|
||||||
- `StellaOpsEnableCryptoPro` - Enable GOST crypto support (set to `true` in build)
|
- `StellaOpsEnableCryptoPro` - Enable GOST crypto support (set to `true` in build)
|
||||||
|
|||||||
@@ -2,23 +2,15 @@
|
|||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
||||||
<StellaOpsLocalNuGetSource Condition="'$(StellaOpsLocalNuGetSource)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nugets/'))</StellaOpsLocalNuGetSource>
|
|
||||||
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
||||||
<StellaOpsNuGetOrgSource Condition="'$(StellaOpsNuGetOrgSource)' == ''">https://api.nuget.org/v3/index.json</StellaOpsNuGetOrgSource>
|
|
||||||
<_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource)</_StellaOpsDefaultRestoreSources>
|
|
||||||
<_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources)</_StellaOpsOriginalRestoreSources>
|
|
||||||
<RestorePackagesPath Condition="'$(RestorePackagesPath)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot).nuget/packages'))</RestorePackagesPath>
|
|
||||||
<RestoreConfigFile Condition="'$(RestoreConfigFile)' == ''">$([System.IO.Path]::Combine('$(StellaOpsRepoRoot)','NuGet.config'))</RestoreConfigFile>
|
<RestoreConfigFile Condition="'$(RestoreConfigFile)' == ''">$([System.IO.Path]::Combine('$(StellaOpsRepoRoot)','NuGet.config'))</RestoreConfigFile>
|
||||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(_StellaOpsDefaultRestoreSources)</RestoreSources>
|
|
||||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' != ''">$(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources)</RestoreSources>
|
|
||||||
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
||||||
<NoWarn>$(NoWarn);NU1608;NU1605</NoWarn>
|
<NoWarn>$(NoWarn);NU1608;NU1605;NU1202</NoWarn>
|
||||||
<WarningsNotAsErrors>$(WarningsNotAsErrors);NU1608;NU1605</WarningsNotAsErrors>
|
<WarningsNotAsErrors>$(WarningsNotAsErrors);NU1608;NU1605;NU1202</WarningsNotAsErrors>
|
||||||
<RestoreNoWarn>$(RestoreNoWarn);NU1608;NU1605</RestoreNoWarn>
|
<RestoreNoWarn>$(RestoreNoWarn);NU1608;NU1605;NU1202</RestoreNoWarn>
|
||||||
<RestoreWarningsAsErrors></RestoreWarningsAsErrors>
|
<RestoreWarningsAsErrors></RestoreWarningsAsErrors>
|
||||||
<RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors>
|
<RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors>
|
||||||
<RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder>
|
<RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder>
|
||||||
@@ -31,6 +23,10 @@
|
|||||||
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<PropertyGroup>
|
||||||
|
<AssetTargetFallback>$(AssetTargetFallback);net8.0;net7.0;net6.0;netstandard2.1;netstandard2.0</AssetTargetFallback>
|
||||||
|
</PropertyGroup>
|
||||||
|
|
||||||
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
||||||
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
@@ -43,4 +39,52 @@
|
|||||||
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
|
<!-- .NET 10 compatible package version overrides -->
|
||||||
|
<ItemGroup>
|
||||||
|
<!-- Cryptography packages - updated for net10.0 compatibility -->
|
||||||
|
<PackageReference Update="BouncyCastle.Cryptography" Version="2.6.2" />
|
||||||
|
<PackageReference Update="Pkcs11Interop" Version="5.1.2" />
|
||||||
|
|
||||||
|
<!-- Resilience - Polly 8.x for .NET 6+ -->
|
||||||
|
<PackageReference Update="Polly" Version="8.5.2" />
|
||||||
|
<PackageReference Update="Polly.Core" Version="8.5.2" />
|
||||||
|
|
||||||
|
<!-- YAML - updated for net10.0 -->
|
||||||
|
<PackageReference Update="YamlDotNet" Version="16.3.0" />
|
||||||
|
|
||||||
|
<!-- JSON Schema packages -->
|
||||||
|
<PackageReference Update="JsonSchema.Net" Version="7.3.2" />
|
||||||
|
<PackageReference Update="Json.More.Net" Version="2.1.0" />
|
||||||
|
<PackageReference Update="JsonPointer.Net" Version="5.1.0" />
|
||||||
|
|
||||||
|
<!-- HTML parsing -->
|
||||||
|
<PackageReference Update="AngleSharp" Version="1.2.0" />
|
||||||
|
|
||||||
|
<!-- Scheduling -->
|
||||||
|
<PackageReference Update="Cronos" Version="0.9.0" />
|
||||||
|
|
||||||
|
<!-- Testing - xUnit 2.9.3 for .NET 10 -->
|
||||||
|
<PackageReference Update="xunit" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.assert" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.extensibility.core" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.extensibility.execution" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.runner.visualstudio" Version="3.0.1" />
|
||||||
|
<PackageReference Update="xunit.abstractions" Version="2.0.3" />
|
||||||
|
|
||||||
|
<!-- JSON -->
|
||||||
|
<PackageReference Update="Newtonsoft.Json" Version="13.0.4" />
|
||||||
|
|
||||||
|
<!-- Annotations -->
|
||||||
|
<PackageReference Update="JetBrains.Annotations" Version="2024.3.0" />
|
||||||
|
|
||||||
|
<!-- Async interfaces -->
|
||||||
|
<PackageReference Update="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||||
|
|
||||||
|
<!-- HTTP Resilience integration (replaces Http.Polly) -->
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Http.Resilience" Version="10.0.0" />
|
||||||
|
|
||||||
|
<!-- Testing packages - aligned to 10.0.0 -->
|
||||||
|
<PackageReference Update="Microsoft.Extensions.TimeProvider.Testing" Version="10.0.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
# Third-Party Notices
|
# Third-Party Notices
|
||||||
|
|
||||||
This project bundles or links against the following third-party components in the scanner Ruby analyzer implementation:
|
This project bundles or links against the following third-party components:
|
||||||
|
|
||||||
- **tree-sitter** (MIT License, © 2018 Max Brunsfeld)
|
- **tree-sitter** (MIT License, (c) 2018 Max Brunsfeld)
|
||||||
- **tree-sitter-ruby** (MIT License, © 2016 Rob Rix)
|
- **tree-sitter-ruby** (MIT License, (c) 2016 Rob Rix)
|
||||||
|
- **GostCryptography (fork)** (MIT License, (c) 2014-2024 AlexMAS) — vendored under `third_party/forks/AlexMAS.GostCryptography` for GOST support in `StellaOps.Cryptography.Plugin.CryptoPro` and related sovereign crypto plug-ins.
|
||||||
|
- **CryptoPro CSP integration** (Commercial, customer-provided) — StellaOps ships only integration code; CryptoPro CSP binaries and licenses are not redistributed and must be supplied by the operator per vendor EULA.
|
||||||
|
|
||||||
License texts are available under third-party-licenses/.
|
License texts are available under third-party-licenses/.
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
<configuration>
|
<configuration>
|
||||||
|
<config>
|
||||||
|
<!-- Centralize package cache to prevent .nuget-* directory sprawl -->
|
||||||
|
<add key="globalPackagesFolder" value=".nuget/packages" />
|
||||||
|
</config>
|
||||||
<packageSources>
|
<packageSources>
|
||||||
<clear />
|
<clear />
|
||||||
<add key="local-nugets" value="./local-nugets" />
|
|
||||||
<add key="dotnet-public" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json" />
|
|
||||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||||
</packageSources>
|
</packageSources>
|
||||||
<config>
|
|
||||||
<add key="globalPackagesFolder" value="./.nuget/packages" />
|
|
||||||
</config>
|
|
||||||
<fallbackPackageFolders>
|
<fallbackPackageFolders>
|
||||||
<clear />
|
<clear />
|
||||||
</fallbackPackageFolders>
|
</fallbackPackageFolders>
|
||||||
|
|||||||
33
README.md
33
README.md
@@ -1,33 +0,0 @@
|
|||||||
# StellaOps Concelier & CLI
|
|
||||||
|
|
||||||
This repository hosts the StellaOps Concelier service, its plug-in ecosystem, and the
|
|
||||||
first-party CLI (`stellaops-cli`). Concelier ingests vulnerability advisories from
|
|
||||||
authoritative sources, stores them in MongoDB, and exports deterministic JSON and
|
|
||||||
Trivy DB artefacts. The CLI drives scanner distribution, scan execution, and job
|
|
||||||
control against the Concelier API.
|
|
||||||
|
|
||||||
## Quickstart
|
|
||||||
|
|
||||||
1. Prepare a MongoDB instance and (optionally) install `trivy-db`/`oras`.
|
|
||||||
2. Copy `etc/concelier.yaml.sample` to `etc/concelier.yaml` and update the storage + telemetry
|
|
||||||
settings.
|
|
||||||
3. Copy `etc/authority.yaml.sample` to `etc/authority.yaml`, review the issuer, token
|
|
||||||
lifetimes, and plug-in descriptors, then edit the companion manifests under
|
|
||||||
`etc/authority.plugins/*.yaml` to match your deployment.
|
|
||||||
4. Start the web service with `dotnet run --project src/Concelier/StellaOps.Concelier.WebService`.
|
|
||||||
5. Configure the CLI via environment variables (e.g. `STELLAOPS_BACKEND_URL`) and trigger
|
|
||||||
jobs with `dotnet run --project src/Cli/StellaOps.Cli -- db merge`.
|
|
||||||
|
|
||||||
Detailed operator guidance is available in `docs/10_CONCELIER_CLI_QUICKSTART.md`. API and
|
|
||||||
command reference material lives in `docs/09_API_CLI_REFERENCE.md`.
|
|
||||||
|
|
||||||
Pipeline note: deployment workflows should template `etc/concelier.yaml` during CI/CD,
|
|
||||||
injecting environment-specific Mongo credentials and telemetry endpoints. Upcoming
|
|
||||||
releases will add Microsoft OAuth (Entra ID) authentication support—track the quickstart
|
|
||||||
for integration steps once available.
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
- `docs/README.md` now consolidates the platform index and points to the updated high-level architecture.
|
|
||||||
- Module architecture dossiers now live under `docs/modules/<module>/`. The most relevant here are `docs/modules/concelier/ARCHITECTURE.md` (service layout, merge engine, exports) and `docs/modules/cli/ARCHITECTURE.md` (command surface, AOT packaging, auth flows). Related services such as the Signer, Attestor, Authority, Scanner, UI, Excititor, Zastava, and DevOps pipeline each have their own dossier in the same hierarchy.
|
|
||||||
- Offline operation guidance moved to `docs/24_OFFLINE_KIT.md`, which details bundle composition, verification, and delta workflows. Concelier-specific connector operations stay in `docs/modules/concelier/operations/connectors/*.md` with companion runbooks in `docs/modules/concelier/operations/`.
|
|
||||||
@@ -1,19 +1,17 @@
|
|||||||
<Solution>
|
<Solution>
|
||||||
<Folder Name="/src/" />
|
<Folder Name="/src/" />
|
||||||
<Folder Name="/src/Gateway/">
|
|
||||||
<Project Path="src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj" />
|
|
||||||
</Folder>
|
|
||||||
<Folder Name="/src/__Libraries/">
|
<Folder Name="/src/__Libraries/">
|
||||||
<Project Path="src/__Libraries/StellaOps.Microservice.SourceGen/StellaOps.Microservice.SourceGen.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Microservice.SourceGen/StellaOps.Microservice.SourceGen.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Microservice/StellaOps.Microservice.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Microservice/StellaOps.Microservice.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Router.Common/StellaOps.Router.Common.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Router.Common/StellaOps.Router.Common.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Router.Config/StellaOps.Router.Config.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Router.Config/StellaOps.Router.Config.csproj" />
|
||||||
|
<Project Path="src/__Libraries/StellaOps.Router.Gateway/StellaOps.Router.Gateway.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Router.Transport.InMemory/StellaOps.Router.Transport.InMemory.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Router.Transport.InMemory/StellaOps.Router.Transport.InMemory.csproj" />
|
||||||
</Folder>
|
</Folder>
|
||||||
<Folder Name="/tests/">
|
<Folder Name="/tests/">
|
||||||
<Project Path="tests/StellaOps.Gateway.WebService.Tests/StellaOps.Gateway.WebService.Tests.csproj" />
|
|
||||||
<Project Path="tests/StellaOps.Microservice.Tests/StellaOps.Microservice.Tests.csproj" />
|
<Project Path="tests/StellaOps.Microservice.Tests/StellaOps.Microservice.Tests.csproj" />
|
||||||
<Project Path="tests/StellaOps.Router.Common.Tests/StellaOps.Router.Common.Tests.csproj" />
|
<Project Path="tests/StellaOps.Router.Common.Tests/StellaOps.Router.Common.Tests.csproj" />
|
||||||
|
<Project Path="tests/StellaOps.Router.Gateway.Tests/StellaOps.Router.Gateway.Tests.csproj" />
|
||||||
<Project Path="tests/StellaOps.Router.Transport.InMemory.Tests/StellaOps.Router.Transport.InMemory.Tests.csproj" />
|
<Project Path="tests/StellaOps.Router.Transport.InMemory.Tests/StellaOps.Router.Transport.InMemory.Tests.csproj" />
|
||||||
</Folder>
|
</Folder>
|
||||||
</Solution>
|
</Solution>
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
# Stella Ops Bench Repository
|
|
||||||
|
|
||||||
> **Status:** Draft — aligns with `docs/benchmarks/vex-evidence-playbook.md` (Sprint 401).
|
|
||||||
> **Purpose:** Host reproducible VEX decisions and comparison data that prove Stella Ops’ signal quality vs. baseline scanners.
|
|
||||||
|
|
||||||
## Layout
|
|
||||||
|
|
||||||
```
|
|
||||||
bench/
|
|
||||||
README.md # this file
|
|
||||||
findings/ # per CVE/product bundles
|
|
||||||
CVE-YYYY-NNNNN/
|
|
||||||
evidence/
|
|
||||||
reachability.json
|
|
||||||
sbom.cdx.json
|
|
||||||
decision.openvex.json
|
|
||||||
decision.dsse.json
|
|
||||||
rekor.txt
|
|
||||||
metadata.json
|
|
||||||
tools/
|
|
||||||
verify.sh # DSSE + Rekor verifier
|
|
||||||
verify.py # offline verifier
|
|
||||||
compare.py # baseline comparison script
|
|
||||||
replay.sh # runs reachability replay manifolds
|
|
||||||
results/
|
|
||||||
summary.csv
|
|
||||||
runs/<date>/... # raw outputs + replay manifests
|
|
||||||
```
|
|
||||||
|
|
||||||
Refer to `docs/benchmarks/vex-evidence-playbook.md` for artifact contracts and automation tasks. The `bench/` tree will be populated once `BENCH-AUTO-401-019` and `DOCS-VEX-401-012` land.
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
# Reachability Benchmark · AGENTS
|
|
||||||
|
|
||||||
## Scope & Roles
|
|
||||||
- **Working directory:** `bench/reachability-benchmark/`
|
|
||||||
- Roles: benchmark curator (datasets, schemas), tooling engineer (scorer/CI), docs maintainer (public README/CONTRIBUTING), DevOps (deterministic builds, CI).
|
|
||||||
- Outputs are public-facing (Apache-2.0); keep artefacts deterministic and offline-friendly.
|
|
||||||
|
|
||||||
## Required Reading
|
|
||||||
- `docs/README.md`
|
|
||||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
|
||||||
- `docs/reachability/function-level-evidence.md`
|
|
||||||
- `docs/reachability/lattice.md`
|
|
||||||
- Product advisories:
|
|
||||||
- `docs/product-advisories/24-Nov-2025 - Designing a Deterministic Reachability Benchmark.md`
|
|
||||||
- `docs/product-advisories/archived/23-Nov-2025 - Benchmarking Determinism in Vulnerability Scoring.md`
|
|
||||||
- `docs/product-advisories/archived/23-Nov-2025 - Publishing a Reachability Benchmark Dataset.md`
|
|
||||||
- Sprint plan: `docs/implplan/SPRINT_0513_0001_0001_public_reachability_benchmark.md`
|
|
||||||
- DB/spec guidance for determinism and licensing: `docs/db/RULES.md`, `docs/db/VERIFICATION.md`
|
|
||||||
|
|
||||||
## Working Agreements
|
|
||||||
- Determinism: pin toolchains; set `SOURCE_DATE_EPOCH`; sort file lists; stable JSON/YAML ordering; fixed seeds for any sampling.
|
|
||||||
- Offline posture: no network at build/test time; vendored toolchains; registry pulls are forbidden—use cached/bundled images.
|
|
||||||
- Licensing: all benchmark content Apache-2.0; include LICENSE in repo root; third-party cases must have compatible licenses and attributions.
|
|
||||||
- Evidence: each case must include oracle tests/coverage proving reachability label; store truth and submissions under `benchmark/truth/` and `benchmark/submissions/` with JSON Schema.
|
|
||||||
- Security: no secrets; scrub URLs/tokens; deterministic CI artifacts only.
|
|
||||||
- Observability: scorer emits structured logs (JSON) with deterministic ordering; metrics optional.
|
|
||||||
|
|
||||||
## Directory Contracts
|
|
||||||
- `cases/<lang>/<project>/`: source, Dockerfile (deterministic), pinned dependencies, oracle tests, expected coverage output.
|
|
||||||
- `schemas/`: JSON/YAML schemas for cases, entrypoints, truth, submission; include validation CLI.
|
|
||||||
- `tools/scorer/`: `rb-score` CLI; no network; pure local file IO.
|
|
||||||
- `baselines/`: reference runners (Semgrep/CodeQL/Stella) with normalized outputs.
|
|
||||||
- `ci/`: deterministic CI workflows; no cache flakiness.
|
|
||||||
- `website/`: static site (no trackers/fonts from CDN).
|
|
||||||
|
|
||||||
## Testing
|
|
||||||
- Per-case oracle tests must pass locally without network.
|
|
||||||
- Scorer unit tests: schema validation, scoring math (precision/recall/F1), explainability tiers.
|
|
||||||
- Determinism tests: rerun scorer twice → identical outputs/hash.
|
|
||||||
|
|
||||||
## Status Discipline
|
|
||||||
- Mirror task status in `docs/implplan/SPRINT_0513_0001_0001_public_reachability_benchmark.md` when starting/pausing/completing work.
|
|
||||||
- Log material changes in sprint Execution Log with date (UTC).
|
|
||||||
|
|
||||||
## Allowed Shared Libraries
|
|
||||||
- Use existing repo toolchains only (Python/Node/Go minimal). No new external services. Keep scorer dependencies minimal and vendored when possible.
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
# StellaOps Reachability Benchmark (Public)
|
|
||||||
|
|
||||||
Deterministic, reproducible benchmark for reachability analysis tools.
|
|
||||||
|
|
||||||
## Goals
|
|
||||||
- Provide open cases with ground truth for reachable/unreachable sinks.
|
|
||||||
- Enforce determinism (hash-stable builds, fixed seeds, pinned deps).
|
|
||||||
- Enable fair scoring via the `rb-score` CLI and published schemas.
|
|
||||||
|
|
||||||
## Layout
|
|
||||||
- `cases/<lang>/<project>/` — benchmark cases with deterministic Dockerfiles, pinned deps, oracle tests.
|
|
||||||
- `schemas/` — JSON/YAML schemas for cases, entrypoints, truth, submissions.
|
|
||||||
- `benchmark/truth/` — ground-truth labels (hidden/internal split optional).
|
|
||||||
- `benchmark/submissions/` — sample submissions and format reference.
|
|
||||||
- `tools/scorer/` — `rb-score` CLI and tests.
|
|
||||||
- `tools/build/` — `build_all.py` (run all cases) and `validate_builds.py` (run twice and compare hashes).
|
|
||||||
- `baselines/` — reference runners (Semgrep, CodeQL, Stella) with normalized outputs.
|
|
||||||
- `ci/` — deterministic CI workflows and scripts.
|
|
||||||
- `website/` — static site (leaderboard/docs/downloads).
|
|
||||||
|
|
||||||
Sample cases added (JS track):
|
|
||||||
- `cases/js/unsafe-eval` (reachable sink) → `benchmark/truth/js-unsafe-eval.json`.
|
|
||||||
- `cases/js/guarded-eval` (unreachable by default) → `benchmark/truth/js-guarded-eval.json`.
|
|
||||||
- `cases/js/express-eval` (admin eval reachable) → `benchmark/truth/js-express-eval.json`.
|
|
||||||
- `cases/js/express-guarded` (admin eval gated by env) → `benchmark/truth/js-express-guarded.json`.
|
|
||||||
- `cases/js/fastify-template` (template rendering reachable) → `benchmark/truth/js-fastify-template.json`.
|
|
||||||
|
|
||||||
Sample cases added (Python track):
|
|
||||||
- `cases/py/unsafe-exec` (reachable eval) → `benchmark/truth/py-unsafe-exec.json`.
|
|
||||||
- `cases/py/guarded-exec` (unreachable when FEATURE_ENABLE != 1) → `benchmark/truth/py-guarded-exec.json`.
|
|
||||||
- `cases/py/flask-template` (template rendering reachable) → `benchmark/truth/py-flask-template.json`.
|
|
||||||
- `cases/py/fastapi-guarded` (unreachable unless ALLOW_EXEC=true) → `benchmark/truth/py-fastapi-guarded.json`.
|
|
||||||
- `cases/py/django-ssti` (template rendering reachable, autoescape off) → `benchmark/truth/py-django-ssti.json`.
|
|
||||||
|
|
||||||
Sample cases added (Java track):
|
|
||||||
- `cases/java/spring-deserialize` (reachable Java deserialization) → `benchmark/truth/java-spring-deserialize.json`.
|
|
||||||
- `cases/java/spring-guarded` (deserialization unreachable unless ALLOW_DESER=true) → `benchmark/truth/java-spring-guarded.json`.
|
|
||||||
|
|
||||||
## Determinism & Offline Rules
|
|
||||||
- No network during build/test; pin images/deps; set `SOURCE_DATE_EPOCH`.
|
|
||||||
- Sort file lists; stable JSON/YAML emitters; fixed RNG seeds.
|
|
||||||
- All scripts must succeed on a clean machine with cached toolchain tarballs only.
|
|
||||||
|
|
||||||
## Licensing
|
|
||||||
- Apache-2.0 for all benchmark assets. Third-party snippets must be license-compatible and attributed.
|
|
||||||
|
|
||||||
## Quick Start (once populated)
|
|
||||||
```bash
|
|
||||||
# schema sanity checks (offline)
|
|
||||||
python tools/validate.py all schemas/examples
|
|
||||||
|
|
||||||
# score a submission (coming in task 513-008)
|
|
||||||
cd tools/scorer
|
|
||||||
./rb-score --cases ../cases --truth ../benchmark/truth --submission ../benchmark/submissions/sample.json
|
|
||||||
```
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
See CONTRIBUTING.md. Open issues/PRs welcome; please provide hashes and logs for reproducibility.
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
# Reachability Benchmark Changelog
|
|
||||||
|
|
||||||
## 1.0.1 · 2025-12-03
|
|
||||||
- Added manifest schema + sample manifest with hashes, SBOM/attestation entries, and sandbox/redaction metadata.
|
|
||||||
- Added coverage/trace schemas and extended validator to cover them.
|
|
||||||
- Introduced `tools/verify_manifest.py` and deterministic offline kit packaging script.
|
|
||||||
- Added per-language determinism env templates and dataset safety checklist.
|
|
||||||
- Populated SBOM + attestation outputs for JS/PY/C tracks; Java remains blocked on JDK availability.
|
|
||||||
|
|
||||||
## 1.0.0 · 2025-12-01
|
|
||||||
- Initial public dataset, scorer, baselines, and website.
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
{
|
|
||||||
"schemaVersion": "1.0.0",
|
|
||||||
"kitId": "reachability-benchmark:public-v1",
|
|
||||||
"version": "1.0.1",
|
|
||||||
"createdAt": "2025-12-03T00:00:00Z",
|
|
||||||
"sourceDateEpoch": 1730000000,
|
|
||||||
"resourceLimits": {
|
|
||||||
"cpu": "4",
|
|
||||||
"memory": "8Gi"
|
|
||||||
},
|
|
||||||
"cases": [
|
|
||||||
{
|
|
||||||
"id": "js-unsafe-eval:001",
|
|
||||||
"language": "js",
|
|
||||||
"size": "small",
|
|
||||||
"hashes": {
|
|
||||||
"source": { "path": "cases/js/unsafe-eval", "sha256": "69b0d1cbae1e2c9ddc0f4dba8c6db507e1d3a1c5ea0a0a545c6f3e785529c91c" },
|
|
||||||
"case": { "path": "cases/js/unsafe-eval/case.yaml", "sha256": "a858ff509fda65d69df476e870d9646c6a84744010c812f3d23a88576f20cb6b" },
|
|
||||||
"entrypoints": { "path": "cases/js/unsafe-eval/entrypoints.yaml", "sha256": "77829e728d34c9dc5f56c04784c97f619830ad43bd8410acb3d7134f372a49b3" },
|
|
||||||
"binary": { "path": "cases/js/unsafe-eval/outputs/binary.tar.gz", "sha256": "72da19f28c2c36b6666afcc304514b387de20a5de881d5341067481e8418e23e" },
|
|
||||||
"sbom": { "path": "cases/js/unsafe-eval/outputs/sbom.cdx.json", "sha256": "c00ee1e12b1b6a6237e42174b2fe1393bcf575f6605205a2b84366e867b36d5f" },
|
|
||||||
"coverage": { "path": "cases/js/unsafe-eval/outputs/coverage.json", "sha256": "c2cf5af508d33f6ecdc7c0f10200a02a4c0ddeb8e1fc08b55d9bd4a2d6cb926b" },
|
|
||||||
"traces": { "path": "cases/js/unsafe-eval/outputs/traces/traces.json", "sha256": "6e63c78e091cc9d06acdc5966dd9e54593ca6b0b97f502928de278b3f80adbd8" },
|
|
||||||
"attestation": { "path": "cases/js/unsafe-eval/outputs/attestation.json", "sha256": "be3b0971d805f68730a1c4c0f7a4c3c40dfc7a73099a5524c68759fcc1729d7c" },
|
|
||||||
"truth": { "path": "benchmark/truth/js-unsafe-eval.json", "sha256": "ab42f28ed229eb657ffcb36c3a99287436e1822a4c7d395a94de784457a08f62" }
|
|
||||||
},
|
|
||||||
"truth": {
|
|
||||||
"label": "reachable",
|
|
||||||
"confidence": "high",
|
|
||||||
"rationale": "Unit test hits eval sink via POST /api/exec"
|
|
||||||
},
|
|
||||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
|
||||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "py-fastapi-guarded:104",
|
|
||||||
"language": "py",
|
|
||||||
"size": "small",
|
|
||||||
"hashes": {
|
|
||||||
"source": { "path": "cases/py/fastapi-guarded", "sha256": "0869cab10767ac7e7b33c9bbd634f811d98ce5cdeb244769f1a81949438460fb" },
|
|
||||||
"case": { "path": "cases/py/fastapi-guarded/case.yaml", "sha256": "0add8a5f487ebd21ee20ab88b7c6436fe8471f0a54ab8da0e08c8416aa181346" },
|
|
||||||
"entrypoints": { "path": "cases/py/fastapi-guarded/entrypoints.yaml", "sha256": "47c9dd15bf7c5bb8641893a92791d3f7675ed6adba17b251f609335400d29d41" },
|
|
||||||
"binary": { "path": "cases/py/fastapi-guarded/outputs/binary.tar.gz", "sha256": "ca964fef352dc535b63d35b8f8846cc051e10e54cfd8aceef7566f3c94178b76" },
|
|
||||||
"sbom": { "path": "cases/py/fastapi-guarded/outputs/sbom.cdx.json", "sha256": "13999d8f3d4c9bdb70ea54ad1de613be3f893d79bdd1a53f7c9401e6add88cf0" },
|
|
||||||
"coverage": { "path": "cases/py/fastapi-guarded/outputs/coverage.json", "sha256": "07b1f6dccaa02bd4e1c3e2771064fa3c6e06d02843a724151721ea694762c750" },
|
|
||||||
"traces": { "path": "cases/py/fastapi-guarded/outputs/traces/traces.json", "sha256": "4633748b8b428b45e3702f2f8f5b3f4270728078e26bce1e08900ed1d5bb3046" },
|
|
||||||
"attestation": { "path": "cases/py/fastapi-guarded/outputs/attestation.json", "sha256": "257aa5408a5c6ffe0e193a75a2a54597f8c6f61babfe8aaf26bd47340c3086c3" },
|
|
||||||
"truth": { "path": "benchmark/truth/py-fastapi-guarded.json", "sha256": "f8c62abeb00006621feeb010d0e47d248918dffd6d6e20e0f47d74e1b3642760" }
|
|
||||||
},
|
|
||||||
"truth": {
|
|
||||||
"label": "unreachable",
|
|
||||||
"confidence": "high",
|
|
||||||
"rationale": "Feature flag ALLOW_EXEC must be true before sink executes"
|
|
||||||
},
|
|
||||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
|
||||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "c-unsafe-system:001",
|
|
||||||
"language": "c",
|
|
||||||
"size": "small",
|
|
||||||
"hashes": {
|
|
||||||
"source": { "path": "cases/c/unsafe-system", "sha256": "bc39ab3a3e5cb3944a205912ecad8c1ac4b7d15c64b453c9d34a9a5df7fbbbf4" },
|
|
||||||
"case": { "path": "cases/c/unsafe-system/case.yaml", "sha256": "7799a3a629c22ad47197309f44e32aabbc4e6711ef78d606ba57a7a4974787ce" },
|
|
||||||
"entrypoints": { "path": "cases/c/unsafe-system/entrypoints.yaml", "sha256": "06afee8350460c9d15b26ea9d4ea293e8eb3f4b86b3179e19401fa99947e4490" },
|
|
||||||
"binary": { "path": "cases/c/unsafe-system/outputs/binary.tar.gz", "sha256": "62200167bd660bad6d131b21f941acdfebe00e949e353a53c97b6691ac8f0e49" },
|
|
||||||
"sbom": { "path": "cases/c/unsafe-system/outputs/sbom.cdx.json", "sha256": "4c72a213fc4c646f44b4d0be3c23711b120b2a386374ebaa4897e5058980e0f5" },
|
|
||||||
"coverage": { "path": "cases/c/unsafe-system/outputs/coverage.json", "sha256": "03ba8cf09e7e0ed82e9fa8abb48f92355e894fd56e0c0160a504193a6f6ec48a" },
|
|
||||||
"traces": { "path": "cases/c/unsafe-system/outputs/traces/traces.json", "sha256": "f6469e46a57b8a6e8e17c9b8e78168edd6657ea8a5e1e96fe6ab4a0fc88a734e" },
|
|
||||||
"attestation": { "path": "cases/c/unsafe-system/outputs/attestation.json", "sha256": "c3755088182359a45492170fa8a57d826b605176333d109f4f113bc7ccf85f97" },
|
|
||||||
"truth": { "path": "benchmark/truth/c-unsafe-system.json", "sha256": "9a8200c2cf549b3ac8b19b170e9d34df063351879f19f401d8492e280ad08c13" }
|
|
||||||
},
|
|
||||||
"truth": {
|
|
||||||
"label": "reachable",
|
|
||||||
"confidence": "high",
|
|
||||||
"rationale": "Command injection sink reachable via argv -> system()"
|
|
||||||
},
|
|
||||||
"sandbox": { "network": "loopback", "privileges": "rootless" },
|
|
||||||
"redaction": { "pii": false, "policy": "benchmark-default/v1" }
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"artifacts": {
|
|
||||||
"submissionSchema": { "path": "schemas/submission.schema.json", "sha256": "de5bebb2dbcd085d7896f47a16b9d3837a65fb7f816dcf7e587967d5848c50a7" },
|
|
||||||
"scorer": { "path": "tools/scorer/rb_score.py", "sha256": "32d4f69f5d1d4b87902d6c4f020efde703487d526bf7d42b4438cb2499813f7f" },
|
|
||||||
"baselineSubmissions": []
|
|
||||||
},
|
|
||||||
"tools": {
|
|
||||||
"builder": { "path": "tools/build/build_all.py", "sha256": "64a73f3df9b6f2cdaf5cbb33852b8e9bf443f67cf9dff1573fb635a0252bda9a" },
|
|
||||||
"validator": { "path": "tools/validate.py", "sha256": "776009ef0f3691e60cc87df3f0468181ee7a827be1bd0f73c77fdb68d3ed31c0" }
|
|
||||||
},
|
|
||||||
"signatures": []
|
|
||||||
}
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
# Reachability Benchmark · Submission Guide
|
|
||||||
|
|
||||||
This guide explains how to produce a compliant submission for the Stella Ops reachability benchmark. It is fully offline-friendly.
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
- Python 3.11+
|
|
||||||
- Your analyzer toolchain (no network calls during analysis)
|
|
||||||
- Schemas from `schemas/` and truth from `benchmark/truth/`
|
|
||||||
|
|
||||||
## Steps
|
|
||||||
1) **Build cases deterministically**
|
|
||||||
```bash
|
|
||||||
python tools/build/build_all.py --cases cases
|
|
||||||
```
|
|
||||||
- Sets `SOURCE_DATE_EPOCH`.
|
|
||||||
- Skips Java by default if JDK is unavailable (pass `--skip-lang` as needed).
|
|
||||||
|
|
||||||
2) **Run your analyzer**
|
|
||||||
- For each case, produce sink predictions in memory-safe JSON.
|
|
||||||
- Do not reach out to the internet, package registries, or remote APIs.
|
|
||||||
|
|
||||||
3) **Emit `submission.json`**
|
|
||||||
- Must conform to `schemas/submission.schema.json` (`version: 1.0.0`).
|
|
||||||
- Sort cases and sinks alphabetically to ensure determinism.
|
|
||||||
- Include optional runtime stats under `run` (time_s, peak_mb) if available.
|
|
||||||
|
|
||||||
4) **Validate**
|
|
||||||
```bash
|
|
||||||
python tools/validate.py --submission submission.json --schema schemas/submission.schema.json
|
|
||||||
```
|
|
||||||
|
|
||||||
5) **Score locally**
|
|
||||||
```bash
|
|
||||||
tools/scorer/rb_score.py --truth benchmark/truth/<aggregate>.json --submission submission.json --format json
|
|
||||||
```
|
|
||||||
|
|
||||||
6) **Compare (optional)**
|
|
||||||
```bash
|
|
||||||
tools/scorer/rb_compare.py --truth benchmark/truth/<aggregate>.json \
|
|
||||||
--submissions submission.json baselines/*/submission.json \
|
|
||||||
--output leaderboard.json --text
|
|
||||||
```
|
|
||||||
|
|
||||||
## Determinism checklist
|
|
||||||
- Set `SOURCE_DATE_EPOCH` for all builds.
|
|
||||||
- Disable telemetry/version checks in your analyzer.
|
|
||||||
- Avoid nondeterministic ordering (sort file and sink lists).
|
|
||||||
- No network access; use vendored toolchains only.
|
|
||||||
- Use fixed seeds for any sampling.
|
|
||||||
|
|
||||||
## Packaging
|
|
||||||
- Submit a zip/tar with:
|
|
||||||
- `submission.json`
|
|
||||||
- Tool version & configuration (README)
|
|
||||||
- Optional logs and runtime metrics
|
|
||||||
- For production submissions, sign `submission.json` with DSSE and record the envelope under `signatures` in the manifest (see `benchmark/manifest.sample.json`).
|
|
||||||
- Do **not** include binaries that require network access or licenses we cannot redistribute.
|
|
||||||
|
|
||||||
## Provenance & Manifest
|
|
||||||
- Reference kit manifest: `benchmark/manifest.sample.json` (schema: `benchmark/schemas/benchmark-manifest.schema.json`).
|
|
||||||
- Validate your bundle offline:
|
|
||||||
```bash
|
|
||||||
python tools/verify_manifest.py benchmark/manifest.sample.json --root bench/reachability-benchmark
|
|
||||||
```
|
|
||||||
- Determinism templates: `benchmark/templates/determinism/*.env` can be sourced by build scripts per language.
|
|
||||||
|
|
||||||
## Support
|
|
||||||
- Open issues in the public repo (once live) or provide a reproducible script that runs fully offline.
|
|
||||||
34
config/crypto-profiles.sample.json
Normal file
34
config/crypto-profiles.sample.json
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"StellaOps": {
|
||||||
|
"Crypto": {
|
||||||
|
"Registry": {
|
||||||
|
"ActiveProfile": "world",
|
||||||
|
"PreferredProviders": [ "default" ],
|
||||||
|
"Profiles": {
|
||||||
|
"ru-free": { "PreferredProviders": [ "ru.openssl.gost", "ru.pkcs11", "sim.crypto.remote" ] },
|
||||||
|
"ru-paid": { "PreferredProviders": [ "ru.cryptopro.csp", "ru.openssl.gost", "ru.pkcs11", "sim.crypto.remote" ] },
|
||||||
|
"sm": { "PreferredProviders": [ "cn.sm.soft", "sim.crypto.remote" ] },
|
||||||
|
"eidas": { "PreferredProviders": [ "eu.eidas.soft", "sim.crypto.remote" ] },
|
||||||
|
"fips": { "PreferredProviders": [ "fips.ecdsa.soft", "sim.crypto.remote" ] },
|
||||||
|
"kcmvp": { "PreferredProviders": [ "kr.kcmvp.hash", "sim.crypto.remote" ] },
|
||||||
|
"pq": { "PreferredProviders": [ "pq.soft", "sim.crypto.remote" ] }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Sim": {
|
||||||
|
"BaseAddress": "http://localhost:8080"
|
||||||
|
},
|
||||||
|
"CryptoPro": {
|
||||||
|
"Keys": [],
|
||||||
|
"LicenseNote": "Customer-provided CryptoPro CSP .deb packages; set CRYPTOPRO_ACCEPT_EULA=1; Linux only."
|
||||||
|
},
|
||||||
|
"Pkcs11": {
|
||||||
|
"LibraryPath": "/usr/lib/pkcs11/lib.so",
|
||||||
|
"Keys": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Compliance": {
|
||||||
|
"ProfileId": "world",
|
||||||
|
"StrictValidation": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
8
config/env/.env.eidas.example
vendored
Normal file
8
config/env/.env.eidas.example
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
STELLAOPS_CRYPTO_COMPLIANCE_PROFILE=eidas
|
||||||
|
STELLAOPS__CRYPTO__REGISTRY__ACTIVEPROFILE=eidas
|
||||||
|
EIDAS_SOFT_ALLOWED=1
|
||||||
|
# QSCD PKCS#11 path + PIN when hardware is available:
|
||||||
|
# STELLAOPS__CRYPTO__PKCS11__LIBRARYPATH=/usr/lib/qscd/libpkcs11.so
|
||||||
|
# EIDAS_QSCD_PIN=changeme
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_SIM=1
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL=http://localhost:8080
|
||||||
6
config/env/.env.fips.example
vendored
Normal file
6
config/env/.env.fips.example
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
STELLAOPS_CRYPTO_COMPLIANCE_PROFILE=fips
|
||||||
|
STELLAOPS__CRYPTO__REGISTRY__ACTIVEPROFILE=fips
|
||||||
|
FIPS_SOFT_ALLOWED=1
|
||||||
|
# Optional: AWS_USE_FIPS_ENDPOINTS=true
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_SIM=1
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL=http://localhost:8080
|
||||||
5
config/env/.env.kcmvp.example
vendored
Normal file
5
config/env/.env.kcmvp.example
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
STELLAOPS_CRYPTO_COMPLIANCE_PROFILE=kcmvp
|
||||||
|
STELLAOPS__CRYPTO__REGISTRY__ACTIVEPROFILE=kcmvp
|
||||||
|
KCMVP_HASH_ALLOWED=1
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_SIM=1
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL=http://localhost:8080
|
||||||
6
config/env/.env.ru-free.example
vendored
Normal file
6
config/env/.env.ru-free.example
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
STELLAOPS_CRYPTO_COMPLIANCE_PROFILE=gost
|
||||||
|
STELLAOPS__CRYPTO__REGISTRY__ACTIVEPROFILE=ru-free
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_RU_OPENSSL=1
|
||||||
|
STELLAOPS_RU_OPENSSL_REMOTE_URL=
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_SIM=1
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL=http://localhost:8080
|
||||||
7
config/env/.env.ru-paid.example
vendored
Normal file
7
config/env/.env.ru-paid.example
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
STELLAOPS_CRYPTO_COMPLIANCE_PROFILE=gost
|
||||||
|
STELLAOPS__CRYPTO__REGISTRY__ACTIVEPROFILE=ru-paid
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_RU_CSP=1
|
||||||
|
CRYPTOPRO_ACCEPT_EULA=1
|
||||||
|
# Bind customer-provided debs to /opt/cryptopro/downloads inside the service container.
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_SIM=1
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL=http://localhost:8080
|
||||||
6
config/env/.env.sm.example
vendored
Normal file
6
config/env/.env.sm.example
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
STELLAOPS_CRYPTO_COMPLIANCE_PROFILE=sm
|
||||||
|
STELLAOPS__CRYPTO__REGISTRY__ACTIVEPROFILE=sm
|
||||||
|
SM_SOFT_ALLOWED=1
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_SM_PKCS11=0
|
||||||
|
STELLAOPS_CRYPTO_ENABLE_SIM=1
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL=http://localhost:8080
|
||||||
181
deploy/ansible/README.md
Normal file
181
deploy/ansible/README.md
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
# Zastava Agent Ansible Deployment
|
||||||
|
|
||||||
|
Ansible playbook for deploying StellaOps Zastava Agent on VM/bare-metal hosts.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Ansible 2.10 or later
|
||||||
|
- Target hosts must have:
|
||||||
|
- Docker installed and running
|
||||||
|
- SSH access with sudo privileges
|
||||||
|
- systemd as init system
|
||||||
|
- Internet access (for downloading agent binaries) OR local artifact repository
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
1. **Create inventory file:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp inventory.yml.sample inventory.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Edit inventory with your hosts and configuration:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
zastava_agents:
|
||||||
|
hosts:
|
||||||
|
your-host:
|
||||||
|
ansible_host: 192.168.1.100
|
||||||
|
ansible_user: ubuntu
|
||||||
|
vars:
|
||||||
|
zastava_tenant: your-tenant
|
||||||
|
scanner_backend_url: https://scanner.internal
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Run the playbook:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ansible-playbook -i inventory.yml zastava-agent.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Variables
|
||||||
|
|
||||||
|
### Required Variables
|
||||||
|
|
||||||
|
| Variable | Description |
|
||||||
|
|----------|-------------|
|
||||||
|
| `zastava_tenant` | Tenant identifier for multi-tenancy isolation |
|
||||||
|
| `scanner_backend_url` | URL of the Scanner backend service |
|
||||||
|
|
||||||
|
### Optional Variables
|
||||||
|
|
||||||
|
| Variable | Default | Description |
|
||||||
|
|----------|---------|-------------|
|
||||||
|
| `zastava_version` | `latest` | Agent version to deploy |
|
||||||
|
| `zastava_node_name` | hostname | Override node name in events |
|
||||||
|
| `zastava_health_port` | `8080` | Health check HTTP port |
|
||||||
|
| `docker_socket` | `/var/run/docker.sock` | Docker socket path |
|
||||||
|
| `zastava_log_level` | `Information` | Serilog log level |
|
||||||
|
| `scanner_backend_insecure` | `false` | Allow HTTP backend (NOT for production) |
|
||||||
|
| `download_base_url` | `https://releases.stellaops.org` | Base URL for agent downloads |
|
||||||
|
|
||||||
|
### Advanced Variables
|
||||||
|
|
||||||
|
| Variable | Description |
|
||||||
|
|----------|-------------|
|
||||||
|
| `zastava_extra_env` | Dictionary of additional environment variables |
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
After deployment, the agent is installed with the following structure:
|
||||||
|
|
||||||
|
```
|
||||||
|
/opt/stellaops/zastava-agent/ # Agent binaries
|
||||||
|
/etc/stellaops/zastava-agent.env # Environment configuration
|
||||||
|
/var/lib/zastava-agent/ # Data directory
|
||||||
|
/var/lib/zastava-agent/runtime-events/ # Event buffer (disk-backed)
|
||||||
|
/etc/systemd/system/zastava-agent.service # systemd unit
|
||||||
|
```
|
||||||
|
|
||||||
|
## Post-Deployment Verification
|
||||||
|
|
||||||
|
### Check Service Status
|
||||||
|
|
||||||
|
```bash
|
||||||
|
systemctl status zastava-agent
|
||||||
|
```
|
||||||
|
|
||||||
|
### View Logs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
journalctl -u zastava-agent -f
|
||||||
|
```
|
||||||
|
|
||||||
|
### Health Endpoints
|
||||||
|
|
||||||
|
| Endpoint | Description |
|
||||||
|
|----------|-------------|
|
||||||
|
| `/healthz` | Liveness probe - agent is running |
|
||||||
|
| `/readyz` | Readiness probe - agent can process events |
|
||||||
|
| `/livez` | Alias for liveness probe |
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl http://localhost:8080/healthz
|
||||||
|
curl http://localhost:8080/readyz
|
||||||
|
```
|
||||||
|
|
||||||
|
## Air-Gapped Deployment
|
||||||
|
|
||||||
|
For air-gapped environments:
|
||||||
|
|
||||||
|
1. Download agent tarball to a local artifact server
|
||||||
|
2. Set `download_base_url` to your local server:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
download_base_url: https://artifacts.internal/stellaops
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Ensure the URL structure matches:
|
||||||
|
`{download_base_url}/zastava-agent/{version}/zastava-agent-linux-{arch}.tar.gz`
|
||||||
|
|
||||||
|
## Security Notes
|
||||||
|
|
||||||
|
### Docker Socket Access
|
||||||
|
|
||||||
|
The agent requires read access to the Docker socket to monitor container events.
|
||||||
|
The service runs as the `zastava-agent` user in the `docker` group.
|
||||||
|
|
||||||
|
See `docs/modules/zastava/operations/docker-socket-permissions.md` for security
|
||||||
|
considerations and alternative configurations.
|
||||||
|
|
||||||
|
### systemd Hardening
|
||||||
|
|
||||||
|
The service unit includes security hardening:
|
||||||
|
|
||||||
|
- `NoNewPrivileges=true` - Prevent privilege escalation
|
||||||
|
- `ProtectSystem=strict` - Read-only system directories
|
||||||
|
- `PrivateTmp=true` - Isolated /tmp
|
||||||
|
- `ProtectKernelTunables=true` - No kernel parameter modification
|
||||||
|
- Resource limits on file descriptors and memory
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Agent Won't Start
|
||||||
|
|
||||||
|
1. Check Docker service: `systemctl status docker`
|
||||||
|
2. Verify Docker socket permissions: `ls -la /var/run/docker.sock`
|
||||||
|
3. Check agent logs: `journalctl -u zastava-agent -e`
|
||||||
|
|
||||||
|
### Cannot Connect to Backend
|
||||||
|
|
||||||
|
1. Verify network connectivity: `curl -I ${scanner_backend_url}/healthz`
|
||||||
|
2. Check TLS certificates if using HTTPS
|
||||||
|
3. Ensure firewall allows outbound connections
|
||||||
|
|
||||||
|
### Events Not Being Sent
|
||||||
|
|
||||||
|
1. Check event buffer directory permissions
|
||||||
|
2. Verify health endpoint returns healthy: `curl localhost:8080/readyz`
|
||||||
|
3. Check agent logs for connection errors
|
||||||
|
|
||||||
|
## Uninstallation
|
||||||
|
|
||||||
|
To remove the agent:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Stop and disable service
|
||||||
|
sudo systemctl stop zastava-agent
|
||||||
|
sudo systemctl disable zastava-agent
|
||||||
|
|
||||||
|
# Remove files
|
||||||
|
sudo rm -rf /opt/stellaops/zastava-agent
|
||||||
|
sudo rm -f /etc/stellaops/zastava-agent.env
|
||||||
|
sudo rm -f /etc/systemd/system/zastava-agent.service
|
||||||
|
sudo rm -rf /var/lib/zastava-agent
|
||||||
|
|
||||||
|
# Remove user
|
||||||
|
sudo userdel zastava-agent
|
||||||
|
|
||||||
|
# Reload systemd
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
```
|
||||||
58
deploy/ansible/files/zastava-agent.service
Normal file
58
deploy/ansible/files/zastava-agent.service
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=StellaOps Zastava Agent - Container Runtime Monitor
|
||||||
|
Documentation=https://docs.stellaops.org/zastava/agent/
|
||||||
|
After=network-online.target docker.service containerd.service
|
||||||
|
Wants=network-online.target
|
||||||
|
Requires=docker.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=notify
|
||||||
|
ExecStart=/opt/stellaops/zastava-agent/StellaOps.Zastava.Agent
|
||||||
|
WorkingDirectory=/opt/stellaops/zastava-agent
|
||||||
|
Restart=always
|
||||||
|
RestartSec=5
|
||||||
|
|
||||||
|
# Environment configuration
|
||||||
|
EnvironmentFile=-/etc/stellaops/zastava-agent.env
|
||||||
|
Environment=DOTNET_ENVIRONMENT=Production
|
||||||
|
Environment=ASPNETCORE_ENVIRONMENT=Production
|
||||||
|
|
||||||
|
# User and permissions
|
||||||
|
User=zastava-agent
|
||||||
|
Group=docker
|
||||||
|
|
||||||
|
# Security hardening
|
||||||
|
NoNewPrivileges=true
|
||||||
|
ProtectSystem=strict
|
||||||
|
ProtectHome=true
|
||||||
|
PrivateTmp=true
|
||||||
|
PrivateDevices=true
|
||||||
|
ProtectKernelTunables=true
|
||||||
|
ProtectKernelModules=true
|
||||||
|
ProtectControlGroups=true
|
||||||
|
RestrictRealtime=true
|
||||||
|
RestrictSUIDSGID=true
|
||||||
|
|
||||||
|
# Allow read access to Docker socket
|
||||||
|
ReadWritePaths=/var/run/docker.sock
|
||||||
|
ReadWritePaths=/var/lib/zastava-agent
|
||||||
|
|
||||||
|
# Capabilities
|
||||||
|
CapabilityBoundingSet=
|
||||||
|
AmbientCapabilities=
|
||||||
|
|
||||||
|
# Resource limits
|
||||||
|
LimitNOFILE=65536
|
||||||
|
LimitNPROC=4096
|
||||||
|
MemoryMax=512M
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=zastava-agent
|
||||||
|
|
||||||
|
# Watchdog (5 minute timeout)
|
||||||
|
WatchdogSec=300
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
46
deploy/ansible/inventory.yml.sample
Normal file
46
deploy/ansible/inventory.yml.sample
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
---
|
||||||
|
# Sample Ansible Inventory for Zastava Agent Deployment
|
||||||
|
#
|
||||||
|
# Copy this file to inventory.yml and customize for your environment.
|
||||||
|
# Then run: ansible-playbook -i inventory.yml zastava-agent.yml
|
||||||
|
|
||||||
|
all:
|
||||||
|
children:
|
||||||
|
zastava_agents:
|
||||||
|
hosts:
|
||||||
|
# Add your VM/bare-metal hosts here
|
||||||
|
vm-node-1:
|
||||||
|
ansible_host: 192.168.1.101
|
||||||
|
ansible_user: ubuntu
|
||||||
|
vm-node-2:
|
||||||
|
ansible_host: 192.168.1.102
|
||||||
|
ansible_user: ubuntu
|
||||||
|
# Example with SSH key
|
||||||
|
vm-node-3:
|
||||||
|
ansible_host: 192.168.1.103
|
||||||
|
ansible_user: root
|
||||||
|
ansible_ssh_private_key_file: ~/.ssh/stellaops_key
|
||||||
|
|
||||||
|
vars:
|
||||||
|
# Required: Set these for your environment
|
||||||
|
zastava_tenant: my-tenant
|
||||||
|
scanner_backend_url: https://scanner.example.com
|
||||||
|
|
||||||
|
# Optional: Override node name per host
|
||||||
|
# zastava_node_name: custom-node-name
|
||||||
|
|
||||||
|
# Optional: Change health check port
|
||||||
|
# zastava_health_port: 8080
|
||||||
|
|
||||||
|
# Optional: Custom Docker socket path
|
||||||
|
# docker_socket: /var/run/docker.sock
|
||||||
|
|
||||||
|
# Optional: Set log level (Verbose, Debug, Information, Warning, Error)
|
||||||
|
# zastava_log_level: Information
|
||||||
|
|
||||||
|
# Optional: Allow insecure HTTP (NOT for production)
|
||||||
|
# scanner_backend_insecure: false
|
||||||
|
|
||||||
|
# Optional: Additional environment variables
|
||||||
|
# zastava_extra_env:
|
||||||
|
# CUSTOM_VAR: custom_value
|
||||||
40
deploy/ansible/templates/zastava-agent.env.j2
Normal file
40
deploy/ansible/templates/zastava-agent.env.j2
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# StellaOps Zastava Agent Configuration
|
||||||
|
# Managed by Ansible - Do not edit manually
|
||||||
|
# Generated: {{ ansible_date_time.iso8601 }}
|
||||||
|
|
||||||
|
# Tenant identifier for multi-tenancy
|
||||||
|
ZASTAVA_TENANT={{ zastava_tenant }}
|
||||||
|
|
||||||
|
# Scanner backend URL
|
||||||
|
ZASTAVA_AGENT__Backend__BaseAddress={{ scanner_backend_url }}
|
||||||
|
|
||||||
|
{% if zastava_node_name is defined %}
|
||||||
|
# Node name override
|
||||||
|
ZASTAVA_NODE_NAME={{ zastava_node_name }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
# Docker socket endpoint
|
||||||
|
ZASTAVA_AGENT__DockerEndpoint=unix://{{ docker_socket }}
|
||||||
|
|
||||||
|
# Event buffer path
|
||||||
|
ZASTAVA_AGENT__EventBufferPath={{ zastava_data_dir }}/runtime-events
|
||||||
|
|
||||||
|
# Health check port
|
||||||
|
ZASTAVA_AGENT__HealthCheck__Port={{ zastava_health_port }}
|
||||||
|
|
||||||
|
{% if scanner_backend_insecure | default(false) | bool %}
|
||||||
|
# WARNING: Insecure HTTP backend enabled
|
||||||
|
ZASTAVA_AGENT__Backend__AllowInsecureHttp=true
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if zastava_log_level is defined %}
|
||||||
|
# Logging level
|
||||||
|
Serilog__MinimumLevel__Default={{ zastava_log_level }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if zastava_extra_env is defined %}
|
||||||
|
# Additional environment variables
|
||||||
|
{% for key, value in zastava_extra_env.items() %}
|
||||||
|
{{ key }}={{ value }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
232
deploy/ansible/zastava-agent.yml
Normal file
232
deploy/ansible/zastava-agent.yml
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
---
|
||||||
|
# Ansible Playbook for Zastava Agent VM/Bare-Metal Deployment
|
||||||
|
#
|
||||||
|
# Requirements:
|
||||||
|
# - Target hosts must have Docker installed and running
|
||||||
|
# - Ansible 2.10+ with community.docker collection
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ansible-playbook -i inventory.yml zastava-agent.yml \
|
||||||
|
# -e zastava_tenant=my-tenant \
|
||||||
|
# -e scanner_backend_url=https://scanner.internal
|
||||||
|
#
|
||||||
|
# Variables (can be set in inventory or via -e):
|
||||||
|
# zastava_tenant: Tenant identifier (required)
|
||||||
|
# scanner_backend_url: Scanner backend URL (required)
|
||||||
|
# zastava_version: Version to deploy (default: latest)
|
||||||
|
# zastava_node_name: Override node name (default: hostname)
|
||||||
|
# zastava_health_port: Health check port (default: 8080)
|
||||||
|
# docker_socket: Docker socket path (default: /var/run/docker.sock)
|
||||||
|
|
||||||
|
- name: Deploy StellaOps Zastava Agent
|
||||||
|
hosts: zastava_agents
|
||||||
|
become: true
|
||||||
|
|
||||||
|
vars:
|
||||||
|
zastava_version: "{{ zastava_version | default('latest') }}"
|
||||||
|
zastava_install_dir: /opt/stellaops/zastava-agent
|
||||||
|
zastava_config_dir: /etc/stellaops
|
||||||
|
zastava_data_dir: /var/lib/zastava-agent
|
||||||
|
zastava_user: zastava-agent
|
||||||
|
zastava_group: docker
|
||||||
|
zastava_health_port: "{{ zastava_health_port | default(8080) }}"
|
||||||
|
docker_socket: "{{ docker_socket | default('/var/run/docker.sock') }}"
|
||||||
|
download_base_url: "{{ download_base_url | default('https://releases.stellaops.org') }}"
|
||||||
|
|
||||||
|
pre_tasks:
|
||||||
|
- name: Validate required variables
|
||||||
|
ansible.builtin.assert:
|
||||||
|
that:
|
||||||
|
- zastava_tenant is defined and zastava_tenant | length > 0
|
||||||
|
- scanner_backend_url is defined and scanner_backend_url | length > 0
|
||||||
|
fail_msg: |
|
||||||
|
Required variables not set.
|
||||||
|
Please provide:
|
||||||
|
- zastava_tenant: Your tenant identifier
|
||||||
|
- scanner_backend_url: Scanner backend URL
|
||||||
|
|
||||||
|
- name: Check Docker service is running
|
||||||
|
ansible.builtin.systemd:
|
||||||
|
name: docker
|
||||||
|
state: started
|
||||||
|
check_mode: true
|
||||||
|
register: docker_status
|
||||||
|
|
||||||
|
- name: Fail if Docker is not available
|
||||||
|
ansible.builtin.fail:
|
||||||
|
msg: "Docker service is not running on {{ inventory_hostname }}"
|
||||||
|
when: docker_status.status.ActiveState != 'active'
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
# =========================================================================
|
||||||
|
# User and Directory Setup
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
- name: Create zastava-agent system user
|
||||||
|
ansible.builtin.user:
|
||||||
|
name: "{{ zastava_user }}"
|
||||||
|
comment: StellaOps Zastava Agent
|
||||||
|
system: true
|
||||||
|
shell: /usr/sbin/nologin
|
||||||
|
groups: "{{ zastava_group }}"
|
||||||
|
create_home: false
|
||||||
|
state: present
|
||||||
|
|
||||||
|
- name: Create installation directory
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: "{{ zastava_install_dir }}"
|
||||||
|
state: directory
|
||||||
|
owner: "{{ zastava_user }}"
|
||||||
|
group: "{{ zastava_group }}"
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
- name: Create configuration directory
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: "{{ zastava_config_dir }}"
|
||||||
|
state: directory
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
- name: Create data directory
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: "{{ zastava_data_dir }}"
|
||||||
|
state: directory
|
||||||
|
owner: "{{ zastava_user }}"
|
||||||
|
group: "{{ zastava_group }}"
|
||||||
|
mode: '0750'
|
||||||
|
|
||||||
|
- name: Create event buffer directory
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: "{{ zastava_data_dir }}/runtime-events"
|
||||||
|
state: directory
|
||||||
|
owner: "{{ zastava_user }}"
|
||||||
|
group: "{{ zastava_group }}"
|
||||||
|
mode: '0750'
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Download and Install Agent
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
- name: Determine architecture
|
||||||
|
ansible.builtin.set_fact:
|
||||||
|
arch_suffix: "{{ 'x64' if ansible_architecture == 'x86_64' else 'arm64' if ansible_architecture == 'aarch64' else ansible_architecture }}"
|
||||||
|
|
||||||
|
- name: Download Zastava Agent binary
|
||||||
|
ansible.builtin.get_url:
|
||||||
|
url: "{{ download_base_url }}/zastava-agent/{{ zastava_version }}/zastava-agent-linux-{{ arch_suffix }}.tar.gz"
|
||||||
|
dest: /tmp/zastava-agent.tar.gz
|
||||||
|
mode: '0644'
|
||||||
|
register: download_result
|
||||||
|
retries: 3
|
||||||
|
delay: 5
|
||||||
|
|
||||||
|
- name: Extract Zastava Agent
|
||||||
|
ansible.builtin.unarchive:
|
||||||
|
src: /tmp/zastava-agent.tar.gz
|
||||||
|
dest: "{{ zastava_install_dir }}"
|
||||||
|
remote_src: true
|
||||||
|
owner: "{{ zastava_user }}"
|
||||||
|
group: "{{ zastava_group }}"
|
||||||
|
extra_opts:
|
||||||
|
- --strip-components=1
|
||||||
|
notify: Restart zastava-agent
|
||||||
|
|
||||||
|
- name: Make agent binary executable
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: "{{ zastava_install_dir }}/StellaOps.Zastava.Agent"
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
- name: Clean up downloaded archive
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: /tmp/zastava-agent.tar.gz
|
||||||
|
state: absent
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Configuration
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
- name: Deploy environment configuration
|
||||||
|
ansible.builtin.template:
|
||||||
|
src: zastava-agent.env.j2
|
||||||
|
dest: "{{ zastava_config_dir }}/zastava-agent.env"
|
||||||
|
owner: root
|
||||||
|
group: "{{ zastava_group }}"
|
||||||
|
mode: '0640'
|
||||||
|
notify: Restart zastava-agent
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# systemd Service
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
- name: Install systemd service unit
|
||||||
|
ansible.builtin.copy:
|
||||||
|
src: zastava-agent.service
|
||||||
|
dest: /etc/systemd/system/zastava-agent.service
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
notify:
|
||||||
|
- Reload systemd
|
||||||
|
- Restart zastava-agent
|
||||||
|
|
||||||
|
- name: Enable and start zastava-agent service
|
||||||
|
ansible.builtin.systemd:
|
||||||
|
name: zastava-agent
|
||||||
|
state: started
|
||||||
|
enabled: true
|
||||||
|
daemon_reload: true
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Health Verification
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
- name: Wait for agent health endpoint
|
||||||
|
ansible.builtin.uri:
|
||||||
|
url: "http://localhost:{{ zastava_health_port }}/healthz"
|
||||||
|
method: GET
|
||||||
|
status_code: 200
|
||||||
|
register: health_result
|
||||||
|
retries: 30
|
||||||
|
delay: 2
|
||||||
|
until: health_result.status == 200
|
||||||
|
|
||||||
|
- name: Display agent status
|
||||||
|
ansible.builtin.debug:
|
||||||
|
msg: "Zastava Agent deployed successfully on {{ inventory_hostname }}"
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
- name: Reload systemd
|
||||||
|
ansible.builtin.systemd:
|
||||||
|
daemon_reload: true
|
||||||
|
|
||||||
|
- name: Restart zastava-agent
|
||||||
|
ansible.builtin.systemd:
|
||||||
|
name: zastava-agent
|
||||||
|
state: restarted
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Post-deployment verification play
|
||||||
|
# =============================================================================
|
||||||
|
- name: Verify Zastava Agent Deployment
|
||||||
|
hosts: zastava_agents
|
||||||
|
become: false
|
||||||
|
gather_facts: false
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: Check agent readiness
|
||||||
|
ansible.builtin.uri:
|
||||||
|
url: "http://localhost:{{ zastava_health_port | default(8080) }}/readyz"
|
||||||
|
method: GET
|
||||||
|
return_content: true
|
||||||
|
register: ready_check
|
||||||
|
|
||||||
|
- name: Display deployment summary
|
||||||
|
ansible.builtin.debug:
|
||||||
|
msg: |
|
||||||
|
Zastava Agent Deployment Summary:
|
||||||
|
- Host: {{ inventory_hostname }}
|
||||||
|
- Status: {{ 'Ready' if ready_check.status == 200 else 'Not Ready' }}
|
||||||
|
- Health Endpoint: http://localhost:{{ zastava_health_port | default(8080) }}/healthz
|
||||||
|
- Tenant: {{ zastava_tenant }}
|
||||||
|
- Backend: {{ scanner_backend_url }}
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
# Stella Ops Compose Profiles
|
# Stella Ops Compose Profiles
|
||||||
|
|
||||||
These Compose bundles ship the minimum services required to exercise the scanner pipeline plus control-plane dependencies. Every profile is pinned to immutable image digests sourced from `deploy/releases/*.yaml` and is linted via `docker compose config` in CI.
|
These Compose bundles ship the minimum services required to exercise the scanner pipeline plus control-plane dependencies. Every profile is pinned to immutable image digests sourced from `deploy/releases/*.yaml` and is linted via `docker compose config` in CI.
|
||||||
|
|
||||||
## Layout
|
## Layout
|
||||||
|
|
||||||
| Path | Purpose |
|
| Path | Purpose |
|
||||||
| ---- | ------- |
|
| ---- | ------- |
|
||||||
| `docker-compose.dev.yaml` | Edge/nightly stack tuned for laptops and iterative work. |
|
| `docker-compose.dev.yaml` | Edge/nightly stack tuned for laptops and iterative work. |
|
||||||
@@ -19,9 +19,9 @@ These Compose bundles ship the minimum services required to exercise the scanner
|
|||||||
| `scripts/reset.sh` | Stops the stack and removes Mongo/MinIO/Redis volumes after explicit confirmation. |
|
| `scripts/reset.sh` | Stops the stack and removes Mongo/MinIO/Redis volumes after explicit confirmation. |
|
||||||
| `scripts/quickstart.sh` | Helper to validate config and start dev stack; set `USE_MOCK=1` to include `docker-compose.mock.yaml` overlay. |
|
| `scripts/quickstart.sh` | Helper to validate config and start dev stack; set `USE_MOCK=1` to include `docker-compose.mock.yaml` overlay. |
|
||||||
| `docker-compose.mock.yaml` | Dev-only overlay with placeholder digests for missing services (orchestrator, policy-registry, packs, task-runner, VEX/Vuln stack). Use only with mock release manifest `deploy/releases/2025.09-mock-dev.yaml`. |
|
| `docker-compose.mock.yaml` | Dev-only overlay with placeholder digests for missing services (orchestrator, policy-registry, packs, task-runner, VEX/Vuln stack). Use only with mock release manifest `deploy/releases/2025.09-mock-dev.yaml`. |
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cp env/dev.env.example dev.env
|
cp env/dev.env.example dev.env
|
||||||
docker compose --env-file dev.env -f docker-compose.dev.yaml config
|
docker compose --env-file dev.env -f docker-compose.dev.yaml config
|
||||||
@@ -30,6 +30,8 @@ docker compose --env-file dev.env -f docker-compose.dev.yaml up -d
|
|||||||
|
|
||||||
The stage and airgap variants behave the same way—swap the file names accordingly. All profiles expose 443/8443 for the UI and REST APIs, and they share a `stellaops` Docker network scoped to the compose project.
|
The stage and airgap variants behave the same way—swap the file names accordingly. All profiles expose 443/8443 for the UI and REST APIs, and they share a `stellaops` Docker network scoped to the compose project.
|
||||||
|
|
||||||
|
> **Surface.Secrets:** set `SCANNER_SURFACE_SECRETS_PROVIDER`/`SCANNER_SURFACE_SECRETS_ROOT` in your `.env` and point `SURFACE_SECRETS_HOST_PATH` to the decrypted bundle path (default `./offline/surface-secrets`). The stack mounts that path read-only into Scanner Web/Worker so `secret://` references resolve without embedding plaintext.
|
||||||
|
|
||||||
> **Graph Explorer reminder:** If you enable Cartographer or Graph API containers alongside these profiles, update `etc/authority.yaml` so the `cartographer-service` client is marked with `properties.serviceIdentity: "cartographer"` and carries a tenant hint. The Authority host now refuses `graph:write` tokens without that marker, so apply the configuration change before rolling out the updated images.
|
> **Graph Explorer reminder:** If you enable Cartographer or Graph API containers alongside these profiles, update `etc/authority.yaml` so the `cartographer-service` client is marked with `properties.serviceIdentity: "cartographer"` and carries a tenant hint. The Authority host now refuses `graph:write` tokens without that marker, so apply the configuration change before rolling out the updated images.
|
||||||
|
|
||||||
### Telemetry collector overlay
|
### Telemetry collector overlay
|
||||||
@@ -79,7 +81,7 @@ in the `.env` samples match the options bound by `AddSchedulerWorker`:
|
|||||||
|
|
||||||
- `SCHEDULER_QUEUE_KIND` – queue transport (`Nats` or `Redis`).
|
- `SCHEDULER_QUEUE_KIND` – queue transport (`Nats` or `Redis`).
|
||||||
- `SCHEDULER_QUEUE_NATS_URL` – NATS connection string used by planner/runner consumers.
|
- `SCHEDULER_QUEUE_NATS_URL` – NATS connection string used by planner/runner consumers.
|
||||||
- `SCHEDULER_STORAGE_DATABASE` – MongoDB database name for scheduler state.
|
- `SCHEDULER_STORAGE_DATABASE` – PostgreSQL database name for scheduler state.
|
||||||
- `SCHEDULER_SCANNER_BASEADDRESS` – base URL the runner uses when invoking Scanner’s
|
- `SCHEDULER_SCANNER_BASEADDRESS` – base URL the runner uses when invoking Scanner’s
|
||||||
`/api/v1/reports` (defaults to the in-cluster `http://scanner-web:8444`).
|
`/api/v1/reports` (defaults to the in-cluster `http://scanner-web:8444`).
|
||||||
|
|
||||||
@@ -116,7 +118,7 @@ USE_MOCK=1 ./scripts/quickstart.sh env/dev.env.example
|
|||||||
```
|
```
|
||||||
|
|
||||||
The overlay pins the missing services (orchestrator, policy-registry, packs-registry, task-runner, VEX/Vuln stack) to mock digests from `deploy/releases/2025.09-mock-dev.yaml` and starts their real entrypoints so integration flows can be exercised end-to-end. Replace the mock pins with production digests once releases publish; keep the mock overlay dev-only.
|
The overlay pins the missing services (orchestrator, policy-registry, packs-registry, task-runner, VEX/Vuln stack) to mock digests from `deploy/releases/2025.09-mock-dev.yaml` and starts their real entrypoints so integration flows can be exercised end-to-end. Replace the mock pins with production digests once releases publish; keep the mock overlay dev-only.
|
||||||
|
|
||||||
Keep digests synchronized between Compose, Helm, and the release manifest to preserve reproducibility guarantees. `deploy/tools/validate-profiles.sh` performs a quick audit.
|
Keep digests synchronized between Compose, Helm, and the release manifest to preserve reproducibility guarantees. `deploy/tools/validate-profiles.sh` performs a quick audit.
|
||||||
|
|
||||||
### GPU toggle for Advisory AI
|
### GPU toggle for Advisory AI
|
||||||
|
|||||||
@@ -1,357 +1,383 @@
|
|||||||
x-release-labels: &release-labels
|
x-release-labels: &release-labels
|
||||||
com.stellaops.release.version: "2025.09.2-airgap"
|
com.stellaops.release.version: "2025.09.2-airgap"
|
||||||
com.stellaops.release.channel: "airgap"
|
com.stellaops.release.channel: "airgap"
|
||||||
com.stellaops.profile: "airgap"
|
com.stellaops.profile: "airgap"
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
stellaops:
|
stellaops:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mongo-data:
|
valkey-data:
|
||||||
minio-data:
|
rustfs-data:
|
||||||
rustfs-data:
|
|
||||||
concelier-jobs:
|
concelier-jobs:
|
||||||
nats-data:
|
nats-data:
|
||||||
scanner-surface-cache:
|
scanner-surface-cache:
|
||||||
postgres-data:
|
postgres-data:
|
||||||
|
advisory-ai-queue:
|
||||||
services:
|
advisory-ai-plans:
|
||||||
mongo:
|
advisory-ai-outputs:
|
||||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
|
||||||
command: ["mongod", "--bind_ip_all"]
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}"
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}"
|
|
||||||
volumes:
|
|
||||||
- mongo-data:/data/db
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: docker.io/library/postgres:16
|
image: docker.io/library/postgres:17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
||||||
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
||||||
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
POSTGRES_DB: "${POSTGRES_DB:-stellaops}"
|
||||||
PGDATA: /var/lib/postgresql/data/pgdata
|
PGDATA: /var/lib/postgresql/data/pgdata
|
||||||
volumes:
|
volumes:
|
||||||
- postgres-data:/var/lib/postgresql/data
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
- ./postgres-init:/docker-entrypoint-initdb.d:ro
|
||||||
|
command:
|
||||||
|
- "postgres"
|
||||||
|
- "-c"
|
||||||
|
- "shared_preload_libraries=pg_stat_statements"
|
||||||
|
- "-c"
|
||||||
|
- "pg_stat_statements.track=all"
|
||||||
ports:
|
ports:
|
||||||
- "${POSTGRES_PORT:-25432}:5432"
|
- "${POSTGRES_PORT:-25432}:5432"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
minio:
|
valkey:
|
||||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
image: docker.io/valkey/valkey:8.0
|
||||||
command: ["server", "/data", "--console-address", ":9001"]
|
restart: unless-stopped
|
||||||
restart: unless-stopped
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
environment:
|
volumes:
|
||||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
- valkey-data:/data
|
||||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
ports:
|
||||||
volumes:
|
- "${VALKEY_PORT:-26379}:6379"
|
||||||
- minio-data:/data
|
networks:
|
||||||
ports:
|
- stellaops
|
||||||
- "${MINIO_CONSOLE_PORT:-29001}:9001"
|
labels: *release-labels
|
||||||
networks:
|
|
||||||
- stellaops
|
rustfs:
|
||||||
labels: *release-labels
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
rustfs:
|
restart: unless-stopped
|
||||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
environment:
|
||||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
RUSTFS__LOG__LEVEL: info
|
||||||
restart: unless-stopped
|
RUSTFS__STORAGE__PATH: /data
|
||||||
environment:
|
volumes:
|
||||||
RUSTFS__LOG__LEVEL: info
|
- rustfs-data:/data
|
||||||
RUSTFS__STORAGE__PATH: /data
|
ports:
|
||||||
volumes:
|
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||||
- rustfs-data:/data
|
networks:
|
||||||
ports:
|
- stellaops
|
||||||
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
labels: *release-labels
|
||||||
networks:
|
|
||||||
- stellaops
|
nats:
|
||||||
labels: *release-labels
|
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||||
|
command:
|
||||||
nats:
|
- "-js"
|
||||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
- "-sd"
|
||||||
command:
|
- /data
|
||||||
- "-js"
|
restart: unless-stopped
|
||||||
- "-sd"
|
ports:
|
||||||
- /data
|
- "${NATS_CLIENT_PORT:-24222}:4222"
|
||||||
restart: unless-stopped
|
volumes:
|
||||||
ports:
|
- nats-data:/data
|
||||||
- "${NATS_CLIENT_PORT:-24222}:4222"
|
networks:
|
||||||
volumes:
|
- stellaops
|
||||||
- nats-data:/data
|
labels: *release-labels
|
||||||
networks:
|
|
||||||
- stellaops
|
authority:
|
||||||
labels: *release-labels
|
image: registry.stella-ops.org/stellaops/authority@sha256:5551a3269b7008cd5aceecf45df018c67459ed519557ccbe48b093b926a39bcc
|
||||||
|
restart: unless-stopped
|
||||||
authority:
|
depends_on:
|
||||||
image: registry.stella-ops.org/stellaops/authority@sha256:5551a3269b7008cd5aceecf45df018c67459ed519557ccbe48b093b926a39bcc
|
- postgres
|
||||||
restart: unless-stopped
|
- valkey
|
||||||
depends_on:
|
environment:
|
||||||
- mongo
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
environment:
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
STELLAOPS_AUTHORITY__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
volumes:
|
volumes:
|
||||||
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||||
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||||
ports:
|
ports:
|
||||||
- "${AUTHORITY_PORT:-8440}:8440"
|
- "${AUTHORITY_PORT:-8440}:8440"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
signer:
|
signer:
|
||||||
image: registry.stella-ops.org/stellaops/signer@sha256:ddbbd664a42846cea6b40fca6465bc679b30f72851158f300d01a8571c5478fc
|
image: registry.stella-ops.org/stellaops/signer@sha256:ddbbd664a42846cea6b40fca6465bc679b30f72851158f300d01a8571c5478fc
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- authority
|
- postgres
|
||||||
environment:
|
- authority
|
||||||
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
environment:
|
||||||
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
||||||
ports:
|
SIGNER__STORAGE__DRIVER: "postgres"
|
||||||
- "${SIGNER_PORT:-8441}:8441"
|
SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
networks:
|
ports:
|
||||||
- stellaops
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
labels: *release-labels
|
networks:
|
||||||
|
- stellaops
|
||||||
attestor:
|
labels: *release-labels
|
||||||
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
|
||||||
restart: unless-stopped
|
attestor:
|
||||||
depends_on:
|
image: registry.stella-ops.org/stellaops/attestor@sha256:1ff0a3124d66d3a2702d8e421df40fbd98cc75cb605d95510598ebbae1433c50
|
||||||
- signer
|
restart: unless-stopped
|
||||||
environment:
|
depends_on:
|
||||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
- signer
|
||||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
- postgres
|
||||||
ports:
|
environment:
|
||||||
- "${ATTESTOR_PORT:-8442}:8442"
|
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||||
networks:
|
ATTESTOR__STORAGE__DRIVER: "postgres"
|
||||||
- stellaops
|
ATTESTOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
labels: *release-labels
|
ports:
|
||||||
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
issuer-directory:
|
networks:
|
||||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
- stellaops
|
||||||
restart: unless-stopped
|
labels: *release-labels
|
||||||
depends_on:
|
|
||||||
- mongo
|
issuer-directory:
|
||||||
- authority
|
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||||
environment:
|
restart: unless-stopped
|
||||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
depends_on:
|
||||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
- postgres
|
||||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
- authority
|
||||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
environment:
|
||||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||||
volumes:
|
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
ports:
|
ISSUERDIRECTORY__STORAGE__DRIVER: "postgres"
|
||||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
ISSUERDIRECTORY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
networks:
|
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||||
- stellaops
|
volumes:
|
||||||
labels: *release-labels
|
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||||
|
ports:
|
||||||
concelier:
|
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||||
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
networks:
|
||||||
restart: unless-stopped
|
- stellaops
|
||||||
depends_on:
|
labels: *release-labels
|
||||||
- mongo
|
|
||||||
- minio
|
concelier:
|
||||||
environment:
|
image: registry.stella-ops.org/stellaops/concelier@sha256:29e2e1a0972707e092cbd3d370701341f9fec2aa9316fb5d8100480f2a1c76b5
|
||||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
restart: unless-stopped
|
||||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
depends_on:
|
||||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
- postgres
|
||||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
- valkey
|
||||||
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
environment:
|
||||||
CONCELIER__AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true"
|
CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
CONCELIER__AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "${AUTHORITY_OFFLINE_CACHE_TOLERANCE:-00:30:00}"
|
CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
volumes:
|
CONCELIER__STORAGE__S3__ENDPOINT: "http://rustfs:8080"
|
||||||
- concelier-jobs:/var/lib/concelier/jobs
|
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
ports:
|
CONCELIER__AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true"
|
||||||
- "${CONCELIER_PORT:-8445}:8445"
|
CONCELIER__AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "${AUTHORITY_OFFLINE_CACHE_TOLERANCE:-00:30:00}"
|
||||||
networks:
|
volumes:
|
||||||
- stellaops
|
- concelier-jobs:/var/lib/concelier/jobs
|
||||||
labels: *release-labels
|
ports:
|
||||||
|
- "${CONCELIER_PORT:-8445}:8445"
|
||||||
scanner-web:
|
networks:
|
||||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:3df8ca21878126758203c1a0444e39fd97f77ddacf04a69685cda9f1e5e94718
|
- stellaops
|
||||||
restart: unless-stopped
|
labels: *release-labels
|
||||||
depends_on:
|
|
||||||
- concelier
|
scanner-web:
|
||||||
- rustfs
|
image: registry.stella-ops.org/stellaops/scanner-web@sha256:3df8ca21878126758203c1a0444e39fd97f77ddacf04a69685cda9f1e5e94718
|
||||||
- nats
|
restart: unless-stopped
|
||||||
environment:
|
depends_on:
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
- postgres
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
- valkey
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
- concelier
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
- rustfs
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
environment:
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}"
|
SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
# Surface.Env configuration (see docs/modules/scanner/design/surface-env.md)
|
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER:-valkey://valkey:6379}"
|
||||||
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
||||||
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-valkey}"
|
||||||
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
||||||
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
||||||
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
||||||
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
||||||
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
SCANNER__OFFLINEKIT__ENABLED: "${SCANNER_OFFLINEKIT_ENABLED:-false}"
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
SCANNER__OFFLINEKIT__REQUIREDSSE: "${SCANNER_OFFLINEKIT_REQUIREDSSE:-true}"
|
||||||
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
SCANNER__OFFLINEKIT__REKOROFFLINEMODE: "${SCANNER_OFFLINEKIT_REKOROFFLINEMODE:-true}"
|
||||||
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
SCANNER__OFFLINEKIT__TRUSTROOTDIRECTORY: "${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}"
|
||||||
volumes:
|
SCANNER__OFFLINEKIT__REKORSNAPSHOTDIRECTORY: "${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}"
|
||||||
- scanner-surface-cache:/var/lib/stellaops/surface
|
# Surface.Env configuration (see docs/modules/scanner/design/surface-env.md)
|
||||||
ports:
|
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||||
networks:
|
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||||
- stellaops
|
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||||
labels: *release-labels
|
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||||
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
scanner-worker:
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
restart: unless-stopped
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
depends_on:
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
- scanner-web
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
- rustfs
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
- nats
|
volumes:
|
||||||
environment:
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-./offline/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
- ${SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH:-./offline/rekor-snapshot}:${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}:ro
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
ports:
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
networks:
|
||||||
# Surface.Env configuration (see docs/modules/scanner/design/surface-env.md)
|
- stellaops
|
||||||
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
labels: *release-labels
|
||||||
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
|
||||||
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
scanner-worker:
|
||||||
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:eea5d6cfe7835950c5ec7a735a651f2f0d727d3e470cf9027a4a402ea89c4fb5
|
||||||
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
restart: unless-stopped
|
||||||
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
depends_on:
|
||||||
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
- postgres
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
- valkey
|
||||||
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
- scanner-web
|
||||||
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
- rustfs
|
||||||
volumes:
|
environment:
|
||||||
- scanner-surface-cache:/var/lib/stellaops/surface
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
networks:
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
- stellaops
|
SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
labels: *release-labels
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
scheduler-worker:
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
restart: unless-stopped
|
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER:-valkey://valkey:6379}"
|
||||||
depends_on:
|
# Surface.Env configuration (see docs/modules/scanner/design/surface-env.md)
|
||||||
- mongo
|
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||||
- nats
|
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||||
- scanner-web
|
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||||
command:
|
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||||
- "dotnet"
|
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||||
- "StellaOps.Scheduler.Worker.Host.dll"
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
environment:
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}"
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}"
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}"
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
networks:
|
volumes:
|
||||||
- stellaops
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
labels: *release-labels
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scheduler-worker:
|
||||||
|
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- valkey
|
||||||
|
- scanner-web
|
||||||
|
command:
|
||||||
|
- "dotnet"
|
||||||
|
- "StellaOps.Scheduler.Worker.Host.dll"
|
||||||
|
environment:
|
||||||
|
SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
|
SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Valkey}"
|
||||||
|
SCHEDULER__QUEUE__VALKEY__URL: "${SCHEDULER_QUEUE_VALKEY_URL:-valkey:6379}"
|
||||||
|
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
notify-web:
|
notify-web:
|
||||||
image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2}
|
image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- authority
|
- authority
|
||||||
environment:
|
environment:
|
||||||
DOTNET_ENVIRONMENT: Production
|
DOTNET_ENVIRONMENT: Production
|
||||||
volumes:
|
volumes:
|
||||||
- ../../etc/notify.airgap.yaml:/app/etc/notify.yaml:ro
|
- ../../etc/notify.airgap.yaml:/app/etc/notify.yaml:ro
|
||||||
ports:
|
ports:
|
||||||
- "${NOTIFY_WEB_PORT:-9446}:8446"
|
- "${NOTIFY_WEB_PORT:-9446}:8446"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
excititor:
|
excititor:
|
||||||
image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68
|
image: registry.stella-ops.org/stellaops/excititor@sha256:65c0ee13f773efe920d7181512349a09d363ab3f3e177d276136bd2742325a68
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- concelier
|
- postgres
|
||||||
environment:
|
- concelier
|
||||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
environment:
|
||||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||||
networks:
|
EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
- stellaops
|
EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
labels: *release-labels
|
networks:
|
||||||
|
- stellaops
|
||||||
advisory-ai-web:
|
labels: *release-labels
|
||||||
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2-airgap
|
|
||||||
restart: unless-stopped
|
advisory-ai-web:
|
||||||
depends_on:
|
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2-airgap
|
||||||
- scanner-web
|
restart: unless-stopped
|
||||||
environment:
|
depends_on:
|
||||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
- scanner-web
|
||||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
environment:
|
||||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||||
ports:
|
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||||
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||||
volumes:
|
ports:
|
||||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
||||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
volumes:
|
||||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||||
networks:
|
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||||
- stellaops
|
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||||
labels: *release-labels
|
networks:
|
||||||
|
- stellaops
|
||||||
advisory-ai-worker:
|
labels: *release-labels
|
||||||
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2-airgap
|
|
||||||
restart: unless-stopped
|
advisory-ai-worker:
|
||||||
depends_on:
|
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2-airgap
|
||||||
- advisory-ai-web
|
restart: unless-stopped
|
||||||
environment:
|
depends_on:
|
||||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
- advisory-ai-web
|
||||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
environment:
|
||||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||||
volumes:
|
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
volumes:
|
||||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||||
networks:
|
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||||
- stellaops
|
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||||
labels: *release-labels
|
networks:
|
||||||
|
- stellaops
|
||||||
web-ui:
|
labels: *release-labels
|
||||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:bee9668011ff414572131dc777faab4da24473fe12c230893f161cabee092a1d
|
|
||||||
restart: unless-stopped
|
web-ui:
|
||||||
depends_on:
|
image: registry.stella-ops.org/stellaops/web-ui@sha256:bee9668011ff414572131dc777faab4da24473fe12c230893f161cabee092a1d
|
||||||
- scanner-web
|
restart: unless-stopped
|
||||||
environment:
|
depends_on:
|
||||||
STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444"
|
- scanner-web
|
||||||
ports:
|
environment:
|
||||||
- "${UI_PORT:-9443}:8443"
|
STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444"
|
||||||
networks:
|
ports:
|
||||||
- stellaops
|
- "${UI_PORT:-9443}:8443"
|
||||||
labels: *release-labels
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|||||||
301
deploy/compose/docker-compose.china.yml
Normal file
301
deploy/compose/docker-compose.china.yml
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
# StellaOps Docker Compose - International Profile
|
||||||
|
# Cryptography: SM2, SM3, SM4 (ShangMi / Commercial Cipher - temporarily using NIST)
|
||||||
|
# Provider: offline-verification
|
||||||
|
# Jurisdiction: china, world
|
||||||
|
|
||||||
|
x-release-labels: &release-labels
|
||||||
|
com.stellaops.release.version: "2025.10.0-edge"
|
||||||
|
com.stellaops.release.channel: "edge"
|
||||||
|
com.stellaops.profile: "china"
|
||||||
|
com.stellaops.crypto.profile: "china"
|
||||||
|
com.stellaops.crypto.provider: "offline-verification"
|
||||||
|
|
||||||
|
x-crypto-env: &crypto-env
|
||||||
|
# Crypto configuration
|
||||||
|
STELLAOPS_CRYPTO_PROFILE: "china"
|
||||||
|
STELLAOPS_CRYPTO_CONFIG_PATH: "/app/etc/appsettings.crypto.yaml"
|
||||||
|
STELLAOPS_CRYPTO_MANIFEST_PATH: "/app/etc/crypto-plugins-manifest.json"
|
||||||
|
|
||||||
|
networks:
|
||||||
|
stellaops:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
rustfs-data:
|
||||||
|
concelier-jobs:
|
||||||
|
nats-data:
|
||||||
|
valkey-data:
|
||||||
|
advisory-ai-queue:
|
||||||
|
advisory-ai-plans:
|
||||||
|
advisory-ai-outputs:
|
||||||
|
postgres-data:
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: docker.io/library/postgres:16
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
||||||
|
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
||||||
|
PGDATA: /var/lib/postgresql/data/pgdata
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
- ../postgres-partitioning:/docker-entrypoint-initdb.d:ro
|
||||||
|
ports:
|
||||||
|
- "${POSTGRES_PORT:-5432}:5432"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
valkey:
|
||||||
|
image: docker.io/valkey/valkey:8.0
|
||||||
|
restart: unless-stopped
|
||||||
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
|
volumes:
|
||||||
|
- valkey-data:/data
|
||||||
|
ports:
|
||||||
|
- "${VALKEY_PORT:-6379}:6379"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
rustfs:
|
||||||
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
RUSTFS__LOG__LEVEL: info
|
||||||
|
RUSTFS__STORAGE__PATH: /data
|
||||||
|
volumes:
|
||||||
|
- rustfs-data:/data
|
||||||
|
ports:
|
||||||
|
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
nats:
|
||||||
|
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||||
|
command:
|
||||||
|
- "-js"
|
||||||
|
- "-sd"
|
||||||
|
- /data
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "${NATS_CLIENT_PORT:-4222}:4222"
|
||||||
|
volumes:
|
||||||
|
- nats-data:/data
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
authority:
|
||||||
|
image: registry.stella-ops.org/stellaops/authority:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||||
|
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${AUTHORITY_PORT:-8440}:8440"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
signer:
|
||||||
|
image: registry.stella-ops.org/stellaops/signer:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SIGNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
attestor:
|
||||||
|
image: registry.stella-ops.org/stellaops/attestor:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- signer
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ATTESTOR__SIGNER__BASEURL: "http://signer:8441"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
concelier:
|
||||||
|
image: registry.stella-ops.org/stellaops/concelier:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- rustfs
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__RUSTFS__BASEURL: "http://rustfs:8080"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
- concelier-jobs:/app/jobs
|
||||||
|
ports:
|
||||||
|
- "${CONCELIER_PORT:-8443}:8443"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scanner:
|
||||||
|
image: registry.stella-ops.org/stellaops/scanner:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCANNER_PORT:-8444}:8444"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
excititor:
|
||||||
|
image: registry.stella-ops.org/stellaops/excititor:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${EXCITITOR_PORT:-8445}:8445"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
policy:
|
||||||
|
image: registry.stella-ops.org/stellaops/policy:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_POLICY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_POLICY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${POLICY_PORT:-8446}:8446"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scheduler:
|
||||||
|
image: registry.stella-ops.org/stellaops/scheduler:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- nats
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_SCHEDULER__MESSAGING__NATS__URL: "nats://nats:4222"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCHEDULER_PORT:-8447}:8447"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
notify:
|
||||||
|
image: registry.stella-ops.org/stellaops/notify:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${NOTIFY_PORT:-8448}:8448"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
zastava:
|
||||||
|
image: registry.stella-ops.org/stellaops/zastava:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ZASTAVA_PORT:-8449}:8449"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
gateway:
|
||||||
|
image: registry.stella-ops.org/stellaops/gateway:china
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- authority
|
||||||
|
- concelier
|
||||||
|
- scanner
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_GATEWAY__AUTHORITY__BASEURL: "http://authority:8440"
|
||||||
|
STELLAOPS_GATEWAY__CONCELIER__BASEURL: "http://concelier:8443"
|
||||||
|
STELLAOPS_GATEWAY__SCANNER__BASEURL: "http://scanner:8444"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.china.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${GATEWAY_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
@@ -8,47 +8,16 @@ networks:
|
|||||||
driver: bridge
|
driver: bridge
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mongo-data:
|
|
||||||
minio-data:
|
|
||||||
rustfs-data:
|
rustfs-data:
|
||||||
concelier-jobs:
|
concelier-jobs:
|
||||||
nats-data:
|
nats-data:
|
||||||
|
valkey-data:
|
||||||
advisory-ai-queue:
|
advisory-ai-queue:
|
||||||
advisory-ai-plans:
|
advisory-ai-plans:
|
||||||
advisory-ai-outputs:
|
advisory-ai-outputs:
|
||||||
postgres-data:
|
postgres-data:
|
||||||
wine-csp-prefix:
|
|
||||||
wine-csp-logs:
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
mongo:
|
|
||||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
|
||||||
command: ["mongod", "--bind_ip_all"]
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}"
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}"
|
|
||||||
volumes:
|
|
||||||
- mongo-data:/data/db
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
minio:
|
|
||||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
|
||||||
command: ["server", "/data", "--console-address", ":9001"]
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
|
||||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
|
||||||
volumes:
|
|
||||||
- minio-data:/data
|
|
||||||
ports:
|
|
||||||
- "${MINIO_CONSOLE_PORT:-9001}:9001"
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: docker.io/library/postgres:16
|
image: docker.io/library/postgres:16
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@@ -65,6 +34,18 @@ services:
|
|||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
|
valkey:
|
||||||
|
image: docker.io/valkey/valkey:8.0
|
||||||
|
restart: unless-stopped
|
||||||
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
|
volumes:
|
||||||
|
- valkey-data:/data
|
||||||
|
ports:
|
||||||
|
- "${VALKEY_PORT:-6379}:6379"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
rustfs:
|
rustfs:
|
||||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
@@ -99,10 +80,11 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/authority@sha256:a8e8faec44a579aa5714e58be835f25575710430b1ad2ccd1282a018cd9ffcdd
|
image: registry.stella-ops.org/stellaops/authority@sha256:a8e8faec44a579aa5714e58be835f25575710430b1ad2ccd1282a018cd9ffcdd
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- postgres
|
||||||
environment:
|
environment:
|
||||||
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -119,10 +101,11 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- authority
|
- authority
|
||||||
|
- valkey
|
||||||
environment:
|
environment:
|
||||||
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
||||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SIGNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
ports:
|
ports:
|
||||||
- "${SIGNER_PORT:-8441}:8441"
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
networks:
|
networks:
|
||||||
@@ -134,9 +117,10 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- signer
|
- signer
|
||||||
|
- valkey
|
||||||
environment:
|
environment:
|
||||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
ATTESTOR__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
ports:
|
ports:
|
||||||
- "${ATTESTOR_PORT:-8442}:8442"
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
networks:
|
networks:
|
||||||
@@ -147,13 +131,14 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- postgres
|
||||||
- authority
|
- authority
|
||||||
environment:
|
environment:
|
||||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
ISSUERDIRECTORY__STORAGE__DRIVER: "postgres"
|
||||||
|
ISSUERDIRECTORY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||||
volumes:
|
volumes:
|
||||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||||
@@ -167,13 +152,10 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/concelier@sha256:dafef3954eb4b837e2c424dd2d23e1e4d60fa83794840fac9cd3dea1d43bd085
|
image: registry.stella-ops.org/stellaops/concelier@sha256:dafef3954eb4b837e2c424dd2d23e1e4d60fa83794840fac9cd3dea1d43bd085
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- postgres
|
||||||
- minio
|
|
||||||
environment:
|
environment:
|
||||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
|
||||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
|
||||||
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
volumes:
|
volumes:
|
||||||
- concelier-jobs:/var/lib/concelier/jobs
|
- concelier-jobs:/var/lib/concelier/jobs
|
||||||
@@ -187,22 +169,34 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:e0dfdb087e330585a5953029fb4757f5abdf7610820a085bd61b457dbead9a11
|
image: registry.stella-ops.org/stellaops/scanner-web@sha256:e0dfdb087e330585a5953029fb4757f5abdf7610820a085bd61b457dbead9a11
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
- postgres
|
||||||
- concelier
|
- concelier
|
||||||
- rustfs
|
- rustfs
|
||||||
- nats
|
- nats
|
||||||
|
- valkey
|
||||||
environment:
|
environment:
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
SCANNER__QUEUE__BROKER: "nats://nats:4222"
|
||||||
|
SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
||||||
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}"
|
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-valkey}"
|
||||||
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-valkey:6379}"
|
||||||
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
||||||
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
||||||
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
||||||
|
SCANNER__OFFLINEKIT__ENABLED: "${SCANNER_OFFLINEKIT_ENABLED:-false}"
|
||||||
|
SCANNER__OFFLINEKIT__REQUIREDSSE: "${SCANNER_OFFLINEKIT_REQUIREDSSE:-true}"
|
||||||
|
SCANNER__OFFLINEKIT__REKOROFFLINEMODE: "${SCANNER_OFFLINEKIT_REKOROFFLINEMODE:-true}"
|
||||||
|
SCANNER__OFFLINEKIT__TRUSTROOTDIRECTORY: "${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}"
|
||||||
|
SCANNER__OFFLINEKIT__REKORSNAPSHOTDIRECTORY: "${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}"
|
||||||
|
volumes:
|
||||||
|
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-./offline/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
|
||||||
|
- ${SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH:-./offline/rekor-snapshot}:${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}:ro
|
||||||
ports:
|
ports:
|
||||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||||
networks:
|
networks:
|
||||||
@@ -217,12 +211,13 @@ services:
|
|||||||
- rustfs
|
- rustfs
|
||||||
- nats
|
- nats
|
||||||
environment:
|
environment:
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
SCANNER__QUEUE__BROKER: "nats://nats:4222"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
@@ -231,17 +226,17 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- postgres
|
||||||
- nats
|
- nats
|
||||||
- scanner-web
|
- scanner-web
|
||||||
command:
|
command:
|
||||||
- "dotnet"
|
- "dotnet"
|
||||||
- "StellaOps.Scheduler.Worker.Host.dll"
|
- "StellaOps.Scheduler.Worker.Host.dll"
|
||||||
environment:
|
environment:
|
||||||
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}"
|
SCHEDULER__QUEUE__KIND: "Nats"
|
||||||
SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}"
|
SCHEDULER__QUEUE__NATS__URL: "nats://nats:4222"
|
||||||
SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}"
|
SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
@@ -253,8 +248,13 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- authority
|
- authority
|
||||||
|
- valkey
|
||||||
environment:
|
environment:
|
||||||
DOTNET_ENVIRONMENT: Development
|
DOTNET_ENVIRONMENT: Development
|
||||||
|
NOTIFY__STORAGE__DRIVER: "postgres"
|
||||||
|
NOTIFY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
NOTIFY__QUEUE__DRIVER: "nats"
|
||||||
|
NOTIFY__QUEUE__NATS__URL: "nats://nats:4222"
|
||||||
volumes:
|
volumes:
|
||||||
- ../../etc/notify.dev.yaml:/app/etc/notify.yaml:ro
|
- ../../etc/notify.dev.yaml:/app/etc/notify.yaml:ro
|
||||||
ports:
|
ports:
|
||||||
@@ -267,10 +267,12 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/excititor@sha256:d9bd5cadf1eab427447ce3df7302c30ded837239771cc6433b9befb895054285
|
image: registry.stella-ops.org/stellaops/excititor@sha256:d9bd5cadf1eab427447ce3df7302c30ded837239771cc6433b9befb895054285
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
- postgres
|
||||||
- concelier
|
- concelier
|
||||||
environment:
|
environment:
|
||||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
|
EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
@@ -332,41 +334,20 @@ services:
|
|||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
# Wine CSP Service - GOST cryptographic operations via Wine-hosted CryptoPro CSP
|
cryptopro-csp:
|
||||||
# WARNING: For TEST VECTOR GENERATION ONLY - not for production signing
|
|
||||||
wine-csp:
|
|
||||||
image: registry.stella-ops.org/stellaops/wine-csp:${WINE_CSP_VERSION:-2025.10.0-edge}
|
|
||||||
build:
|
build:
|
||||||
context: ../..
|
context: ../..
|
||||||
dockerfile: ops/wine-csp/Dockerfile
|
dockerfile: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
args:
|
||||||
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
environment:
|
||||||
WINE_CSP_PORT: "${WINE_CSP_PORT:-5099}"
|
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
||||||
WINE_CSP_MODE: "${WINE_CSP_MODE:-limited}"
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
WINE_CSP_INSTALLER_PATH: "${WINE_CSP_INSTALLER_PATH:-/opt/cryptopro/csp-installer.msi}"
|
|
||||||
WINE_CSP_LOG_LEVEL: "${WINE_CSP_LOG_LEVEL:-Information}"
|
|
||||||
ASPNETCORE_ENVIRONMENT: "${ASPNETCORE_ENVIRONMENT:-Development}"
|
|
||||||
volumes:
|
volumes:
|
||||||
- wine-csp-prefix:/home/winecsp/.wine
|
- ../../opt/cryptopro/downloads:/opt/cryptopro/downloads:ro
|
||||||
- wine-csp-logs:/var/log/wine-csp
|
|
||||||
# Mount customer-provided CSP installer (optional):
|
|
||||||
# - /path/to/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro
|
|
||||||
ports:
|
ports:
|
||||||
- "${WINE_CSP_PORT:-5099}:5099"
|
- "${CRYPTOPRO_PORT:-18080}:8080"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
healthcheck:
|
labels: *release-labels
|
||||||
test: ["/usr/local/bin/healthcheck.sh"]
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
start_period: 90s
|
|
||||||
retries: 3
|
|
||||||
deploy:
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
memory: 2G
|
|
||||||
labels:
|
|
||||||
<<: *release-labels
|
|
||||||
com.stellaops.component: "wine-csp"
|
|
||||||
com.stellaops.security.production-signing: "false"
|
|
||||||
com.stellaops.security.test-vectors-only: "true"
|
|
||||||
|
|||||||
301
deploy/compose/docker-compose.eu.yml
Normal file
301
deploy/compose/docker-compose.eu.yml
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
# StellaOps Docker Compose - International Profile
|
||||||
|
# Cryptography: eIDAS-compliant qualified trust services (temporarily using NIST)
|
||||||
|
# Provider: offline-verification
|
||||||
|
# Jurisdiction: eu, world
|
||||||
|
|
||||||
|
x-release-labels: &release-labels
|
||||||
|
com.stellaops.release.version: "2025.10.0-edge"
|
||||||
|
com.stellaops.release.channel: "edge"
|
||||||
|
com.stellaops.profile: "eu"
|
||||||
|
com.stellaops.crypto.profile: "eu"
|
||||||
|
com.stellaops.crypto.provider: "offline-verification"
|
||||||
|
|
||||||
|
x-crypto-env: &crypto-env
|
||||||
|
# Crypto configuration
|
||||||
|
STELLAOPS_CRYPTO_PROFILE: "eu"
|
||||||
|
STELLAOPS_CRYPTO_CONFIG_PATH: "/app/etc/appsettings.crypto.yaml"
|
||||||
|
STELLAOPS_CRYPTO_MANIFEST_PATH: "/app/etc/crypto-plugins-manifest.json"
|
||||||
|
|
||||||
|
networks:
|
||||||
|
stellaops:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
rustfs-data:
|
||||||
|
concelier-jobs:
|
||||||
|
nats-data:
|
||||||
|
valkey-data:
|
||||||
|
advisory-ai-queue:
|
||||||
|
advisory-ai-plans:
|
||||||
|
advisory-ai-outputs:
|
||||||
|
postgres-data:
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: docker.io/library/postgres:16
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
||||||
|
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
||||||
|
PGDATA: /var/lib/postgresql/data/pgdata
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
- ../postgres-partitioning:/docker-entrypoint-initdb.d:ro
|
||||||
|
ports:
|
||||||
|
- "${POSTGRES_PORT:-5432}:5432"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
valkey:
|
||||||
|
image: docker.io/valkey/valkey:8.0
|
||||||
|
restart: unless-stopped
|
||||||
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
|
volumes:
|
||||||
|
- valkey-data:/data
|
||||||
|
ports:
|
||||||
|
- "${VALKEY_PORT:-6379}:6379"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
rustfs:
|
||||||
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
RUSTFS__LOG__LEVEL: info
|
||||||
|
RUSTFS__STORAGE__PATH: /data
|
||||||
|
volumes:
|
||||||
|
- rustfs-data:/data
|
||||||
|
ports:
|
||||||
|
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
nats:
|
||||||
|
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||||
|
command:
|
||||||
|
- "-js"
|
||||||
|
- "-sd"
|
||||||
|
- /data
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "${NATS_CLIENT_PORT:-4222}:4222"
|
||||||
|
volumes:
|
||||||
|
- nats-data:/data
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
authority:
|
||||||
|
image: registry.stella-ops.org/stellaops/authority:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||||
|
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${AUTHORITY_PORT:-8440}:8440"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
signer:
|
||||||
|
image: registry.stella-ops.org/stellaops/signer:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SIGNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
attestor:
|
||||||
|
image: registry.stella-ops.org/stellaops/attestor:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- signer
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ATTESTOR__SIGNER__BASEURL: "http://signer:8441"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
concelier:
|
||||||
|
image: registry.stella-ops.org/stellaops/concelier:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- rustfs
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__RUSTFS__BASEURL: "http://rustfs:8080"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
- concelier-jobs:/app/jobs
|
||||||
|
ports:
|
||||||
|
- "${CONCELIER_PORT:-8443}:8443"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scanner:
|
||||||
|
image: registry.stella-ops.org/stellaops/scanner:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCANNER_PORT:-8444}:8444"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
excititor:
|
||||||
|
image: registry.stella-ops.org/stellaops/excititor:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${EXCITITOR_PORT:-8445}:8445"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
policy:
|
||||||
|
image: registry.stella-ops.org/stellaops/policy:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_POLICY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_POLICY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${POLICY_PORT:-8446}:8446"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scheduler:
|
||||||
|
image: registry.stella-ops.org/stellaops/scheduler:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- nats
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_SCHEDULER__MESSAGING__NATS__URL: "nats://nats:4222"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCHEDULER_PORT:-8447}:8447"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
notify:
|
||||||
|
image: registry.stella-ops.org/stellaops/notify:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${NOTIFY_PORT:-8448}:8448"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
zastava:
|
||||||
|
image: registry.stella-ops.org/stellaops/zastava:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ZASTAVA_PORT:-8449}:8449"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
gateway:
|
||||||
|
image: registry.stella-ops.org/stellaops/gateway:eu
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- authority
|
||||||
|
- concelier
|
||||||
|
- scanner
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_GATEWAY__AUTHORITY__BASEURL: "http://authority:8440"
|
||||||
|
STELLAOPS_GATEWAY__CONCELIER__BASEURL: "http://concelier:8443"
|
||||||
|
STELLAOPS_GATEWAY__SCANNER__BASEURL: "http://scanner:8444"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.eu.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${GATEWAY_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
301
deploy/compose/docker-compose.international.yml
Normal file
301
deploy/compose/docker-compose.international.yml
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
# StellaOps Docker Compose - International Profile
|
||||||
|
# Cryptography: Standard NIST algorithms (ECDSA, RSA, SHA-2)
|
||||||
|
# Provider: offline-verification
|
||||||
|
# Jurisdiction: world
|
||||||
|
|
||||||
|
x-release-labels: &release-labels
|
||||||
|
com.stellaops.release.version: "2025.10.0-edge"
|
||||||
|
com.stellaops.release.channel: "edge"
|
||||||
|
com.stellaops.profile: "international"
|
||||||
|
com.stellaops.crypto.profile: "international"
|
||||||
|
com.stellaops.crypto.provider: "offline-verification"
|
||||||
|
|
||||||
|
x-crypto-env: &crypto-env
|
||||||
|
# Crypto configuration
|
||||||
|
STELLAOPS_CRYPTO_PROFILE: "international"
|
||||||
|
STELLAOPS_CRYPTO_CONFIG_PATH: "/app/etc/appsettings.crypto.yaml"
|
||||||
|
STELLAOPS_CRYPTO_MANIFEST_PATH: "/app/etc/crypto-plugins-manifest.json"
|
||||||
|
|
||||||
|
networks:
|
||||||
|
stellaops:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
rustfs-data:
|
||||||
|
concelier-jobs:
|
||||||
|
nats-data:
|
||||||
|
valkey-data:
|
||||||
|
advisory-ai-queue:
|
||||||
|
advisory-ai-plans:
|
||||||
|
advisory-ai-outputs:
|
||||||
|
postgres-data:
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: docker.io/library/postgres:16
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
||||||
|
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
||||||
|
PGDATA: /var/lib/postgresql/data/pgdata
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
- ../postgres-partitioning:/docker-entrypoint-initdb.d:ro
|
||||||
|
ports:
|
||||||
|
- "${POSTGRES_PORT:-5432}:5432"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
valkey:
|
||||||
|
image: docker.io/valkey/valkey:8.0
|
||||||
|
restart: unless-stopped
|
||||||
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
|
volumes:
|
||||||
|
- valkey-data:/data
|
||||||
|
ports:
|
||||||
|
- "${VALKEY_PORT:-6379}:6379"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
rustfs:
|
||||||
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
RUSTFS__LOG__LEVEL: info
|
||||||
|
RUSTFS__STORAGE__PATH: /data
|
||||||
|
volumes:
|
||||||
|
- rustfs-data:/data
|
||||||
|
ports:
|
||||||
|
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
nats:
|
||||||
|
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||||
|
command:
|
||||||
|
- "-js"
|
||||||
|
- "-sd"
|
||||||
|
- /data
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "${NATS_CLIENT_PORT:-4222}:4222"
|
||||||
|
volumes:
|
||||||
|
- nats-data:/data
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
authority:
|
||||||
|
image: registry.stella-ops.org/stellaops/authority:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||||
|
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${AUTHORITY_PORT:-8440}:8440"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
signer:
|
||||||
|
image: registry.stella-ops.org/stellaops/signer:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SIGNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
attestor:
|
||||||
|
image: registry.stella-ops.org/stellaops/attestor:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- signer
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ATTESTOR__SIGNER__BASEURL: "http://signer:8441"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
concelier:
|
||||||
|
image: registry.stella-ops.org/stellaops/concelier:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- rustfs
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__RUSTFS__BASEURL: "http://rustfs:8080"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
- concelier-jobs:/app/jobs
|
||||||
|
ports:
|
||||||
|
- "${CONCELIER_PORT:-8443}:8443"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scanner:
|
||||||
|
image: registry.stella-ops.org/stellaops/scanner:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCANNER_PORT:-8444}:8444"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
excititor:
|
||||||
|
image: registry.stella-ops.org/stellaops/excititor:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${EXCITITOR_PORT:-8445}:8445"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
policy:
|
||||||
|
image: registry.stella-ops.org/stellaops/policy:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_POLICY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_POLICY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${POLICY_PORT:-8446}:8446"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scheduler:
|
||||||
|
image: registry.stella-ops.org/stellaops/scheduler:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- nats
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_SCHEDULER__MESSAGING__NATS__URL: "nats://nats:4222"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCHEDULER_PORT:-8447}:8447"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
notify:
|
||||||
|
image: registry.stella-ops.org/stellaops/notify:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${NOTIFY_PORT:-8448}:8448"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
zastava:
|
||||||
|
image: registry.stella-ops.org/stellaops/zastava:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ZASTAVA_PORT:-8449}:8449"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
gateway:
|
||||||
|
image: registry.stella-ops.org/stellaops/gateway:international
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- authority
|
||||||
|
- concelier
|
||||||
|
- scanner
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_GATEWAY__AUTHORITY__BASEURL: "http://authority:8440"
|
||||||
|
STELLAOPS_GATEWAY__CONCELIER__BASEURL: "http://concelier:8443"
|
||||||
|
STELLAOPS_GATEWAY__SCANNER__BASEURL: "http://scanner:8444"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.international.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${GATEWAY_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
@@ -73,13 +73,18 @@ services:
|
|||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
networks: [stellaops]
|
networks: [stellaops]
|
||||||
|
|
||||||
# Wine CSP Service - GOST cryptographic operations via Wine-hosted CryptoPro CSP
|
cryptopro-csp:
|
||||||
# WARNING: For TEST VECTOR GENERATION ONLY - not for production signing
|
build:
|
||||||
wine-csp:
|
context: ../..
|
||||||
image: registry.stella-ops.org/stellaops/wine-csp:${WINE_CSP_VERSION:-2025.09.2-mock}
|
dockerfile: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
args:
|
||||||
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
environment:
|
environment:
|
||||||
WINE_CSP_PORT: "5099"
|
ASPNETCORE_URLS: "http://0.0.0.0:8080"
|
||||||
WINE_CSP_MODE: "limited"
|
CRYPTOPRO_ACCEPT_EULA: "${CRYPTOPRO_ACCEPT_EULA:-0}"
|
||||||
WINE_CSP_LOG_LEVEL: "Debug"
|
volumes:
|
||||||
|
- ../../opt/cryptopro/downloads:/opt/cryptopro/downloads:ro
|
||||||
|
ports:
|
||||||
|
- "${CRYPTOPRO_PORT:-18080}:8080"
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
networks: [stellaops]
|
networks: [stellaops]
|
||||||
|
|||||||
@@ -10,42 +10,26 @@ networks:
|
|||||||
external: true
|
external: true
|
||||||
name: ${FRONTDOOR_NETWORK:-stellaops_frontdoor}
|
name: ${FRONTDOOR_NETWORK:-stellaops_frontdoor}
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mongo-data:
|
valkey-data:
|
||||||
minio-data:
|
rustfs-data:
|
||||||
rustfs-data:
|
concelier-jobs:
|
||||||
concelier-jobs:
|
nats-data:
|
||||||
nats-data:
|
scanner-surface-cache:
|
||||||
advisory-ai-queue:
|
postgres-data:
|
||||||
advisory-ai-plans:
|
advisory-ai-queue:
|
||||||
advisory-ai-outputs:
|
advisory-ai-plans:
|
||||||
postgres-data:
|
advisory-ai-outputs:
|
||||||
|
|
||||||
services:
|
services:
|
||||||
mongo:
|
valkey:
|
||||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
image: docker.io/valkey/valkey:8.0
|
||||||
command: ["mongod", "--bind_ip_all"]
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}"
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}"
|
|
||||||
volumes:
|
volumes:
|
||||||
- mongo-data:/data/db
|
- valkey-data:/data
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
minio:
|
|
||||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
|
||||||
command: ["server", "/data", "--console-address", ":9001"]
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
|
||||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
|
||||||
volumes:
|
|
||||||
- minio-data:/data
|
|
||||||
ports:
|
ports:
|
||||||
- "${MINIO_CONSOLE_PORT:-9001}:9001"
|
- "${VALKEY_PORT:-6379}:6379"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
@@ -84,10 +68,13 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- postgres
|
||||||
|
- valkey
|
||||||
environment:
|
environment:
|
||||||
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_AUTHORITY__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -104,11 +91,13 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
- postgres
|
||||||
- authority
|
- authority
|
||||||
environment:
|
environment:
|
||||||
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
||||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SIGNER__STORAGE__DRIVER: "postgres"
|
||||||
|
SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
ports:
|
ports:
|
||||||
- "${SIGNER_PORT:-8441}:8441"
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
networks:
|
networks:
|
||||||
@@ -116,69 +105,73 @@ services:
|
|||||||
- frontdoor
|
- frontdoor
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
attestor:
|
attestor:
|
||||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
|
||||||
- signer
|
|
||||||
environment:
|
|
||||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
|
||||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
|
||||||
ports:
|
|
||||||
- "${ATTESTOR_PORT:-8442}:8442"
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
- frontdoor
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: docker.io/library/postgres:16
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
|
||||||
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
|
||||||
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
|
||||||
PGDATA: /var/lib/postgresql/data/pgdata
|
|
||||||
volumes:
|
|
||||||
- postgres-data:/var/lib/postgresql/data
|
|
||||||
ports:
|
|
||||||
- "${POSTGRES_PORT:-5432}:5432"
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
issuer-directory:
|
|
||||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
- mongo
|
|
||||||
- authority
|
|
||||||
environment:
|
|
||||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
|
||||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
|
||||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
|
||||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
|
||||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
|
||||||
volumes:
|
|
||||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
|
||||||
ports:
|
|
||||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
concelier:
|
|
||||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- signer
|
||||||
- minio
|
- postgres
|
||||||
environment:
|
environment:
|
||||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
ATTESTOR__STORAGE__DRIVER: "postgres"
|
||||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
ATTESTOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
ports:
|
||||||
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
- frontdoor
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: docker.io/library/postgres:16
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
||||||
|
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
||||||
|
PGDATA: /var/lib/postgresql/data/pgdata
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- "${POSTGRES_PORT:-5432}:5432"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
issuer-directory:
|
||||||
|
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- authority
|
||||||
|
environment:
|
||||||
|
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||||
|
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
|
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
|
ISSUERDIRECTORY__STORAGE__DRIVER: "postgres"
|
||||||
|
ISSUERDIRECTORY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||||
|
ports:
|
||||||
|
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
concelier:
|
||||||
|
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- valkey
|
||||||
|
environment:
|
||||||
|
CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
|
CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
CONCELIER__STORAGE__S3__ENDPOINT: "http://rustfs:8080"
|
||||||
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
|
CONCELIER__AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true"
|
||||||
|
CONCELIER__AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "${AUTHORITY_OFFLINE_CACHE_TOLERANCE:-00:30:00}"
|
||||||
volumes:
|
volumes:
|
||||||
- concelier-jobs:/var/lib/concelier/jobs
|
- concelier-jobs:/var/lib/concelier/jobs
|
||||||
ports:
|
ports:
|
||||||
@@ -192,22 +185,47 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- valkey
|
||||||
- concelier
|
- concelier
|
||||||
- rustfs
|
- rustfs
|
||||||
- nats
|
|
||||||
environment:
|
environment:
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER:-valkey://valkey:6379}"
|
||||||
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-true}"
|
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
||||||
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}"
|
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-valkey}"
|
||||||
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
||||||
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
||||||
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
||||||
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
||||||
|
SCANNER__OFFLINEKIT__ENABLED: "${SCANNER_OFFLINEKIT_ENABLED:-false}"
|
||||||
|
SCANNER__OFFLINEKIT__REQUIREDSSE: "${SCANNER_OFFLINEKIT_REQUIREDSSE:-true}"
|
||||||
|
SCANNER__OFFLINEKIT__REKOROFFLINEMODE: "${SCANNER_OFFLINEKIT_REKOROFFLINEMODE:-true}"
|
||||||
|
SCANNER__OFFLINEKIT__TRUSTROOTDIRECTORY: "${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}"
|
||||||
|
SCANNER__OFFLINEKIT__REKORSNAPSHOTDIRECTORY: "${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}"
|
||||||
|
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||||
|
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||||
|
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||||
|
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||||
|
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||||
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
|
volumes:
|
||||||
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
|
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-./offline/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
|
||||||
|
- ${SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH:-./offline/rekor-snapshot}:${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}:ro
|
||||||
ports:
|
ports:
|
||||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||||
networks:
|
networks:
|
||||||
@@ -215,50 +233,68 @@ services:
|
|||||||
- frontdoor
|
- frontdoor
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
scanner-worker:
|
scanner-worker:
|
||||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- scanner-web
|
- postgres
|
||||||
- rustfs
|
- valkey
|
||||||
- nats
|
- scanner-web
|
||||||
environment:
|
- rustfs
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
environment:
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
networks:
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
- stellaops
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
labels: *release-labels
|
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER:-valkey://valkey:6379}"
|
||||||
|
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||||
scheduler-worker:
|
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||||
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||||
restart: unless-stopped
|
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||||
depends_on:
|
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||||
- mongo
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
- nats
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
- scanner-web
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
command:
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
- "dotnet"
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
- "StellaOps.Scheduler.Worker.Host.dll"
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
environment:
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}"
|
volumes:
|
||||||
SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}"
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}"
|
networks:
|
||||||
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
- stellaops
|
||||||
networks:
|
labels: *release-labels
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
notify-web:
|
scheduler-worker:
|
||||||
image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2}
|
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- authority
|
- valkey
|
||||||
|
- scanner-web
|
||||||
|
command:
|
||||||
|
- "dotnet"
|
||||||
|
- "StellaOps.Scheduler.Worker.Host.dll"
|
||||||
|
environment:
|
||||||
|
SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
|
SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Valkey}"
|
||||||
|
SCHEDULER__QUEUE__VALKEY__URL: "${SCHEDULER_QUEUE_VALKEY_URL:-valkey:6379}"
|
||||||
|
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
notify-web:
|
||||||
|
image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2}
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- authority
|
||||||
environment:
|
environment:
|
||||||
DOTNET_ENVIRONMENT: Production
|
DOTNET_ENVIRONMENT: Production
|
||||||
volumes:
|
volumes:
|
||||||
@@ -270,64 +306,66 @@ services:
|
|||||||
- frontdoor
|
- frontdoor
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
excititor:
|
excititor:
|
||||||
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- concelier
|
- postgres
|
||||||
environment:
|
- concelier
|
||||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
environment:
|
||||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||||
networks:
|
EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
- stellaops
|
EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
labels: *release-labels
|
networks:
|
||||||
|
- stellaops
|
||||||
advisory-ai-web:
|
labels: *release-labels
|
||||||
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2
|
|
||||||
restart: unless-stopped
|
advisory-ai-web:
|
||||||
depends_on:
|
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2
|
||||||
- scanner-web
|
restart: unless-stopped
|
||||||
environment:
|
depends_on:
|
||||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
- scanner-web
|
||||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
environment:
|
||||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||||
ports:
|
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||||
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||||
volumes:
|
ports:
|
||||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
||||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
volumes:
|
||||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||||
networks:
|
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||||
- stellaops
|
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||||
- frontdoor
|
networks:
|
||||||
labels: *release-labels
|
- stellaops
|
||||||
|
- frontdoor
|
||||||
advisory-ai-worker:
|
labels: *release-labels
|
||||||
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2
|
|
||||||
restart: unless-stopped
|
advisory-ai-worker:
|
||||||
depends_on:
|
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2
|
||||||
- advisory-ai-web
|
restart: unless-stopped
|
||||||
environment:
|
depends_on:
|
||||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
- advisory-ai-web
|
||||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
environment:
|
||||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||||
volumes:
|
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
volumes:
|
||||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||||
networks:
|
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||||
- stellaops
|
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||||
labels: *release-labels
|
networks:
|
||||||
|
- stellaops
|
||||||
web-ui:
|
labels: *release-labels
|
||||||
|
|
||||||
|
web-ui:
|
||||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
301
deploy/compose/docker-compose.russia.yml
Normal file
301
deploy/compose/docker-compose.russia.yml
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
# StellaOps Docker Compose - International Profile
|
||||||
|
# Cryptography: GOST R 34.10-2012, GOST R 34.11-2012 (Streebog)
|
||||||
|
# Provider: openssl.gost, pkcs11.gost, cryptopro.gost
|
||||||
|
# Jurisdiction: world
|
||||||
|
|
||||||
|
x-release-labels: &release-labels
|
||||||
|
com.stellaops.release.version: "2025.10.0-edge"
|
||||||
|
com.stellaops.release.channel: "edge"
|
||||||
|
com.stellaops.profile: "russia"
|
||||||
|
com.stellaops.crypto.profile: "russia"
|
||||||
|
com.stellaops.crypto.provider: "openssl.gost, pkcs11.gost, cryptopro.gost"
|
||||||
|
|
||||||
|
x-crypto-env: &crypto-env
|
||||||
|
# Crypto configuration
|
||||||
|
STELLAOPS_CRYPTO_PROFILE: "russia"
|
||||||
|
STELLAOPS_CRYPTO_CONFIG_PATH: "/app/etc/appsettings.crypto.yaml"
|
||||||
|
STELLAOPS_CRYPTO_MANIFEST_PATH: "/app/etc/crypto-plugins-manifest.json"
|
||||||
|
|
||||||
|
networks:
|
||||||
|
stellaops:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
rustfs-data:
|
||||||
|
concelier-jobs:
|
||||||
|
nats-data:
|
||||||
|
valkey-data:
|
||||||
|
advisory-ai-queue:
|
||||||
|
advisory-ai-plans:
|
||||||
|
advisory-ai-outputs:
|
||||||
|
postgres-data:
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: docker.io/library/postgres:16
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
||||||
|
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
||||||
|
PGDATA: /var/lib/postgresql/data/pgdata
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
- ../postgres-partitioning:/docker-entrypoint-initdb.d:ro
|
||||||
|
ports:
|
||||||
|
- "${POSTGRES_PORT:-5432}:5432"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
valkey:
|
||||||
|
image: docker.io/valkey/valkey:8.0
|
||||||
|
restart: unless-stopped
|
||||||
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
|
volumes:
|
||||||
|
- valkey-data:/data
|
||||||
|
ports:
|
||||||
|
- "${VALKEY_PORT:-6379}:6379"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
rustfs:
|
||||||
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
RUSTFS__LOG__LEVEL: info
|
||||||
|
RUSTFS__STORAGE__PATH: /data
|
||||||
|
volumes:
|
||||||
|
- rustfs-data:/data
|
||||||
|
ports:
|
||||||
|
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
nats:
|
||||||
|
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||||
|
command:
|
||||||
|
- "-js"
|
||||||
|
- "-sd"
|
||||||
|
- /data
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "${NATS_CLIENT_PORT:-4222}:4222"
|
||||||
|
volumes:
|
||||||
|
- nats-data:/data
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
authority:
|
||||||
|
image: registry.stella-ops.org/stellaops/authority:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/authority.yaml:/etc/authority.yaml:ro
|
||||||
|
- ../../etc/authority.plugins:/app/etc/authority.plugins:ro
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${AUTHORITY_PORT:-8440}:8440"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
signer:
|
||||||
|
image: registry.stella-ops.org/stellaops/signer:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SIGNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
attestor:
|
||||||
|
image: registry.stella-ops.org/stellaops/attestor:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- signer
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ATTESTOR__SIGNER__BASEURL: "http://signer:8441"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
concelier:
|
||||||
|
image: registry.stella-ops.org/stellaops/concelier:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- rustfs
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_CONCELIER__STORAGE__RUSTFS__BASEURL: "http://rustfs:8080"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
- concelier-jobs:/app/jobs
|
||||||
|
ports:
|
||||||
|
- "${CONCELIER_PORT:-8443}:8443"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scanner:
|
||||||
|
image: registry.stella-ops.org/stellaops/scanner:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCANNER_PORT:-8444}:8444"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
excititor:
|
||||||
|
image: registry.stella-ops.org/stellaops/excititor:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${EXCITITOR_PORT:-8445}:8445"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
policy:
|
||||||
|
image: registry.stella-ops.org/stellaops/policy:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_POLICY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_POLICY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${POLICY_PORT:-8446}:8446"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
scheduler:
|
||||||
|
image: registry.stella-ops.org/stellaops/scheduler:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- nats
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_SCHEDULER__MESSAGING__NATS__URL: "nats://nats:4222"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${SCHEDULER_PORT:-8447}:8447"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
notify:
|
||||||
|
image: registry.stella-ops.org/stellaops/notify:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_NOTIFY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${NOTIFY_PORT:-8448}:8448"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
zastava:
|
||||||
|
image: registry.stella-ops.org/stellaops/zastava:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_ZASTAVA__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${ZASTAVA_PORT:-8449}:8449"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
gateway:
|
||||||
|
image: registry.stella-ops.org/stellaops/gateway:russia
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- authority
|
||||||
|
- concelier
|
||||||
|
- scanner
|
||||||
|
environment:
|
||||||
|
<<: *crypto-env
|
||||||
|
STELLAOPS_GATEWAY__AUTHORITY__BASEURL: "http://authority:8440"
|
||||||
|
STELLAOPS_GATEWAY__CONCELIER__BASEURL: "http://concelier:8443"
|
||||||
|
STELLAOPS_GATEWAY__SCANNER__BASEURL: "http://scanner:8444"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/appsettings.crypto.russia.yaml:/app/etc/appsettings.crypto.yaml:ro
|
||||||
|
- ../../etc/crypto-plugins-manifest.json:/app/etc/crypto-plugins-manifest.json:ro
|
||||||
|
ports:
|
||||||
|
- "${GATEWAY_PORT:-8080}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
@@ -7,76 +7,60 @@ networks:
|
|||||||
stellaops:
|
stellaops:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mongo-data:
|
valkey-data:
|
||||||
minio-data:
|
rustfs-data:
|
||||||
rustfs-data:
|
concelier-jobs:
|
||||||
concelier-jobs:
|
nats-data:
|
||||||
nats-data:
|
scanner-surface-cache:
|
||||||
advisory-ai-queue:
|
postgres-data:
|
||||||
advisory-ai-plans:
|
advisory-ai-queue:
|
||||||
advisory-ai-outputs:
|
advisory-ai-plans:
|
||||||
postgres-data:
|
advisory-ai-outputs:
|
||||||
|
|
||||||
services:
|
services:
|
||||||
mongo:
|
valkey:
|
||||||
image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49
|
image: docker.io/valkey/valkey:8.0
|
||||||
command: ["mongod", "--bind_ip_all"]
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command: ["valkey-server", "--appendonly", "yes"]
|
||||||
MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}"
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}"
|
|
||||||
volumes:
|
volumes:
|
||||||
- mongo-data:/data/db
|
- valkey-data:/data
|
||||||
|
ports:
|
||||||
|
- "${VALKEY_PORT:-6379}:6379"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
minio:
|
postgres:
|
||||||
image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e
|
image: docker.io/library/postgres:16
|
||||||
command: ["server", "/data", "--console-address", ":9001"]
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
environment:
|
||||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
||||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
||||||
|
PGDATA: /var/lib/postgresql/data/pgdata
|
||||||
volumes:
|
volumes:
|
||||||
- minio-data:/data
|
- postgres-data:/var/lib/postgresql/data
|
||||||
ports:
|
ports:
|
||||||
- "${MINIO_CONSOLE_PORT:-9001}:9001"
|
- "${POSTGRES_PORT:-5432}:5432"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
postgres:
|
rustfs:
|
||||||
image: docker.io/library/postgres:16
|
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
||||||
restart: unless-stopped
|
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
||||||
environment:
|
restart: unless-stopped
|
||||||
POSTGRES_USER: "${POSTGRES_USER:-stellaops}"
|
environment:
|
||||||
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD:-stellaops}"
|
RUSTFS__LOG__LEVEL: info
|
||||||
POSTGRES_DB: "${POSTGRES_DB:-stellaops_platform}"
|
RUSTFS__STORAGE__PATH: /data
|
||||||
PGDATA: /var/lib/postgresql/data/pgdata
|
volumes:
|
||||||
volumes:
|
- rustfs-data:/data
|
||||||
- postgres-data:/var/lib/postgresql/data
|
ports:
|
||||||
ports:
|
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
||||||
- "${POSTGRES_PORT:-5432}:5432"
|
networks:
|
||||||
networks:
|
- stellaops
|
||||||
- stellaops
|
labels: *release-labels
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
rustfs:
|
|
||||||
image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge
|
|
||||||
command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"]
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
RUSTFS__LOG__LEVEL: info
|
|
||||||
RUSTFS__STORAGE__PATH: /data
|
|
||||||
volumes:
|
|
||||||
- rustfs-data:/data
|
|
||||||
ports:
|
|
||||||
- "${RUSTFS_HTTP_PORT:-8080}:8080"
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
nats:
|
nats:
|
||||||
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e
|
||||||
@@ -97,10 +81,13 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- postgres
|
||||||
|
- valkey
|
||||||
environment:
|
environment:
|
||||||
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
STELLAOPS_AUTHORITY__STORAGE__DRIVER: "postgres"
|
||||||
|
STELLAOPS_AUTHORITY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
STELLAOPS_AUTHORITY__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins"
|
||||||
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -116,63 +103,69 @@ services:
|
|||||||
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
- postgres
|
||||||
- authority
|
- authority
|
||||||
environment:
|
environment:
|
||||||
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
SIGNER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}"
|
||||||
SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
SIGNER__STORAGE__DRIVER: "postgres"
|
||||||
|
SIGNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
ports:
|
ports:
|
||||||
- "${SIGNER_PORT:-8441}:8441"
|
- "${SIGNER_PORT:-8441}:8441"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
attestor:
|
attestor:
|
||||||
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
- signer
|
|
||||||
environment:
|
|
||||||
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
|
||||||
ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
|
||||||
ports:
|
|
||||||
- "${ATTESTOR_PORT:-8442}:8442"
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
issuer-directory:
|
|
||||||
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
- mongo
|
|
||||||
- authority
|
|
||||||
environment:
|
|
||||||
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
|
||||||
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
|
||||||
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
|
||||||
ISSUERDIRECTORY__MONGO__CONNECTIONSTRING: "${ISSUER_DIRECTORY_MONGO_CONNECTION_STRING}"
|
|
||||||
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
|
||||||
volumes:
|
|
||||||
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
|
||||||
ports:
|
|
||||||
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
|
||||||
networks:
|
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
concelier:
|
|
||||||
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- signer
|
||||||
- minio
|
- postgres
|
||||||
environment:
|
environment:
|
||||||
CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
ATTESTOR__SIGNER__BASEURL: "https://signer:8441"
|
||||||
CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000"
|
ATTESTOR__STORAGE__DRIVER: "postgres"
|
||||||
CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}"
|
ATTESTOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}"
|
ports:
|
||||||
|
- "${ATTESTOR_PORT:-8442}:8442"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
issuer-directory:
|
||||||
|
image: registry.stella-ops.org/stellaops/issuer-directory-web:2025.10.0-edge
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- authority
|
||||||
|
environment:
|
||||||
|
ISSUERDIRECTORY__CONFIG: "/etc/issuer-directory.yaml"
|
||||||
|
ISSUERDIRECTORY__AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}"
|
||||||
|
ISSUERDIRECTORY__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
|
ISSUERDIRECTORY__STORAGE__DRIVER: "postgres"
|
||||||
|
ISSUERDIRECTORY__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
ISSUERDIRECTORY__SEEDCSAFPUBLISHERS: "${ISSUER_DIRECTORY_SEED_CSAF:-true}"
|
||||||
|
volumes:
|
||||||
|
- ../../etc/issuer-directory.yaml:/etc/issuer-directory.yaml:ro
|
||||||
|
ports:
|
||||||
|
- "${ISSUER_DIRECTORY_PORT:-8447}:8080"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
concelier:
|
||||||
|
image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- valkey
|
||||||
|
environment:
|
||||||
|
CONCELIER__STORAGE__DRIVER: "postgres"
|
||||||
|
CONCELIER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
CONCELIER__STORAGE__S3__ENDPOINT: "http://rustfs:8080"
|
||||||
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
CONCELIER__AUTHORITY__BASEURL: "https://authority:8440"
|
||||||
|
CONCELIER__AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK: "true"
|
||||||
|
CONCELIER__AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE: "${AUTHORITY_OFFLINE_CACHE_TOLERANCE:-00:30:00}"
|
||||||
volumes:
|
volumes:
|
||||||
- concelier-jobs:/var/lib/concelier/jobs
|
- concelier-jobs:/var/lib/concelier/jobs
|
||||||
ports:
|
ports:
|
||||||
@@ -181,76 +174,119 @@ services:
|
|||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
scanner-web:
|
scanner-web:
|
||||||
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- concelier
|
- postgres
|
||||||
- rustfs
|
- valkey
|
||||||
- nats
|
- concelier
|
||||||
environment:
|
- rustfs
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
environment:
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}"
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER:-valkey://valkey:6379}"
|
||||||
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-false}"
|
||||||
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-valkey}"
|
||||||
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}"
|
||||||
|
SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}"
|
||||||
|
SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}"
|
||||||
|
SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}"
|
||||||
|
SCANNER__OFFLINEKIT__ENABLED: "${SCANNER_OFFLINEKIT_ENABLED:-false}"
|
||||||
|
SCANNER__OFFLINEKIT__REQUIREDSSE: "${SCANNER_OFFLINEKIT_REQUIREDSSE:-true}"
|
||||||
|
SCANNER__OFFLINEKIT__REKOROFFLINEMODE: "${SCANNER_OFFLINEKIT_REKOROFFLINEMODE:-true}"
|
||||||
|
SCANNER__OFFLINEKIT__TRUSTROOTDIRECTORY: "${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}"
|
||||||
|
SCANNER__OFFLINEKIT__REKORSNAPSHOTDIRECTORY: "${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}"
|
||||||
|
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||||
|
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||||
|
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||||
|
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||||
|
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||||
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
|
volumes:
|
||||||
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
|
- ${SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH:-./offline/trust-roots}:${SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY:-/etc/stellaops/trust-roots}:ro
|
||||||
|
- ${SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH:-./offline/rekor-snapshot}:${SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY:-/var/lib/stellaops/rekor-snapshot}:ro
|
||||||
ports:
|
ports:
|
||||||
- "${SCANNER_WEB_PORT:-8444}:8444"
|
- "${SCANNER_WEB_PORT:-8444}:8444"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
scanner-worker:
|
scanner-worker:
|
||||||
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- scanner-web
|
- postgres
|
||||||
- rustfs
|
- valkey
|
||||||
- nats
|
- scanner-web
|
||||||
environment:
|
- rustfs
|
||||||
SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
environment:
|
||||||
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
SCANNER__STORAGE__DRIVER: "postgres"
|
||||||
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
SCANNER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
SCANNER__CACHE__REDIS__CONNECTIONSTRING: "valkey:6379"
|
||||||
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
SCANNER__ARTIFACTSTORE__DRIVER: "rustfs"
|
||||||
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}"
|
SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1"
|
||||||
networks:
|
SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts"
|
||||||
- stellaops
|
SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30"
|
||||||
labels: *release-labels
|
SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER:-valkey://valkey:6379}"
|
||||||
|
SCANNER_SURFACE_FS_ENDPOINT: "${SCANNER_SURFACE_FS_ENDPOINT:-http://rustfs:8080}"
|
||||||
scheduler-worker:
|
SCANNER_SURFACE_FS_BUCKET: "${SCANNER_SURFACE_FS_BUCKET:-surface-cache}"
|
||||||
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
SCANNER_SURFACE_CACHE_ROOT: "${SCANNER_SURFACE_CACHE_ROOT:-/var/lib/stellaops/surface}"
|
||||||
restart: unless-stopped
|
SCANNER_SURFACE_CACHE_QUOTA_MB: "${SCANNER_SURFACE_CACHE_QUOTA_MB:-4096}"
|
||||||
depends_on:
|
SCANNER_SURFACE_PREFETCH_ENABLED: "${SCANNER_SURFACE_PREFETCH_ENABLED:-false}"
|
||||||
- mongo
|
SCANNER_SURFACE_TENANT: "${SCANNER_SURFACE_TENANT:-default}"
|
||||||
- nats
|
SCANNER_SURFACE_FEATURES: "${SCANNER_SURFACE_FEATURES:-}"
|
||||||
- scanner-web
|
SCANNER_SURFACE_SECRETS_PROVIDER: "${SCANNER_SURFACE_SECRETS_PROVIDER:-file}"
|
||||||
command:
|
SCANNER_SURFACE_SECRETS_NAMESPACE: "${SCANNER_SURFACE_SECRETS_NAMESPACE:-}"
|
||||||
- "dotnet"
|
SCANNER_SURFACE_SECRETS_ROOT: "${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}"
|
||||||
- "StellaOps.Scheduler.Worker.Host.dll"
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER: "${SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER:-}"
|
||||||
environment:
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE: "${SCANNER_SURFACE_SECRETS_ALLOW_INLINE:-false}"
|
||||||
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Nats}"
|
volumes:
|
||||||
SCHEDULER__QUEUE__NATS__URL: "${SCHEDULER_QUEUE_NATS_URL:-nats://nats:4222}"
|
- scanner-surface-cache:/var/lib/stellaops/surface
|
||||||
SCHEDULER__STORAGE__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
- ${SURFACE_SECRETS_HOST_PATH:-./offline/surface-secrets}:${SCANNER_SURFACE_SECRETS_ROOT:-/etc/stellaops/secrets}:ro
|
||||||
SCHEDULER__STORAGE__DATABASE: "${SCHEDULER_STORAGE_DATABASE:-stellaops_scheduler}"
|
networks:
|
||||||
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
- stellaops
|
||||||
networks:
|
labels: *release-labels
|
||||||
- stellaops
|
|
||||||
labels: *release-labels
|
|
||||||
|
|
||||||
notify-web:
|
scheduler-worker:
|
||||||
image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2}
|
image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- authority
|
- valkey
|
||||||
|
- scanner-web
|
||||||
|
command:
|
||||||
|
- "dotnet"
|
||||||
|
- "StellaOps.Scheduler.Worker.Host.dll"
|
||||||
|
environment:
|
||||||
|
SCHEDULER__STORAGE__DRIVER: "postgres"
|
||||||
|
SCHEDULER__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
|
SCHEDULER__QUEUE__KIND: "${SCHEDULER_QUEUE_KIND:-Valkey}"
|
||||||
|
SCHEDULER__QUEUE__VALKEY__URL: "${SCHEDULER_QUEUE_VALKEY_URL:-valkey:6379}"
|
||||||
|
SCHEDULER__WORKER__RUNNER__SCANNER__BASEADDRESS: "${SCHEDULER_SCANNER_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
|
networks:
|
||||||
|
- stellaops
|
||||||
|
labels: *release-labels
|
||||||
|
|
||||||
|
notify-web:
|
||||||
|
image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2}
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- authority
|
||||||
environment:
|
environment:
|
||||||
DOTNET_ENVIRONMENT: Production
|
DOTNET_ENVIRONMENT: Production
|
||||||
volumes:
|
volumes:
|
||||||
@@ -261,63 +297,65 @@ services:
|
|||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
excititor:
|
excititor:
|
||||||
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- concelier
|
- postgres
|
||||||
|
- concelier
|
||||||
environment:
|
environment:
|
||||||
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445"
|
||||||
EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017"
|
EXCITITOR__STORAGE__DRIVER: "postgres"
|
||||||
|
EXCITITOR__STORAGE__POSTGRES__CONNECTIONSTRING: "Host=postgres;Port=5432;Database=${POSTGRES_DB:-stellaops_platform};Username=${POSTGRES_USER:-stellaops};Password=${POSTGRES_PASSWORD:-stellaops}"
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
advisory-ai-web:
|
advisory-ai-web:
|
||||||
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2
|
image: registry.stella-ops.org/stellaops/advisory-ai-web:2025.09.2
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- scanner-web
|
- scanner-web
|
||||||
environment:
|
environment:
|
||||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||||
ports:
|
ports:
|
||||||
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
- "${ADVISORY_AI_WEB_PORT:-8448}:8448"
|
||||||
volumes:
|
volumes:
|
||||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
advisory-ai-worker:
|
advisory-ai-worker:
|
||||||
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2
|
image: registry.stella-ops.org/stellaops/advisory-ai-worker:2025.09.2
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
- advisory-ai-web
|
- advisory-ai-web
|
||||||
environment:
|
environment:
|
||||||
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
ADVISORYAI__AdvisoryAI__SbomBaseAddress: "${ADVISORY_AI_SBOM_BASEADDRESS:-http://scanner-web:8444}"
|
||||||
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
ADVISORYAI__AdvisoryAI__Queue__DirectoryPath: "/var/lib/advisory-ai/queue"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
ADVISORYAI__AdvisoryAI__Storage__PlanCacheDirectory: "/var/lib/advisory-ai/plans"
|
||||||
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
ADVISORYAI__AdvisoryAI__Storage__OutputDirectory: "/var/lib/advisory-ai/outputs"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
ADVISORYAI__AdvisoryAI__Inference__Mode: "${ADVISORY_AI_INFERENCE_MODE:-Local}"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Remote__BaseAddress: "${ADVISORY_AI_REMOTE_BASEADDRESS:-}"
|
||||||
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
ADVISORYAI__AdvisoryAI__Inference__Remote__ApiKey: "${ADVISORY_AI_REMOTE_APIKEY:-}"
|
||||||
volumes:
|
volumes:
|
||||||
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
- advisory-ai-queue:/var/lib/advisory-ai/queue
|
||||||
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
- advisory-ai-plans:/var/lib/advisory-ai/plans
|
||||||
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
- advisory-ai-outputs:/var/lib/advisory-ai/outputs
|
||||||
networks:
|
networks:
|
||||||
- stellaops
|
- stellaops
|
||||||
labels: *release-labels
|
labels: *release-labels
|
||||||
|
|
||||||
web-ui:
|
web-ui:
|
||||||
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
100
deploy/compose/env/airgap.env.example
vendored
100
deploy/compose/env/airgap.env.example
vendored
@@ -1,45 +1,91 @@
|
|||||||
# Substitutions for docker-compose.airgap.yaml
|
# Substitutions for docker-compose.airgap.yaml
|
||||||
MONGO_INITDB_ROOT_USERNAME=stellaops
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD=airgap-password
|
# PostgreSQL Database
|
||||||
MINIO_ROOT_USER=stellaops-offline
|
POSTGRES_USER=stellaops
|
||||||
MINIO_ROOT_PASSWORD=airgap-minio-secret
|
POSTGRES_PASSWORD=airgap-postgres-password
|
||||||
MINIO_CONSOLE_PORT=29001
|
POSTGRES_DB=stellaops_platform
|
||||||
|
POSTGRES_PORT=25432
|
||||||
|
|
||||||
|
# Valkey (Redis-compatible cache and messaging)
|
||||||
|
VALKEY_PORT=26379
|
||||||
|
|
||||||
|
# RustFS Object Storage
|
||||||
RUSTFS_HTTP_PORT=8080
|
RUSTFS_HTTP_PORT=8080
|
||||||
|
|
||||||
|
# Authority (OAuth2/OIDC)
|
||||||
AUTHORITY_ISSUER=https://authority.airgap.local
|
AUTHORITY_ISSUER=https://authority.airgap.local
|
||||||
AUTHORITY_PORT=8440
|
AUTHORITY_PORT=8440
|
||||||
SIGNER_POE_INTROSPECT_URL=file:///offline/poe/introspect.json
|
|
||||||
SIGNER_PORT=8441
|
|
||||||
ATTESTOR_PORT=8442
|
|
||||||
# Secrets for Issuer Directory are provided via issuer-directory.mongo.env (see etc/secrets/issuer-directory.mongo.secret.example).
|
|
||||||
ISSUER_DIRECTORY_PORT=8447
|
|
||||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
|
||||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
|
||||||
CONCELIER_PORT=8445
|
|
||||||
SCANNER_WEB_PORT=8444
|
|
||||||
UI_PORT=9443
|
|
||||||
NATS_CLIENT_PORT=24222
|
|
||||||
SCANNER_QUEUE_BROKER=nats://nats:4222
|
|
||||||
AUTHORITY_OFFLINE_CACHE_TOLERANCE=00:45:00
|
AUTHORITY_OFFLINE_CACHE_TOLERANCE=00:45:00
|
||||||
|
|
||||||
|
# Signer
|
||||||
|
SIGNER_POE_INTROSPECT_URL=file:///offline/poe/introspect.json
|
||||||
|
SIGNER_PORT=8441
|
||||||
|
|
||||||
|
# Attestor
|
||||||
|
ATTESTOR_PORT=8442
|
||||||
|
|
||||||
|
# Issuer Directory
|
||||||
|
ISSUER_DIRECTORY_PORT=8447
|
||||||
|
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||||
|
|
||||||
|
# Concelier
|
||||||
|
CONCELIER_PORT=8445
|
||||||
|
|
||||||
|
# Scanner
|
||||||
|
SCANNER_WEB_PORT=8444
|
||||||
|
SCANNER_QUEUE_BROKER=valkey://valkey:6379
|
||||||
SCANNER_EVENTS_ENABLED=false
|
SCANNER_EVENTS_ENABLED=false
|
||||||
SCANNER_EVENTS_DRIVER=redis
|
SCANNER_EVENTS_DRIVER=valkey
|
||||||
# Leave SCANNER_EVENTS_DSN empty to inherit the Redis queue DSN when SCANNER_QUEUE_BROKER uses redis://.
|
|
||||||
SCANNER_EVENTS_DSN=
|
SCANNER_EVENTS_DSN=
|
||||||
SCANNER_EVENTS_STREAM=stella.events
|
SCANNER_EVENTS_STREAM=stella.events
|
||||||
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
||||||
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
||||||
SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080/api/v1
|
|
||||||
|
# Surface.Env configuration
|
||||||
|
SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080
|
||||||
|
SCANNER_SURFACE_FS_BUCKET=surface-cache
|
||||||
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
||||||
|
SCANNER_SURFACE_CACHE_QUOTA_MB=4096
|
||||||
|
SCANNER_SURFACE_PREFETCH_ENABLED=false
|
||||||
|
SCANNER_SURFACE_TENANT=default
|
||||||
|
SCANNER_SURFACE_FEATURES=
|
||||||
|
SCANNER_SURFACE_SECRETS_PROVIDER=file
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE=
|
||||||
|
SCANNER_SURFACE_SECRETS_ROOT=/etc/stellaops/secrets
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER=
|
||||||
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE=false
|
||||||
|
SURFACE_SECRETS_HOST_PATH=./offline/surface-secrets
|
||||||
|
|
||||||
|
# Offline Kit configuration
|
||||||
|
SCANNER_OFFLINEKIT_ENABLED=false
|
||||||
|
SCANNER_OFFLINEKIT_REQUIREDSSE=true
|
||||||
|
SCANNER_OFFLINEKIT_REKOROFFLINEMODE=true
|
||||||
|
SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY=/etc/stellaops/trust-roots
|
||||||
|
SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY=/var/lib/stellaops/rekor-snapshot
|
||||||
|
SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH=./offline/trust-roots
|
||||||
|
SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH=./offline/rekor-snapshot
|
||||||
|
|
||||||
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
||||||
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
||||||
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER=file
|
|
||||||
SCANNER_SURFACE_SECRETS_ROOT=/etc/stellaops/secrets
|
# Scheduler
|
||||||
SCHEDULER_QUEUE_KIND=Nats
|
SCHEDULER_QUEUE_KIND=Valkey
|
||||||
SCHEDULER_QUEUE_NATS_URL=nats://nats:4222
|
SCHEDULER_QUEUE_VALKEY_URL=valkey:6379
|
||||||
SCHEDULER_STORAGE_DATABASE=stellaops_scheduler
|
|
||||||
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
||||||
|
|
||||||
|
# Notify
|
||||||
|
NOTIFY_WEB_PORT=9446
|
||||||
|
|
||||||
|
# Advisory AI
|
||||||
ADVISORY_AI_WEB_PORT=8448
|
ADVISORY_AI_WEB_PORT=8448
|
||||||
ADVISORY_AI_SBOM_BASEADDRESS=http://scanner-web:8444
|
ADVISORY_AI_SBOM_BASEADDRESS=http://scanner-web:8444
|
||||||
ADVISORY_AI_INFERENCE_MODE=Local
|
ADVISORY_AI_INFERENCE_MODE=Local
|
||||||
ADVISORY_AI_REMOTE_BASEADDRESS=
|
ADVISORY_AI_REMOTE_BASEADDRESS=
|
||||||
ADVISORY_AI_REMOTE_APIKEY=
|
ADVISORY_AI_REMOTE_APIKEY=
|
||||||
|
|
||||||
|
# Web UI
|
||||||
|
UI_PORT=9443
|
||||||
|
|
||||||
|
# NATS
|
||||||
|
NATS_CLIENT_PORT=24222
|
||||||
|
|||||||
65
deploy/compose/env/dev.env.example
vendored
65
deploy/compose/env/dev.env.example
vendored
@@ -1,47 +1,78 @@
|
|||||||
# Substitutions for docker-compose.dev.yaml
|
# Substitutions for docker-compose.dev.yaml
|
||||||
MONGO_INITDB_ROOT_USERNAME=stellaops
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD=dev-password
|
# PostgreSQL Database
|
||||||
MINIO_ROOT_USER=stellaops
|
POSTGRES_USER=stellaops
|
||||||
MINIO_ROOT_PASSWORD=dev-minio-secret
|
POSTGRES_PASSWORD=dev-postgres-password
|
||||||
MINIO_CONSOLE_PORT=9001
|
POSTGRES_DB=stellaops_platform
|
||||||
|
POSTGRES_PORT=5432
|
||||||
|
|
||||||
|
# Valkey (Redis-compatible cache and messaging)
|
||||||
|
VALKEY_PORT=6379
|
||||||
|
|
||||||
|
# RustFS Object Storage
|
||||||
RUSTFS_HTTP_PORT=8080
|
RUSTFS_HTTP_PORT=8080
|
||||||
|
|
||||||
|
# Authority (OAuth2/OIDC)
|
||||||
AUTHORITY_ISSUER=https://authority.localtest.me
|
AUTHORITY_ISSUER=https://authority.localtest.me
|
||||||
AUTHORITY_PORT=8440
|
AUTHORITY_PORT=8440
|
||||||
SIGNER_POE_INTROSPECT_URL=https://licensing.svc.local/introspect
|
|
||||||
|
# Signer
|
||||||
|
SIGNER_POE_INTROSPECT_URL=https://licensing.svc.local/introspect
|
||||||
SIGNER_PORT=8441
|
SIGNER_PORT=8441
|
||||||
|
|
||||||
|
# Attestor
|
||||||
ATTESTOR_PORT=8442
|
ATTESTOR_PORT=8442
|
||||||
# Secrets for Issuer Directory are provided via issuer-directory.mongo.env (see etc/secrets/issuer-directory.mongo.secret.example).
|
|
||||||
|
# Issuer Directory
|
||||||
ISSUER_DIRECTORY_PORT=8447
|
ISSUER_DIRECTORY_PORT=8447
|
||||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
|
||||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||||
|
|
||||||
|
# Concelier
|
||||||
CONCELIER_PORT=8445
|
CONCELIER_PORT=8445
|
||||||
|
|
||||||
|
# Scanner
|
||||||
SCANNER_WEB_PORT=8444
|
SCANNER_WEB_PORT=8444
|
||||||
UI_PORT=8443
|
|
||||||
NATS_CLIENT_PORT=4222
|
|
||||||
SCANNER_QUEUE_BROKER=nats://nats:4222
|
SCANNER_QUEUE_BROKER=nats://nats:4222
|
||||||
SCANNER_EVENTS_ENABLED=false
|
SCANNER_EVENTS_ENABLED=false
|
||||||
SCANNER_EVENTS_DRIVER=redis
|
SCANNER_EVENTS_DRIVER=valkey
|
||||||
# Leave SCANNER_EVENTS_DSN empty to inherit the Redis queue DSN when SCANNER_QUEUE_BROKER uses redis://.
|
SCANNER_EVENTS_DSN=valkey:6379
|
||||||
SCANNER_EVENTS_DSN=
|
|
||||||
SCANNER_EVENTS_STREAM=stella.events
|
SCANNER_EVENTS_STREAM=stella.events
|
||||||
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
||||||
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
||||||
# Surface.Env defaults keep worker/web service aligned with local RustFS and inline secrets.
|
|
||||||
|
# Surface.Env defaults keep worker/web service aligned with local RustFS and inline secrets
|
||||||
SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080/api/v1
|
SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080/api/v1
|
||||||
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER=inline
|
SCANNER_SURFACE_SECRETS_PROVIDER=inline
|
||||||
SCANNER_SURFACE_SECRETS_ROOT=
|
SCANNER_SURFACE_SECRETS_ROOT=
|
||||||
|
|
||||||
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
||||||
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
||||||
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
||||||
ZASTAVA_SURFACE_SECRETS_PROVIDER=${SCANNER_SURFACE_SECRETS_PROVIDER}
|
ZASTAVA_SURFACE_SECRETS_PROVIDER=${SCANNER_SURFACE_SECRETS_PROVIDER}
|
||||||
ZASTAVA_SURFACE_SECRETS_ROOT=${SCANNER_SURFACE_SECRETS_ROOT}
|
ZASTAVA_SURFACE_SECRETS_ROOT=${SCANNER_SURFACE_SECRETS_ROOT}
|
||||||
|
|
||||||
|
# Scheduler
|
||||||
SCHEDULER_QUEUE_KIND=Nats
|
SCHEDULER_QUEUE_KIND=Nats
|
||||||
SCHEDULER_QUEUE_NATS_URL=nats://nats:4222
|
SCHEDULER_QUEUE_NATS_URL=nats://nats:4222
|
||||||
SCHEDULER_STORAGE_DATABASE=stellaops_scheduler
|
|
||||||
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
||||||
|
|
||||||
|
# Notify
|
||||||
|
NOTIFY_WEB_PORT=8446
|
||||||
|
|
||||||
|
# Advisory AI
|
||||||
ADVISORY_AI_WEB_PORT=8448
|
ADVISORY_AI_WEB_PORT=8448
|
||||||
ADVISORY_AI_SBOM_BASEADDRESS=http://scanner-web:8444
|
ADVISORY_AI_SBOM_BASEADDRESS=http://scanner-web:8444
|
||||||
ADVISORY_AI_INFERENCE_MODE=Local
|
ADVISORY_AI_INFERENCE_MODE=Local
|
||||||
ADVISORY_AI_REMOTE_BASEADDRESS=
|
ADVISORY_AI_REMOTE_BASEADDRESS=
|
||||||
ADVISORY_AI_REMOTE_APIKEY=
|
ADVISORY_AI_REMOTE_APIKEY=
|
||||||
|
|
||||||
|
# Web UI
|
||||||
|
UI_PORT=8443
|
||||||
|
|
||||||
|
# NATS
|
||||||
|
NATS_CLIENT_PORT=4222
|
||||||
|
|
||||||
|
# CryptoPro (optional)
|
||||||
|
CRYPTOPRO_PORT=18080
|
||||||
|
CRYPTOPRO_ACCEPT_EULA=0
|
||||||
|
|||||||
107
deploy/compose/env/prod.env.example
vendored
107
deploy/compose/env/prod.env.example
vendored
@@ -1,49 +1,96 @@
|
|||||||
# Substitutions for docker-compose.prod.yaml
|
# Substitutions for docker-compose.prod.yaml
|
||||||
# ⚠️ Replace all placeholder secrets with values sourced from your secret manager.
|
# WARNING: Replace all placeholder secrets with values sourced from your secret manager.
|
||||||
MONGO_INITDB_ROOT_USERNAME=stellaops-prod
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD=REPLACE_WITH_STRONG_PASSWORD
|
# PostgreSQL Database
|
||||||
MINIO_ROOT_USER=stellaops-prod
|
POSTGRES_USER=stellaops-prod
|
||||||
MINIO_ROOT_PASSWORD=REPLACE_WITH_STRONG_PASSWORD
|
POSTGRES_PASSWORD=REPLACE_WITH_STRONG_PASSWORD
|
||||||
# Expose the MinIO console only to trusted operator networks.
|
POSTGRES_DB=stellaops_platform
|
||||||
MINIO_CONSOLE_PORT=39001
|
POSTGRES_PORT=5432
|
||||||
RUSTFS_HTTP_PORT=8080
|
|
||||||
AUTHORITY_ISSUER=https://authority.prod.stella-ops.org
|
# Valkey (Redis-compatible cache and messaging)
|
||||||
AUTHORITY_PORT=8440
|
VALKEY_PORT=6379
|
||||||
SIGNER_POE_INTROSPECT_URL=https://licensing.prod.stella-ops.org/introspect
|
|
||||||
|
# RustFS Object Storage
|
||||||
|
RUSTFS_HTTP_PORT=8080
|
||||||
|
|
||||||
|
# Authority (OAuth2/OIDC)
|
||||||
|
AUTHORITY_ISSUER=https://authority.prod.stella-ops.org
|
||||||
|
AUTHORITY_PORT=8440
|
||||||
|
AUTHORITY_OFFLINE_CACHE_TOLERANCE=00:30:00
|
||||||
|
|
||||||
|
# Signer
|
||||||
|
SIGNER_POE_INTROSPECT_URL=https://licensing.prod.stella-ops.org/introspect
|
||||||
SIGNER_PORT=8441
|
SIGNER_PORT=8441
|
||||||
|
|
||||||
|
# Attestor
|
||||||
ATTESTOR_PORT=8442
|
ATTESTOR_PORT=8442
|
||||||
# Secrets for Issuer Directory are provided via issuer-directory.mongo.env (see etc/secrets/issuer-directory.mongo.secret.example).
|
|
||||||
|
# Issuer Directory
|
||||||
ISSUER_DIRECTORY_PORT=8447
|
ISSUER_DIRECTORY_PORT=8447
|
||||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
|
||||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||||
|
|
||||||
|
# Concelier
|
||||||
CONCELIER_PORT=8445
|
CONCELIER_PORT=8445
|
||||||
SCANNER_WEB_PORT=8444
|
|
||||||
UI_PORT=8443
|
# Scanner
|
||||||
NATS_CLIENT_PORT=4222
|
SCANNER_WEB_PORT=8444
|
||||||
SCANNER_QUEUE_BROKER=nats://nats:4222
|
SCANNER_QUEUE_BROKER=valkey://valkey:6379
|
||||||
# `true` enables signed scanner events for Notify ingestion.
|
# `true` enables signed scanner events for Notify ingestion.
|
||||||
SCANNER_EVENTS_ENABLED=true
|
SCANNER_EVENTS_ENABLED=true
|
||||||
SCANNER_EVENTS_DRIVER=redis
|
SCANNER_EVENTS_DRIVER=valkey
|
||||||
# Leave SCANNER_EVENTS_DSN empty to inherit the Redis queue DSN when SCANNER_QUEUE_BROKER uses redis://.
|
SCANNER_EVENTS_DSN=
|
||||||
SCANNER_EVENTS_DSN=
|
|
||||||
SCANNER_EVENTS_STREAM=stella.events
|
SCANNER_EVENTS_STREAM=stella.events
|
||||||
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
||||||
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
||||||
|
|
||||||
|
# Surface.Env configuration
|
||||||
SCANNER_SURFACE_FS_ENDPOINT=https://surfacefs.prod.stella-ops.org/api/v1
|
SCANNER_SURFACE_FS_ENDPOINT=https://surfacefs.prod.stella-ops.org/api/v1
|
||||||
|
SCANNER_SURFACE_FS_BUCKET=surface-cache
|
||||||
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
||||||
|
SCANNER_SURFACE_CACHE_QUOTA_MB=4096
|
||||||
|
SCANNER_SURFACE_PREFETCH_ENABLED=false
|
||||||
|
SCANNER_SURFACE_TENANT=default
|
||||||
|
SCANNER_SURFACE_FEATURES=
|
||||||
|
SCANNER_SURFACE_SECRETS_PROVIDER=kubernetes
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE=
|
||||||
|
SCANNER_SURFACE_SECRETS_ROOT=stellaops/scanner
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER=
|
||||||
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE=false
|
||||||
|
SURFACE_SECRETS_HOST_PATH=./offline/surface-secrets
|
||||||
|
|
||||||
|
# Offline Kit configuration
|
||||||
|
SCANNER_OFFLINEKIT_ENABLED=false
|
||||||
|
SCANNER_OFFLINEKIT_REQUIREDSSE=true
|
||||||
|
SCANNER_OFFLINEKIT_REKOROFFLINEMODE=true
|
||||||
|
SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY=/etc/stellaops/trust-roots
|
||||||
|
SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY=/var/lib/stellaops/rekor-snapshot
|
||||||
|
SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH=./offline/trust-roots
|
||||||
|
SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH=./offline/rekor-snapshot
|
||||||
|
|
||||||
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
||||||
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
||||||
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER=kubernetes
|
|
||||||
SCANNER_SURFACE_SECRETS_ROOT=stellaops/scanner
|
# Scheduler
|
||||||
SCHEDULER_QUEUE_KIND=Nats
|
SCHEDULER_QUEUE_KIND=Valkey
|
||||||
SCHEDULER_QUEUE_NATS_URL=nats://nats:4222
|
SCHEDULER_QUEUE_VALKEY_URL=valkey:6379
|
||||||
SCHEDULER_STORAGE_DATABASE=stellaops_scheduler
|
|
||||||
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
||||||
|
|
||||||
|
# Notify
|
||||||
|
NOTIFY_WEB_PORT=8446
|
||||||
|
|
||||||
|
# Advisory AI
|
||||||
ADVISORY_AI_WEB_PORT=8448
|
ADVISORY_AI_WEB_PORT=8448
|
||||||
ADVISORY_AI_SBOM_BASEADDRESS=https://scanner-web:8444
|
ADVISORY_AI_SBOM_BASEADDRESS=https://scanner-web:8444
|
||||||
ADVISORY_AI_INFERENCE_MODE=Local
|
ADVISORY_AI_INFERENCE_MODE=Local
|
||||||
ADVISORY_AI_REMOTE_BASEADDRESS=
|
ADVISORY_AI_REMOTE_BASEADDRESS=
|
||||||
ADVISORY_AI_REMOTE_APIKEY=
|
ADVISORY_AI_REMOTE_APIKEY=
|
||||||
# External reverse proxy (Traefik, Envoy, etc.) that terminates TLS.
|
|
||||||
FRONTDOOR_NETWORK=stellaops_frontdoor
|
# Web UI
|
||||||
|
UI_PORT=8443
|
||||||
|
|
||||||
|
# NATS
|
||||||
|
NATS_CLIENT_PORT=4222
|
||||||
|
|
||||||
|
# External reverse proxy (Traefik, Envoy, etc.) that terminates TLS.
|
||||||
|
FRONTDOOR_NETWORK=stellaops_frontdoor
|
||||||
|
|||||||
89
deploy/compose/env/stage.env.example
vendored
89
deploy/compose/env/stage.env.example
vendored
@@ -1,44 +1,91 @@
|
|||||||
# Substitutions for docker-compose.stage.yaml
|
# Substitutions for docker-compose.stage.yaml
|
||||||
MONGO_INITDB_ROOT_USERNAME=stellaops
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD=stage-password
|
# PostgreSQL Database
|
||||||
MINIO_ROOT_USER=stellaops-stage
|
POSTGRES_USER=stellaops
|
||||||
MINIO_ROOT_PASSWORD=stage-minio-secret
|
POSTGRES_PASSWORD=stage-postgres-password
|
||||||
MINIO_CONSOLE_PORT=19001
|
POSTGRES_DB=stellaops_platform
|
||||||
|
POSTGRES_PORT=5432
|
||||||
|
|
||||||
|
# Valkey (Redis-compatible cache and messaging)
|
||||||
|
VALKEY_PORT=6379
|
||||||
|
|
||||||
|
# RustFS Object Storage
|
||||||
RUSTFS_HTTP_PORT=8080
|
RUSTFS_HTTP_PORT=8080
|
||||||
|
|
||||||
|
# Authority (OAuth2/OIDC)
|
||||||
AUTHORITY_ISSUER=https://authority.stage.stella-ops.internal
|
AUTHORITY_ISSUER=https://authority.stage.stella-ops.internal
|
||||||
AUTHORITY_PORT=8440
|
AUTHORITY_PORT=8440
|
||||||
SIGNER_POE_INTROSPECT_URL=https://licensing.stage.stella-ops.internal/introspect
|
AUTHORITY_OFFLINE_CACHE_TOLERANCE=00:30:00
|
||||||
|
|
||||||
|
# Signer
|
||||||
|
SIGNER_POE_INTROSPECT_URL=https://licensing.stage.stella-ops.internal/introspect
|
||||||
SIGNER_PORT=8441
|
SIGNER_PORT=8441
|
||||||
|
|
||||||
|
# Attestor
|
||||||
ATTESTOR_PORT=8442
|
ATTESTOR_PORT=8442
|
||||||
# Secrets for Issuer Directory are provided via issuer-directory.mongo.env (see etc/secrets/issuer-directory.mongo.secret.example).
|
|
||||||
|
# Issuer Directory
|
||||||
ISSUER_DIRECTORY_PORT=8447
|
ISSUER_DIRECTORY_PORT=8447
|
||||||
ISSUER_DIRECTORY_MONGO_CONNECTION_STRING=mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017
|
|
||||||
ISSUER_DIRECTORY_SEED_CSAF=true
|
ISSUER_DIRECTORY_SEED_CSAF=true
|
||||||
|
|
||||||
|
# Concelier
|
||||||
CONCELIER_PORT=8445
|
CONCELIER_PORT=8445
|
||||||
|
|
||||||
|
# Scanner
|
||||||
SCANNER_WEB_PORT=8444
|
SCANNER_WEB_PORT=8444
|
||||||
UI_PORT=8443
|
SCANNER_QUEUE_BROKER=valkey://valkey:6379
|
||||||
NATS_CLIENT_PORT=4222
|
|
||||||
SCANNER_QUEUE_BROKER=nats://nats:4222
|
|
||||||
SCANNER_EVENTS_ENABLED=false
|
SCANNER_EVENTS_ENABLED=false
|
||||||
SCANNER_EVENTS_DRIVER=redis
|
SCANNER_EVENTS_DRIVER=valkey
|
||||||
# Leave SCANNER_EVENTS_DSN empty to inherit the Redis queue DSN when SCANNER_QUEUE_BROKER uses redis://.
|
|
||||||
SCANNER_EVENTS_DSN=
|
SCANNER_EVENTS_DSN=
|
||||||
SCANNER_EVENTS_STREAM=stella.events
|
SCANNER_EVENTS_STREAM=stella.events
|
||||||
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5
|
||||||
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
SCANNER_EVENTS_MAX_STREAM_LENGTH=10000
|
||||||
SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080/api/v1
|
|
||||||
|
# Surface.Env configuration
|
||||||
|
SCANNER_SURFACE_FS_ENDPOINT=http://rustfs:8080
|
||||||
|
SCANNER_SURFACE_FS_BUCKET=surface-cache
|
||||||
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
SCANNER_SURFACE_CACHE_ROOT=/var/lib/stellaops/surface
|
||||||
|
SCANNER_SURFACE_CACHE_QUOTA_MB=4096
|
||||||
|
SCANNER_SURFACE_PREFETCH_ENABLED=false
|
||||||
|
SCANNER_SURFACE_TENANT=default
|
||||||
|
SCANNER_SURFACE_FEATURES=
|
||||||
|
SCANNER_SURFACE_SECRETS_PROVIDER=kubernetes
|
||||||
|
SCANNER_SURFACE_SECRETS_NAMESPACE=
|
||||||
|
SCANNER_SURFACE_SECRETS_ROOT=stellaops/scanner
|
||||||
|
SCANNER_SURFACE_SECRETS_FALLBACK_PROVIDER=
|
||||||
|
SCANNER_SURFACE_SECRETS_ALLOW_INLINE=false
|
||||||
|
SURFACE_SECRETS_HOST_PATH=./offline/surface-secrets
|
||||||
|
|
||||||
|
# Offline Kit configuration
|
||||||
|
SCANNER_OFFLINEKIT_ENABLED=false
|
||||||
|
SCANNER_OFFLINEKIT_REQUIREDSSE=true
|
||||||
|
SCANNER_OFFLINEKIT_REKOROFFLINEMODE=true
|
||||||
|
SCANNER_OFFLINEKIT_TRUSTROOTDIRECTORY=/etc/stellaops/trust-roots
|
||||||
|
SCANNER_OFFLINEKIT_REKORSNAPSHOTDIRECTORY=/var/lib/stellaops/rekor-snapshot
|
||||||
|
SCANNER_OFFLINEKIT_TRUSTROOTS_HOST_PATH=./offline/trust-roots
|
||||||
|
SCANNER_OFFLINEKIT_REKOR_SNAPSHOT_HOST_PATH=./offline/rekor-snapshot
|
||||||
|
|
||||||
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
# Zastava inherits Scanner defaults; override if Observer/Webhook diverge
|
||||||
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
ZASTAVA_SURFACE_FS_ENDPOINT=${SCANNER_SURFACE_FS_ENDPOINT}
|
||||||
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
ZASTAVA_SURFACE_CACHE_ROOT=${SCANNER_SURFACE_CACHE_ROOT}
|
||||||
SCANNER_SURFACE_SECRETS_PROVIDER=kubernetes
|
|
||||||
SCANNER_SURFACE_SECRETS_ROOT=stellaops/scanner
|
# Scheduler
|
||||||
SCHEDULER_QUEUE_KIND=Nats
|
SCHEDULER_QUEUE_KIND=Valkey
|
||||||
SCHEDULER_QUEUE_NATS_URL=nats://nats:4222
|
SCHEDULER_QUEUE_VALKEY_URL=valkey:6379
|
||||||
SCHEDULER_STORAGE_DATABASE=stellaops_scheduler
|
|
||||||
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444
|
||||||
|
|
||||||
|
# Notify
|
||||||
|
NOTIFY_WEB_PORT=8446
|
||||||
|
|
||||||
|
# Advisory AI
|
||||||
ADVISORY_AI_WEB_PORT=8448
|
ADVISORY_AI_WEB_PORT=8448
|
||||||
ADVISORY_AI_SBOM_BASEADDRESS=http://scanner-web:8444
|
ADVISORY_AI_SBOM_BASEADDRESS=http://scanner-web:8444
|
||||||
ADVISORY_AI_INFERENCE_MODE=Local
|
ADVISORY_AI_INFERENCE_MODE=Local
|
||||||
ADVISORY_AI_REMOTE_BASEADDRESS=
|
ADVISORY_AI_REMOTE_BASEADDRESS=
|
||||||
ADVISORY_AI_REMOTE_APIKEY=
|
ADVISORY_AI_REMOTE_APIKEY=
|
||||||
|
|
||||||
|
# Web UI
|
||||||
|
UI_PORT=8443
|
||||||
|
|
||||||
|
# NATS
|
||||||
|
NATS_CLIENT_PORT=4222
|
||||||
|
|||||||
49
deploy/compose/env/wine-csp.env.example
vendored
49
deploy/compose/env/wine-csp.env.example
vendored
@@ -1,49 +0,0 @@
|
|||||||
# Wine CSP Service Environment Configuration
|
|
||||||
# ===========================================================================
|
|
||||||
#
|
|
||||||
# WARNING: This service is for TEST VECTOR GENERATION ONLY.
|
|
||||||
# It MUST NOT be used for production cryptographic signing operations.
|
|
||||||
#
|
|
||||||
# ===========================================================================
|
|
||||||
|
|
||||||
# Service port (default: 5099)
|
|
||||||
WINE_CSP_PORT=5099
|
|
||||||
|
|
||||||
# Operation mode:
|
|
||||||
# - limited: Works without CryptoPro CSP (basic GostCryptography only)
|
|
||||||
# - full: Requires CryptoPro CSP installer to be mounted at WINE_CSP_INSTALLER_PATH
|
|
||||||
WINE_CSP_MODE=limited
|
|
||||||
|
|
||||||
# Path to CryptoPro CSP installer MSI (customer-provided)
|
|
||||||
# Mount your licensed CSP installer to /opt/cryptopro/csp-installer.msi
|
|
||||||
WINE_CSP_INSTALLER_PATH=/opt/cryptopro/csp-installer.msi
|
|
||||||
|
|
||||||
# Logging level: Trace, Debug, Information, Warning, Error, Critical
|
|
||||||
WINE_CSP_LOG_LEVEL=Information
|
|
||||||
|
|
||||||
# Image version tag
|
|
||||||
WINE_CSP_VERSION=2025.10.0-edge
|
|
||||||
|
|
||||||
# ASP.NET Core environment (Development, Staging, Production)
|
|
||||||
ASPNETCORE_ENVIRONMENT=Production
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Advanced Configuration (typically not changed)
|
|
||||||
# ===========================================================================
|
|
||||||
|
|
||||||
# Wine debug output (set to "warn+all" for troubleshooting)
|
|
||||||
# WINEDEBUG=-all
|
|
||||||
|
|
||||||
# Wine architecture (must be win64 for CryptoPro CSP)
|
|
||||||
# WINEARCH=win64
|
|
||||||
|
|
||||||
# ===========================================================================
|
|
||||||
# Volume Mounts (configure in docker-compose, not here)
|
|
||||||
# ===========================================================================
|
|
||||||
# - Wine prefix: /home/winecsp/.wine (persistent storage)
|
|
||||||
# - CSP installer: /opt/cryptopro (read-only mount)
|
|
||||||
# - Logs: /var/log/wine-csp (log output)
|
|
||||||
#
|
|
||||||
# Example mount for CSP installer:
|
|
||||||
# volumes:
|
|
||||||
# - /path/to/your/csp-5.0.msi:/opt/cryptopro/csp-installer.msi:ro
|
|
||||||
33
deploy/compose/postgres-init/01-extensions.sql
Normal file
33
deploy/compose/postgres-init/01-extensions.sql
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
-- PostgreSQL initialization for StellaOps air-gap deployment
|
||||||
|
-- This script runs automatically on first container start
|
||||||
|
|
||||||
|
-- Enable pg_stat_statements extension for query performance analysis
|
||||||
|
CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
|
||||||
|
|
||||||
|
-- Enable other useful extensions
|
||||||
|
CREATE EXTENSION IF NOT EXISTS pg_trgm; -- Fuzzy text search
|
||||||
|
CREATE EXTENSION IF NOT EXISTS btree_gin; -- GIN indexes for scalar types
|
||||||
|
CREATE EXTENSION IF NOT EXISTS pgcrypto; -- Cryptographic functions
|
||||||
|
|
||||||
|
-- Create schemas for all modules
|
||||||
|
-- Migrations will create tables within these schemas
|
||||||
|
CREATE SCHEMA IF NOT EXISTS authority;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS vuln;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS vex;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS scheduler;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS notify;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS policy;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS concelier;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS audit;
|
||||||
|
CREATE SCHEMA IF NOT EXISTS unknowns;
|
||||||
|
|
||||||
|
-- Grant usage to application user (assumes POSTGRES_USER is the app user)
|
||||||
|
GRANT USAGE ON SCHEMA authority TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA vuln TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA vex TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA scheduler TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA notify TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA policy TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA concelier TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA audit TO PUBLIC;
|
||||||
|
GRANT USAGE ON SCHEMA unknowns TO PUBLIC;
|
||||||
172
deploy/docker/Dockerfile.crypto-profile
Normal file
172
deploy/docker/Dockerfile.crypto-profile
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
# syntax=docker/dockerfile:1.4
|
||||||
|
# StellaOps Regional Crypto Profile
|
||||||
|
# Selects regional cryptographic configuration at build time
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Build Arguments
|
||||||
|
# ============================================================================
|
||||||
|
ARG CRYPTO_PROFILE=international
|
||||||
|
ARG BASE_IMAGE=stellaops/platform:latest
|
||||||
|
ARG SERVICE_NAME=authority
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Regional Crypto Profile Layer
|
||||||
|
# ============================================================================
|
||||||
|
FROM ${BASE_IMAGE} AS regional-profile
|
||||||
|
|
||||||
|
# Copy regional cryptographic configuration
|
||||||
|
ARG CRYPTO_PROFILE
|
||||||
|
COPY etc/appsettings.crypto.${CRYPTO_PROFILE}.yaml /app/etc/appsettings.crypto.yaml
|
||||||
|
COPY etc/crypto-plugins-manifest.json /app/etc/crypto-plugins-manifest.json
|
||||||
|
|
||||||
|
# Set environment variable for runtime verification
|
||||||
|
ENV STELLAOPS_CRYPTO_PROFILE=${CRYPTO_PROFILE}
|
||||||
|
ENV STELLAOPS_CRYPTO_CONFIG_PATH=/app/etc/appsettings.crypto.yaml
|
||||||
|
ENV STELLAOPS_CRYPTO_MANIFEST_PATH=/app/etc/crypto-plugins-manifest.json
|
||||||
|
|
||||||
|
# Add labels for metadata
|
||||||
|
LABEL com.stellaops.crypto.profile="${CRYPTO_PROFILE}"
|
||||||
|
LABEL com.stellaops.crypto.config="/app/etc/appsettings.crypto.${CRYPTO_PROFILE}.yaml"
|
||||||
|
LABEL com.stellaops.crypto.runtime-selection="true"
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Service-Specific Regional Images
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Authority with Regional Crypto
|
||||||
|
FROM regional-profile AS authority
|
||||||
|
WORKDIR /app/authority
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Authority.WebService.dll"]
|
||||||
|
|
||||||
|
# Signer with Regional Crypto
|
||||||
|
FROM regional-profile AS signer
|
||||||
|
WORKDIR /app/signer
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Signer.WebService.dll"]
|
||||||
|
|
||||||
|
# Attestor with Regional Crypto
|
||||||
|
FROM regional-profile AS attestor
|
||||||
|
WORKDIR /app/attestor
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Attestor.WebService.dll"]
|
||||||
|
|
||||||
|
# Concelier with Regional Crypto
|
||||||
|
FROM regional-profile AS concelier
|
||||||
|
WORKDIR /app/concelier
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Concelier.WebService.dll"]
|
||||||
|
|
||||||
|
# Scanner with Regional Crypto
|
||||||
|
FROM regional-profile AS scanner
|
||||||
|
WORKDIR /app/scanner
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Scanner.WebService.dll"]
|
||||||
|
|
||||||
|
# Excititor with Regional Crypto
|
||||||
|
FROM regional-profile AS excititor
|
||||||
|
WORKDIR /app/excititor
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Excititor.WebService.dll"]
|
||||||
|
|
||||||
|
# Policy with Regional Crypto
|
||||||
|
FROM regional-profile AS policy
|
||||||
|
WORKDIR /app/policy
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Policy.WebService.dll"]
|
||||||
|
|
||||||
|
# Scheduler with Regional Crypto
|
||||||
|
FROM regional-profile AS scheduler
|
||||||
|
WORKDIR /app/scheduler
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Scheduler.WebService.dll"]
|
||||||
|
|
||||||
|
# Notify with Regional Crypto
|
||||||
|
FROM regional-profile AS notify
|
||||||
|
WORKDIR /app/notify
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Notify.WebService.dll"]
|
||||||
|
|
||||||
|
# Zastava with Regional Crypto
|
||||||
|
FROM regional-profile AS zastava
|
||||||
|
WORKDIR /app/zastava
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Zastava.WebService.dll"]
|
||||||
|
|
||||||
|
# Gateway with Regional Crypto
|
||||||
|
FROM regional-profile AS gateway
|
||||||
|
WORKDIR /app/gateway
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Gateway.WebService.dll"]
|
||||||
|
|
||||||
|
# AirGap Importer with Regional Crypto
|
||||||
|
FROM regional-profile AS airgap-importer
|
||||||
|
WORKDIR /app/airgap-importer
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.AirGap.Importer.dll"]
|
||||||
|
|
||||||
|
# AirGap Exporter with Regional Crypto
|
||||||
|
FROM regional-profile AS airgap-exporter
|
||||||
|
WORKDIR /app/airgap-exporter
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.AirGap.Exporter.dll"]
|
||||||
|
|
||||||
|
# CLI with Regional Crypto
|
||||||
|
FROM regional-profile AS cli
|
||||||
|
WORKDIR /app/cli
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Cli.dll"]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Build Instructions
|
||||||
|
# ============================================================================
|
||||||
|
# Build international profile (default):
|
||||||
|
# docker build -f deploy/docker/Dockerfile.crypto-profile \
|
||||||
|
# --build-arg CRYPTO_PROFILE=international \
|
||||||
|
# --target authority \
|
||||||
|
# -t stellaops/authority:international .
|
||||||
|
#
|
||||||
|
# Build Russia (GOST) profile:
|
||||||
|
# docker build -f deploy/docker/Dockerfile.crypto-profile \
|
||||||
|
# --build-arg CRYPTO_PROFILE=russia \
|
||||||
|
# --target scanner \
|
||||||
|
# -t stellaops/scanner:russia .
|
||||||
|
#
|
||||||
|
# Build EU (eIDAS) profile:
|
||||||
|
# docker build -f deploy/docker/Dockerfile.crypto-profile \
|
||||||
|
# --build-arg CRYPTO_PROFILE=eu \
|
||||||
|
# --target signer \
|
||||||
|
# -t stellaops/signer:eu .
|
||||||
|
#
|
||||||
|
# Build China (SM) profile:
|
||||||
|
# docker build -f deploy/docker/Dockerfile.crypto-profile \
|
||||||
|
# --build-arg CRYPTO_PROFILE=china \
|
||||||
|
# --target attestor \
|
||||||
|
# -t stellaops/attestor:china .
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
|
# Regional Profile Descriptions
|
||||||
|
# ============================================================================
|
||||||
|
# international: Default NIST algorithms (ES256, RS256, SHA-256)
|
||||||
|
# Uses offline-verification plugin
|
||||||
|
# Jurisdiction: world
|
||||||
|
#
|
||||||
|
# russia: GOST R 34.10-2012, GOST R 34.11-2012
|
||||||
|
# Uses CryptoPro CSP plugin
|
||||||
|
# Jurisdiction: russia
|
||||||
|
# Requires: CryptoPro CSP SDK
|
||||||
|
#
|
||||||
|
# eu: eIDAS-compliant qualified trust services
|
||||||
|
# Uses eIDAS plugin with qualified certificates
|
||||||
|
# Jurisdiction: eu
|
||||||
|
# Requires: eIDAS trust service provider integration
|
||||||
|
#
|
||||||
|
# china: SM2, SM3, SM4 algorithms
|
||||||
|
# Uses SM crypto plugin
|
||||||
|
# Jurisdiction: china
|
||||||
|
# Requires: GmSSL or BouncyCastle SM extensions
|
||||||
|
#
|
||||||
|
# ============================================================================
|
||||||
|
# Runtime Configuration
|
||||||
|
# ============================================================================
|
||||||
|
# The crypto provider is selected at runtime based on:
|
||||||
|
# 1. STELLAOPS_CRYPTO_PROFILE environment variable
|
||||||
|
# 2. /app/etc/appsettings.crypto.yaml configuration file
|
||||||
|
# 3. /app/etc/crypto-plugins-manifest.json plugin metadata
|
||||||
|
#
|
||||||
|
# Plugin loading sequence:
|
||||||
|
# 1. Application starts
|
||||||
|
# 2. CryptoPluginLoader reads /app/etc/appsettings.crypto.yaml
|
||||||
|
# 3. Loads enabled plugins from manifest
|
||||||
|
# 4. Validates platform compatibility
|
||||||
|
# 5. Validates jurisdiction compliance
|
||||||
|
# 6. Registers providers with DI container
|
||||||
|
# 7. Application uses ICryptoProvider abstraction
|
||||||
|
#
|
||||||
|
# No cryptographic code is executed until runtime plugin selection completes.
|
||||||
212
deploy/docker/Dockerfile.platform
Normal file
212
deploy/docker/Dockerfile.platform
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
# syntax=docker/dockerfile:1.4
|
||||||
|
# StellaOps Platform Image - Build Once, Deploy Everywhere
|
||||||
|
# Builds ALL crypto plugins unconditionally for runtime selection
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Stage 1: SDK Build - Build ALL Projects and Crypto Plugins
|
||||||
|
# ============================================================================
|
||||||
|
FROM mcr.microsoft.com/dotnet/sdk:10.0-preview AS build
|
||||||
|
WORKDIR /src
|
||||||
|
|
||||||
|
# Copy solution and project files for dependency restore
|
||||||
|
COPY Directory.Build.props Directory.Build.targets nuget.config ./
|
||||||
|
COPY src/StellaOps.sln ./src/
|
||||||
|
|
||||||
|
# Copy all crypto plugin projects
|
||||||
|
COPY src/__Libraries/StellaOps.Cryptography/ ./src/__Libraries/StellaOps.Cryptography/
|
||||||
|
COPY src/__Libraries/StellaOps.Cryptography.DependencyInjection/ ./src/__Libraries/StellaOps.Cryptography.DependencyInjection/
|
||||||
|
COPY src/__Libraries/StellaOps.Cryptography.PluginLoader/ ./src/__Libraries/StellaOps.Cryptography.PluginLoader/
|
||||||
|
|
||||||
|
# Crypto plugins - ALL built unconditionally
|
||||||
|
COPY src/__Libraries/StellaOps.Cryptography.Plugin.OfflineVerification/ ./src/__Libraries/StellaOps.Cryptography.Plugin.OfflineVerification/
|
||||||
|
# Note: Additional crypto plugins can be added here when available:
|
||||||
|
# COPY src/__Libraries/StellaOps.Cryptography.Plugin.eIDAS/ ./src/__Libraries/StellaOps.Cryptography.Plugin.eIDAS/
|
||||||
|
# COPY src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/ ./src/__Libraries/StellaOps.Cryptography.Plugin.CryptoPro/
|
||||||
|
# COPY src/__Libraries/StellaOps.Cryptography.Plugin.SM/ ./src/__Libraries/StellaOps.Cryptography.Plugin.SM/
|
||||||
|
|
||||||
|
# Copy all module projects
|
||||||
|
COPY src/Authority/ ./src/Authority/
|
||||||
|
COPY src/Signer/ ./src/Signer/
|
||||||
|
COPY src/Attestor/ ./src/Attestor/
|
||||||
|
COPY src/Concelier/ ./src/Concelier/
|
||||||
|
COPY src/Scanner/ ./src/Scanner/
|
||||||
|
COPY src/AirGap/ ./src/AirGap/
|
||||||
|
COPY src/Excititor/ ./src/Excititor/
|
||||||
|
COPY src/Policy/ ./src/Policy/
|
||||||
|
COPY src/Scheduler/ ./src/Scheduler/
|
||||||
|
COPY src/Notify/ ./src/Notify/
|
||||||
|
COPY src/Zastava/ ./src/Zastava/
|
||||||
|
COPY src/Gateway/ ./src/Gateway/
|
||||||
|
COPY src/Cli/ ./src/Cli/
|
||||||
|
|
||||||
|
# Copy shared libraries
|
||||||
|
COPY src/__Libraries/ ./src/__Libraries/
|
||||||
|
|
||||||
|
# Restore dependencies
|
||||||
|
RUN dotnet restore src/StellaOps.sln
|
||||||
|
|
||||||
|
# Build entire solution (Release configuration)
|
||||||
|
RUN dotnet build src/StellaOps.sln --configuration Release --no-restore
|
||||||
|
|
||||||
|
# Publish all web services and libraries
|
||||||
|
# This creates /app/publish with all assemblies including crypto plugins
|
||||||
|
RUN dotnet publish src/Authority/StellaOps.Authority.WebService/StellaOps.Authority.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/authority
|
||||||
|
|
||||||
|
RUN dotnet publish src/Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/signer
|
||||||
|
|
||||||
|
RUN dotnet publish src/Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/attestor
|
||||||
|
|
||||||
|
RUN dotnet publish src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/concelier
|
||||||
|
|
||||||
|
RUN dotnet publish src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/scanner
|
||||||
|
|
||||||
|
RUN dotnet publish src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/excititor
|
||||||
|
|
||||||
|
RUN dotnet publish src/Policy/StellaOps.Policy.WebService/StellaOps.Policy.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/policy
|
||||||
|
|
||||||
|
RUN dotnet publish src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/scheduler
|
||||||
|
|
||||||
|
RUN dotnet publish src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/notify
|
||||||
|
|
||||||
|
RUN dotnet publish src/Zastava/StellaOps.Zastava.WebService/StellaOps.Zastava.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/zastava
|
||||||
|
|
||||||
|
RUN dotnet publish src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/gateway
|
||||||
|
|
||||||
|
RUN dotnet publish src/AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/airgap-importer
|
||||||
|
|
||||||
|
RUN dotnet publish src/AirGap/StellaOps.AirGap.Exporter/StellaOps.AirGap.Exporter.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/airgap-exporter
|
||||||
|
|
||||||
|
RUN dotnet publish src/Cli/StellaOps.Cli/StellaOps.Cli.csproj \
|
||||||
|
--configuration Release --no-build --output /app/publish/cli
|
||||||
|
|
||||||
|
# Copy crypto plugin manifest
|
||||||
|
COPY etc/crypto-plugins-manifest.json /app/publish/etc/
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Stage 2: Runtime Base - Contains ALL Crypto Plugins
|
||||||
|
# ============================================================================
|
||||||
|
FROM mcr.microsoft.com/dotnet/aspnet:10.0-preview AS runtime-base
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install dependencies for crypto providers
|
||||||
|
# PostgreSQL client for Authority/Concelier/etc
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
postgresql-client \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy all published assemblies (includes all crypto plugins)
|
||||||
|
COPY --from=build /app/publish /app/
|
||||||
|
|
||||||
|
# Expose common ports (these can be overridden by docker-compose)
|
||||||
|
EXPOSE 8080 8443
|
||||||
|
|
||||||
|
# Labels
|
||||||
|
LABEL com.stellaops.image.type="platform"
|
||||||
|
LABEL com.stellaops.image.variant="all-plugins"
|
||||||
|
LABEL com.stellaops.crypto.plugins="offline-verification"
|
||||||
|
# Additional plugins will be added as they become available:
|
||||||
|
# LABEL com.stellaops.crypto.plugins="offline-verification,eidas,cryptopro,sm"
|
||||||
|
|
||||||
|
# Health check placeholder (can be overridden per service)
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8080/health || exit 1
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Service-Specific Final Stages
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# Authority Service
|
||||||
|
FROM runtime-base AS authority
|
||||||
|
WORKDIR /app/authority
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Authority.WebService.dll"]
|
||||||
|
|
||||||
|
# Signer Service
|
||||||
|
FROM runtime-base AS signer
|
||||||
|
WORKDIR /app/signer
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Signer.WebService.dll"]
|
||||||
|
|
||||||
|
# Attestor Service
|
||||||
|
FROM runtime-base AS attestor
|
||||||
|
WORKDIR /app/attestor
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Attestor.WebService.dll"]
|
||||||
|
|
||||||
|
# Concelier Service
|
||||||
|
FROM runtime-base AS concelier
|
||||||
|
WORKDIR /app/concelier
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Concelier.WebService.dll"]
|
||||||
|
|
||||||
|
# Scanner Service
|
||||||
|
FROM runtime-base AS scanner
|
||||||
|
WORKDIR /app/scanner
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Scanner.WebService.dll"]
|
||||||
|
|
||||||
|
# Excititor Service
|
||||||
|
FROM runtime-base AS excititor
|
||||||
|
WORKDIR /app/excititor
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Excititor.WebService.dll"]
|
||||||
|
|
||||||
|
# Policy Service
|
||||||
|
FROM runtime-base AS policy
|
||||||
|
WORKDIR /app/policy
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Policy.WebService.dll"]
|
||||||
|
|
||||||
|
# Scheduler Service
|
||||||
|
FROM runtime-base AS scheduler
|
||||||
|
WORKDIR /app/scheduler
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Scheduler.WebService.dll"]
|
||||||
|
|
||||||
|
# Notify Service
|
||||||
|
FROM runtime-base AS notify
|
||||||
|
WORKDIR /app/notify
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Notify.WebService.dll"]
|
||||||
|
|
||||||
|
# Zastava Service
|
||||||
|
FROM runtime-base AS zastava
|
||||||
|
WORKDIR /app/zastava
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Zastava.WebService.dll"]
|
||||||
|
|
||||||
|
# Gateway Service
|
||||||
|
FROM runtime-base AS gateway
|
||||||
|
WORKDIR /app/gateway
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Gateway.WebService.dll"]
|
||||||
|
|
||||||
|
# AirGap Importer (CLI tool)
|
||||||
|
FROM runtime-base AS airgap-importer
|
||||||
|
WORKDIR /app/airgap-importer
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.AirGap.Importer.dll"]
|
||||||
|
|
||||||
|
# AirGap Exporter (CLI tool)
|
||||||
|
FROM runtime-base AS airgap-exporter
|
||||||
|
WORKDIR /app/airgap-exporter
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.AirGap.Exporter.dll"]
|
||||||
|
|
||||||
|
# CLI Tool
|
||||||
|
FROM runtime-base AS cli
|
||||||
|
WORKDIR /app/cli
|
||||||
|
ENTRYPOINT ["dotnet", "StellaOps.Cli.dll"]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Build Instructions
|
||||||
|
# ============================================================================
|
||||||
|
# Build platform image:
|
||||||
|
# docker build -f deploy/docker/Dockerfile.platform --target runtime-base -t stellaops/platform:latest .
|
||||||
|
#
|
||||||
|
# Build specific service:
|
||||||
|
# docker build -f deploy/docker/Dockerfile.platform --target authority -t stellaops/authority:latest .
|
||||||
|
# docker build -f deploy/docker/Dockerfile.platform --target scanner -t stellaops/scanner:latest .
|
||||||
|
#
|
||||||
|
# The platform image contains ALL crypto plugins.
|
||||||
|
# Regional selection happens at runtime via configuration (see Dockerfile.crypto-profile).
|
||||||
555
deploy/grafana/dashboards/attestation-metrics.json
Normal file
555
deploy/grafana/dashboards/attestation-metrics.json
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
{
|
||||||
|
"annotations": {
|
||||||
|
"list": [
|
||||||
|
{
|
||||||
|
"builtIn": 1,
|
||||||
|
"datasource": {
|
||||||
|
"type": "grafana",
|
||||||
|
"uid": "-- Grafana --"
|
||||||
|
},
|
||||||
|
"enable": true,
|
||||||
|
"hide": true,
|
||||||
|
"iconColor": "rgba(0, 211, 255, 1)",
|
||||||
|
"name": "Annotations & Alerts",
|
||||||
|
"type": "dashboard"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"editable": true,
|
||||||
|
"fiscalYearStartMonth": 0,
|
||||||
|
"graphTooltip": 0,
|
||||||
|
"id": null,
|
||||||
|
"links": [],
|
||||||
|
"liveNow": false,
|
||||||
|
"panels": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"color": {
|
||||||
|
"mode": "thresholds"
|
||||||
|
},
|
||||||
|
"mappings": [],
|
||||||
|
"max": 1,
|
||||||
|
"min": 0,
|
||||||
|
"thresholds": {
|
||||||
|
"mode": "absolute",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"color": "red",
|
||||||
|
"value": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"color": "yellow",
|
||||||
|
"value": 0.9
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"color": "green",
|
||||||
|
"value": 0.95
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unit": "percentunit"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 6,
|
||||||
|
"x": 0,
|
||||||
|
"y": 0
|
||||||
|
},
|
||||||
|
"id": 1,
|
||||||
|
"options": {
|
||||||
|
"orientation": "auto",
|
||||||
|
"reduceOptions": {
|
||||||
|
"calcs": [
|
||||||
|
"lastNotNull"
|
||||||
|
],
|
||||||
|
"fields": "",
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"showThresholdLabels": true,
|
||||||
|
"showThresholdMarkers": true
|
||||||
|
},
|
||||||
|
"pluginVersion": "10.0.0",
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "sum(stella_attestations_created_total) / (sum(stella_attestations_created_total) + sum(stella_attestations_failed_total))",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Attestation Completeness (Target: ≥95%)",
|
||||||
|
"type": "gauge"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"color": {
|
||||||
|
"mode": "palette-classic"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"axisCenteredZero": false,
|
||||||
|
"axisColorMode": "text",
|
||||||
|
"axisLabel": "",
|
||||||
|
"axisPlacement": "auto",
|
||||||
|
"barAlignment": 0,
|
||||||
|
"drawStyle": "bars",
|
||||||
|
"fillOpacity": 80,
|
||||||
|
"gradientMode": "none",
|
||||||
|
"hideFrom": {
|
||||||
|
"tooltip": false,
|
||||||
|
"viz": false,
|
||||||
|
"legend": false
|
||||||
|
},
|
||||||
|
"lineInterpolation": "linear",
|
||||||
|
"lineWidth": 1,
|
||||||
|
"pointSize": 5,
|
||||||
|
"scaleDistribution": {
|
||||||
|
"type": "linear"
|
||||||
|
},
|
||||||
|
"showPoints": "auto",
|
||||||
|
"spanNulls": false,
|
||||||
|
"stacking": {
|
||||||
|
"group": "A",
|
||||||
|
"mode": "none"
|
||||||
|
},
|
||||||
|
"thresholdsStyle": {
|
||||||
|
"mode": "line"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mappings": [],
|
||||||
|
"thresholds": {
|
||||||
|
"mode": "absolute",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"color": "green",
|
||||||
|
"value": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"color": "red",
|
||||||
|
"value": 30
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unit": "s"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 9,
|
||||||
|
"x": 6,
|
||||||
|
"y": 0
|
||||||
|
},
|
||||||
|
"id": 2,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["mean", "max"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "right",
|
||||||
|
"showLegend": true
|
||||||
|
},
|
||||||
|
"tooltip": {
|
||||||
|
"mode": "single",
|
||||||
|
"sort": "none"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "histogram_quantile(0.95, rate(stella_ttfe_seconds_bucket[5m]))",
|
||||||
|
"legendFormat": "p95",
|
||||||
|
"refId": "A"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "histogram_quantile(0.50, rate(stella_ttfe_seconds_bucket[5m]))",
|
||||||
|
"legendFormat": "p50",
|
||||||
|
"refId": "B"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "TTFE Distribution (Target: ≤30s)",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"color": {
|
||||||
|
"mode": "palette-classic"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"axisCenteredZero": false,
|
||||||
|
"axisColorMode": "text",
|
||||||
|
"axisLabel": "",
|
||||||
|
"axisPlacement": "auto",
|
||||||
|
"barAlignment": 0,
|
||||||
|
"drawStyle": "line",
|
||||||
|
"fillOpacity": 20,
|
||||||
|
"gradientMode": "none",
|
||||||
|
"hideFrom": {
|
||||||
|
"tooltip": false,
|
||||||
|
"viz": false,
|
||||||
|
"legend": false
|
||||||
|
},
|
||||||
|
"lineInterpolation": "smooth",
|
||||||
|
"lineWidth": 2,
|
||||||
|
"pointSize": 5,
|
||||||
|
"scaleDistribution": {
|
||||||
|
"type": "linear"
|
||||||
|
},
|
||||||
|
"showPoints": "auto",
|
||||||
|
"spanNulls": false,
|
||||||
|
"stacking": {
|
||||||
|
"group": "A",
|
||||||
|
"mode": "none"
|
||||||
|
},
|
||||||
|
"thresholdsStyle": {
|
||||||
|
"mode": "off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mappings": [],
|
||||||
|
"max": 1,
|
||||||
|
"min": 0,
|
||||||
|
"thresholds": {
|
||||||
|
"mode": "absolute",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"color": "green",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unit": "percentunit"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 9,
|
||||||
|
"x": 15,
|
||||||
|
"y": 0
|
||||||
|
},
|
||||||
|
"id": 3,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["mean", "last"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "right",
|
||||||
|
"showLegend": true
|
||||||
|
},
|
||||||
|
"tooltip": {
|
||||||
|
"mode": "single",
|
||||||
|
"sort": "none"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "sum(rate(stella_attestations_verified_total[5m])) / (sum(rate(stella_attestations_verified_total[5m])) + sum(rate(stella_attestations_failed_total[5m])))",
|
||||||
|
"legendFormat": "Success Rate",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Verification Success Rate",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"color": {
|
||||||
|
"mode": "palette-classic"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"axisCenteredZero": false,
|
||||||
|
"axisColorMode": "text",
|
||||||
|
"axisLabel": "",
|
||||||
|
"axisPlacement": "auto",
|
||||||
|
"barAlignment": 0,
|
||||||
|
"drawStyle": "line",
|
||||||
|
"fillOpacity": 20,
|
||||||
|
"gradientMode": "none",
|
||||||
|
"hideFrom": {
|
||||||
|
"tooltip": false,
|
||||||
|
"viz": false,
|
||||||
|
"legend": false
|
||||||
|
},
|
||||||
|
"lineInterpolation": "smooth",
|
||||||
|
"lineWidth": 2,
|
||||||
|
"pointSize": 5,
|
||||||
|
"scaleDistribution": {
|
||||||
|
"type": "linear"
|
||||||
|
},
|
||||||
|
"showPoints": "auto",
|
||||||
|
"spanNulls": false,
|
||||||
|
"stacking": {
|
||||||
|
"group": "A",
|
||||||
|
"mode": "normal"
|
||||||
|
},
|
||||||
|
"thresholdsStyle": {
|
||||||
|
"mode": "line"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mappings": [],
|
||||||
|
"thresholds": {
|
||||||
|
"mode": "absolute",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"color": "green",
|
||||||
|
"value": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"color": "red",
|
||||||
|
"value": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unit": "short"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 8
|
||||||
|
},
|
||||||
|
"id": 4,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["sum"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "right",
|
||||||
|
"showLegend": true
|
||||||
|
},
|
||||||
|
"tooltip": {
|
||||||
|
"mode": "multi",
|
||||||
|
"sort": "none"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "sum by (environment, reason) (rate(stella_post_deploy_reversions_total[5m]))",
|
||||||
|
"legendFormat": "{{environment}}: {{reason}}",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Post-Deploy Reversions (Trend to Zero)",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"color": {
|
||||||
|
"mode": "palette-classic"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"hideFrom": {
|
||||||
|
"tooltip": false,
|
||||||
|
"viz": false,
|
||||||
|
"legend": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mappings": []
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 6,
|
||||||
|
"x": 12,
|
||||||
|
"y": 8
|
||||||
|
},
|
||||||
|
"id": 5,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "right",
|
||||||
|
"showLegend": true,
|
||||||
|
"values": ["value"]
|
||||||
|
},
|
||||||
|
"pieType": "pie",
|
||||||
|
"tooltip": {
|
||||||
|
"mode": "single",
|
||||||
|
"sort": "none"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "sum by (predicate_type) (stella_attestations_created_total)",
|
||||||
|
"legendFormat": "{{predicate_type}}",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Attestations by Type",
|
||||||
|
"type": "piechart"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"color": {
|
||||||
|
"mode": "palette-classic"
|
||||||
|
},
|
||||||
|
"custom": {
|
||||||
|
"axisCenteredZero": false,
|
||||||
|
"axisColorMode": "text",
|
||||||
|
"axisLabel": "",
|
||||||
|
"axisPlacement": "auto",
|
||||||
|
"barAlignment": 0,
|
||||||
|
"drawStyle": "line",
|
||||||
|
"fillOpacity": 20,
|
||||||
|
"gradientMode": "none",
|
||||||
|
"hideFrom": {
|
||||||
|
"tooltip": false,
|
||||||
|
"viz": false,
|
||||||
|
"legend": false
|
||||||
|
},
|
||||||
|
"lineInterpolation": "smooth",
|
||||||
|
"lineWidth": 2,
|
||||||
|
"pointSize": 5,
|
||||||
|
"scaleDistribution": {
|
||||||
|
"type": "linear"
|
||||||
|
},
|
||||||
|
"showPoints": "auto",
|
||||||
|
"spanNulls": false,
|
||||||
|
"stacking": {
|
||||||
|
"group": "A",
|
||||||
|
"mode": "none"
|
||||||
|
},
|
||||||
|
"thresholdsStyle": {
|
||||||
|
"mode": "off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mappings": [],
|
||||||
|
"thresholds": {
|
||||||
|
"mode": "absolute",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"color": "green",
|
||||||
|
"value": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"color": "red",
|
||||||
|
"value": 80
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"unit": "short"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 6,
|
||||||
|
"x": 18,
|
||||||
|
"y": 8
|
||||||
|
},
|
||||||
|
"id": 6,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": [],
|
||||||
|
"displayMode": "list",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
},
|
||||||
|
"tooltip": {
|
||||||
|
"mode": "single",
|
||||||
|
"sort": "none"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "sum(stella_attestations_failed_total{reason=\"stale_evidence\"})",
|
||||||
|
"legendFormat": "Stale Evidence Alerts",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Stale Evidence Alerts",
|
||||||
|
"type": "timeseries"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"refresh": "30s",
|
||||||
|
"schemaVersion": 38,
|
||||||
|
"style": "dark",
|
||||||
|
"tags": ["stellaops", "attestations", "security"],
|
||||||
|
"templating": {
|
||||||
|
"list": [
|
||||||
|
{
|
||||||
|
"current": {
|
||||||
|
"selected": false,
|
||||||
|
"text": "Prometheus",
|
||||||
|
"value": "Prometheus"
|
||||||
|
},
|
||||||
|
"hide": 0,
|
||||||
|
"includeAll": false,
|
||||||
|
"label": "Data Source",
|
||||||
|
"multi": false,
|
||||||
|
"name": "DS_PROMETHEUS",
|
||||||
|
"options": [],
|
||||||
|
"query": "prometheus",
|
||||||
|
"refresh": 1,
|
||||||
|
"regex": "",
|
||||||
|
"skipUrlSync": false,
|
||||||
|
"type": "datasource"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"time": {
|
||||||
|
"from": "now-6h",
|
||||||
|
"to": "now"
|
||||||
|
},
|
||||||
|
"timepicker": {},
|
||||||
|
"timezone": "",
|
||||||
|
"title": "StellaOps - Attestation Metrics",
|
||||||
|
"uid": "stellaops-attestations",
|
||||||
|
"version": 1,
|
||||||
|
"weekStart": ""
|
||||||
|
}
|
||||||
1016
deploy/grafana/dashboards/provcache-overview.json
Normal file
1016
deploy/grafana/dashboards/provcache-overview.json
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user