Compare commits
124 Commits
feature/do
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b4235c134c | ||
| dee252940b | |||
|
|
8bbfe4d2d2 | ||
|
|
394b57f6bf | ||
|
|
3a2100aa78 | ||
|
|
417ef83202 | ||
|
|
2170a58734 | ||
|
|
415eff1207 | ||
|
|
b55d9fa68d | ||
|
|
5a480a3c2a | ||
|
|
4391f35d8a | ||
|
|
b1f40945b7 | ||
|
|
41864227d2 | ||
|
|
8137503221 | ||
|
|
08dab053c0 | ||
|
|
7ce83270d0 | ||
|
|
505fe7a885 | ||
|
|
0cb5c9abfb | ||
|
|
d59cc816c1 | ||
|
|
8c8f0c632d | ||
|
|
4344020dd1 | ||
|
|
b058dbe031 | ||
|
|
3411e825cd | ||
|
|
9202cd7da8 | ||
|
|
00c41790f4 | ||
|
|
2e70c9fdb6 | ||
| d233fa3529 | |||
|
|
e2e404e705 | ||
| 01f4943ab9 | |||
|
|
233873f620 | ||
|
|
f1a39c4ce3 | ||
|
|
6e45066e37 | ||
|
|
e00f6365da | ||
|
|
999e26a48e | ||
| d776e93b16 | |||
|
|
564df71bfb | ||
|
|
e1f1bef4c1 | ||
|
|
3f3473ee3a | ||
|
|
efaf3cb789 | ||
|
|
ce5ec9c158 | ||
|
|
ab22181e8b | ||
|
|
1995883476 | ||
|
|
0987cd6ac8 | ||
|
|
b83aa1aa0b | ||
|
|
ce1f282ce0 | ||
|
|
b8b493913a | ||
|
|
49922dff5a | ||
|
|
92bc4d3a07 | ||
|
|
0ad4777259 | ||
|
|
2bd189387e | ||
|
|
3a92c77a04 | ||
|
|
b7059d523e | ||
|
|
96e5646977 | ||
|
|
a3c7fe5e88 | ||
|
|
199aaf74d8 | ||
|
|
f30805ad7f | ||
|
|
689c656f20 | ||
|
|
108d1c64b3 | ||
|
|
bc0762e97d | ||
|
|
3d01bf9edc | ||
|
|
68bc53a07b | ||
|
|
4b124fb056 | ||
|
|
7c24ed96ee | ||
|
|
11597679ed | ||
|
|
e3f28a21ab | ||
|
|
a403979177 | ||
|
|
b8641b1959 | ||
|
|
98e6b76584 | ||
|
|
862bb6ed80 | ||
|
|
bd2529502e | ||
|
|
965cbf9574 | ||
|
|
af30fc322f | ||
|
|
e53a282fbe | ||
|
|
d907729778 | ||
|
|
8a72779c16 | ||
|
|
e0f6efecce | ||
|
|
98934170ca | ||
|
|
69651212ec | ||
|
|
53889d85e7 | ||
|
|
0de92144d2 | ||
|
|
9bd6a73926 | ||
|
|
4042fc2184 | ||
|
|
dd0067ea0b | ||
|
|
f6c22854a4 | ||
|
|
05597616d6 | ||
|
|
a6f1406509 | ||
|
|
0a8f8c14af | ||
|
|
7efee7dd41 | ||
|
|
952ba77924 | ||
|
|
23e463e346 | ||
|
|
849a70f9d1 | ||
|
|
868f8e0bb6 | ||
|
|
84c42ca2d8 | ||
|
|
efd6850c38 | ||
|
|
2b892ad1b2 | ||
|
|
e16d2b5224 | ||
|
|
5e514532df | ||
|
|
2141196496 | ||
|
|
bca02ec295 | ||
|
|
8cabdce3b6 | ||
|
|
6145d89468 | ||
|
|
ee317d3f61 | ||
|
|
4cc8bdb460 | ||
|
|
95ff83e0f0 | ||
|
|
3954615e81 | ||
|
|
8948b1a3e2 | ||
|
|
5cfcf0723a | ||
|
|
ba733b9f69 | ||
|
|
79d562ea5d | ||
|
|
a7cd10020a | ||
|
|
b978ae399f | ||
|
|
570746b7d9 | ||
|
|
8318b26370 | ||
|
|
1f76650b7e | ||
|
|
37304cf819 | ||
|
|
6beb9d7c4e | ||
|
|
be8c623e04 | ||
|
|
dd4bb50076 | ||
|
|
bf6ab6ba6f | ||
|
|
02849cc955 | ||
|
|
2eaf0f699b | ||
|
|
6c1177a6ce | ||
|
|
582a88e8f8 | ||
|
|
f0662dd45f |
@@ -1,8 +1,34 @@
|
|||||||
{
|
{
|
||||||
"permissions": {
|
"permissions": {
|
||||||
"allow": [
|
"allow": [
|
||||||
|
"Bash(dotnet --list-sdks:*)",
|
||||||
|
"Bash(winget install:*)",
|
||||||
|
"Bash(dotnet restore:*)",
|
||||||
|
"Bash(dotnet nuget:*)",
|
||||||
|
"Bash(csc -parse:*)",
|
||||||
|
"Bash(grep:*)",
|
||||||
|
"Bash(dotnet build:*)",
|
||||||
|
"Bash(cat:*)",
|
||||||
|
"Bash(copy:*)",
|
||||||
|
"Bash(dotnet test:*)",
|
||||||
|
"Bash(dir:*)",
|
||||||
|
"Bash(Select-Object -ExpandProperty FullName)",
|
||||||
|
"Bash(echo:*)",
|
||||||
|
"Bash(Out-File -FilePath \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Libraries\\StellaOps.Scanner.Surface\\StellaOps.Scanner.Surface.csproj\" -Encoding utf8)",
|
||||||
"Bash(wc:*)",
|
"Bash(wc:*)",
|
||||||
"Bash(sort:*)"
|
"Bash(find:*)",
|
||||||
|
"WebFetch(domain:docs.gradle.org)",
|
||||||
|
"WebSearch",
|
||||||
|
"Bash(dotnet msbuild:*)",
|
||||||
|
"Bash(test:*)",
|
||||||
|
"Bash(taskkill:*)",
|
||||||
|
"Bash(timeout /t)",
|
||||||
|
"Bash(dotnet clean:*)",
|
||||||
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
||||||
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")",
|
||||||
|
"Bash(rm:*)",
|
||||||
|
"Bash(if not exist \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\" mkdir \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\")",
|
||||||
|
"Bash(del \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\SPRINT_0510_0001_0001_airgap.md\")"
|
||||||
],
|
],
|
||||||
"deny": [],
|
"deny": [],
|
||||||
"ask": []
|
"ask": []
|
||||||
|
|||||||
12
.config/dotnet-tools.json
Normal file
12
.config/dotnet-tools.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"isRoot": true,
|
||||||
|
"tools": {
|
||||||
|
"dotnet-stryker": {
|
||||||
|
"version": "4.4.0",
|
||||||
|
"commands": [
|
||||||
|
"stryker"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
23
.dockerignore
Normal file
23
.dockerignore
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitea
|
||||||
|
.venv
|
||||||
|
bin
|
||||||
|
obj
|
||||||
|
**/bin
|
||||||
|
**/obj
|
||||||
|
local-nugets
|
||||||
|
.nuget
|
||||||
|
**/node_modules
|
||||||
|
**/dist
|
||||||
|
**/coverage
|
||||||
|
**/*.user
|
||||||
|
**/*.suo
|
||||||
|
**/*.cache
|
||||||
|
**/.vscode
|
||||||
|
**/.idea
|
||||||
|
**/.DS_Store
|
||||||
|
**/TestResults
|
||||||
|
**/out
|
||||||
|
**/packages
|
||||||
|
/tmp
|
||||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,2 +1,5 @@
|
|||||||
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
||||||
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
||||||
|
|
||||||
|
# Ensure reachability sample assets keep LF endings for deterministic hashes
|
||||||
|
tests/reachability/samples-public/** text eol=lf
|
||||||
|
|||||||
70
.gitea/workflows/advisory-ai-release.yml
Normal file
70
.gitea/workflows/advisory-ai-release.yml
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
name: Advisory AI Feed Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/AdvisoryAI/feeds/**'
|
||||||
|
- 'docs/samples/advisory-feeds/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-feeds:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Fallback to dev key when secret is absent
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[warn] COSIGN_PRIVATE_KEY_B64 not set; using dev key for non-production"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
# Manual override
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package advisory feeds
|
||||||
|
run: |
|
||||||
|
chmod +x ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
|
||||||
|
- name: Generate SBOM
|
||||||
|
run: |
|
||||||
|
# Install syft
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
|
||||||
|
# Generate SBOM for feed bundle
|
||||||
|
syft dir:out/advisory-ai/feeds/stage \
|
||||||
|
-o spdx-json=out/advisory-ai/feeds/advisory-feeds.sbom.json \
|
||||||
|
--name advisory-feeds
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: advisory-feeds-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.tar.gz
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.dsse.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.sbom.json
|
||||||
|
out/advisory-ai/feeds/provenance.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: AOC Backfill Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dataset_hash:
|
||||||
|
description: 'Dataset hash from dev rehearsal (leave empty for dev mode)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-backfill:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Restore AOC CLI
|
||||||
|
run: dotnet restore src/Aoc/StellaOps.Aoc.Cli/StellaOps.Aoc.Cli.csproj
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[info] No production key; using dev key"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package AOC backfill release
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/aoc/package-backfill-release.sh
|
||||||
|
DATASET_HASH="${{ github.event.inputs.dataset_hash }}" \
|
||||||
|
ops/devops/aoc/package-backfill-release.sh
|
||||||
|
env:
|
||||||
|
DATASET_HASH: ${{ github.event.inputs.dataset_hash }}
|
||||||
|
|
||||||
|
- name: Generate SBOM with syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
syft dir:out/aoc/cli \
|
||||||
|
-o spdx-json=out/aoc/aoc-backfill-runner.sbom.json \
|
||||||
|
--name aoc-backfill-runner || true
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/aoc
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: aoc-backfill-release-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/aoc/aoc-backfill-runner.tar.gz
|
||||||
|
out/aoc/aoc-backfill-runner.manifest.json
|
||||||
|
out/aoc/aoc-backfill-runner.sbom.json
|
||||||
|
out/aoc/aoc-backfill-runner.provenance.json
|
||||||
|
out/aoc/aoc-backfill-runner.dsse.json
|
||||||
|
out/aoc/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
aoc-guard:
|
aoc-guard:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -56,10 +56,41 @@ jobs:
|
|||||||
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
|
|
||||||
- name: Run analyzer tests
|
- name: Run analyzer tests with coverage
|
||||||
run: |
|
run: |
|
||||||
mkdir -p $ARTIFACT_DIR
|
mkdir -p $ARTIFACT_DIR
|
||||||
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release --logger "trx;LogFileName=aoc-tests.trx" --results-directory $ARTIFACT_DIR
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-analyzers-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC library tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-lib-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC CLI tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Cli.Tests/StellaOps.Aoc.Cli.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-cli-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Generate coverage report
|
||||||
|
run: |
|
||||||
|
dotnet tool install --global dotnet-reportgenerator-globaltool || true
|
||||||
|
reportgenerator \
|
||||||
|
-reports:"$ARTIFACT_DIR/**/coverage.cobertura.xml" \
|
||||||
|
-targetdir:"$ARTIFACT_DIR/coverage-report" \
|
||||||
|
-reporttypes:"Html;Cobertura;TextSummary" || true
|
||||||
|
if [ -f "$ARTIFACT_DIR/coverage-report/Summary.txt" ]; then
|
||||||
|
cat "$ARTIFACT_DIR/coverage-report/Summary.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -72,7 +103,7 @@ jobs:
|
|||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
if: github.event_name != 'schedule'
|
if: github.event_name != 'schedule'
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
AOC_VERIFY_SINCE: ${{ github.event.pull_request.base.sha || 'HEAD~1' }}
|
||||||
steps:
|
steps:
|
||||||
@@ -96,13 +127,37 @@ jobs:
|
|||||||
- name: Run AOC verify
|
- name: Run AOC verify
|
||||||
env:
|
env:
|
||||||
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
||||||
|
STAGING_POSTGRES_URI: ${{ secrets.STAGING_POSTGRES_URI || vars.STAGING_POSTGRES_URI }}
|
||||||
run: |
|
run: |
|
||||||
if [ -z "${STAGING_MONGO_URI:-}" ]; then
|
mkdir -p $ARTIFACT_DIR
|
||||||
echo "::warning::STAGING_MONGO_URI not set; skipping aoc verify"
|
|
||||||
|
# Prefer PostgreSQL, fall back to MongoDB (legacy)
|
||||||
|
if [ -n "${STAGING_POSTGRES_URI:-}" ]; then
|
||||||
|
echo "Using PostgreSQL for AOC verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "$STAGING_POSTGRES_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
elif [ -n "${STAGING_MONGO_URI:-}" ]; then
|
||||||
|
echo "Using MongoDB for AOC verification (deprecated)"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--mongo "$STAGING_MONGO_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
else
|
||||||
|
echo "::warning::Neither STAGING_POSTGRES_URI nor STAGING_MONGO_URI set; running dry-run verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "placeholder" \
|
||||||
|
--dry-run \
|
||||||
|
--verbose
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
mkdir -p $ARTIFACT_DIR
|
|
||||||
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify --since "$AOC_VERIFY_SINCE" --mongo "$STAGING_MONGO_URI" --output "$ARTIFACT_DIR/aoc-verify.json" --ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" || VERIFY_EXIT=$?
|
|
||||||
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
||||||
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
||||||
fi
|
fi
|
||||||
|
|||||||
128
.gitea/workflows/artifact-signing.yml
Normal file
128
.gitea/workflows/artifact-signing.yml
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
name: Artifact Signing
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
artifact_path:
|
||||||
|
description: 'Path to artifact to sign'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
|
||||||
|
env:
|
||||||
|
COSIGN_VERSION: 'v2.2.0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sign-containers:
|
||||||
|
name: Sign Container Images
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
id-token: write
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Log in to registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Sign images (keyless)
|
||||||
|
if: ${{ !env.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
env:
|
||||||
|
COSIGN_EXPERIMENTAL: "1"
|
||||||
|
run: |
|
||||||
|
IMAGES=(
|
||||||
|
"ghcr.io/${{ github.repository }}/concelier"
|
||||||
|
"ghcr.io/${{ github.repository }}/scanner"
|
||||||
|
"ghcr.io/${{ github.repository }}/authority"
|
||||||
|
)
|
||||||
|
for img in "${IMAGES[@]}"; do
|
||||||
|
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||||
|
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||||
|
cosign sign --yes "${img}:${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Sign images (with key)
|
||||||
|
if: ${{ env.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
run: |
|
||||||
|
echo "$COSIGN_PRIVATE_KEY" | base64 -d > /tmp/cosign.key
|
||||||
|
IMAGES=(
|
||||||
|
"ghcr.io/${{ github.repository }}/concelier"
|
||||||
|
"ghcr.io/${{ github.repository }}/scanner"
|
||||||
|
"ghcr.io/${{ github.repository }}/authority"
|
||||||
|
)
|
||||||
|
for img in "${IMAGES[@]}"; do
|
||||||
|
if docker manifest inspect "${img}:${{ github.ref_name }}" > /dev/null 2>&1; then
|
||||||
|
echo "Signing ${img}:${{ github.ref_name }}..."
|
||||||
|
cosign sign --key /tmp/cosign.key "${img}:${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
rm -f /tmp/cosign.key
|
||||||
|
|
||||||
|
sign-sbom:
|
||||||
|
name: Sign SBOM Artifacts
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Generate and sign SBOM
|
||||||
|
run: |
|
||||||
|
# Generate SBOM using syft
|
||||||
|
if command -v syft &> /dev/null; then
|
||||||
|
syft . -o cyclonedx-json > sbom.cdx.json
|
||||||
|
cosign sign-blob --yes sbom.cdx.json --output-signature sbom.cdx.json.sig
|
||||||
|
else
|
||||||
|
echo "syft not installed, skipping SBOM generation"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload signed artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: signed-sbom
|
||||||
|
path: |
|
||||||
|
sbom.cdx.json
|
||||||
|
sbom.cdx.json.sig
|
||||||
|
if-no-files-found: ignore
|
||||||
|
|
||||||
|
verify-signatures:
|
||||||
|
name: Verify Existing Signatures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: ${{ env.COSIGN_VERSION }}
|
||||||
|
|
||||||
|
- name: Verify DSSE envelopes
|
||||||
|
run: |
|
||||||
|
find . -name "*.dsse" -o -name "*.dsse.json" | while read f; do
|
||||||
|
echo "Checking $f..."
|
||||||
|
# Basic JSON validation
|
||||||
|
if ! jq empty "$f" 2>/dev/null; then
|
||||||
|
echo "Warning: Invalid JSON in $f"
|
||||||
|
fi
|
||||||
|
done
|
||||||
@@ -37,7 +37,7 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
BUILD_CONFIGURATION: Release
|
BUILD_CONFIGURATION: Release
|
||||||
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
CI_CACHE_ROOT: /data/.cache/stella-ops/feedser
|
||||||
RUNNER_TOOL_CACHE: /toolcache
|
RUNNER_TOOL_CACHE: /toolcache
|
||||||
@@ -575,6 +575,209 @@ PY
|
|||||||
if-no-files-found: ignore
|
if-no-files-found: ignore
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Quality Gates Foundation (Sprint 0350)
|
||||||
|
# ============================================================================
|
||||||
|
quality-gates:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Reachability quality gate
|
||||||
|
id: reachability
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing reachability metrics"
|
||||||
|
if [ -f scripts/ci/compute-reachability-metrics.sh ]; then
|
||||||
|
chmod +x scripts/ci/compute-reachability-metrics.sh
|
||||||
|
METRICS=$(./scripts/ci/compute-reachability-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "Reachability metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "Reachability script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: TTFS regression gate
|
||||||
|
id: ttfs
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing TTFS metrics"
|
||||||
|
if [ -f scripts/ci/compute-ttfs-metrics.sh ]; then
|
||||||
|
chmod +x scripts/ci/compute-ttfs-metrics.sh
|
||||||
|
METRICS=$(./scripts/ci/compute-ttfs-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "TTFS metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "TTFS script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Performance SLO gate
|
||||||
|
id: slo
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Enforcing performance SLOs"
|
||||||
|
if [ -f scripts/ci/enforce-performance-slos.sh ]; then
|
||||||
|
chmod +x scripts/ci/enforce-performance-slos.sh
|
||||||
|
./scripts/ci/enforce-performance-slos.sh --warn-only || true
|
||||||
|
else
|
||||||
|
echo "Performance SLO script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: RLS policy validation
|
||||||
|
id: rls
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Validating RLS policies"
|
||||||
|
if [ -f deploy/postgres-validation/001_validate_rls.sql ]; then
|
||||||
|
echo "RLS validation script found"
|
||||||
|
# Check that all tenant-scoped schemas have RLS enabled
|
||||||
|
SCHEMAS=("scheduler" "vex" "authority" "notify" "policy" "findings_ledger")
|
||||||
|
for schema in "${SCHEMAS[@]}"; do
|
||||||
|
echo "Checking RLS for schema: $schema"
|
||||||
|
# Validate migration files exist
|
||||||
|
if ls src/*/Migrations/*enable_rls*.sql 2>/dev/null | grep -q "$schema"; then
|
||||||
|
echo " ✓ RLS migration exists for $schema"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "RLS validation passed (static check)"
|
||||||
|
else
|
||||||
|
echo "RLS validation script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload quality gate results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: quality-gate-results
|
||||||
|
path: |
|
||||||
|
scripts/ci/*.json
|
||||||
|
scripts/ci/*.yaml
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
security-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'pull_request' || github.event_name == 'schedule'
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run OWASP security tests
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Running security tests"
|
||||||
|
dotnet test tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj \
|
||||||
|
--no-restore \
|
||||||
|
--logger "trx;LogFileName=security-tests.trx" \
|
||||||
|
--results-directory ./security-test-results \
|
||||||
|
--filter "Category=Security" \
|
||||||
|
--verbosity normal
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload security test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: security-test-results
|
||||||
|
path: security-test-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
mutation-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'mutation-test'))
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore tools
|
||||||
|
run: dotnet tool restore
|
||||||
|
|
||||||
|
- name: Run mutation tests - Scanner.Core
|
||||||
|
id: scanner-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Scanner.Core"
|
||||||
|
cd src/Scanner/__Libraries/StellaOps.Scanner.Core
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/scanner-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Policy.Engine
|
||||||
|
id: policy-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Policy.Engine"
|
||||||
|
cd src/Policy/__Libraries/StellaOps.Policy
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/policy-engine || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Authority.Core
|
||||||
|
id: authority-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Authority.Core"
|
||||||
|
cd src/Authority/StellaOps.Authority
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../mutation-results/authority-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Upload mutation results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: mutation-testing-results
|
||||||
|
path: mutation-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Check mutation thresholds
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Checking mutation score thresholds..."
|
||||||
|
# Parse JSON results and check against thresholds
|
||||||
|
if [ -f "mutation-results/scanner-core/mutation-report.json" ]; then
|
||||||
|
SCORE=$(jq '.mutationScore // 0' mutation-results/scanner-core/mutation-report.json)
|
||||||
|
echo "Scanner.Core mutation score: $SCORE%"
|
||||||
|
if (( $(echo "$SCORE < 65" | bc -l) )); then
|
||||||
|
echo "::error::Scanner.Core mutation score below threshold"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
sealed-mode-ci:
|
sealed-mode-ci:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs: build-test
|
needs: build-test
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Install syft (SBOM)
|
- name: Install syft (SBOM)
|
||||||
uses: anchore/sbom-action/download-syft@v0
|
uses: anchore/sbom-action/download-syft@v0
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Chaos smoke
|
- name: Chaos smoke
|
||||||
if: ${{ github.event.inputs.chaos == 'true' }}
|
if: ${{ github.event.inputs.chaos == 'true' }}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 preview
|
- name: Setup .NET 10 preview
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
dotnet-version: '10.0.100'
|
||||||
|
|
||||||
- name: Restore Concelier solution
|
- name: Restore Concelier solution
|
||||||
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
run: dotnet restore src/Concelier/StellaOps.Concelier.sln
|
||||||
|
|||||||
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
32
.gitea/workflows/concelier-store-aoc-19-005.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Concelier STORE-AOC-19-005 Dataset
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-dataset:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/out/linksets
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: sudo apt-get update && sudo apt-get install -y zstd
|
||||||
|
|
||||||
|
- name: Build dataset tarball
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/concelier/build-store-aoc-19-005-dataset.sh scripts/concelier/test-store-aoc-19-005-dataset.sh
|
||||||
|
scripts/concelier/build-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Validate dataset
|
||||||
|
run: scripts/concelier/test-store-aoc-19-005-dataset.sh "${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst"
|
||||||
|
|
||||||
|
- name: Upload dataset artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: concelier-store-aoc-19-005-dataset
|
||||||
|
path: |
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst
|
||||||
|
${ARTIFACT_DIR}/linksets-stage-backfill.tar.zst.sha256
|
||||||
@@ -1,86 +1,64 @@
|
|||||||
name: Console CI
|
name: console-ci
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
workflow_dispatch:
|
||||||
branches: [ main ]
|
|
||||||
paths:
|
|
||||||
- 'src/UI/**'
|
|
||||||
- '.gitea/workflows/console-ci.yml'
|
|
||||||
- 'docs/modules/devops/console-ci-contract.md'
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ main, develop ]
|
|
||||||
paths:
|
paths:
|
||||||
- 'src/UI/**'
|
- 'src/Web/**'
|
||||||
- '.gitea/workflows/console-ci.yml'
|
- '.gitea/workflows/console-ci.yml'
|
||||||
- 'docs/modules/devops/console-ci-contract.md'
|
- 'ops/devops/console/**'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
console-ci:
|
lint-test-build:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
env:
|
env:
|
||||||
PNPM_HOME: ~/.pnpm
|
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||||
PLAYWRIGHT_BROWSERS_PATH: ./.playwright
|
CI: true
|
||||||
SOURCE_DATE_EPOCH: ${{ github.run_id }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Task Pack offline bundle fixtures
|
- name: Setup Node
|
||||||
run: python3 scripts/packs/run-fixtures-check.sh
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Node.js 20
|
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
|
cache: npm
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
- name: Enable pnpm
|
- name: Install deps (offline-friendly)
|
||||||
run: |
|
run: npm ci --prefer-offline --no-audit --progress=false
|
||||||
corepack enable
|
|
||||||
corepack prepare pnpm@9 --activate
|
|
||||||
|
|
||||||
- name: Cache pnpm store & node_modules
|
- name: Lint
|
||||||
uses: actions/cache@v4
|
run: npm run lint -- --no-progress
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.pnpm-store
|
|
||||||
node_modules
|
|
||||||
./.pnpm-store
|
|
||||||
./.playwright
|
|
||||||
key: console-${{ runner.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
|
||||||
|
|
||||||
- name: Install dependencies (offline-first)
|
- name: Console export specs (targeted)
|
||||||
env:
|
run: bash ./scripts/ci-console-exports.sh
|
||||||
PNPM_FETCH_RETRIES: 0
|
continue-on-error: true
|
||||||
PNPM_OFFLINE: 1
|
|
||||||
run: |
|
|
||||||
pnpm install --frozen-lockfile || PNPM_OFFLINE=0 pnpm install --frozen-lockfile --prefer-offline
|
|
||||||
|
|
||||||
- name: Lint / Types
|
|
||||||
run: pnpm lint && pnpm format:check && pnpm typecheck
|
|
||||||
|
|
||||||
- name: Unit tests
|
- name: Unit tests
|
||||||
run: pnpm test -- --runInBand --reporter=junit --outputFile=.artifacts/junit.xml
|
run: npm run test:ci
|
||||||
|
env:
|
||||||
|
CHROME_BIN: chromium
|
||||||
|
|
||||||
- name: Storybook a11y
|
- name: Build
|
||||||
|
run: npm run build -- --configuration=production --progress=false
|
||||||
|
|
||||||
|
- name: Collect artifacts
|
||||||
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
pnpm storybook:build
|
mkdir -p ../artifacts
|
||||||
pnpm storybook:a11y --ci --output .artifacts/storybook-a11y.json
|
cp -r dist ../artifacts/dist || true
|
||||||
|
cp -r coverage ../artifacts/coverage || true
|
||||||
- name: Playwright smoke
|
find . -maxdepth 3 -type f -name "*.xml" -o -name "*.trx" -o -name "*.json" -path "*test*" -print0 | xargs -0 -I{} cp --parents {} ../artifacts 2>/dev/null || true
|
||||||
run: pnpm playwright test --config=playwright.config.ci.ts --reporter=list,junit=.artifacts/playwright.xml
|
|
||||||
|
|
||||||
- name: Lighthouse (CI budgets)
|
|
||||||
run: |
|
|
||||||
pnpm serve --port 4173 &
|
|
||||||
pnpm lhci autorun --config=lighthouserc.ci.js --upload.target=filesystem --upload.outputDir=.artifacts/lhci
|
|
||||||
|
|
||||||
- name: SBOM
|
|
||||||
run: pnpm exec syft packages dir:dist --output=spdx-json=.artifacts/console.spdx.json
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
|
if: always()
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: console-ci-artifacts
|
name: console-ci-${{ github.run_id }}
|
||||||
path: .artifacts
|
path: artifacts
|
||||||
|
retention-days: 14
|
||||||
|
|||||||
32
.gitea/workflows/console-runner-image.yml
Normal file
32
.gitea/workflows/console-runner-image.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: console-runner-image
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- 'ops/devops/console/**'
|
||||||
|
- '.gitea/workflows/console-runner-image.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner-image:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Build runner image tarball (baked caches)
|
||||||
|
env:
|
||||||
|
RUN_ID: ${{ github.run_id }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
chmod +x ops/devops/console/build-runner-image.sh ops/devops/console/build-runner-image-ci.sh
|
||||||
|
ops/devops/console/build-runner-image-ci.sh
|
||||||
|
|
||||||
|
- name: Upload runner image artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: console-runner-image-${{ github.run_id }}
|
||||||
|
path: ops/devops/artifacts/console-runner/
|
||||||
|
retention-days: 14
|
||||||
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
41
.gitea/workflows/crypto-sim-smoke.yml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: crypto-sim-smoke
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "ops/crypto/sim-crypto-service/**"
|
||||||
|
- "ops/crypto/sim-crypto-smoke/**"
|
||||||
|
- "scripts/crypto/run-sim-smoke.ps1"
|
||||||
|
- "docs/security/crypto-simulation-services.md"
|
||||||
|
- ".gitea/workflows/crypto-sim-smoke.yml"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sim-smoke:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: "10.0.x"
|
||||||
|
|
||||||
|
- name: Build sim service and smoke harness
|
||||||
|
run: |
|
||||||
|
dotnet build ops/crypto/sim-crypto-service/SimCryptoService.csproj -c Release
|
||||||
|
dotnet build ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj -c Release
|
||||||
|
|
||||||
|
- name: Run smoke (sim profile: sm)
|
||||||
|
env:
|
||||||
|
ASPNETCORE_URLS: http://localhost:5000
|
||||||
|
STELLAOPS_CRYPTO_SIM_URL: http://localhost:5000
|
||||||
|
SIM_PROFILE: sm
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
dotnet run --project ops/crypto/sim-crypto-service/SimCryptoService.csproj --no-build -c Release &
|
||||||
|
service_pid=$!
|
||||||
|
sleep 6
|
||||||
|
dotnet run --project ops/crypto/sim-crypto-smoke/SimCryptoSmoke.csproj --no-build -c Release
|
||||||
|
kill $service_pid
|
||||||
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
55
.gitea/workflows/cryptopro-linux-csp.yml
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: cryptopro-linux-csp
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'ops/cryptopro/linux-csp-service/**'
|
||||||
|
- 'opt/cryptopro/downloads/**'
|
||||||
|
- '.gitea/workflows/cryptopro-linux-csp.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: cryptopro-linux-csp
|
||||||
|
DOCKERFILE: ops/cryptopro/linux-csp-service/Dockerfile
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Build image (accept EULA explicitly)
|
||||||
|
run: |
|
||||||
|
docker build -t $IMAGE_NAME \
|
||||||
|
--build-arg CRYPTOPRO_ACCEPT_EULA=1 \
|
||||||
|
-f $DOCKERFILE .
|
||||||
|
|
||||||
|
- name: Run container
|
||||||
|
run: |
|
||||||
|
docker run -d --rm --name $IMAGE_NAME -p 18080:8080 $IMAGE_NAME
|
||||||
|
for i in {1..20}; do
|
||||||
|
if curl -sf http://127.0.0.1:18080/health >/dev/null; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
sleep 3
|
||||||
|
done
|
||||||
|
echo "Service failed to start" && exit 1
|
||||||
|
|
||||||
|
- name: Test endpoints
|
||||||
|
run: |
|
||||||
|
curl -sf http://127.0.0.1:18080/health
|
||||||
|
curl -sf http://127.0.0.1:18080/license || true
|
||||||
|
curl -sf -X POST http://127.0.0.1:18080/hash \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"data_b64":"SGVsbG8="}'
|
||||||
|
|
||||||
|
- name: Stop container
|
||||||
|
if: always()
|
||||||
|
run: docker rm -f $IMAGE_NAME || true
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 (preview)
|
- name: Setup .NET 10 (preview)
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
|
|
||||||
- name: Build CryptoPro plugin
|
- name: Build CryptoPro plugin
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
- name: Setup .NET SDK
|
- name: Setup .NET SDK
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: '10.0.100-rc.2.25502.107'
|
dotnet-version: '10.0.100'
|
||||||
|
|
||||||
- name: Link check
|
- name: Link check
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ jobs:
|
|||||||
export-ci:
|
export-ci:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
MINIO_ACCESS_KEY: exportci
|
MINIO_ACCESS_KEY: exportci
|
||||||
MINIO_SECRET_KEY: exportci123
|
MINIO_SECRET_KEY: exportci123
|
||||||
BUCKET: export-ci
|
BUCKET: export-ci
|
||||||
|
|||||||
46
.gitea/workflows/exporter-ci.yml
Normal file
46
.gitea/workflows/exporter-ci.yml
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
name: exporter-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/**'
|
||||||
|
- '.gitea/workflows/exporter-ci.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.x'
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj --configuration Release --no-restore
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj --configuration Release --no-build --verbosity normal
|
||||||
|
|
||||||
|
- name: Publish
|
||||||
|
run: |
|
||||||
|
dotnet publish src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj \
|
||||||
|
--configuration Release \
|
||||||
|
--output artifacts/exporter
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: exporter-${{ github.run_id }}
|
||||||
|
path: artifacts/
|
||||||
|
retention-days: 14
|
||||||
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
325
.gitea/workflows/findings-ledger-ci.yml
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
# .gitea/workflows/findings-ledger-ci.yml
|
||||||
|
# Findings Ledger CI with RLS migration validation (DEVOPS-LEDGER-TEN-48-001-REL)
|
||||||
|
|
||||||
|
name: Findings Ledger CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Findings/**'
|
||||||
|
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||||
|
- 'deploy/releases/2025.09-stable.yaml'
|
||||||
|
- 'deploy/releases/2025.09-airgap.yaml'
|
||||||
|
- 'deploy/downloads/manifest.json'
|
||||||
|
- 'ops/devops/release/check_release_manifest.py'
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/Findings/**'
|
||||||
|
- '.gitea/workflows/findings-ledger-ci.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
POSTGRES_IMAGE: postgres:16-alpine
|
||||||
|
BUILD_CONFIGURATION: Release
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TEST_RESULTS_DIR: ${{ github.workspace }}/artifacts/test-results
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj
|
||||||
|
dotnet restore src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
dotnet build src/Findings/StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj \
|
||||||
|
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||||
|
/p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Run unit tests
|
||||||
|
run: |
|
||||||
|
mkdir -p $TEST_RESULTS_DIR
|
||||||
|
dotnet test src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj \
|
||||||
|
-c ${{ env.BUILD_CONFIGURATION }} \
|
||||||
|
--logger "trx;LogFileName=ledger-tests.trx" \
|
||||||
|
--results-directory $TEST_RESULTS_DIR
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: ledger-test-results
|
||||||
|
path: ${{ env.TEST_RESULTS_DIR }}
|
||||||
|
|
||||||
|
migration-validation:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: ledgertest
|
||||||
|
POSTGRES_PASSWORD: ledgertest
|
||||||
|
POSTGRES_DB: ledger_test
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
env:
|
||||||
|
PGHOST: localhost
|
||||||
|
PGPORT: 5432
|
||||||
|
PGUSER: ledgertest
|
||||||
|
PGPASSWORD: ledgertest
|
||||||
|
PGDATABASE: ledger_test
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET ${{ env.DOTNET_VERSION }}
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Install PostgreSQL client
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y postgresql-client
|
||||||
|
|
||||||
|
- name: Wait for PostgreSQL
|
||||||
|
run: |
|
||||||
|
until pg_isready -h $PGHOST -p $PGPORT -U $PGUSER; do
|
||||||
|
echo "Waiting for PostgreSQL..."
|
||||||
|
sleep 2
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Apply prerequisite migrations (001-006)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
MIGRATION_DIR="src/Findings/StellaOps.Findings.Ledger/migrations"
|
||||||
|
for migration in 001_initial.sql 002_add_evidence_bundle_ref.sql 002_projection_offsets.sql \
|
||||||
|
003_policy_rationale.sql 004_ledger_attestations.sql 004_risk_fields.sql \
|
||||||
|
005_risk_fields.sql 006_orchestrator_airgap.sql; do
|
||||||
|
if [ -f "$MIGRATION_DIR/$migration" ]; then
|
||||||
|
echo "Applying migration: $migration"
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -f "$MIGRATION_DIR/$migration"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Apply RLS migration (007_enable_rls.sql)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Applying RLS migration..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||||
|
|
||||||
|
- name: Validate RLS configuration
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Validating RLS is enabled on all protected tables..."
|
||||||
|
|
||||||
|
# Check RLS enabled
|
||||||
|
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||||
|
WHERE n.nspname = 'public'
|
||||||
|
AND c.relrowsecurity = true
|
||||||
|
AND c.relname IN (
|
||||||
|
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||||
|
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||||
|
'orchestrator_exports', 'airgap_imports'
|
||||||
|
);
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$TABLES_WITH_RLS" -ne 8 ]; then
|
||||||
|
echo "::error::Expected 8 tables with RLS enabled, found $TABLES_WITH_RLS"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ All 8 tables have RLS enabled"
|
||||||
|
|
||||||
|
# Check policies exist
|
||||||
|
POLICIES=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(DISTINCT tablename)
|
||||||
|
FROM pg_policies
|
||||||
|
WHERE schemaname = 'public'
|
||||||
|
AND policyname LIKE '%_tenant_isolation';
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$POLICIES" -ne 8 ]; then
|
||||||
|
echo "::error::Expected 8 tenant isolation policies, found $POLICIES"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ All 8 tenant isolation policies created"
|
||||||
|
|
||||||
|
# Check tenant function exists
|
||||||
|
FUNC_EXISTS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_proc p
|
||||||
|
JOIN pg_namespace n ON p.pronamespace = n.oid
|
||||||
|
WHERE p.proname = 'require_current_tenant'
|
||||||
|
AND n.nspname = 'findings_ledger_app';
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$FUNC_EXISTS" -ne 1 ]; then
|
||||||
|
echo "::error::Tenant function 'require_current_tenant' not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ Tenant function 'findings_ledger_app.require_current_tenant()' exists"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== RLS Migration Validation PASSED ==="
|
||||||
|
|
||||||
|
- name: Test rollback migration
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Testing rollback migration..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql
|
||||||
|
|
||||||
|
# Verify RLS is disabled
|
||||||
|
TABLES_WITH_RLS=$(psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE -t -A -c "
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||||
|
WHERE n.nspname = 'public'
|
||||||
|
AND c.relrowsecurity = true
|
||||||
|
AND c.relname IN (
|
||||||
|
'ledger_events', 'ledger_merkle_roots', 'findings_projection',
|
||||||
|
'finding_history', 'triage_actions', 'ledger_attestations',
|
||||||
|
'orchestrator_exports', 'airgap_imports'
|
||||||
|
);
|
||||||
|
")
|
||||||
|
|
||||||
|
if [ "$TABLES_WITH_RLS" -ne 0 ]; then
|
||||||
|
echo "::error::Rollback failed - $TABLES_WITH_RLS tables still have RLS enabled"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ Rollback successful - RLS disabled on all tables"
|
||||||
|
- name: Validate release manifests (production)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
python ops/devops/release/check_release_manifest.py
|
||||||
|
|
||||||
|
- name: Re-apply RLS migration (idempotency check)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Re-applying RLS migration to verify idempotency..."
|
||||||
|
psql -h $PGHOST -p $PGPORT -U $PGUSER -d $PGDATABASE \
|
||||||
|
-f src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql
|
||||||
|
echo "✓ Migration is idempotent"
|
||||||
|
|
||||||
|
generate-manifest:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: [build-test, migration-validation]
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Generate migration manifest
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
MIGRATION_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql"
|
||||||
|
ROLLBACK_FILE="src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql"
|
||||||
|
MANIFEST_DIR="out/findings-ledger/migrations"
|
||||||
|
mkdir -p "$MANIFEST_DIR"
|
||||||
|
|
||||||
|
# Compute SHA256 hashes
|
||||||
|
MIGRATION_SHA=$(sha256sum "$MIGRATION_FILE" | awk '{print $1}')
|
||||||
|
ROLLBACK_SHA=$(sha256sum "$ROLLBACK_FILE" | awk '{print $1}')
|
||||||
|
CREATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
|
cat > "$MANIFEST_DIR/007_enable_rls.manifest.json" <<EOF
|
||||||
|
{
|
||||||
|
"\$schema": "https://stella-ops.org/schemas/migration-manifest.v1.json",
|
||||||
|
"schemaVersion": "1.0.0",
|
||||||
|
"migrationId": "007_enable_rls",
|
||||||
|
"module": "findings-ledger",
|
||||||
|
"version": "2025.12.0",
|
||||||
|
"createdAt": "$CREATED_AT",
|
||||||
|
"description": "Enable Row-Level Security for Findings Ledger tenant isolation",
|
||||||
|
"taskId": "LEDGER-TEN-48-001-DEV",
|
||||||
|
"contractRef": "CONTRACT-FINDINGS-LEDGER-RLS-011",
|
||||||
|
"database": {
|
||||||
|
"engine": "postgresql",
|
||||||
|
"minVersion": "16.0"
|
||||||
|
},
|
||||||
|
"files": {
|
||||||
|
"apply": {
|
||||||
|
"path": "007_enable_rls.sql",
|
||||||
|
"sha256": "$MIGRATION_SHA"
|
||||||
|
},
|
||||||
|
"rollback": {
|
||||||
|
"path": "007_enable_rls_rollback.sql",
|
||||||
|
"sha256": "$ROLLBACK_SHA"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"affects": {
|
||||||
|
"tables": [
|
||||||
|
"ledger_events",
|
||||||
|
"ledger_merkle_roots",
|
||||||
|
"findings_projection",
|
||||||
|
"finding_history",
|
||||||
|
"triage_actions",
|
||||||
|
"ledger_attestations",
|
||||||
|
"orchestrator_exports",
|
||||||
|
"airgap_imports"
|
||||||
|
],
|
||||||
|
"schemas": ["public", "findings_ledger_app"],
|
||||||
|
"roles": ["findings_ledger_admin"]
|
||||||
|
},
|
||||||
|
"prerequisites": [
|
||||||
|
"006_orchestrator_airgap"
|
||||||
|
],
|
||||||
|
"validation": {
|
||||||
|
"type": "rls-check",
|
||||||
|
"expectedTables": 8,
|
||||||
|
"expectedPolicies": 8,
|
||||||
|
"tenantFunction": "findings_ledger_app.require_current_tenant"
|
||||||
|
},
|
||||||
|
"offlineKit": {
|
||||||
|
"includedInBundle": true,
|
||||||
|
"requiresManualApply": true,
|
||||||
|
"applyOrder": 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Generated migration manifest at $MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||||
|
cat "$MANIFEST_DIR/007_enable_rls.manifest.json"
|
||||||
|
|
||||||
|
- name: Copy migration files for offline-kit
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
OFFLINE_DIR="out/findings-ledger/offline-kit/migrations"
|
||||||
|
mkdir -p "$OFFLINE_DIR"
|
||||||
|
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls.sql "$OFFLINE_DIR/"
|
||||||
|
cp src/Findings/StellaOps.Findings.Ledger/migrations/007_enable_rls_rollback.sql "$OFFLINE_DIR/"
|
||||||
|
cp out/findings-ledger/migrations/007_enable_rls.manifest.json "$OFFLINE_DIR/"
|
||||||
|
echo "Offline-kit migration files prepared"
|
||||||
|
ls -la "$OFFLINE_DIR"
|
||||||
|
|
||||||
|
- name: Upload migration artefacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: findings-ledger-migrations
|
||||||
|
path: out/findings-ledger/
|
||||||
|
if-no-files-found: error
|
||||||
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
68
.gitea/workflows/icscisa-kisa-refresh.yml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
name: ICS/KISA Feed Refresh
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * MON'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
live_fetch:
|
||||||
|
description: 'Attempt live RSS fetch (fallback to samples on failure)'
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
offline_snapshot:
|
||||||
|
description: 'Force offline samples only (no network)'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
refresh:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
ICSCISA_FEED_URL: ${{ secrets.ICSCISA_FEED_URL }}
|
||||||
|
KISA_FEED_URL: ${{ secrets.KISA_FEED_URL }}
|
||||||
|
FEED_GATEWAY_HOST: concelier-webservice
|
||||||
|
FEED_GATEWAY_SCHEME: http
|
||||||
|
LIVE_FETCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.live_fetch || 'true' }}
|
||||||
|
OFFLINE_SNAPSHOT: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.offline_snapshot || 'false' }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set run metadata
|
||||||
|
id: meta
|
||||||
|
run: |
|
||||||
|
RUN_DATE=$(date -u +%Y%m%d)
|
||||||
|
RUN_ID="icscisa-kisa-$(date -u +%Y%m%dT%H%M%SZ)"
|
||||||
|
echo "run_date=$RUN_DATE" >> $GITHUB_OUTPUT
|
||||||
|
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
|
||||||
|
echo "RUN_DATE=$RUN_DATE" >> $GITHUB_ENV
|
||||||
|
echo "RUN_ID=$RUN_ID" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Run ICS/KISA refresh
|
||||||
|
run: |
|
||||||
|
python scripts/feeds/run_icscisa_kisa_refresh.py \
|
||||||
|
--out-dir out/feeds/icscisa-kisa \
|
||||||
|
--run-date "${{ steps.meta.outputs.run_date }}" \
|
||||||
|
--run-id "${{ steps.meta.outputs.run_id }}"
|
||||||
|
|
||||||
|
- name: Show fetch log
|
||||||
|
run: cat out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}/fetch.log
|
||||||
|
|
||||||
|
- name: Upload refresh artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: icscisa-kisa-${{ steps.meta.outputs.run_date }}
|
||||||
|
path: out/feeds/icscisa-kisa/${{ steps.meta.outputs.run_date }}
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 21
|
||||||
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
name: Ledger OpenAPI CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
- 'ops/devops/ledger/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-oas:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install tools
|
||||||
|
run: |
|
||||||
|
npm install -g @stoplight/spectral-cli
|
||||||
|
npm install -g @openapitools/openapi-generator-cli
|
||||||
|
|
||||||
|
- name: Validate OpenAPI spec
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/ledger/validate-oas.sh
|
||||||
|
ops/devops/ledger/validate-oas.sh
|
||||||
|
|
||||||
|
- name: Upload validation report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-oas-validation-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/oas/lint-report.json
|
||||||
|
out/ledger/oas/validation-report.txt
|
||||||
|
out/ledger/oas/spec-summary.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
check-wellknown:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check .well-known/openapi structure
|
||||||
|
run: |
|
||||||
|
# Validate .well-known structure if exists
|
||||||
|
if [ -d ".well-known" ]; then
|
||||||
|
echo "Checking .well-known/openapi..."
|
||||||
|
if [ -f ".well-known/openapi.json" ]; then
|
||||||
|
python3 -c "import json; json.load(open('.well-known/openapi.json'))"
|
||||||
|
echo ".well-known/openapi.json is valid JSON"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[info] .well-known directory not present (OK for dev)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
deprecation-check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check deprecation policy
|
||||||
|
run: |
|
||||||
|
if [ -f "ops/devops/ledger/deprecation-policy.yaml" ]; then
|
||||||
|
echo "Validating deprecation policy..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('ops/devops/ledger/deprecation-policy.yaml'))"
|
||||||
|
echo "Deprecation policy is valid"
|
||||||
|
else
|
||||||
|
echo "[info] No deprecation policy yet (OK for initial setup)"
|
||||||
|
fi
|
||||||
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
name: Ledger Packs CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
snapshot_id:
|
||||||
|
description: 'Snapshot ID (leave empty for auto)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
sign:
|
||||||
|
description: 'Sign pack (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'ops/devops/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ] || [ "${{ github.event.inputs.sign }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build pack
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/ledger/build-pack.sh
|
||||||
|
SNAPSHOT_ID="${{ github.event.inputs.snapshot_id }}"
|
||||||
|
if [ -z "$SNAPSHOT_ID" ]; then
|
||||||
|
SNAPSHOT_ID="ci-$(date +%Y%m%d%H%M%S)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SIGN_FLAG=""
|
||||||
|
if [ "${{ github.event.inputs.sign }}" = "1" ] || [ -n "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
SIGN_FLAG="--sign"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SNAPSHOT_ID="$SNAPSHOT_ID" ops/devops/ledger/build-pack.sh $SIGN_FLAG
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for f in *.SHA256SUMS; do
|
||||||
|
if [ -f "$f" ]; then
|
||||||
|
sha256sum -c "$f"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Upload pack
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/packs/*.pack.tar.gz
|
||||||
|
out/ledger/packs/*.SHA256SUMS
|
||||||
|
out/ledger/packs/*.dsse.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
verify-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-pack
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download pack
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: out/ledger/packs/
|
||||||
|
|
||||||
|
- name: Verify pack structure
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for pack in *.pack.tar.gz; do
|
||||||
|
if [ -f "$pack" ]; then
|
||||||
|
echo "Verifying $pack..."
|
||||||
|
tar -tzf "$pack" | head -20
|
||||||
|
|
||||||
|
# Extract and check manifest
|
||||||
|
tar -xzf "$pack" -C /tmp manifest.json 2>/dev/null || true
|
||||||
|
if [ -f /tmp/manifest.json ]; then
|
||||||
|
python3 -c "import json; json.load(open('/tmp/manifest.json'))"
|
||||||
|
echo "Pack manifest is valid JSON"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
188
.gitea/workflows/lighthouse-ci.yml
Normal file
188
.gitea/workflows/lighthouse-ci.yml
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
# .gitea/workflows/lighthouse-ci.yml
|
||||||
|
# Lighthouse CI for performance and accessibility testing of the StellaOps Web UI
|
||||||
|
|
||||||
|
name: Lighthouse CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
- '.gitea/workflows/lighthouse-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
schedule:
|
||||||
|
# Run weekly on Sunday at 2 AM UTC
|
||||||
|
- cron: '0 2 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODE_VERSION: '20'
|
||||||
|
LHCI_BUILD_CONTEXT__CURRENT_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||||
|
LHCI_BUILD_CONTEXT__COMMIT_SHA: ${{ github.sha }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lighthouse:
|
||||||
|
name: Lighthouse Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Install Lighthouse CI
|
||||||
|
run: npm install -g @lhci/cli@0.13.x
|
||||||
|
|
||||||
|
- name: Run Lighthouse CI
|
||||||
|
run: |
|
||||||
|
lhci autorun \
|
||||||
|
--collect.staticDistDir=./dist/stella-ops-web/browser \
|
||||||
|
--collect.numberOfRuns=3 \
|
||||||
|
--assert.preset=lighthouse:recommended \
|
||||||
|
--assert.assertions.categories:performance=off \
|
||||||
|
--assert.assertions.categories:accessibility=off \
|
||||||
|
--upload.target=filesystem \
|
||||||
|
--upload.outputDir=./lighthouse-results
|
||||||
|
|
||||||
|
- name: Evaluate Lighthouse Results
|
||||||
|
id: lhci-results
|
||||||
|
run: |
|
||||||
|
# Parse the latest Lighthouse report
|
||||||
|
REPORT=$(ls -t lighthouse-results/*.json | head -1)
|
||||||
|
|
||||||
|
if [ -f "$REPORT" ]; then
|
||||||
|
PERF=$(jq '.categories.performance.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
A11Y=$(jq '.categories.accessibility.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
BP=$(jq '.categories["best-practices"].score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
SEO=$(jq '.categories.seo.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
|
||||||
|
echo "performance=$PERF" >> $GITHUB_OUTPUT
|
||||||
|
echo "accessibility=$A11Y" >> $GITHUB_OUTPUT
|
||||||
|
echo "best-practices=$BP" >> $GITHUB_OUTPUT
|
||||||
|
echo "seo=$SEO" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "## Lighthouse Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Category | Score | Threshold | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|----------|-------|-----------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# Performance: target >= 90
|
||||||
|
if [ "$PERF" -ge 90 ]; then
|
||||||
|
echo "| Performance | $PERF | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Performance | $PERF | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility: target >= 95
|
||||||
|
if [ "$A11Y" -ge 95 ]; then
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :x: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Best Practices: target >= 90
|
||||||
|
if [ "$BP" -ge 90 ]; then
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# SEO: target >= 90
|
||||||
|
if [ "$SEO" -ge 90 ]; then
|
||||||
|
echo "| SEO | $SEO | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| SEO | $SEO | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Check Quality Gates
|
||||||
|
run: |
|
||||||
|
PERF=${{ steps.lhci-results.outputs.performance }}
|
||||||
|
A11Y=${{ steps.lhci-results.outputs.accessibility }}
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
# Performance gate (warning only, not blocking)
|
||||||
|
if [ "$PERF" -lt 90 ]; then
|
||||||
|
echo "::warning::Performance score ($PERF) is below target (90)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility gate (blocking)
|
||||||
|
if [ "$A11Y" -lt 95 ]; then
|
||||||
|
echo "::error::Accessibility score ($A11Y) is below required threshold (95)"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$FAILED" -eq 1 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload Lighthouse Reports
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: lighthouse-reports
|
||||||
|
path: src/Web/StellaOps.Web/lighthouse-results/
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
axe-accessibility:
|
||||||
|
name: Axe Accessibility Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
run: npx playwright install --with-deps chromium
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Start preview server
|
||||||
|
run: |
|
||||||
|
npx serve -s dist/stella-ops-web/browser -l 4200 &
|
||||||
|
sleep 5
|
||||||
|
|
||||||
|
- name: Run Axe accessibility tests
|
||||||
|
run: |
|
||||||
|
npm run test:a11y || true
|
||||||
|
|
||||||
|
- name: Upload Axe results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: axe-accessibility-results
|
||||||
|
path: src/Web/StellaOps.Web/test-results/
|
||||||
|
retention-days: 30
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
lnm-backfill:
|
lnm-backfill:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
|
|||||||
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: LNM Migration CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_staging:
|
||||||
|
description: 'Run staging backfill (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Concelier/__Libraries/StellaOps.Concelier.Migrations/**'
|
||||||
|
- 'ops/devops/lnm/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${{ secrets.COSIGN_PRIVATE_KEY_B64 }}" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
|
||||||
|
- name: Build and package runner
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/lnm/package-runner.sh
|
||||||
|
ops/devops/lnm/package-runner.sh
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/lnm
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: lnm-migration-runner-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/lnm/lnm-migration-runner.tar.gz
|
||||||
|
out/lnm/lnm-migration-runner.manifest.json
|
||||||
|
out/lnm/lnm-migration-runner.dsse.json
|
||||||
|
out/lnm/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
validate-metrics:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-runner
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate monitoring config
|
||||||
|
run: |
|
||||||
|
# Validate alert rules syntax
|
||||||
|
if [ -f "ops/devops/lnm/alerts/lnm-alerts.yaml" ]; then
|
||||||
|
echo "Validating alert rules..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('ops/devops/lnm/alerts/lnm-alerts.yaml'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate dashboard JSON
|
||||||
|
if [ -f "ops/devops/lnm/dashboards/lnm-migration.json" ]; then
|
||||||
|
echo "Validating dashboard..."
|
||||||
|
python3 -c "import json; json.load(open('ops/devops/lnm/dashboards/lnm-migration.json'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Monitoring config validation complete"
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
vex-backfill:
|
vex-backfill:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
|
|||||||
125
.gitea/workflows/manifest-integrity.yml
Normal file
125
.gitea/workflows/manifest-integrity.yml
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
name: Manifest Integrity
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'docs/**/*.schema.json'
|
||||||
|
- 'docs/contracts/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/packs/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'docs/**/*.schema.json'
|
||||||
|
- 'docs/contracts/**'
|
||||||
|
- 'docs/schemas/**'
|
||||||
|
- 'scripts/packs/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-schemas:
|
||||||
|
name: Validate Schema Integrity
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm install -g ajv-cli ajv-formats
|
||||||
|
|
||||||
|
- name: Validate JSON schemas
|
||||||
|
run: |
|
||||||
|
EXIT_CODE=0
|
||||||
|
for schema in docs/schemas/*.schema.json; do
|
||||||
|
echo "Validating $schema..."
|
||||||
|
if ! ajv compile -s "$schema" --spec=draft2020 2>/dev/null; then
|
||||||
|
echo "Error: $schema is invalid"
|
||||||
|
EXIT_CODE=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
exit $EXIT_CODE
|
||||||
|
|
||||||
|
validate-contracts:
|
||||||
|
name: Validate Contract Documents
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check contract structure
|
||||||
|
run: |
|
||||||
|
for contract in docs/contracts/*.md; do
|
||||||
|
echo "Checking $contract..."
|
||||||
|
# Verify required sections exist
|
||||||
|
if ! grep -q "^## " "$contract"; then
|
||||||
|
echo "Warning: $contract missing section headers"
|
||||||
|
fi
|
||||||
|
# Check for decision ID
|
||||||
|
if grep -q "Decision ID" "$contract" && ! grep -q "DECISION-\|CONTRACT-" "$contract"; then
|
||||||
|
echo "Warning: $contract missing decision ID format"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
validate-pack-fixtures:
|
||||||
|
name: Validate Pack Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install jsonschema
|
||||||
|
|
||||||
|
- name: Run fixture validation
|
||||||
|
run: |
|
||||||
|
if [ -f scripts/packs/run-fixtures-check.sh ]; then
|
||||||
|
chmod +x scripts/packs/run-fixtures-check.sh
|
||||||
|
./scripts/packs/run-fixtures-check.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
checksum-audit:
|
||||||
|
name: Audit SHA256SUMS Files
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate checksums
|
||||||
|
run: |
|
||||||
|
find . -name "SHA256SUMS" -type f | while read f; do
|
||||||
|
dir=$(dirname "$f")
|
||||||
|
echo "Validating checksums in $dir..."
|
||||||
|
cd "$dir"
|
||||||
|
# Check if all referenced files exist
|
||||||
|
while read hash file; do
|
||||||
|
if [ ! -f "$file" ]; then
|
||||||
|
echo "Warning: $file referenced in SHA256SUMS but not found"
|
||||||
|
fi
|
||||||
|
done < SHA256SUMS
|
||||||
|
cd - > /dev/null
|
||||||
|
done
|
||||||
|
|
||||||
|
merkle-consistency:
|
||||||
|
name: Verify Merkle Roots
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check DSSE Merkle roots
|
||||||
|
run: |
|
||||||
|
find . -name "*.dsse.json" -type f | while read f; do
|
||||||
|
echo "Checking Merkle root in $f..."
|
||||||
|
# Extract and validate Merkle root if present
|
||||||
|
if jq -e '.payload' "$f" > /dev/null 2>&1; then
|
||||||
|
PAYLOAD=$(jq -r '.payload' "$f" | base64 -d 2>/dev/null || echo "")
|
||||||
|
if echo "$PAYLOAD" | jq -e '._stellaops.merkleRoot' > /dev/null 2>&1; then
|
||||||
|
MERKLE=$(echo "$PAYLOAD" | jq -r '._stellaops.merkleRoot')
|
||||||
|
echo " Merkle root: $MERKLE"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
@@ -18,10 +18,18 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Fallback to dev signing key when secret is absent (non-prod only)
|
||||||
|
run: |
|
||||||
|
if [ -z "${MIRROR_SIGN_KEY_B64}" ]; then
|
||||||
|
echo "[warn] MIRROR_SIGN_KEY_B64 not set; using repo dev key for non-production signing."
|
||||||
|
echo "MIRROR_SIGN_KEY_B64=$(base64 -w0 tools/cosign/cosign.dev.key)" >> $GITHUB_ENV
|
||||||
|
echo "REQUIRE_PROD_SIGNING=0" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Task Pack offline bundle fixtures
|
- name: Task Pack offline bundle fixtures
|
||||||
@@ -38,6 +46,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
scripts/mirror/verify_thin_bundle.py out/mirror/thin/mirror-thin-v1.tar.gz
|
||||||
|
|
||||||
|
- name: Prepare Export Center handoff (metadata + optional schedule)
|
||||||
|
run: |
|
||||||
|
scripts/mirror/export-center-wire.sh
|
||||||
|
env:
|
||||||
|
EXPORT_CENTER_BASE_URL: ${{ secrets.EXPORT_CENTER_BASE_URL }}
|
||||||
|
EXPORT_CENTER_TOKEN: ${{ secrets.EXPORT_CENTER_TOKEN }}
|
||||||
|
EXPORT_CENTER_TENANT: ${{ secrets.EXPORT_CENTER_TENANT }}
|
||||||
|
EXPORT_CENTER_PROJECT: ${{ secrets.EXPORT_CENTER_PROJECT }}
|
||||||
|
EXPORT_CENTER_AUTO_SCHEDULE: ${{ secrets.EXPORT_CENTER_AUTO_SCHEDULE }}
|
||||||
|
|
||||||
- name: Upload signed artifacts
|
- name: Upload signed artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
@@ -49,5 +67,8 @@ jobs:
|
|||||||
out/mirror/thin/tuf/
|
out/mirror/thin/tuf/
|
||||||
out/mirror/thin/oci/
|
out/mirror/thin/oci/
|
||||||
out/mirror/thin/milestone.json
|
out/mirror/thin/milestone.json
|
||||||
|
out/mirror/thin/export-center/export-center-handoff.json
|
||||||
|
out/mirror/thin/export-center/export-center-targets.json
|
||||||
|
out/mirror/thin/export-center/schedule-response.json
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 14
|
retention-days: 14
|
||||||
|
|||||||
44
.gitea/workflows/mock-dev-release.yml
Normal file
44
.gitea/workflows/mock-dev-release.yml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: mock-dev-release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- deploy/releases/2025.09-mock-dev.yaml
|
||||||
|
- deploy/downloads/manifest.json
|
||||||
|
- ops/devops/mock-release/**
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-mock-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Package mock dev artefacts
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
mkdir -p out/mock-release
|
||||||
|
cp deploy/releases/2025.09-mock-dev.yaml out/mock-release/
|
||||||
|
cp deploy/downloads/manifest.json out/mock-release/
|
||||||
|
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
|
||||||
|
|
||||||
|
- name: Compose config (dev + mock overlay)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
ops/devops/mock-release/config_check.sh
|
||||||
|
|
||||||
|
- name: Helm template (mock overlay)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
helm template mock ./deploy/helm/stellaops -f deploy/helm/stellaops/values-mock.yaml > /tmp/helm-mock.yaml
|
||||||
|
ls -lh /tmp/helm-mock.yaml
|
||||||
|
|
||||||
|
- name: Upload mock release bundle
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: mock-dev-release
|
||||||
|
path: |
|
||||||
|
out/mock-release/mock-dev-release.tgz
|
||||||
|
/tmp/compose-mock-config.yaml
|
||||||
|
/tmp/helm-mock.yaml
|
||||||
102
.gitea/workflows/notify-smoke-test.yml
Normal file
102
.gitea/workflows/notify-smoke-test.yml
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
name: Notify Smoke Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Notify/**'
|
||||||
|
- 'src/Notifier/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Notify/**'
|
||||||
|
- 'src/Notifier/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
name: Notify Unit Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/Notify/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Notify/ --no-restore
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test src/Notify/ --no-build --verbosity normal
|
||||||
|
|
||||||
|
notifier-tests:
|
||||||
|
name: Notifier Service Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore src/Notifier/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Notifier/ --no-restore
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: dotnet test src/Notifier/ --no-build --verbosity normal
|
||||||
|
|
||||||
|
smoke-test:
|
||||||
|
name: Notification Smoke Test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [unit-tests, notifier-tests]
|
||||||
|
services:
|
||||||
|
mongodb:
|
||||||
|
image: mongo:7.0
|
||||||
|
ports:
|
||||||
|
- 27017:27017
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Build Notifier
|
||||||
|
run: dotnet build src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/
|
||||||
|
|
||||||
|
- name: Start service
|
||||||
|
run: |
|
||||||
|
dotnet run --project src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/ &
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
- name: Health check
|
||||||
|
run: |
|
||||||
|
for i in {1..30}; do
|
||||||
|
if curl -s http://localhost:5000/health > /dev/null; then
|
||||||
|
echo "Service is healthy"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "Service failed to start"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Test notification endpoint
|
||||||
|
run: |
|
||||||
|
# Test dry-run notification
|
||||||
|
curl -X POST http://localhost:5000/api/v1/notifications/test \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"channel": "log", "message": "Smoke test", "dryRun": true}' \
|
||||||
|
|| echo "Warning: Notification test endpoint not available"
|
||||||
@@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Cache NuGet packages
|
- name: Cache NuGet packages
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
|
|||||||
306
.gitea/workflows/reachability-bench.yaml
Normal file
306
.gitea/workflows/reachability-bench.yaml
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
name: Reachability Benchmark
|
||||||
|
|
||||||
|
# Sprint: SPRINT_3500_0003_0001
|
||||||
|
# Task: CORPUS-009 - Create Gitea workflow for reachability benchmark
|
||||||
|
# Task: CORPUS-010 - Configure nightly + per-PR benchmark runs
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
baseline_version:
|
||||||
|
description: 'Baseline version to compare against'
|
||||||
|
required: false
|
||||||
|
default: 'latest'
|
||||||
|
verbose:
|
||||||
|
description: 'Enable verbose output'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'datasets/reachability/**'
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||||
|
- 'bench/reachability-benchmark/**'
|
||||||
|
- '.gitea/workflows/reachability-bench.yaml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'datasets/reachability/**'
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/**'
|
||||||
|
- 'bench/reachability-benchmark/**'
|
||||||
|
schedule:
|
||||||
|
# Nightly at 02:00 UTC
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
benchmark:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
STELLAOPS_OFFLINE: 'true'
|
||||||
|
STELLAOPS_DETERMINISTIC: 'true'
|
||||||
|
outputs:
|
||||||
|
precision: ${{ steps.metrics.outputs.precision }}
|
||||||
|
recall: ${{ steps.metrics.outputs.recall }}
|
||||||
|
f1: ${{ steps.metrics.outputs.f1 }}
|
||||||
|
pr_auc: ${{ steps.metrics.outputs.pr_auc }}
|
||||||
|
regression: ${{ steps.compare.outputs.regression }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.nuget/packages
|
||||||
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/*.csproj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-nuget-
|
||||||
|
|
||||||
|
- name: Restore benchmark project
|
||||||
|
run: |
|
||||||
|
dotnet restore src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
--configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build benchmark project
|
||||||
|
run: |
|
||||||
|
dotnet build src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-restore
|
||||||
|
|
||||||
|
- name: Validate corpus integrity
|
||||||
|
run: |
|
||||||
|
echo "::group::Validating corpus index"
|
||||||
|
if [ ! -f datasets/reachability/corpus.json ]; then
|
||||||
|
echo "::error::corpus.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
python3 -c "import json; data = json.load(open('datasets/reachability/corpus.json')); print(f'Corpus contains {len(data.get(\"samples\", []))} samples')"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Run benchmark
|
||||||
|
id: benchmark
|
||||||
|
run: |
|
||||||
|
echo "::group::Running reachability benchmark"
|
||||||
|
mkdir -p bench/results
|
||||||
|
|
||||||
|
# Run the corpus benchmark
|
||||||
|
dotnet run \
|
||||||
|
--project src/Scanner/__Libraries/StellaOps.Scanner.Benchmarks/StellaOps.Scanner.Benchmarks.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
-- corpus run \
|
||||||
|
--corpus datasets/reachability/corpus.json \
|
||||||
|
--output bench/results/benchmark-${{ github.sha }}.json \
|
||||||
|
--format json \
|
||||||
|
${{ inputs.verbose == 'true' && '--verbose' || '' }}
|
||||||
|
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Extract metrics
|
||||||
|
id: metrics
|
||||||
|
run: |
|
||||||
|
echo "::group::Extracting metrics"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
|
||||||
|
if [ -f "$RESULT_FILE" ]; then
|
||||||
|
PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||||
|
RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||||
|
F1=$(jq -r '.metrics.f1 // 0' "$RESULT_FILE")
|
||||||
|
PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||||
|
|
||||||
|
echo "precision=$PRECISION" >> $GITHUB_OUTPUT
|
||||||
|
echo "recall=$RECALL" >> $GITHUB_OUTPUT
|
||||||
|
echo "f1=$F1" >> $GITHUB_OUTPUT
|
||||||
|
echo "pr_auc=$PR_AUC" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "Precision: $PRECISION"
|
||||||
|
echo "Recall: $RECALL"
|
||||||
|
echo "F1: $F1"
|
||||||
|
echo "PR-AUC: $PR_AUC"
|
||||||
|
else
|
||||||
|
echo "::error::Benchmark result file not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Get baseline
|
||||||
|
id: baseline
|
||||||
|
run: |
|
||||||
|
echo "::group::Loading baseline"
|
||||||
|
BASELINE_VERSION="${{ inputs.baseline_version || 'latest' }}"
|
||||||
|
|
||||||
|
if [ "$BASELINE_VERSION" = "latest" ]; then
|
||||||
|
BASELINE_FILE=$(ls -t bench/baselines/*.json 2>/dev/null | head -1)
|
||||||
|
else
|
||||||
|
BASELINE_FILE="bench/baselines/$BASELINE_VERSION.json"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$BASELINE_FILE" ]; then
|
||||||
|
echo "baseline_file=$BASELINE_FILE" >> $GITHUB_OUTPUT
|
||||||
|
echo "Using baseline: $BASELINE_FILE"
|
||||||
|
else
|
||||||
|
echo "::warning::No baseline found, skipping comparison"
|
||||||
|
echo "baseline_file=" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Compare to baseline
|
||||||
|
id: compare
|
||||||
|
if: steps.baseline.outputs.baseline_file != ''
|
||||||
|
run: |
|
||||||
|
echo "::group::Comparing to baseline"
|
||||||
|
BASELINE_FILE="${{ steps.baseline.outputs.baseline_file }}"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
|
||||||
|
# Extract baseline metrics
|
||||||
|
BASELINE_PRECISION=$(jq -r '.metrics.precision // 0' "$BASELINE_FILE")
|
||||||
|
BASELINE_RECALL=$(jq -r '.metrics.recall // 0' "$BASELINE_FILE")
|
||||||
|
BASELINE_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$BASELINE_FILE")
|
||||||
|
|
||||||
|
# Extract current metrics
|
||||||
|
CURRENT_PRECISION=$(jq -r '.metrics.precision // 0' "$RESULT_FILE")
|
||||||
|
CURRENT_RECALL=$(jq -r '.metrics.recall // 0' "$RESULT_FILE")
|
||||||
|
CURRENT_PR_AUC=$(jq -r '.metrics.pr_auc // 0' "$RESULT_FILE")
|
||||||
|
|
||||||
|
# Calculate deltas
|
||||||
|
PRECISION_DELTA=$(echo "$CURRENT_PRECISION - $BASELINE_PRECISION" | bc -l)
|
||||||
|
RECALL_DELTA=$(echo "$CURRENT_RECALL - $BASELINE_RECALL" | bc -l)
|
||||||
|
PR_AUC_DELTA=$(echo "$CURRENT_PR_AUC - $BASELINE_PR_AUC" | bc -l)
|
||||||
|
|
||||||
|
echo "Precision delta: $PRECISION_DELTA"
|
||||||
|
echo "Recall delta: $RECALL_DELTA"
|
||||||
|
echo "PR-AUC delta: $PR_AUC_DELTA"
|
||||||
|
|
||||||
|
# Check for regression (PR-AUC drop > 2%)
|
||||||
|
REGRESSION_THRESHOLD=-0.02
|
||||||
|
if (( $(echo "$PR_AUC_DELTA < $REGRESSION_THRESHOLD" | bc -l) )); then
|
||||||
|
echo "::error::PR-AUC regression detected: $PR_AUC_DELTA (threshold: $REGRESSION_THRESHOLD)"
|
||||||
|
echo "regression=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "regression=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Generate markdown report
|
||||||
|
run: |
|
||||||
|
echo "::group::Generating report"
|
||||||
|
RESULT_FILE="bench/results/benchmark-${{ github.sha }}.json"
|
||||||
|
REPORT_FILE="bench/results/benchmark-${{ github.sha }}.md"
|
||||||
|
|
||||||
|
cat > "$REPORT_FILE" << 'EOF'
|
||||||
|
# Reachability Benchmark Report
|
||||||
|
|
||||||
|
**Commit:** ${{ github.sha }}
|
||||||
|
**Run:** ${{ github.run_number }}
|
||||||
|
**Date:** $(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
|
## Metrics
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Precision | ${{ steps.metrics.outputs.precision }} |
|
||||||
|
| Recall | ${{ steps.metrics.outputs.recall }} |
|
||||||
|
| F1 Score | ${{ steps.metrics.outputs.f1 }} |
|
||||||
|
| PR-AUC | ${{ steps.metrics.outputs.pr_auc }} |
|
||||||
|
|
||||||
|
## Comparison
|
||||||
|
|
||||||
|
${{ steps.compare.outputs.regression == 'true' && '⚠️ **REGRESSION DETECTED**' || '✅ No regression' }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Report generated: $REPORT_FILE"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.sha }}
|
||||||
|
path: |
|
||||||
|
bench/results/benchmark-${{ github.sha }}.json
|
||||||
|
bench/results/benchmark-${{ github.sha }}.md
|
||||||
|
retention-days: 90
|
||||||
|
|
||||||
|
- name: Fail on regression
|
||||||
|
if: steps.compare.outputs.regression == 'true' && github.event_name == 'pull_request'
|
||||||
|
run: |
|
||||||
|
echo "::error::Benchmark regression detected. PR-AUC dropped below threshold."
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
update-baseline:
|
||||||
|
needs: benchmark
|
||||||
|
if: github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.benchmark.outputs.regression != 'true'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download results
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results-${{ github.sha }}
|
||||||
|
path: bench/results/
|
||||||
|
|
||||||
|
- name: Update baseline (nightly only)
|
||||||
|
if: github.event_name == 'schedule'
|
||||||
|
run: |
|
||||||
|
DATE=$(date +%Y%m%d)
|
||||||
|
cp bench/results/benchmark-${{ github.sha }}.json bench/baselines/baseline-$DATE.json
|
||||||
|
echo "Updated baseline to baseline-$DATE.json"
|
||||||
|
|
||||||
|
notify-pr:
|
||||||
|
needs: benchmark
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Comment on PR
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const precision = '${{ needs.benchmark.outputs.precision }}';
|
||||||
|
const recall = '${{ needs.benchmark.outputs.recall }}';
|
||||||
|
const f1 = '${{ needs.benchmark.outputs.f1 }}';
|
||||||
|
const prAuc = '${{ needs.benchmark.outputs.pr_auc }}';
|
||||||
|
const regression = '${{ needs.benchmark.outputs.regression }}' === 'true';
|
||||||
|
|
||||||
|
const status = regression ? '⚠️ REGRESSION' : '✅ PASS';
|
||||||
|
|
||||||
|
const body = `## Reachability Benchmark Results ${status}
|
||||||
|
|
||||||
|
| Metric | Value |
|
||||||
|
|--------|-------|
|
||||||
|
| Precision | ${precision} |
|
||||||
|
| Recall | ${recall} |
|
||||||
|
| F1 Score | ${f1} |
|
||||||
|
| PR-AUC | ${prAuc} |
|
||||||
|
|
||||||
|
${regression ? '### ⚠️ Regression Detected\nPR-AUC dropped below threshold. Please review changes.' : ''}
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Details</summary>
|
||||||
|
|
||||||
|
- Commit: \`${{ github.sha }}\`
|
||||||
|
- Run: [#${{ github.run_number }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||||
|
|
||||||
|
</details>`;
|
||||||
|
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: body
|
||||||
|
});
|
||||||
267
.gitea/workflows/reachability-corpus-ci.yml
Normal file
267
.gitea/workflows/reachability-corpus-ci.yml
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
name: Reachability Corpus Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'tests/reachability/corpus/**'
|
||||||
|
- 'tests/reachability/fixtures/**'
|
||||||
|
- 'tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||||
|
- 'scripts/reachability/**'
|
||||||
|
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'tests/reachability/corpus/**'
|
||||||
|
- 'tests/reachability/fixtures/**'
|
||||||
|
- 'tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||||
|
- 'scripts/reachability/**'
|
||||||
|
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-corpus:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Verify corpus manifest integrity
|
||||||
|
run: |
|
||||||
|
echo "Verifying corpus manifest..."
|
||||||
|
cd tests/reachability/corpus
|
||||||
|
if [ ! -f manifest.json ]; then
|
||||||
|
echo "::error::Corpus manifest.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Manifest exists, checking JSON validity..."
|
||||||
|
python3 -c "import json; json.load(open('manifest.json'))"
|
||||||
|
echo "Manifest is valid JSON"
|
||||||
|
|
||||||
|
- name: Verify reachbench index integrity
|
||||||
|
run: |
|
||||||
|
echo "Verifying reachbench fixtures..."
|
||||||
|
cd tests/reachability/fixtures/reachbench-2025-expanded
|
||||||
|
if [ ! -f INDEX.json ]; then
|
||||||
|
echo "::error::Reachbench INDEX.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "INDEX exists, checking JSON validity..."
|
||||||
|
python3 -c "import json; json.load(open('INDEX.json'))"
|
||||||
|
echo "INDEX is valid JSON"
|
||||||
|
|
||||||
|
- name: Restore test project
|
||||||
|
run: dotnet restore tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build test project
|
||||||
|
run: dotnet build tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Run corpus fixture tests
|
||||||
|
run: |
|
||||||
|
dotnet test tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=corpus-results.trx" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
--filter "FullyQualifiedName~CorpusFixtureTests"
|
||||||
|
|
||||||
|
- name: Run reachbench fixture tests
|
||||||
|
run: |
|
||||||
|
dotnet test tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=reachbench-results.trx" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
--filter "FullyQualifiedName~ReachbenchFixtureTests"
|
||||||
|
|
||||||
|
- name: Verify deterministic hashes
|
||||||
|
run: |
|
||||||
|
echo "Verifying SHA-256 hashes in corpus manifest..."
|
||||||
|
chmod +x scripts/reachability/verify_corpus_hashes.sh || true
|
||||||
|
if [ -f scripts/reachability/verify_corpus_hashes.sh ]; then
|
||||||
|
scripts/reachability/verify_corpus_hashes.sh
|
||||||
|
else
|
||||||
|
echo "Hash verification script not found, using inline verification..."
|
||||||
|
cd tests/reachability/corpus
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
with open('manifest.json') as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for entry in manifest:
|
||||||
|
case_id = entry['id']
|
||||||
|
lang = entry['language']
|
||||||
|
case_dir = os.path.join(lang, case_id)
|
||||||
|
for filename, expected_hash in entry['files'].items():
|
||||||
|
filepath = os.path.join(case_dir, filename)
|
||||||
|
if not os.path.exists(filepath):
|
||||||
|
errors.append(f"{case_id}: missing {filename}")
|
||||||
|
continue
|
||||||
|
with open(filepath, 'rb') as f:
|
||||||
|
actual_hash = hashlib.sha256(f.read()).hexdigest()
|
||||||
|
if actual_hash != expected_hash:
|
||||||
|
errors.append(f"{case_id}: {filename} hash mismatch (expected {expected_hash}, got {actual_hash})")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
print(f"All {len(manifest)} corpus entries verified")
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: corpus-test-results-${{ github.run_number }}
|
||||||
|
path: ./TestResults/*.trx
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
validate-ground-truths:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate ground-truth schema version
|
||||||
|
run: |
|
||||||
|
echo "Validating ground-truth files..."
|
||||||
|
cd tests/reachability
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
EXPECTED_SCHEMA = "reachbench.reachgraph.truth/v1"
|
||||||
|
ALLOWED_VARIANTS = {"reachable", "unreachable"}
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
# Validate corpus ground-truths
|
||||||
|
corpus_manifest = 'corpus/manifest.json'
|
||||||
|
if os.path.exists(corpus_manifest):
|
||||||
|
with open(corpus_manifest) as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
for entry in manifest:
|
||||||
|
case_id = entry['id']
|
||||||
|
lang = entry['language']
|
||||||
|
truth_path = os.path.join('corpus', lang, case_id, 'ground-truth.json')
|
||||||
|
if not os.path.exists(truth_path):
|
||||||
|
errors.append(f"corpus/{case_id}: missing ground-truth.json")
|
||||||
|
continue
|
||||||
|
with open(truth_path) as f:
|
||||||
|
truth = json.load(f)
|
||||||
|
if truth.get('schema_version') != EXPECTED_SCHEMA:
|
||||||
|
errors.append(f"corpus/{case_id}: wrong schema_version")
|
||||||
|
if truth.get('variant') not in ALLOWED_VARIANTS:
|
||||||
|
errors.append(f"corpus/{case_id}: invalid variant '{truth.get('variant')}'")
|
||||||
|
if not isinstance(truth.get('paths'), list):
|
||||||
|
errors.append(f"corpus/{case_id}: paths must be an array")
|
||||||
|
|
||||||
|
# Validate reachbench ground-truths
|
||||||
|
reachbench_index = 'fixtures/reachbench-2025-expanded/INDEX.json'
|
||||||
|
if os.path.exists(reachbench_index):
|
||||||
|
with open(reachbench_index) as f:
|
||||||
|
index = json.load(f)
|
||||||
|
for case in index.get('cases', []):
|
||||||
|
case_id = case['id']
|
||||||
|
case_path = case.get('path', os.path.join('cases', case_id))
|
||||||
|
for variant in ['reachable', 'unreachable']:
|
||||||
|
truth_path = os.path.join('fixtures/reachbench-2025-expanded', case_path, 'images', variant, 'reachgraph.truth.json')
|
||||||
|
if not os.path.exists(truth_path):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: missing reachgraph.truth.json")
|
||||||
|
continue
|
||||||
|
with open(truth_path) as f:
|
||||||
|
truth = json.load(f)
|
||||||
|
if not truth.get('schema_version'):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: missing schema_version")
|
||||||
|
if not isinstance(truth.get('paths'), list):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: paths must be an array")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
print("All ground-truth files validated successfully")
|
||||||
|
EOF
|
||||||
|
|
||||||
|
determinism-check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TZ: UTC
|
||||||
|
needs: validate-corpus
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Verify JSON determinism (sorted keys, no trailing whitespace)
|
||||||
|
run: |
|
||||||
|
echo "Checking JSON determinism..."
|
||||||
|
cd tests/reachability
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def check_json_sorted(filepath):
|
||||||
|
"""Check if JSON has sorted keys (deterministic)."""
|
||||||
|
with open(filepath) as f:
|
||||||
|
content = f.read()
|
||||||
|
parsed = json.loads(content)
|
||||||
|
reserialized = json.dumps(parsed, sort_keys=True, indent=2)
|
||||||
|
# Normalize line endings
|
||||||
|
content_normalized = content.replace('\r\n', '\n').strip()
|
||||||
|
reserialized_normalized = reserialized.strip()
|
||||||
|
return content_normalized == reserialized_normalized
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
json_files = []
|
||||||
|
|
||||||
|
# Collect JSON files from corpus
|
||||||
|
for root, dirs, files in os.walk('corpus'):
|
||||||
|
for f in files:
|
||||||
|
if f.endswith('.json'):
|
||||||
|
json_files.append(os.path.join(root, f))
|
||||||
|
|
||||||
|
# Check determinism
|
||||||
|
non_deterministic = []
|
||||||
|
for filepath in json_files:
|
||||||
|
try:
|
||||||
|
if not check_json_sorted(filepath):
|
||||||
|
non_deterministic.append(filepath)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
errors.append(f"{filepath}: invalid JSON - {e}")
|
||||||
|
|
||||||
|
if non_deterministic:
|
||||||
|
print(f"::warning::Found {len(non_deterministic)} non-deterministic JSON files (keys not sorted or whitespace differs)")
|
||||||
|
for f in non_deterministic[:10]:
|
||||||
|
print(f" - {f}")
|
||||||
|
if len(non_deterministic) > 10:
|
||||||
|
print(f" ... and {len(non_deterministic) - 10} more")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"Checked {len(json_files)} JSON files")
|
||||||
|
EOF
|
||||||
19
.gitea/workflows/release-manifest-verify.yml
Normal file
19
.gitea/workflows/release-manifest-verify.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
name: release-manifest-verify
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- deploy/releases/2025.09-stable.yaml
|
||||||
|
- deploy/releases/2025.09-airgap.yaml
|
||||||
|
- deploy/downloads/manifest.json
|
||||||
|
- ops/devops/release/check_release_manifest.py
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
verify:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Validate release & downloads manifests
|
||||||
|
run: |
|
||||||
|
python ops/devops/release/check_release_manifest.py
|
||||||
120
.gitea/workflows/release-validation.yml
Normal file
120
.gitea/workflows/release-validation.yml
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
name: Release Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'deploy/**'
|
||||||
|
- 'scripts/release/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_PREFIX: stellaops
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-manifests:
|
||||||
|
name: Validate Release Manifests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate Helm charts
|
||||||
|
run: |
|
||||||
|
helm lint deploy/helm/stellaops
|
||||||
|
helm template stellaops deploy/helm/stellaops --dry-run
|
||||||
|
|
||||||
|
- name: Validate Kubernetes manifests
|
||||||
|
run: |
|
||||||
|
for f in deploy/k8s/*.yaml; do
|
||||||
|
kubectl apply --dry-run=client -f "$f" || exit 1
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Check required images exist
|
||||||
|
run: |
|
||||||
|
REQUIRED_IMAGES=(
|
||||||
|
"concelier"
|
||||||
|
"scanner"
|
||||||
|
"authority"
|
||||||
|
"signer"
|
||||||
|
"attestor"
|
||||||
|
"excititor"
|
||||||
|
"policy"
|
||||||
|
"scheduler"
|
||||||
|
"notify"
|
||||||
|
)
|
||||||
|
for img in "${REQUIRED_IMAGES[@]}"; do
|
||||||
|
echo "Checking $img..."
|
||||||
|
# Validate Dockerfile exists
|
||||||
|
if [ ! -f "src/${img^}/Dockerfile" ] && [ ! -f "deploy/docker/${img}/Dockerfile" ]; then
|
||||||
|
echo "Warning: Dockerfile not found for $img"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
validate-checksums:
|
||||||
|
name: Validate Artifact Checksums
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Verify SHA256SUMS files
|
||||||
|
run: |
|
||||||
|
find . -name "SHA256SUMS" -type f | while read f; do
|
||||||
|
dir=$(dirname "$f")
|
||||||
|
echo "Validating $f..."
|
||||||
|
cd "$dir"
|
||||||
|
if ! sha256sum -c SHA256SUMS --quiet 2>/dev/null; then
|
||||||
|
echo "Warning: Checksum mismatch in $dir"
|
||||||
|
fi
|
||||||
|
cd - > /dev/null
|
||||||
|
done
|
||||||
|
|
||||||
|
validate-schemas:
|
||||||
|
name: Validate Schema Integrity
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install ajv-cli
|
||||||
|
run: npm install -g ajv-cli ajv-formats
|
||||||
|
|
||||||
|
- name: Validate JSON schemas
|
||||||
|
run: |
|
||||||
|
for schema in docs/schemas/*.schema.json; do
|
||||||
|
echo "Validating $schema..."
|
||||||
|
ajv compile -s "$schema" --spec=draft2020 || echo "Warning: $schema validation issue"
|
||||||
|
done
|
||||||
|
|
||||||
|
release-notes:
|
||||||
|
name: Generate Release Notes
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
needs: [validate-manifests, validate-checksums, validate-schemas]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Generate changelog
|
||||||
|
run: |
|
||||||
|
PREV_TAG=$(git describe --abbrev=0 --tags HEAD^ 2>/dev/null || echo "")
|
||||||
|
if [ -n "$PREV_TAG" ]; then
|
||||||
|
echo "## Changes since $PREV_TAG" > RELEASE_NOTES.md
|
||||||
|
git log --pretty=format:"- %s (%h)" "$PREV_TAG"..HEAD >> RELEASE_NOTES.md
|
||||||
|
else
|
||||||
|
echo "## Initial Release" > RELEASE_NOTES.md
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload release notes
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-notes
|
||||||
|
path: RELEASE_NOTES.md
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
build-release:
|
build-release:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
DOTNET_VERSION: '10.0.100-rc.1.25451.107'
|
DOTNET_VERSION: '10.0.100'
|
||||||
REGISTRY: registry.stella-ops.org
|
REGISTRY: registry.stella-ops.org
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
|
|||||||
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
198
.gitea/workflows/risk-bundle-ci.yml
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
name: Risk Bundle CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
pull_request:
|
||||||
|
branches: [ main, develop ]
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter.RiskBundles/**'
|
||||||
|
- 'src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/**'
|
||||||
|
- 'ops/devops/risk-bundle/**'
|
||||||
|
- '.gitea/workflows/risk-bundle-ci.yml'
|
||||||
|
- 'docs/modules/export-center/operations/risk-bundle-*.md'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
include_osv:
|
||||||
|
description: 'Include OSV providers (larger bundle)'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
publish_checksums:
|
||||||
|
description: 'Publish checksums to artifact store'
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
risk-bundle-build:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
BUNDLE_OUTPUT: ${{ github.workspace }}/.artifacts/risk-bundle
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Export OpenSSL 1.1 shim for Mongo2Go
|
||||||
|
run: scripts/enable-openssl11-shim.sh
|
||||||
|
|
||||||
|
- name: Set up .NET SDK
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.RiskBundles/StellaOps.ExportCenter.RiskBundles.csproj -c Release /p:ContinuousIntegrationBuild=true
|
||||||
|
|
||||||
|
- name: Test RiskBundle unit tests
|
||||||
|
run: |
|
||||||
|
mkdir -p $ARTIFACT_DIR
|
||||||
|
dotnet test src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--filter "FullyQualifiedName~RiskBundle" \
|
||||||
|
--logger "trx;LogFileName=risk-bundle-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Build risk bundle (fixtures)
|
||||||
|
run: |
|
||||||
|
mkdir -p $BUNDLE_OUTPUT
|
||||||
|
ops/devops/risk-bundle/build-bundle.sh --output "$BUNDLE_OUTPUT" --fixtures-only
|
||||||
|
|
||||||
|
- name: Verify bundle integrity
|
||||||
|
run: ops/devops/risk-bundle/verify-bundle.sh "$BUNDLE_OUTPUT/risk-bundle.tar.gz"
|
||||||
|
|
||||||
|
- name: Generate checksums
|
||||||
|
run: |
|
||||||
|
cd $BUNDLE_OUTPUT
|
||||||
|
sha256sum risk-bundle.tar.gz > risk-bundle.tar.gz.sha256
|
||||||
|
sha256sum manifest.json > manifest.json.sha256
|
||||||
|
cat risk-bundle.tar.gz.sha256 manifest.json.sha256 > checksums.txt
|
||||||
|
echo "Bundle checksums:"
|
||||||
|
cat checksums.txt
|
||||||
|
|
||||||
|
- name: Upload risk bundle artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: |
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/risk-bundle.tar.gz.sig
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/manifest.json
|
||||||
|
${{ env.BUNDLE_OUTPUT }}/checksums.txt
|
||||||
|
${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: risk-bundle-test-results
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}/*.trx
|
||||||
|
|
||||||
|
risk-bundle-offline-kit:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
OFFLINE_KIT_DIR: ${{ github.workspace }}/.artifacts/offline-kit
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Package for offline kit
|
||||||
|
run: |
|
||||||
|
mkdir -p $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/risk-bundle.tar.gz.sig $OFFLINE_KIT_DIR/risk-bundles/ 2>/dev/null || true
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $OFFLINE_KIT_DIR/risk-bundles/
|
||||||
|
|
||||||
|
# Create offline kit manifest entry
|
||||||
|
cat > $OFFLINE_KIT_DIR/risk-bundles/kit-manifest.json <<EOF
|
||||||
|
{
|
||||||
|
"component": "risk-bundle",
|
||||||
|
"version": "$(date -u +%Y%m%d-%H%M%S)",
|
||||||
|
"files": [
|
||||||
|
{"path": "risk-bundle.tar.gz", "checksum_file": "risk-bundle.tar.gz.sha256"},
|
||||||
|
{"path": "manifest.json", "checksum_file": "manifest.json.sha256"}
|
||||||
|
],
|
||||||
|
"verification": {
|
||||||
|
"checksums": "checksums.txt",
|
||||||
|
"signature": "risk-bundle.tar.gz.sig"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Verify offline kit structure
|
||||||
|
run: |
|
||||||
|
echo "Offline kit structure:"
|
||||||
|
find $OFFLINE_KIT_DIR -type f
|
||||||
|
echo ""
|
||||||
|
echo "Checksum verification:"
|
||||||
|
cd $OFFLINE_KIT_DIR/risk-bundles
|
||||||
|
sha256sum -c checksums.txt
|
||||||
|
|
||||||
|
- name: Upload offline kit
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-offline-kit
|
||||||
|
path: ${{ env.OFFLINE_KIT_DIR }}
|
||||||
|
|
||||||
|
publish-checksums:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: risk-bundle-build
|
||||||
|
if: github.ref == 'refs/heads/main' && (github.event_name == 'push' || github.event.inputs.publish_checksums == 'true')
|
||||||
|
env:
|
||||||
|
ARTIFACT_DIR: ${{ github.workspace }}/.artifacts
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download risk bundle artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-artifacts
|
||||||
|
path: ${{ env.ARTIFACT_DIR }}
|
||||||
|
|
||||||
|
- name: Publish checksums
|
||||||
|
run: |
|
||||||
|
echo "Publishing checksums for risk bundle..."
|
||||||
|
CHECKSUM_DIR=out/checksums/risk-bundle/$(date -u +%Y-%m-%d)
|
||||||
|
mkdir -p $CHECKSUM_DIR
|
||||||
|
cp $ARTIFACT_DIR/checksums.txt $CHECKSUM_DIR/
|
||||||
|
cp $ARTIFACT_DIR/manifest.json $CHECKSUM_DIR/
|
||||||
|
|
||||||
|
# Create latest symlink manifest
|
||||||
|
cat > out/checksums/risk-bundle/latest.json <<EOF
|
||||||
|
{
|
||||||
|
"date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||||
|
"path": "$(date -u +%Y-%m-%d)/checksums.txt",
|
||||||
|
"manifest": "$(date -u +%Y-%m-%d)/manifest.json"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Checksums published to $CHECKSUM_DIR"
|
||||||
|
cat $CHECKSUM_DIR/checksums.txt
|
||||||
|
|
||||||
|
- name: Upload published checksums
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: risk-bundle-published-checksums
|
||||||
|
path: out/checksums/risk-bundle/
|
||||||
@@ -20,7 +20,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Install syft (SBOM)
|
- name: Install syft (SBOM)
|
||||||
uses: anchore/sbom-action/download-syft@v0
|
uses: anchore/sbom-action/download-syft@v0
|
||||||
@@ -34,6 +34,22 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj ruby-analyzer
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj ruby-analyzer
|
||||||
|
|
||||||
|
- name: Package Native analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Native/StellaOps.Scanner.Analyzers.Native.csproj native-analyzer
|
||||||
|
|
||||||
|
- name: Package Java analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj java-analyzer
|
||||||
|
|
||||||
|
- name: Package DotNet analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj dotnet-analyzer
|
||||||
|
|
||||||
|
- name: Package Node analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj node-analyzer
|
||||||
|
|
||||||
- name: Upload analyzer artifacts
|
- name: Upload analyzer artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|||||||
133
.gitea/workflows/scanner-analyzers.yml
Normal file
133
.gitea/workflows/scanner-analyzers.yml
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
name: Scanner Analyzers
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
- 'src/Scanner/__Tests/StellaOps.Scanner.Analyzers.*/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.x'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
discover-analyzers:
|
||||||
|
name: Discover Analyzers
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
analyzers: ${{ steps.find.outputs.analyzers }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Find analyzer projects
|
||||||
|
id: find
|
||||||
|
run: |
|
||||||
|
ANALYZERS=$(find src/Scanner/__Libraries -name "StellaOps.Scanner.Analyzers.*.csproj" -exec dirname {} \; | xargs -I {} basename {} | sort -u | jq -R -s -c 'split("\n")[:-1]')
|
||||||
|
echo "analyzers=$ANALYZERS" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build-analyzers:
|
||||||
|
name: Build Analyzers
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: discover-analyzers
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
analyzer: ${{ fromJson(needs.discover-analyzers.outputs.analyzers) }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/Scanner/__Libraries/${{ matrix.analyzer }}/
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Scanner/__Libraries/${{ matrix.analyzer }}/ --no-restore
|
||||||
|
|
||||||
|
test-lang-analyzers:
|
||||||
|
name: Test Language Analyzers
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build-analyzers
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Bun
|
||||||
|
uses: oven-sh/setup-bun@v1
|
||||||
|
with:
|
||||||
|
bun-version: latest
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Run Bun analyzer tests
|
||||||
|
run: |
|
||||||
|
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests" ]; then
|
||||||
|
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests/ --verbosity normal
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run Node analyzer tests
|
||||||
|
run: |
|
||||||
|
if [ -d "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests" ]; then
|
||||||
|
dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/ --verbosity normal
|
||||||
|
fi
|
||||||
|
|
||||||
|
fixture-validation:
|
||||||
|
name: Validate Test Fixtures
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate fixture structure
|
||||||
|
run: |
|
||||||
|
find src/Scanner/__Tests -name "expected.json" | while read f; do
|
||||||
|
echo "Validating $f..."
|
||||||
|
if ! jq empty "$f" 2>/dev/null; then
|
||||||
|
echo "Error: Invalid JSON in $f"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Check fixture completeness
|
||||||
|
run: |
|
||||||
|
find src/Scanner/__Tests -type d -name "Fixtures" | while read fixtures_dir; do
|
||||||
|
echo "Checking $fixtures_dir..."
|
||||||
|
find "$fixtures_dir" -mindepth 1 -maxdepth 1 -type d | while read test_case; do
|
||||||
|
if [ ! -f "$test_case/expected.json" ]; then
|
||||||
|
echo "Warning: $test_case missing expected.json"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
determinism-check:
|
||||||
|
name: Verify Deterministic Output
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test-lang-analyzers
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Run determinism tests
|
||||||
|
run: |
|
||||||
|
# Run scanner on same input twice, compare outputs
|
||||||
|
if [ -d "tests/fixtures/determinism" ]; then
|
||||||
|
dotnet test --filter "Category=Determinism" --verbosity normal
|
||||||
|
fi
|
||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
- name: Setup .NET
|
- name: Setup .NET
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: "10.0.100-rc.2.25502.107"
|
dotnet-version: "10.0.100"
|
||||||
|
|
||||||
- name: Run determinism harness
|
- name: Run determinism harness
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Cache NuGet packages
|
- name: Cache NuGet packages
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ jobs:
|
|||||||
- name: Setup .NET 10 RC
|
- name: Setup .NET 10 RC
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 10.0.100-rc.2.25502.107
|
dotnet-version: 10.0.100
|
||||||
include-prerelease: true
|
include-prerelease: true
|
||||||
|
|
||||||
- name: Cache NuGet packages
|
- name: Cache NuGet packages
|
||||||
|
|||||||
@@ -28,6 +28,8 @@ jobs:
|
|||||||
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-01' }}
|
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-01' }}
|
||||||
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -42,6 +44,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
cosign-release: 'v2.2.4'
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Verify artifacts exist
|
- name: Verify artifacts exist
|
||||||
run: |
|
run: |
|
||||||
cd docs/modules/signals
|
cd docs/modules/signals
|
||||||
@@ -90,9 +102,9 @@ jobs:
|
|||||||
retention-days: 90
|
retention-days: 90
|
||||||
|
|
||||||
- name: Push to Evidence Locker
|
- name: Push to Evidence Locker
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
env:
|
env:
|
||||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
run: |
|
run: |
|
||||||
tar -cf /tmp/signals-dsse.tar -C "$OUT_DIR" .
|
tar -cf /tmp/signals-dsse.tar -C "$OUT_DIR" .
|
||||||
@@ -102,7 +114,7 @@ jobs:
|
|||||||
echo "Pushed to Evidence Locker"
|
echo "Pushed to Evidence Locker"
|
||||||
|
|
||||||
- name: Evidence Locker skip notice
|
- name: Evidence Locker skip notice
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
run: |
|
run: |
|
||||||
echo "::notice::Evidence Locker push skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
echo "::notice::Evidence Locker push skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||||
echo "Artifacts available as workflow artifact for manual ingestion"
|
echo "Artifacts available as workflow artifact for manual ingestion"
|
||||||
|
|||||||
@@ -2,6 +2,14 @@ name: signals-evidence-locker
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
|
out_dir:
|
||||||
|
description: "Output directory containing signed artifacts"
|
||||||
|
required: false
|
||||||
|
default: "evidence-locker/signals/2025-12-05"
|
||||||
|
allow_dev_key:
|
||||||
|
description: "Allow dev key fallback (1=yes, 0=no)"
|
||||||
|
required: false
|
||||||
|
default: "0"
|
||||||
retention_target:
|
retention_target:
|
||||||
description: "Retention days target"
|
description: "Retention days target"
|
||||||
required: false
|
required: false
|
||||||
@@ -12,7 +20,12 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
MODULE_ROOT: docs/modules/signals
|
MODULE_ROOT: docs/modules/signals
|
||||||
OUT_DIR: evidence-locker/signals/2025-12-05
|
OUT_DIR: ${{ github.event.inputs.out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||||
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -20,6 +33,31 @@ jobs:
|
|||||||
- name: Task Pack offline bundle fixtures
|
- name: Task Pack offline bundle fixtures
|
||||||
run: python3 scripts/packs/run-fixtures-check.sh
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify artifacts exist
|
||||||
|
run: |
|
||||||
|
cd "$MODULE_ROOT"
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Sign signals artifacts
|
||||||
|
run: |
|
||||||
|
chmod +x tools/cosign/sign-signals.sh
|
||||||
|
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||||
|
|
||||||
- name: Build deterministic signals evidence tar
|
- name: Build deterministic signals evidence tar
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -52,16 +90,17 @@ jobs:
|
|||||||
/tmp/signals-evidence.tar.sha256
|
/tmp/signals-evidence.tar.sha256
|
||||||
|
|
||||||
- name: Push to Evidence Locker
|
- name: Push to Evidence Locker
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
env:
|
env:
|
||||||
TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN }}
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
run: |
|
run: |
|
||||||
curl -f -X PUT "$URL/signals/2025-12-05/signals-evidence.tar" \
|
upload_path="${OUT_DIR#evidence-locker/}"
|
||||||
|
curl -f -X PUT "$URL/${upload_path}/signals-evidence.tar" \
|
||||||
-H "Authorization: Bearer $TOKEN" \
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
--data-binary @/tmp/signals-evidence.tar
|
--data-binary @/tmp/signals-evidence.tar
|
||||||
|
|
||||||
- name: Skip push (missing secret or URL)
|
- name: Skip push (missing secret or URL)
|
||||||
if: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
run: |
|
run: |
|
||||||
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
echo "Locker push skipped: set CI_EVIDENCE_LOCKER_TOKEN and EVIDENCE_LOCKER_URL to enable." >&2
|
||||||
|
|||||||
127
.gitea/workflows/signals-reachability.yml
Normal file
127
.gitea/workflows/signals-reachability.yml
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
name: Signals Reachability Scoring & Events
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
allow_dev_key:
|
||||||
|
description: "Allow dev signing key fallback (1=yes, 0=no)"
|
||||||
|
required: false
|
||||||
|
default: "0"
|
||||||
|
evidence_out_dir:
|
||||||
|
description: "Evidence output dir for signing/upload"
|
||||||
|
required: false
|
||||||
|
default: "evidence-locker/signals/2025-12-05"
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'src/Signals/**'
|
||||||
|
- 'scripts/signals/reachability-smoke.sh'
|
||||||
|
- '.gitea/workflows/signals-reachability.yml'
|
||||||
|
- 'tools/cosign/sign-signals.sh'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
reachability-smoke:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/Signals/StellaOps.Signals.sln --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/Signals/StellaOps.Signals.sln -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Reachability scoring + cache/events smoke
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/signals/reachability-smoke.sh
|
||||||
|
scripts/signals/reachability-smoke.sh
|
||||||
|
|
||||||
|
sign-and-upload:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: reachability-smoke
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
COSIGN_ALLOW_DEV_KEY: ${{ github.event.inputs.allow_dev_key || '0' }}
|
||||||
|
OUT_DIR: ${{ github.event.inputs.evidence_out_dir || 'evidence-locker/signals/2025-12-05' }}
|
||||||
|
CI_EVIDENCE_LOCKER_TOKEN: ${{ secrets.CI_EVIDENCE_LOCKER_TOKEN || vars.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
EVIDENCE_LOCKER_URL: ${{ secrets.EVIDENCE_LOCKER_URL || vars.EVIDENCE_LOCKER_URL }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Task Pack offline bundle fixtures
|
||||||
|
run: python3 scripts/packs/run-fixtures-check.sh
|
||||||
|
|
||||||
|
- name: Install cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.2.4'
|
||||||
|
|
||||||
|
- name: Check signing key configured
|
||||||
|
run: |
|
||||||
|
if [[ -z "$COSIGN_PRIVATE_KEY_B64" && "$COSIGN_ALLOW_DEV_KEY" != "1" ]]; then
|
||||||
|
echo "::error::COSIGN_PRIVATE_KEY_B64 is missing and dev key fallback is disabled. Set COSIGN_PRIVATE_KEY_B64 (and COSIGN_PASSWORD if needed) or rerun with allow_dev_key=1 for smoke only."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ "$COSIGN_ALLOW_DEV_KEY" == "1" ]]; then
|
||||||
|
echo "::notice::Using dev key for signing (allow_dev_key=1) - not suitable for production uploads."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify artifacts exist
|
||||||
|
run: |
|
||||||
|
cd docs/modules/signals
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Sign signals artifacts
|
||||||
|
run: |
|
||||||
|
chmod +x tools/cosign/sign-signals.sh
|
||||||
|
OUT_DIR="${OUT_DIR}" tools/cosign/sign-signals.sh
|
||||||
|
|
||||||
|
- name: Upload signed artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: signals-evidence-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
${{ env.OUT_DIR }}/*.sigstore.json
|
||||||
|
${{ env.OUT_DIR }}/*.dsse
|
||||||
|
${{ env.OUT_DIR }}/SHA256SUMS
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Push to Evidence Locker
|
||||||
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN != '' && env.EVIDENCE_LOCKER_URL != '' }}
|
||||||
|
env:
|
||||||
|
TOKEN: ${{ env.CI_EVIDENCE_LOCKER_TOKEN }}
|
||||||
|
URL: ${{ env.EVIDENCE_LOCKER_URL }}
|
||||||
|
run: |
|
||||||
|
tar -cf /tmp/signals-evidence.tar -C "$OUT_DIR" .
|
||||||
|
sha256sum /tmp/signals-evidence.tar
|
||||||
|
curl -f -X PUT "$URL/signals/$(date -u +%Y-%m-%d)/signals-evidence.tar" \
|
||||||
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
|
--data-binary @/tmp/signals-evidence.tar
|
||||||
|
echo "Uploaded to Evidence Locker"
|
||||||
|
|
||||||
|
- name: Evidence Locker skip notice
|
||||||
|
if: ${{ env.CI_EVIDENCE_LOCKER_TOKEN == '' || env.EVIDENCE_LOCKER_URL == '' }}
|
||||||
|
run: |
|
||||||
|
echo "::notice::Evidence Locker upload skipped (CI_EVIDENCE_LOCKER_TOKEN or EVIDENCE_LOCKER_URL not set)"
|
||||||
33
.gitea/workflows/sm-remote-ci.yml
Normal file
33
.gitea/workflows/sm-remote-ci.yml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
name: sm-remote-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "src/SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||||
|
- "ops/sm-remote/**"
|
||||||
|
- ".gitea/workflows/sm-remote-ci.yml"
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "src/SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote/**"
|
||||||
|
- "src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/**"
|
||||||
|
- "ops/sm-remote/**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.x
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj
|
||||||
|
- name: Test
|
||||||
|
run: dotnet test src/__Libraries/StellaOps.Cryptography.Plugin.SmRemote.Tests/StellaOps.Cryptography.Plugin.SmRemote.Tests.csproj --no-build --verbosity normal
|
||||||
|
- name: Publish service
|
||||||
|
run: dotnet publish src/SmRemote/StellaOps.SmRemote.Service/StellaOps.SmRemote.Service.csproj -c Release -o out/sm-remote
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -64,3 +64,6 @@ coverage/
|
|||||||
local-nugets/
|
local-nugets/
|
||||||
local-nuget/
|
local-nuget/
|
||||||
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
src/Sdk/StellaOps.Sdk.Generator/tools/jdk-21.0.1+12
|
||||||
|
.nuget-cache/
|
||||||
|
.nuget-packages2/
|
||||||
|
.nuget-temp/
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
|
||||||
<metadata>
|
|
||||||
<id>Microsoft.Extensions.Logging.Abstractions</id>
|
|
||||||
<version>10.0.0-rc.2.25502.107</version>
|
|
||||||
<authors>Microsoft</authors>
|
|
||||||
<license type="expression">MIT</license>
|
|
||||||
<licenseUrl>https://licenses.nuget.org/MIT</licenseUrl>
|
|
||||||
<icon>Icon.png</icon>
|
|
||||||
<readme>PACKAGE.md</readme>
|
|
||||||
<projectUrl>https://dot.net/</projectUrl>
|
|
||||||
<description>Logging abstractions for Microsoft.Extensions.Logging.
|
|
||||||
|
|
||||||
Commonly Used Types:
|
|
||||||
Microsoft.Extensions.Logging.ILogger
|
|
||||||
Microsoft.Extensions.Logging.ILoggerFactory
|
|
||||||
Microsoft.Extensions.Logging.ILogger<TCategoryName>
|
|
||||||
Microsoft.Extensions.Logging.LogLevel
|
|
||||||
Microsoft.Extensions.Logging.Logger<T>
|
|
||||||
Microsoft.Extensions.Logging.LoggerMessage
|
|
||||||
Microsoft.Extensions.Logging.Abstractions.NullLogger</description>
|
|
||||||
<releaseNotes>https://go.microsoft.com/fwlink/?LinkID=799421</releaseNotes>
|
|
||||||
<copyright>© Microsoft Corporation. All rights reserved.</copyright>
|
|
||||||
<serviceable>true</serviceable>
|
|
||||||
<repository type="git" url="https://github.com/dotnet/dotnet" commit="89c8f6a112d37d2ea8b77821e56d170a1bccdc5a" />
|
|
||||||
<dependencies>
|
|
||||||
<group targetFramework=".NETFramework4.6.2">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Buffers" version="4.6.1" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Memory" version="4.6.3" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework="net8.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework="net9.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework="net10.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
<group targetFramework=".NETStandard2.0">
|
|
||||||
<dependency id="Microsoft.Extensions.DependencyInjection.Abstractions" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Diagnostics.DiagnosticSource" version="10.0.0-rc.2.25502.107" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Buffers" version="4.6.1" exclude="Build,Analyzers" />
|
|
||||||
<dependency id="System.Memory" version="4.6.3" exclude="Build,Analyzers" />
|
|
||||||
</group>
|
|
||||||
</dependencies>
|
|
||||||
</metadata>
|
|
||||||
</package>
|
|
||||||
Binary file not shown.
@@ -165,3 +165,69 @@ rules:
|
|||||||
in:
|
in:
|
||||||
const: header
|
const: header
|
||||||
required: [name, in]
|
required: [name, in]
|
||||||
|
|
||||||
|
# --- Deprecation Metadata Rules (per APIGOV-63-001) ---
|
||||||
|
|
||||||
|
stella-deprecated-has-metadata:
|
||||||
|
description: "Deprecated operations must have x-deprecation extension with required fields"
|
||||||
|
message: "Add x-deprecation metadata (deprecatedAt, sunsetAt, successorPath, reason) to deprecated operations"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)]"
|
||||||
|
severity: error
|
||||||
|
then:
|
||||||
|
field: x-deprecation
|
||||||
|
function: schema
|
||||||
|
functionOptions:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- deprecatedAt
|
||||||
|
- sunsetAt
|
||||||
|
- successorPath
|
||||||
|
- reason
|
||||||
|
properties:
|
||||||
|
deprecatedAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
sunsetAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
successorPath:
|
||||||
|
type: string
|
||||||
|
successorOperationId:
|
||||||
|
type: string
|
||||||
|
reason:
|
||||||
|
type: string
|
||||||
|
migrationGuide:
|
||||||
|
type: string
|
||||||
|
format: uri
|
||||||
|
notificationChannels:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
enum: [slack, teams, email, webhook]
|
||||||
|
|
||||||
|
stella-deprecated-sunset-future:
|
||||||
|
description: "Sunset dates should be in the future (warn if sunset already passed)"
|
||||||
|
message: "x-deprecation.sunsetAt should be a future date"
|
||||||
|
given: "$.paths[*][*].x-deprecation.sunsetAt"
|
||||||
|
severity: warn
|
||||||
|
then:
|
||||||
|
function: truthy
|
||||||
|
|
||||||
|
stella-deprecated-migration-guide:
|
||||||
|
description: "Deprecated operations should include a migration guide URL"
|
||||||
|
message: "Consider adding x-deprecation.migrationGuide for consumer guidance"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)].x-deprecation"
|
||||||
|
severity: hint
|
||||||
|
then:
|
||||||
|
field: migrationGuide
|
||||||
|
function: truthy
|
||||||
|
|
||||||
|
stella-deprecated-notification-channels:
|
||||||
|
description: "Deprecated operations should specify notification channels"
|
||||||
|
message: "Add x-deprecation.notificationChannels to enable deprecation notifications"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)].x-deprecation"
|
||||||
|
severity: hint
|
||||||
|
then:
|
||||||
|
field: notificationChannels
|
||||||
|
function: truthy
|
||||||
|
|||||||
@@ -58,8 +58,8 @@ When you are told you are working in a particular module or directory, assume yo
|
|||||||
|
|
||||||
* **Runtime**: .NET 10 (`net10.0`) with latest C# preview features. Microsoft.* dependencies should target the closest compatible versions.
|
* **Runtime**: .NET 10 (`net10.0`) with latest C# preview features. Microsoft.* dependencies should target the closest compatible versions.
|
||||||
* **Frontend**: Angular v17 for the UI.
|
* **Frontend**: Angular v17 for the UI.
|
||||||
* **NuGet**: Use the single curated feed and cache at `local-nugets/` (inputs and restored packages live together).
|
* **NuGet**: Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org). Packages restore to the global NuGet cache.
|
||||||
* **Data**: MongoDB as canonical store and for job/export state. Use a MongoDB driver version ≥ 3.0.
|
* **Data**: PostgreSQL as canonical store and for job/export state. Use a PostgreSQL driver version ≥ 3.0.
|
||||||
* **Observability**: Structured logs, counters, and (optional) OpenTelemetry traces.
|
* **Observability**: Structured logs, counters, and (optional) OpenTelemetry traces.
|
||||||
* **Ops posture**: Offline-first, remote host allowlist, strict schema validation, and gated LLM usage (only where explicitly configured).
|
* **Ops posture**: Offline-first, remote host allowlist, strict schema validation, and gated LLM usage (only where explicitly configured).
|
||||||
|
|
||||||
@@ -126,7 +126,7 @@ It ships as containerised building blocks; each module owns a clear boundary and
|
|||||||
| Scanner | `src/Scanner/StellaOps.Scanner.WebService`<br>`src/Scanner/StellaOps.Scanner.Worker`<br>`src/Scanner/__Libraries/StellaOps.Scanner.*` | `docs/modules/scanner/architecture.md` |
|
| Scanner | `src/Scanner/StellaOps.Scanner.WebService`<br>`src/Scanner/StellaOps.Scanner.Worker`<br>`src/Scanner/__Libraries/StellaOps.Scanner.*` | `docs/modules/scanner/architecture.md` |
|
||||||
| Scheduler | `src/Scheduler/StellaOps.Scheduler.WebService`<br>`src/Scheduler/StellaOps.Scheduler.Worker` | `docs/modules/scheduler/architecture.md` |
|
| Scheduler | `src/Scheduler/StellaOps.Scheduler.WebService`<br>`src/Scheduler/StellaOps.Scheduler.Worker` | `docs/modules/scheduler/architecture.md` |
|
||||||
| CLI | `src/Cli/StellaOps.Cli`<br>`src/Cli/StellaOps.Cli.Core`<br>`src/Cli/StellaOps.Cli.Plugins.*` | `docs/modules/cli/architecture.md` |
|
| CLI | `src/Cli/StellaOps.Cli`<br>`src/Cli/StellaOps.Cli.Core`<br>`src/Cli/StellaOps.Cli.Plugins.*` | `docs/modules/cli/architecture.md` |
|
||||||
| UI / Console | `src/UI/StellaOps.UI` | `docs/modules/ui/architecture.md` |
|
| UI / Console | `src/Web/StellaOps.Web` | `docs/modules/ui/architecture.md` |
|
||||||
| Notify | `src/Notify/StellaOps.Notify.WebService`<br>`src/Notify/StellaOps.Notify.Worker` | `docs/modules/notify/architecture.md` |
|
| Notify | `src/Notify/StellaOps.Notify.WebService`<br>`src/Notify/StellaOps.Notify.Worker` | `docs/modules/notify/architecture.md` |
|
||||||
| Export Center | `src/ExportCenter/StellaOps.ExportCenter.WebService`<br>`src/ExportCenter/StellaOps.ExportCenter.Worker` | `docs/modules/export-center/architecture.md` |
|
| Export Center | `src/ExportCenter/StellaOps.ExportCenter.WebService`<br>`src/ExportCenter/StellaOps.ExportCenter.Worker` | `docs/modules/export-center/architecture.md` |
|
||||||
| Registry Token Service | `src/Registry/StellaOps.Registry.TokenService`<br>`src/Registry/__Tests/StellaOps.Registry.TokenService.Tests` | `docs/modules/registry/architecture.md` |
|
| Registry Token Service | `src/Registry/StellaOps.Registry.TokenService`<br>`src/Registry/__Tests/StellaOps.Registry.TokenService.Tests` | `docs/modules/registry/architecture.md` |
|
||||||
|
|||||||
18
CLAUDE.md
18
CLAUDE.md
@@ -41,7 +41,7 @@ dotnet test --filter "FullyQualifiedName~TestMethodName"
|
|||||||
dotnet test src/StellaOps.sln --verbosity normal
|
dotnet test src/StellaOps.sln --verbosity normal
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note:** Tests use Mongo2Go which requires OpenSSL 1.1 on Linux. Run `scripts/enable-openssl11-shim.sh` before testing if needed.
|
**Note:** Integration tests use Testcontainers for PostgreSQL. Ensure Docker is running before executing tests.
|
||||||
|
|
||||||
## Linting and Validation
|
## Linting and Validation
|
||||||
|
|
||||||
@@ -60,11 +60,11 @@ helm lint deploy/helm/stellaops
|
|||||||
|
|
||||||
### Technology Stack
|
### Technology Stack
|
||||||
- **Runtime:** .NET 10 (`net10.0`) with latest C# preview features
|
- **Runtime:** .NET 10 (`net10.0`) with latest C# preview features
|
||||||
- **Frontend:** Angular v17 (in `src/UI/StellaOps.UI`)
|
- **Frontend:** Angular v17 (in `src/Web/StellaOps.Web`)
|
||||||
- **Database:** MongoDB (driver version ≥ 3.0)
|
- **Database:** PostgreSQL (≥16) with per-module schema isolation; see `docs/db/` for specification
|
||||||
- **Testing:** xUnit with Mongo2Go, Moq, Microsoft.AspNetCore.Mvc.Testing
|
- **Testing:** xUnit with Testcontainers (PostgreSQL), Moq, Microsoft.AspNetCore.Mvc.Testing
|
||||||
- **Observability:** Structured logging, OpenTelemetry traces
|
- **Observability:** Structured logging, OpenTelemetry traces
|
||||||
- **NuGet:** Use the single curated feed and cache at `local-nugets/`
|
- **NuGet:** Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org)
|
||||||
|
|
||||||
### Module Structure
|
### Module Structure
|
||||||
|
|
||||||
@@ -89,7 +89,7 @@ The codebase follows a monorepo pattern with modules under `src/`:
|
|||||||
- **Libraries:** `src/<Module>/__Libraries/StellaOps.<Module>.*`
|
- **Libraries:** `src/<Module>/__Libraries/StellaOps.<Module>.*`
|
||||||
- **Tests:** `src/<Module>/__Tests/StellaOps.<Module>.*.Tests/`
|
- **Tests:** `src/<Module>/__Tests/StellaOps.<Module>.*.Tests/`
|
||||||
- **Plugins:** Follow naming `StellaOps.<Module>.Connector.*` or `StellaOps.<Module>.Plugin.*`
|
- **Plugins:** Follow naming `StellaOps.<Module>.Connector.*` or `StellaOps.<Module>.Plugin.*`
|
||||||
- **Shared test infrastructure:** `StellaOps.Concelier.Testing` provides MongoDB fixtures
|
- **Shared test infrastructure:** `StellaOps.Concelier.Testing` and `StellaOps.Infrastructure.Postgres.Testing` provide PostgreSQL fixtures
|
||||||
|
|
||||||
### Naming Conventions
|
### Naming Conventions
|
||||||
|
|
||||||
@@ -127,7 +127,7 @@ The codebase follows a monorepo pattern with modules under `src/`:
|
|||||||
|
|
||||||
- Module tests: `StellaOps.<Module>.<Component>.Tests`
|
- Module tests: `StellaOps.<Module>.<Component>.Tests`
|
||||||
- Shared fixtures/harnesses: `StellaOps.<Module>.Testing`
|
- Shared fixtures/harnesses: `StellaOps.<Module>.Testing`
|
||||||
- Tests use xUnit, Mongo2Go for MongoDB integration tests
|
- Tests use xUnit, Testcontainers for PostgreSQL integration tests
|
||||||
|
|
||||||
### Documentation Updates
|
### Documentation Updates
|
||||||
|
|
||||||
@@ -200,6 +200,8 @@ Before coding, confirm required docs are read:
|
|||||||
|
|
||||||
- **Architecture overview:** `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
- **Architecture overview:** `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||||
- **Module dossiers:** `docs/modules/<module>/architecture.md`
|
- **Module dossiers:** `docs/modules/<module>/architecture.md`
|
||||||
|
- **Database specification:** `docs/db/SPECIFICATION.md`
|
||||||
|
- **PostgreSQL operations:** `docs/operations/postgresql-guide.md`
|
||||||
- **API/CLI reference:** `docs/09_API_CLI_REFERENCE.md`
|
- **API/CLI reference:** `docs/09_API_CLI_REFERENCE.md`
|
||||||
- **Offline operation:** `docs/24_OFFLINE_KIT.md`
|
- **Offline operation:** `docs/24_OFFLINE_KIT.md`
|
||||||
- **Quickstart:** `docs/10_CONCELIER_CLI_QUICKSTART.md`
|
- **Quickstart:** `docs/10_CONCELIER_CLI_QUICKSTART.md`
|
||||||
@@ -216,5 +218,5 @@ Workflows are in `.gitea/workflows/`. Key workflows:
|
|||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
- `STELLAOPS_BACKEND_URL` - Backend API URL for CLI
|
- `STELLAOPS_BACKEND_URL` - Backend API URL for CLI
|
||||||
- `STELLAOPS_TEST_MONGO_URI` - MongoDB connection string for integration tests
|
- `STELLAOPS_TEST_POSTGRES_CONNECTION` - PostgreSQL connection string for integration tests
|
||||||
- `StellaOpsEnableCryptoPro` - Enable GOST crypto support (set to `true` in build)
|
- `StellaOpsEnableCryptoPro` - Enable GOST crypto support (set to `true` in build)
|
||||||
|
|||||||
@@ -1,23 +1,90 @@
|
|||||||
<Project>
|
<Project>
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
|
|
||||||
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
<StellaOpsRepoRoot Condition="'$(StellaOpsRepoRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)'))</StellaOpsRepoRoot>
|
||||||
<StellaOpsLocalNuGetSource Condition="'$(StellaOpsLocalNuGetSource)' == ''">$([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nugets/'))</StellaOpsLocalNuGetSource>
|
|
||||||
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
<StellaOpsDotNetPublicSource Condition="'$(StellaOpsDotNetPublicSource)' == ''">https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json</StellaOpsDotNetPublicSource>
|
||||||
<StellaOpsNuGetOrgSource Condition="'$(StellaOpsNuGetOrgSource)' == ''">https://api.nuget.org/v3/index.json</StellaOpsNuGetOrgSource>
|
<RestoreConfigFile Condition="'$(RestoreConfigFile)' == ''">$([System.IO.Path]::Combine('$(StellaOpsRepoRoot)','NuGet.config'))</RestoreConfigFile>
|
||||||
<_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource)</_StellaOpsDefaultRestoreSources>
|
|
||||||
<_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources)</_StellaOpsOriginalRestoreSources>
|
|
||||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(_StellaOpsDefaultRestoreSources)</RestoreSources>
|
|
||||||
<RestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' != ''">$(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources)</RestoreSources>
|
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
<StellaOpsEnableCryptoPro Condition="'$(StellaOpsEnableCryptoPro)' == ''">false</StellaOpsEnableCryptoPro>
|
||||||
|
<NoWarn>$(NoWarn);NU1608;NU1605;NU1202</NoWarn>
|
||||||
|
<WarningsNotAsErrors>$(WarningsNotAsErrors);NU1608;NU1605;NU1202</WarningsNotAsErrors>
|
||||||
|
<RestoreNoWarn>$(RestoreNoWarn);NU1608;NU1605;NU1202</RestoreNoWarn>
|
||||||
|
<RestoreWarningsAsErrors></RestoreWarningsAsErrors>
|
||||||
|
<RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors>
|
||||||
|
<RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder>
|
||||||
|
<RestoreFallbackFolders>clear</RestoreFallbackFolders>
|
||||||
|
<RestoreFallbackFoldersExcludes>clear</RestoreFallbackFoldersExcludes>
|
||||||
|
<RestoreAdditionalProjectFallbackFolders>clear</RestoreAdditionalProjectFallbackFolders>
|
||||||
|
<RestoreAdditionalProjectFallbackFoldersExcludes>clear</RestoreAdditionalProjectFallbackFoldersExcludes>
|
||||||
|
<RestoreAdditionalFallbackFolders>clear</RestoreAdditionalFallbackFolders>
|
||||||
|
<RestoreAdditionalFallbackFoldersExcludes>clear</RestoreAdditionalFallbackFoldersExcludes>
|
||||||
|
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
|
||||||
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<PropertyGroup>
|
||||||
|
<AssetTargetFallback>$(AssetTargetFallback);net8.0;net7.0;net6.0;netstandard2.1;netstandard2.0</AssetTargetFallback>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
<PropertyGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">
|
||||||
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
<DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_PRO</DefineConstants>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<!-- .NET 10 compatible package version overrides -->
|
||||||
|
<ItemGroup>
|
||||||
|
<!-- Cryptography packages - updated for net10.0 compatibility -->
|
||||||
|
<PackageReference Update="BouncyCastle.Cryptography" Version="2.6.2" />
|
||||||
|
<PackageReference Update="Pkcs11Interop" Version="5.1.2" />
|
||||||
|
|
||||||
|
<!-- Resilience - Polly 8.x for .NET 6+ -->
|
||||||
|
<PackageReference Update="Polly" Version="8.5.2" />
|
||||||
|
<PackageReference Update="Polly.Core" Version="8.5.2" />
|
||||||
|
|
||||||
|
<!-- YAML - updated for net10.0 -->
|
||||||
|
<PackageReference Update="YamlDotNet" Version="16.3.0" />
|
||||||
|
|
||||||
|
<!-- JSON Schema packages -->
|
||||||
|
<PackageReference Update="JsonSchema.Net" Version="7.3.2" />
|
||||||
|
<PackageReference Update="Json.More.Net" Version="2.1.0" />
|
||||||
|
<PackageReference Update="JsonPointer.Net" Version="5.1.0" />
|
||||||
|
|
||||||
|
<!-- HTML parsing -->
|
||||||
|
<PackageReference Update="AngleSharp" Version="1.2.0" />
|
||||||
|
|
||||||
|
<!-- Scheduling -->
|
||||||
|
<PackageReference Update="Cronos" Version="0.9.0" />
|
||||||
|
|
||||||
|
<!-- Testing - xUnit 2.9.3 for .NET 10 -->
|
||||||
|
<PackageReference Update="xunit" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.assert" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.extensibility.core" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.extensibility.execution" Version="2.9.3" />
|
||||||
|
<PackageReference Update="xunit.runner.visualstudio" Version="3.0.1" />
|
||||||
|
<PackageReference Update="xunit.abstractions" Version="2.0.3" />
|
||||||
|
|
||||||
|
<!-- JSON -->
|
||||||
|
<PackageReference Update="Newtonsoft.Json" Version="13.0.4" />
|
||||||
|
|
||||||
|
<!-- Annotations -->
|
||||||
|
<PackageReference Update="JetBrains.Annotations" Version="2024.3.0" />
|
||||||
|
|
||||||
|
<!-- Async interfaces -->
|
||||||
|
<PackageReference Update="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||||
|
|
||||||
|
<!-- HTTP Resilience integration (replaces Http.Polly) -->
|
||||||
|
<PackageReference Update="Microsoft.Extensions.Http.Resilience" Version="10.0.0" />
|
||||||
|
|
||||||
|
<!-- Testing packages - aligned to 10.0.0 -->
|
||||||
|
<PackageReference Update="Microsoft.Extensions.TimeProvider.Testing" Version="10.0.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
|||||||
@@ -1 +1,4 @@
|
|||||||
/nowarn:CA2022
|
/nowarn:CA2022
|
||||||
|
/p:DisableWorkloadResolver=true
|
||||||
|
/p:RestoreAdditionalProjectFallbackFolders=
|
||||||
|
/p:RestoreFallbackFolders=
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
# Third-Party Notices
|
# Third-Party Notices
|
||||||
|
|
||||||
This project bundles or links against the following third-party components in the scanner Ruby analyzer implementation:
|
This project bundles or links against the following third-party components:
|
||||||
|
|
||||||
- **tree-sitter** (MIT License, © 2018 Max Brunsfeld)
|
- **tree-sitter** (MIT License, (c) 2018 Max Brunsfeld)
|
||||||
- **tree-sitter-ruby** (MIT License, © 2016 Rob Rix)
|
- **tree-sitter-ruby** (MIT License, (c) 2016 Rob Rix)
|
||||||
|
- **GostCryptography (fork)** (MIT License, (c) 2014-2024 AlexMAS) — vendored under `third_party/forks/AlexMAS.GostCryptography` for GOST support in `StellaOps.Cryptography.Plugin.CryptoPro` and related sovereign crypto plug-ins.
|
||||||
|
- **CryptoPro CSP integration** (Commercial, customer-provided) — StellaOps ships only integration code; CryptoPro CSP binaries and licenses are not redistributed and must be supplied by the operator per vendor EULA.
|
||||||
|
|
||||||
License texts are available under third-party-licenses/.
|
License texts are available under third-party-licenses/.
|
||||||
|
|||||||
17
NuGet.config
17
NuGet.config
@@ -2,18 +2,9 @@
|
|||||||
<configuration>
|
<configuration>
|
||||||
<packageSources>
|
<packageSources>
|
||||||
<clear />
|
<clear />
|
||||||
<add key="local" value="local-nugets" />
|
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||||
<add key="ablera-mirror" value="https://mirrors.ablera.dev/nuget/nuget-mirror/v3/index.json" />
|
|
||||||
</packageSources>
|
</packageSources>
|
||||||
<config>
|
<fallbackPackageFolders>
|
||||||
<add key="globalPackagesFolder" value="local-nugets/packages" />
|
<clear />
|
||||||
</config>
|
</fallbackPackageFolders>
|
||||||
<packageSourceMapping>
|
|
||||||
<packageSource key="local">
|
|
||||||
<package pattern="*" />
|
|
||||||
</packageSource>
|
|
||||||
<packageSource key="ablera-mirror">
|
|
||||||
<package pattern="*" />
|
|
||||||
</packageSource>
|
|
||||||
</packageSourceMapping>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|||||||
10
README.md
10
README.md
@@ -1,14 +1,20 @@
|
|||||||
# StellaOps Concelier & CLI
|
# StellaOps Concelier & CLI
|
||||||
|
|
||||||
|
[](https://git.stella-ops.org/stellaops/feedser/actions/workflows/build-test-deploy.yml)
|
||||||
|
[](https://git.stella-ops.org/stellaops/feedser/actions/workflows/build-test-deploy.yml)
|
||||||
|
[](docs/testing/ci-quality-gates.md)
|
||||||
|
[](docs/testing/ci-quality-gates.md)
|
||||||
|
[](docs/testing/mutation-testing-baselines.md)
|
||||||
|
|
||||||
This repository hosts the StellaOps Concelier service, its plug-in ecosystem, and the
|
This repository hosts the StellaOps Concelier service, its plug-in ecosystem, and the
|
||||||
first-party CLI (`stellaops-cli`). Concelier ingests vulnerability advisories from
|
first-party CLI (`stellaops-cli`). Concelier ingests vulnerability advisories from
|
||||||
authoritative sources, stores them in MongoDB, and exports deterministic JSON and
|
authoritative sources, stores them in PostgreSQL, and exports deterministic JSON and
|
||||||
Trivy DB artefacts. The CLI drives scanner distribution, scan execution, and job
|
Trivy DB artefacts. The CLI drives scanner distribution, scan execution, and job
|
||||||
control against the Concelier API.
|
control against the Concelier API.
|
||||||
|
|
||||||
## Quickstart
|
## Quickstart
|
||||||
|
|
||||||
1. Prepare a MongoDB instance and (optionally) install `trivy-db`/`oras`.
|
1. Prepare a PostgreSQL instance and (optionally) install `trivy-db`/`oras`.
|
||||||
2. Copy `etc/concelier.yaml.sample` to `etc/concelier.yaml` and update the storage + telemetry
|
2. Copy `etc/concelier.yaml.sample` to `etc/concelier.yaml` and update the storage + telemetry
|
||||||
settings.
|
settings.
|
||||||
3. Copy `etc/authority.yaml.sample` to `etc/authority.yaml`, review the issuer, token
|
3. Copy `etc/authority.yaml.sample` to `etc/authority.yaml`, review the issuer, token
|
||||||
|
|||||||
@@ -1,19 +1,17 @@
|
|||||||
<Solution>
|
<Solution>
|
||||||
<Folder Name="/src/" />
|
<Folder Name="/src/" />
|
||||||
<Folder Name="/src/Gateway/">
|
|
||||||
<Project Path="src/Gateway/StellaOps.Gateway.WebService/StellaOps.Gateway.WebService.csproj" />
|
|
||||||
</Folder>
|
|
||||||
<Folder Name="/src/__Libraries/">
|
<Folder Name="/src/__Libraries/">
|
||||||
<Project Path="src/__Libraries/StellaOps.Microservice.SourceGen/StellaOps.Microservice.SourceGen.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Microservice.SourceGen/StellaOps.Microservice.SourceGen.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Microservice/StellaOps.Microservice.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Microservice/StellaOps.Microservice.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Router.Common/StellaOps.Router.Common.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Router.Common/StellaOps.Router.Common.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Router.Config/StellaOps.Router.Config.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Router.Config/StellaOps.Router.Config.csproj" />
|
||||||
|
<Project Path="src/__Libraries/StellaOps.Router.Gateway/StellaOps.Router.Gateway.csproj" />
|
||||||
<Project Path="src/__Libraries/StellaOps.Router.Transport.InMemory/StellaOps.Router.Transport.InMemory.csproj" />
|
<Project Path="src/__Libraries/StellaOps.Router.Transport.InMemory/StellaOps.Router.Transport.InMemory.csproj" />
|
||||||
</Folder>
|
</Folder>
|
||||||
<Folder Name="/tests/">
|
<Folder Name="/tests/">
|
||||||
<Project Path="tests/StellaOps.Gateway.WebService.Tests/StellaOps.Gateway.WebService.Tests.csproj" />
|
|
||||||
<Project Path="tests/StellaOps.Microservice.Tests/StellaOps.Microservice.Tests.csproj" />
|
<Project Path="tests/StellaOps.Microservice.Tests/StellaOps.Microservice.Tests.csproj" />
|
||||||
<Project Path="tests/StellaOps.Router.Common.Tests/StellaOps.Router.Common.Tests.csproj" />
|
<Project Path="tests/StellaOps.Router.Common.Tests/StellaOps.Router.Common.Tests.csproj" />
|
||||||
|
<Project Path="tests/StellaOps.Router.Gateway.Tests/StellaOps.Router.Gateway.Tests.csproj" />
|
||||||
<Project Path="tests/StellaOps.Router.Transport.InMemory.Tests/StellaOps.Router.Transport.InMemory.Tests.csproj" />
|
<Project Path="tests/StellaOps.Router.Transport.InMemory.Tests/StellaOps.Router.Transport.InMemory.Tests.csproj" />
|
||||||
</Folder>
|
</Folder>
|
||||||
</Solution>
|
</Solution>
|
||||||
|
|||||||
128
bench/README.md
128
bench/README.md
@@ -1,7 +1,7 @@
|
|||||||
# Stella Ops Bench Repository
|
# Stella Ops Bench Repository
|
||||||
|
|
||||||
> **Status:** Draft — aligns with `docs/benchmarks/vex-evidence-playbook.md` (Sprint 401).
|
> **Status:** Active · Last updated: 2025-12-13
|
||||||
> **Purpose:** Host reproducible VEX decisions and comparison data that prove Stella Ops’ signal quality vs. baseline scanners.
|
> **Purpose:** Host reproducible VEX decisions, reachability evidence, and comparison data proving Stella Ops' signal quality vs. baseline scanners.
|
||||||
|
|
||||||
## Layout
|
## Layout
|
||||||
|
|
||||||
@@ -11,20 +11,122 @@ bench/
|
|||||||
findings/ # per CVE/product bundles
|
findings/ # per CVE/product bundles
|
||||||
CVE-YYYY-NNNNN/
|
CVE-YYYY-NNNNN/
|
||||||
evidence/
|
evidence/
|
||||||
reachability.json
|
reachability.json # richgraph-v1 excerpt
|
||||||
sbom.cdx.json
|
sbom.cdx.json # CycloneDX SBOM
|
||||||
decision.openvex.json
|
decision.openvex.json # OpenVEX decision
|
||||||
decision.dsse.json
|
decision.dsse.json # DSSE envelope
|
||||||
rekor.txt
|
rekor.txt # Rekor log index + inclusion proof
|
||||||
metadata.json
|
metadata.json # finding metadata (purl, CVE, version)
|
||||||
tools/
|
tools/
|
||||||
verify.sh # DSSE + Rekor verifier
|
verify.sh # DSSE + Rekor verifier (online)
|
||||||
verify.py # offline verifier
|
verify.py # offline verifier
|
||||||
compare.py # baseline comparison script
|
compare.py # baseline comparison script
|
||||||
replay.sh # runs reachability replay manifolds
|
replay.sh # runs reachability replay manifests
|
||||||
results/
|
results/
|
||||||
summary.csv
|
summary.csv # aggregated metrics
|
||||||
runs/<date>/... # raw outputs + replay manifests
|
runs/<date>/... # raw outputs + replay manifests
|
||||||
|
reachability-benchmark/ # reachability benchmark with JDK fixtures
|
||||||
```
|
```
|
||||||
|
|
||||||
Refer to `docs/benchmarks/vex-evidence-playbook.md` for artifact contracts and automation tasks. The `bench/` tree will be populated once `BENCH-AUTO-401-019` and `DOCS-VEX-401-012` land.
|
## Related Documentation
|
||||||
|
|
||||||
|
| Document | Purpose |
|
||||||
|
|----------|---------|
|
||||||
|
| [VEX Evidence Playbook](../docs/benchmarks/vex-evidence-playbook.md) | Proof bundle schema, justification catalog, verification workflow |
|
||||||
|
| [Hybrid Attestation](../docs/reachability/hybrid-attestation.md) | Graph-level and edge-bundle DSSE decisions |
|
||||||
|
| [Function-Level Evidence](../docs/reachability/function-level-evidence.md) | Cross-module evidence chain guide |
|
||||||
|
| [Deterministic Replay](../docs/replay/DETERMINISTIC_REPLAY.md) | Replay manifest specification |
|
||||||
|
|
||||||
|
## Verification Workflows
|
||||||
|
|
||||||
|
### Quick Verification (Online)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verify a VEX proof bundle with DSSE and Rekor
|
||||||
|
./tools/verify.sh findings/CVE-2021-44228/decision.dsse.json
|
||||||
|
|
||||||
|
# Output:
|
||||||
|
# ✓ DSSE signature valid
|
||||||
|
# ✓ Rekor inclusion verified (log index: 12345678)
|
||||||
|
# ✓ Evidence hashes match
|
||||||
|
# ✓ Justification catalog membership confirmed
|
||||||
|
```
|
||||||
|
|
||||||
|
### Offline Verification
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verify without network access
|
||||||
|
python tools/verify.py \
|
||||||
|
--bundle findings/CVE-2021-44228/decision.dsse.json \
|
||||||
|
--cas-root ./findings/CVE-2021-44228/evidence/ \
|
||||||
|
--catalog ../docs/benchmarks/vex-justifications.catalog.json
|
||||||
|
|
||||||
|
# Or use the VEX proof bundle verifier
|
||||||
|
python ../scripts/vex/verify_proof_bundle.py \
|
||||||
|
--bundle ../tests/Vex/ProofBundles/sample-proof-bundle.json \
|
||||||
|
--cas-root ../tests/Vex/ProofBundles/cas/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reachability Graph Verification
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verify graph DSSE
|
||||||
|
stella graph verify --hash blake3:a1b2c3d4...
|
||||||
|
|
||||||
|
# Verify with edge bundles
|
||||||
|
stella graph verify --hash blake3:a1b2c3d4... --include-bundles
|
||||||
|
|
||||||
|
# Offline with local CAS
|
||||||
|
stella graph verify --hash blake3:a1b2c3d4... --cas-root ./offline-cas/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Baseline Comparison
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Compare Stella Ops findings against baseline scanners
|
||||||
|
python tools/compare.py \
|
||||||
|
--stellaops results/runs/2025-12-13/findings.json \
|
||||||
|
--baseline results/baselines/trivy-latest.json \
|
||||||
|
--output results/comparison-2025-12-13.csv
|
||||||
|
|
||||||
|
# Metrics generated:
|
||||||
|
# - True positives (reachability-confirmed)
|
||||||
|
# - False positives (unreachable code paths)
|
||||||
|
# - MTTD (mean time to detect)
|
||||||
|
# - Reproducibility score
|
||||||
|
```
|
||||||
|
|
||||||
|
## Artifact Contracts
|
||||||
|
|
||||||
|
All bench artifacts must comply with:
|
||||||
|
|
||||||
|
1. **VEX Proof Bundle Schema** (`docs/benchmarks/vex-evidence-playbook.schema.json`)
|
||||||
|
- BLAKE3-256 primary hash, SHA-256 secondary
|
||||||
|
- Canonical JSON with sorted keys
|
||||||
|
- DSSE envelope with Rekor-ready digest
|
||||||
|
|
||||||
|
2. **Justification Catalog** (`docs/benchmarks/vex-justifications.catalog.json`)
|
||||||
|
- VEX1-VEX10 justification codes
|
||||||
|
- Required evidence types per justification
|
||||||
|
- Expiry and re-evaluation rules
|
||||||
|
|
||||||
|
3. **Reachability Graph** (`docs/contracts/richgraph-v1.md`)
|
||||||
|
- BLAKE3 graph_hash for content addressing
|
||||||
|
- Deterministic node/edge ordering
|
||||||
|
- SymbolID/EdgeID format compliance
|
||||||
|
|
||||||
|
## CI Integration
|
||||||
|
|
||||||
|
The bench directory is validated by:
|
||||||
|
|
||||||
|
- `.gitea/workflows/vex-proof-bundles.yml` - Verifies all proof bundles
|
||||||
|
- `.gitea/workflows/bench-determinism.yml` - Runs determinism benchmarks
|
||||||
|
- `.gitea/workflows/hybrid-attestation.yml` - Verifies graph/edge-bundle fixtures
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
1. Add new findings under `findings/CVE-YYYY-NNNNN/`
|
||||||
|
2. Include all required evidence artifacts
|
||||||
|
3. Generate DSSE envelope and Rekor proof
|
||||||
|
4. Update `results/summary.csv`
|
||||||
|
5. Run verification: `./tools/verify.sh findings/CVE-YYYY-NNNNN/decision.dsse.json`
|
||||||
|
|||||||
56
bench/baselines/ttfs-baseline.json
Normal file
56
bench/baselines/ttfs-baseline.json
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "TTFS Baseline",
|
||||||
|
"description": "Time-to-First-Signal baseline metrics for regression detection",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"created_at": "2025-12-16T00:00:00Z",
|
||||||
|
"updated_at": "2025-12-16T00:00:00Z",
|
||||||
|
"metrics": {
|
||||||
|
"ttfs_ms": {
|
||||||
|
"p50": 1500,
|
||||||
|
"p95": 4000,
|
||||||
|
"p99": 6000,
|
||||||
|
"min": 500,
|
||||||
|
"max": 10000,
|
||||||
|
"mean": 2000,
|
||||||
|
"sample_count": 500
|
||||||
|
},
|
||||||
|
"by_scan_type": {
|
||||||
|
"image_scan": {
|
||||||
|
"p50": 2500,
|
||||||
|
"p95": 5000,
|
||||||
|
"p99": 7500,
|
||||||
|
"description": "Container image scanning TTFS baseline"
|
||||||
|
},
|
||||||
|
"filesystem_scan": {
|
||||||
|
"p50": 1000,
|
||||||
|
"p95": 2000,
|
||||||
|
"p99": 3000,
|
||||||
|
"description": "Filesystem/directory scanning TTFS baseline"
|
||||||
|
},
|
||||||
|
"sbom_scan": {
|
||||||
|
"p50": 400,
|
||||||
|
"p95": 800,
|
||||||
|
"p99": 1200,
|
||||||
|
"description": "SBOM-only scanning TTFS baseline"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"thresholds": {
|
||||||
|
"p50_max_ms": 2000,
|
||||||
|
"p95_max_ms": 5000,
|
||||||
|
"p99_max_ms": 8000,
|
||||||
|
"max_regression_pct": 10,
|
||||||
|
"description": "Thresholds that will trigger CI gate failures"
|
||||||
|
},
|
||||||
|
"collection_info": {
|
||||||
|
"test_environment": "ci-standard-runner",
|
||||||
|
"runner_specs": {
|
||||||
|
"cpu_cores": 4,
|
||||||
|
"memory_gb": 8,
|
||||||
|
"storage_type": "ssd"
|
||||||
|
},
|
||||||
|
"sample_corpus": "tests/reachability/corpus",
|
||||||
|
"collection_window_days": 30
|
||||||
|
}
|
||||||
|
}
|
||||||
10
bench/findings/CVE-2015-7547-reachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2015-7547-reachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpiZTMwNDMzZTE4OGEyNTg4NTY0NDYzMzZkYmIxMDk1OWJmYjRhYjM5NzQzODBhOGVhMTI2NDZiZjI2ODdiZjlhIiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL2dsaWJjLUNWRS0yMDIzLTQ5MTEtbG9vbmV5LXR1bmFibGVzQDEuMC4wIn1dLCJzdGF0dXMiOiJhZmZlY3RlZCIsInZ1bG5lcmFiaWxpdHkiOnsiQGlkIjoiaHR0cHM6Ly9udmQubmlzdC5nb3YvdnVsbi9kZXRhaWwvQ1ZFLTIwMTUtNzU0NyIsIm5hbWUiOiJDVkUtMjAxNS03NTQ3In19XSwidGltZXN0YW1wIjoiMjAyNS0xMi0xNFQwMjoxMzozOFoiLCJ0b29saW5nIjoiU3RlbGxhT3BzL2JlbmNoLWF1dG9AMS4wLjAiLCJ2ZXJzaW9uIjoxfQ==",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
25
bench/findings/CVE-2015-7547-reachable/decision.openvex.json
Normal file
25
bench/findings/CVE-2015-7547-reachable/decision.openvex.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:be30433e188a258856446336dbb10959bfb4ab3974380a8ea12646bf2687bf9a",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2015-7547",
|
||||||
|
"name": "CVE-2015-7547"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://glibc:glibc.c#entry",
|
||||||
|
"sym://glibc:glibc.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://glibc:glibc.c#entry",
|
||||||
|
"sym://glibc:glibc.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2015-7547-reachable/metadata.json
Normal file
11
bench/findings/CVE-2015-7547-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"cve_id": "CVE-2015-7547",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2015-7547-reachable/rekor.txt
Normal file
5
bench/findings/CVE-2015-7547-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2015-7547-unreachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2015-7547-unreachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpjNDJlYzAxNGE0MmQwZTNmYjQzZWQ0ZGRhZDg5NTM4MjFlNDQ0NTcxMTlkYTY2ZGRiNDFhMzVhODAxYTNiNzI3IiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9nbGliYy1DVkUtMjAyMy00OTExLWxvb25leS10dW5hYmxlc0AxLjAuMCJ9XSwic3RhdHVzIjoibm90X2FmZmVjdGVkIiwidnVsbmVyYWJpbGl0eSI6eyJAaWQiOiJodHRwczovL252ZC5uaXN0Lmdvdi92dWxuL2RldGFpbC9DVkUtMjAxNS03NTQ3IiwibmFtZSI6IkNWRS0yMDE1LTc1NDcifX1dLCJ0aW1lc3RhbXAiOiIyMDI1LTEyLTE0VDAyOjEzOjM4WiIsInRvb2xpbmciOiJTdGVsbGFPcHMvYmVuY2gtYXV0b0AxLjAuMCIsInZlcnNpb24iOjF9",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:c42ec014a42d0e3fb43ed4ddad8953821e44457119da66ddb41a35a801a3b727",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2015-7547",
|
||||||
|
"name": "CVE-2015-7547"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2015-7547-unreachable/metadata.json
Normal file
11
bench/findings/CVE-2015-7547-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"cve_id": "CVE-2015-7547",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2015-7547-unreachable/rekor.txt
Normal file
5
bench/findings/CVE-2015-7547-unreachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2022-3602-reachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2022-3602-reachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjowMTQzMWZmMWVlZTc5OWM2ZmFkZDU5M2E3ZWMxOGVlMDk0Zjk4MzE0MDk2M2RhNmNiZmQ0YjdmMDZiYTBmOTcwIiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL29wZW5zc2wtQ1ZFLTIwMjItMzYwMi14NTA5LW5hbWUtY29uc3RyYWludHNAMS4wLjAifV0sInN0YXR1cyI6ImFmZmVjdGVkIiwidnVsbmVyYWJpbGl0eSI6eyJAaWQiOiJodHRwczovL252ZC5uaXN0Lmdvdi92dWxuL2RldGFpbC9DVkUtMjAyMi0zNjAyIiwibmFtZSI6IkNWRS0yMDIyLTM2MDIifX1dLCJ0aW1lc3RhbXAiOiIyMDI1LTEyLTE0VDAyOjEzOjM4WiIsInRvb2xpbmciOiJTdGVsbGFPcHMvYmVuY2gtYXV0b0AxLjAuMCIsInZlcnNpb24iOjF9",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
25
bench/findings/CVE-2022-3602-reachable/decision.openvex.json
Normal file
25
bench/findings/CVE-2022-3602-reachable/decision.openvex.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:01431ff1eee799c6fadd593a7ec18ee094f983140963da6cbfd4b7f06ba0f970",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2022-3602",
|
||||||
|
"name": "CVE-2022-3602"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://openssl:openssl.c#entry",
|
||||||
|
"sym://openssl:openssl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://openssl:openssl.c#entry",
|
||||||
|
"sym://openssl:openssl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2022-3602-reachable/metadata.json
Normal file
11
bench/findings/CVE-2022-3602-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"cve_id": "CVE-2022-3602",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2022-3602-reachable/rekor.txt
Normal file
5
bench/findings/CVE-2022-3602-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2022-3602-unreachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2022-3602-unreachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpkOWJhZjRjNjQ3NDE4Nzc4NTUxYWZjNDM3NTJkZWY0NmQ0YWYyN2Q1MzEyMmU2YzQzNzVjMzUxMzU1YjEwYTMzIiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9vcGVuc3NsLUNWRS0yMDIyLTM2MDIteDUwOS1uYW1lLWNvbnN0cmFpbnRzQDEuMC4wIn1dLCJzdGF0dXMiOiJub3RfYWZmZWN0ZWQiLCJ2dWxuZXJhYmlsaXR5Ijp7IkBpZCI6Imh0dHBzOi8vbnZkLm5pc3QuZ292L3Z1bG4vZGV0YWlsL0NWRS0yMDIyLTM2MDIiLCJuYW1lIjoiQ1ZFLTIwMjItMzYwMiJ9fV0sInRpbWVzdGFtcCI6IjIwMjUtMTItMTRUMDI6MTM6MzhaIiwidG9vbGluZyI6IlN0ZWxsYU9wcy9iZW5jaC1hdXRvQDEuMC4wIiwidmVyc2lvbiI6MX0=",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:d9baf4c647418778551afc43752def46d4af27d53122e6c4375c351355b10a33",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2022-3602",
|
||||||
|
"name": "CVE-2022-3602"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2022-3602-unreachable/metadata.json
Normal file
11
bench/findings/CVE-2022-3602-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"cve_id": "CVE-2022-3602",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2022-3602-unreachable/rekor.txt
Normal file
5
bench/findings/CVE-2022-3602-unreachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2023-38545-reachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2023-38545-reachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpmMWMxZmRiZTk1YjMyNTNiMTNjYTZjNzMzZWMwM2FkYTNlYTg3MWU2NmI1ZGRlZGJiNmMxNGI5ZGM2N2IwNzQ4IiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL2N1cmwtQ1ZFLTIwMjMtMzg1NDUtc29ja3M1LWhlYXBAMS4wLjAifV0sInN0YXR1cyI6ImFmZmVjdGVkIiwidnVsbmVyYWJpbGl0eSI6eyJAaWQiOiJodHRwczovL252ZC5uaXN0Lmdvdi92dWxuL2RldGFpbC9DVkUtMjAyMy0zODU0NSIsIm5hbWUiOiJDVkUtMjAyMy0zODU0NSJ9fV0sInRpbWVzdGFtcCI6IjIwMjUtMTItMTRUMDI6MTM6MzhaIiwidG9vbGluZyI6IlN0ZWxsYU9wcy9iZW5jaC1hdXRvQDEuMC4wIiwidmVyc2lvbiI6MX0=",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:f1c1fdbe95b3253b13ca6c733ec03ada3ea871e66b5ddedbb6c14b9dc67b0748",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2023-38545",
|
||||||
|
"name": "CVE-2023-38545"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://curl:curl.c#entry",
|
||||||
|
"sym://curl:curl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://curl:curl.c#entry",
|
||||||
|
"sym://curl:curl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2023-38545-reachable/metadata.json
Normal file
11
bench/findings/CVE-2023-38545-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"cve_id": "CVE-2023-38545",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2023-38545-reachable/rekor.txt
Normal file
5
bench/findings/CVE-2023-38545-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2023-38545-unreachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2023-38545-unreachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjplNGIxOTk0ZTU5NDEwNTYyZjQwYWI0YTVmZTIzNjM4YzExZTU4MTdiYjcwMDM5M2VkOTlmMjBkM2M5ZWY5ZmEwIiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9jdXJsLUNWRS0yMDIzLTM4NTQ1LXNvY2tzNS1oZWFwQDEuMC4wIn1dLCJzdGF0dXMiOiJub3RfYWZmZWN0ZWQiLCJ2dWxuZXJhYmlsaXR5Ijp7IkBpZCI6Imh0dHBzOi8vbnZkLm5pc3QuZ292L3Z1bG4vZGV0YWlsL0NWRS0yMDIzLTM4NTQ1IiwibmFtZSI6IkNWRS0yMDIzLTM4NTQ1In19XSwidGltZXN0YW1wIjoiMjAyNS0xMi0xNFQwMjoxMzozOFoiLCJ0b29saW5nIjoiU3RlbGxhT3BzL2JlbmNoLWF1dG9AMS4wLjAiLCJ2ZXJzaW9uIjoxfQ==",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:e4b1994e59410562f40ab4a5fe23638c11e5817bb700393ed99f20d3c9ef9fa0",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2023-38545",
|
||||||
|
"name": "CVE-2023-38545"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2023-38545-unreachable/metadata.json
Normal file
11
bench/findings/CVE-2023-38545-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"cve_id": "CVE-2023-38545",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user