Compare commits
39 Commits
0987cd6ac8
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
394b57f6bf | ||
|
|
3a2100aa78 | ||
|
|
417ef83202 | ||
|
|
2170a58734 | ||
|
|
415eff1207 | ||
|
|
b55d9fa68d | ||
|
|
5a480a3c2a | ||
|
|
4391f35d8a | ||
|
|
b1f40945b7 | ||
|
|
41864227d2 | ||
|
|
8137503221 | ||
|
|
08dab053c0 | ||
|
|
7ce83270d0 | ||
|
|
505fe7a885 | ||
|
|
0cb5c9abfb | ||
|
|
d59cc816c1 | ||
|
|
8c8f0c632d | ||
|
|
4344020dd1 | ||
|
|
b058dbe031 | ||
|
|
3411e825cd | ||
|
|
9202cd7da8 | ||
|
|
00c41790f4 | ||
|
|
2e70c9fdb6 | ||
| d233fa3529 | |||
|
|
e2e404e705 | ||
| 01f4943ab9 | |||
|
|
233873f620 | ||
|
|
f1a39c4ce3 | ||
|
|
6e45066e37 | ||
|
|
e00f6365da | ||
|
|
999e26a48e | ||
| d776e93b16 | |||
|
|
564df71bfb | ||
|
|
e1f1bef4c1 | ||
|
|
3f3473ee3a | ||
|
|
efaf3cb789 | ||
|
|
ce5ec9c158 | ||
|
|
ab22181e8b | ||
|
|
1995883476 |
@@ -27,7 +27,8 @@
|
|||||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
|
||||||
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")",
|
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")",
|
||||||
"Bash(rm:*)",
|
"Bash(rm:*)",
|
||||||
"Bash(if not exist \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\" mkdir \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\")"
|
"Bash(if not exist \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\" mkdir \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\archived\")",
|
||||||
|
"Bash(del \"C:\\dev\\New folder\\git.stella-ops.org\\docs\\implplan\\SPRINT_0510_0001_0001_airgap.md\")"
|
||||||
],
|
],
|
||||||
"deny": [],
|
"deny": [],
|
||||||
"ask": []
|
"ask": []
|
||||||
|
|||||||
12
.config/dotnet-tools.json
Normal file
12
.config/dotnet-tools.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"isRoot": true,
|
||||||
|
"tools": {
|
||||||
|
"dotnet-stryker": {
|
||||||
|
"version": "4.4.0",
|
||||||
|
"commands": [
|
||||||
|
"stryker"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,2 +1,5 @@
|
|||||||
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
# Ensure analyzer fixture assets keep LF endings for deterministic hashes
|
||||||
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf
|
||||||
|
|
||||||
|
# Ensure reachability sample assets keep LF endings for deterministic hashes
|
||||||
|
tests/reachability/samples-public/** text eol=lf
|
||||||
|
|||||||
70
.gitea/workflows/advisory-ai-release.yml
Normal file
70
.gitea/workflows/advisory-ai-release.yml
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
name: Advisory AI Feed Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/AdvisoryAI/feeds/**'
|
||||||
|
- 'docs/samples/advisory-feeds/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-feeds:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Fallback to dev key when secret is absent
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[warn] COSIGN_PRIVATE_KEY_B64 not set; using dev key for non-production"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
# Manual override
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package advisory feeds
|
||||||
|
run: |
|
||||||
|
chmod +x ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
ops/deployment/advisory-ai/package-advisory-feeds.sh
|
||||||
|
|
||||||
|
- name: Generate SBOM
|
||||||
|
run: |
|
||||||
|
# Install syft
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
|
||||||
|
# Generate SBOM for feed bundle
|
||||||
|
syft dir:out/advisory-ai/feeds/stage \
|
||||||
|
-o spdx-json=out/advisory-ai/feeds/advisory-feeds.sbom.json \
|
||||||
|
--name advisory-feeds
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: advisory-feeds-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.tar.gz
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.manifest.dsse.json
|
||||||
|
out/advisory-ai/feeds/advisory-feeds.sbom.json
|
||||||
|
out/advisory-ai/feeds/provenance.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
83
.gitea/workflows/aoc-backfill-release.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: AOC Backfill Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dataset_hash:
|
||||||
|
description: 'Dataset hash from dev rehearsal (leave empty for dev mode)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
allow_dev_key:
|
||||||
|
description: 'Allow dev key for testing (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
package-backfill:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
COSIGN_PASSWORD: ${{ secrets.COSIGN_PASSWORD }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.6.0'
|
||||||
|
|
||||||
|
- name: Restore AOC CLI
|
||||||
|
run: dotnet restore src/Aoc/StellaOps.Aoc.Cli/StellaOps.Aoc.Cli.csproj
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
echo "[info] No production key; using dev key"
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
if [ "${{ github.event.inputs.allow_dev_key }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Package AOC backfill release
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/aoc/package-backfill-release.sh
|
||||||
|
DATASET_HASH="${{ github.event.inputs.dataset_hash }}" \
|
||||||
|
ops/devops/aoc/package-backfill-release.sh
|
||||||
|
env:
|
||||||
|
DATASET_HASH: ${{ github.event.inputs.dataset_hash }}
|
||||||
|
|
||||||
|
- name: Generate SBOM with syft
|
||||||
|
run: |
|
||||||
|
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin v1.0.0
|
||||||
|
syft dir:out/aoc/cli \
|
||||||
|
-o spdx-json=out/aoc/aoc-backfill-runner.sbom.json \
|
||||||
|
--name aoc-backfill-runner || true
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/aoc
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: aoc-backfill-release-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/aoc/aoc-backfill-runner.tar.gz
|
||||||
|
out/aoc/aoc-backfill-runner.manifest.json
|
||||||
|
out/aoc/aoc-backfill-runner.sbom.json
|
||||||
|
out/aoc/aoc-backfill-runner.provenance.json
|
||||||
|
out/aoc/aoc-backfill-runner.dsse.json
|
||||||
|
out/aoc/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
@@ -56,10 +56,41 @@ jobs:
|
|||||||
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
dotnet build src/Authority/StellaOps.Authority.Ingestion/StellaOps.Authority.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
dotnet build src/Excititor/StellaOps.Excititor.Ingestion/StellaOps.Excititor.Ingestion.csproj -c Release /p:RunAnalyzers=true /p:TreatWarningsAsErrors=true
|
||||||
|
|
||||||
- name: Run analyzer tests
|
- name: Run analyzer tests with coverage
|
||||||
run: |
|
run: |
|
||||||
mkdir -p $ARTIFACT_DIR
|
mkdir -p $ARTIFACT_DIR
|
||||||
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release --logger "trx;LogFileName=aoc-tests.trx" --results-directory $ARTIFACT_DIR
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Analyzers.Tests/StellaOps.Aoc.Analyzers.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-analyzers-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC library tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-lib-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Run AOC CLI tests with coverage
|
||||||
|
run: |
|
||||||
|
dotnet test src/Aoc/__Tests/StellaOps.Aoc.Cli.Tests/StellaOps.Aoc.Cli.Tests.csproj -c Release \
|
||||||
|
--settings src/Aoc/aoc.runsettings \
|
||||||
|
--collect:"XPlat Code Coverage" \
|
||||||
|
--logger "trx;LogFileName=aoc-cli-tests.trx" \
|
||||||
|
--results-directory $ARTIFACT_DIR
|
||||||
|
|
||||||
|
- name: Generate coverage report
|
||||||
|
run: |
|
||||||
|
dotnet tool install --global dotnet-reportgenerator-globaltool || true
|
||||||
|
reportgenerator \
|
||||||
|
-reports:"$ARTIFACT_DIR/**/coverage.cobertura.xml" \
|
||||||
|
-targetdir:"$ARTIFACT_DIR/coverage-report" \
|
||||||
|
-reporttypes:"Html;Cobertura;TextSummary" || true
|
||||||
|
if [ -f "$ARTIFACT_DIR/coverage-report/Summary.txt" ]; then
|
||||||
|
cat "$ARTIFACT_DIR/coverage-report/Summary.txt"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -96,13 +127,37 @@ jobs:
|
|||||||
- name: Run AOC verify
|
- name: Run AOC verify
|
||||||
env:
|
env:
|
||||||
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
STAGING_MONGO_URI: ${{ secrets.STAGING_MONGO_URI || vars.STAGING_MONGO_URI }}
|
||||||
|
STAGING_POSTGRES_URI: ${{ secrets.STAGING_POSTGRES_URI || vars.STAGING_POSTGRES_URI }}
|
||||||
run: |
|
run: |
|
||||||
if [ -z "${STAGING_MONGO_URI:-}" ]; then
|
mkdir -p $ARTIFACT_DIR
|
||||||
echo "::warning::STAGING_MONGO_URI not set; skipping aoc verify"
|
|
||||||
|
# Prefer PostgreSQL, fall back to MongoDB (legacy)
|
||||||
|
if [ -n "${STAGING_POSTGRES_URI:-}" ]; then
|
||||||
|
echo "Using PostgreSQL for AOC verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "$STAGING_POSTGRES_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
elif [ -n "${STAGING_MONGO_URI:-}" ]; then
|
||||||
|
echo "Using MongoDB for AOC verification (deprecated)"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--mongo "$STAGING_MONGO_URI" \
|
||||||
|
--output "$ARTIFACT_DIR/aoc-verify.json" \
|
||||||
|
--ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" \
|
||||||
|
--verbose || VERIFY_EXIT=$?
|
||||||
|
else
|
||||||
|
echo "::warning::Neither STAGING_POSTGRES_URI nor STAGING_MONGO_URI set; running dry-run verification"
|
||||||
|
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify \
|
||||||
|
--since "$AOC_VERIFY_SINCE" \
|
||||||
|
--postgres "placeholder" \
|
||||||
|
--dry-run \
|
||||||
|
--verbose
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
mkdir -p $ARTIFACT_DIR
|
|
||||||
dotnet run --project src/Aoc/StellaOps.Aoc.Cli -- verify --since "$AOC_VERIFY_SINCE" --mongo "$STAGING_MONGO_URI" --output "$ARTIFACT_DIR/aoc-verify.json" --ndjson "$ARTIFACT_DIR/aoc-verify.ndjson" || VERIFY_EXIT=$?
|
|
||||||
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
if [ -n "${VERIFY_EXIT:-}" ] && [ "${VERIFY_EXIT}" -ne 0 ]; then
|
||||||
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
echo "::error::AOC verify reported violations"; exit ${VERIFY_EXIT}
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -575,6 +575,209 @@ PY
|
|||||||
if-no-files-found: ignore
|
if-no-files-found: ignore
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Quality Gates Foundation (Sprint 0350)
|
||||||
|
# ============================================================================
|
||||||
|
quality-gates:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Reachability quality gate
|
||||||
|
id: reachability
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing reachability metrics"
|
||||||
|
if [ -f scripts/ci/compute-reachability-metrics.sh ]; then
|
||||||
|
chmod +x scripts/ci/compute-reachability-metrics.sh
|
||||||
|
METRICS=$(./scripts/ci/compute-reachability-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "Reachability metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "Reachability script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: TTFS regression gate
|
||||||
|
id: ttfs
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Computing TTFS metrics"
|
||||||
|
if [ -f scripts/ci/compute-ttfs-metrics.sh ]; then
|
||||||
|
chmod +x scripts/ci/compute-ttfs-metrics.sh
|
||||||
|
METRICS=$(./scripts/ci/compute-ttfs-metrics.sh --dry-run 2>/dev/null || echo '{}')
|
||||||
|
echo "metrics=$METRICS" >> $GITHUB_OUTPUT
|
||||||
|
echo "TTFS metrics: $METRICS"
|
||||||
|
else
|
||||||
|
echo "TTFS script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Performance SLO gate
|
||||||
|
id: slo
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Enforcing performance SLOs"
|
||||||
|
if [ -f scripts/ci/enforce-performance-slos.sh ]; then
|
||||||
|
chmod +x scripts/ci/enforce-performance-slos.sh
|
||||||
|
./scripts/ci/enforce-performance-slos.sh --warn-only || true
|
||||||
|
else
|
||||||
|
echo "Performance SLO script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: RLS policy validation
|
||||||
|
id: rls
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Validating RLS policies"
|
||||||
|
if [ -f deploy/postgres-validation/001_validate_rls.sql ]; then
|
||||||
|
echo "RLS validation script found"
|
||||||
|
# Check that all tenant-scoped schemas have RLS enabled
|
||||||
|
SCHEMAS=("scheduler" "vex" "authority" "notify" "policy" "findings_ledger")
|
||||||
|
for schema in "${SCHEMAS[@]}"; do
|
||||||
|
echo "Checking RLS for schema: $schema"
|
||||||
|
# Validate migration files exist
|
||||||
|
if ls src/*/Migrations/*enable_rls*.sql 2>/dev/null | grep -q "$schema"; then
|
||||||
|
echo " ✓ RLS migration exists for $schema"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "RLS validation passed (static check)"
|
||||||
|
else
|
||||||
|
echo "RLS validation script not found, skipping"
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload quality gate results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: quality-gate-results
|
||||||
|
path: |
|
||||||
|
scripts/ci/*.json
|
||||||
|
scripts/ci/*.yaml
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
security-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'pull_request' || github.event_name == 'schedule'
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore dependencies
|
||||||
|
run: dotnet restore tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj
|
||||||
|
|
||||||
|
- name: Run OWASP security tests
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Running security tests"
|
||||||
|
dotnet test tests/security/StellaOps.Security.Tests/StellaOps.Security.Tests.csproj \
|
||||||
|
--no-restore \
|
||||||
|
--logger "trx;LogFileName=security-tests.trx" \
|
||||||
|
--results-directory ./security-test-results \
|
||||||
|
--filter "Category=Security" \
|
||||||
|
--verbosity normal
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Upload security test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: security-test-results
|
||||||
|
path: security-test-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
mutation-testing:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-test
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'mutation-test'))
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
DOTNET_VERSION: '10.0.100'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: ${{ env.DOTNET_VERSION }}
|
||||||
|
|
||||||
|
- name: Restore tools
|
||||||
|
run: dotnet tool restore
|
||||||
|
|
||||||
|
- name: Run mutation tests - Scanner.Core
|
||||||
|
id: scanner-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Scanner.Core"
|
||||||
|
cd src/Scanner/__Libraries/StellaOps.Scanner.Core
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/scanner-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Policy.Engine
|
||||||
|
id: policy-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Policy.Engine"
|
||||||
|
cd src/Policy/__Libraries/StellaOps.Policy
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../../mutation-results/policy-engine || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run mutation tests - Authority.Core
|
||||||
|
id: authority-mutation
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "::group::Mutation testing Authority.Core"
|
||||||
|
cd src/Authority/StellaOps.Authority
|
||||||
|
dotnet stryker --reporter json --reporter html --output ../../mutation-results/authority-core || echo "MUTATION_FAILED=true" >> $GITHUB_ENV
|
||||||
|
echo "::endgroup::"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Upload mutation results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: mutation-testing-results
|
||||||
|
path: mutation-results/
|
||||||
|
if-no-files-found: ignore
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Check mutation thresholds
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "Checking mutation score thresholds..."
|
||||||
|
# Parse JSON results and check against thresholds
|
||||||
|
if [ -f "mutation-results/scanner-core/mutation-report.json" ]; then
|
||||||
|
SCORE=$(jq '.mutationScore // 0' mutation-results/scanner-core/mutation-report.json)
|
||||||
|
echo "Scanner.Core mutation score: $SCORE%"
|
||||||
|
if (( $(echo "$SCORE < 65" | bc -l) )); then
|
||||||
|
echo "::error::Scanner.Core mutation score below threshold"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
sealed-mode-ci:
|
sealed-mode-ci:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs: build-test
|
needs: build-test
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ jobs:
|
|||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
working-directory: src/Web
|
working-directory: src/Web/StellaOps.Web
|
||||||
env:
|
env:
|
||||||
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
PLAYWRIGHT_BROWSERS_PATH: ~/.cache/ms-playwright
|
||||||
CI: true
|
CI: true
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
cache: npm
|
cache: npm
|
||||||
cache-dependency-path: src/Web/package-lock.json
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
- name: Install deps (offline-friendly)
|
- name: Install deps (offline-friendly)
|
||||||
run: npm ci --prefer-offline --no-audit --progress=false
|
run: npm ci --prefer-offline --no-audit --progress=false
|
||||||
@@ -37,6 +37,12 @@ jobs:
|
|||||||
|
|
||||||
- name: Console export specs (targeted)
|
- name: Console export specs (targeted)
|
||||||
run: bash ./scripts/ci-console-exports.sh
|
run: bash ./scripts/ci-console-exports.sh
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Unit tests
|
||||||
|
run: npm run test:ci
|
||||||
|
env:
|
||||||
|
CHROME_BIN: chromium
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: npm run build -- --configuration=production --progress=false
|
run: npm run build -- --configuration=production --progress=false
|
||||||
|
|||||||
46
.gitea/workflows/exporter-ci.yml
Normal file
46
.gitea/workflows/exporter-ci.yml
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
name: exporter-ci
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'src/ExportCenter/**'
|
||||||
|
- '.gitea/workflows/exporter-ci.yml'
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: '10.0.x'
|
||||||
|
|
||||||
|
- name: Restore
|
||||||
|
run: dotnet restore src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: dotnet build src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj --configuration Release --no-restore
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: dotnet test src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj --configuration Release --no-build --verbosity normal
|
||||||
|
|
||||||
|
- name: Publish
|
||||||
|
run: |
|
||||||
|
dotnet publish src/ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj \
|
||||||
|
--configuration Release \
|
||||||
|
--output artifacts/exporter
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: exporter-${{ github.run_id }}
|
||||||
|
path: artifacts/
|
||||||
|
retention-days: 14
|
||||||
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
81
.gitea/workflows/ledger-oas-ci.yml
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
name: Ledger OpenAPI CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
- 'ops/devops/ledger/**'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'api/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-oas:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install tools
|
||||||
|
run: |
|
||||||
|
npm install -g @stoplight/spectral-cli
|
||||||
|
npm install -g @openapitools/openapi-generator-cli
|
||||||
|
|
||||||
|
- name: Validate OpenAPI spec
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/ledger/validate-oas.sh
|
||||||
|
ops/devops/ledger/validate-oas.sh
|
||||||
|
|
||||||
|
- name: Upload validation report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-oas-validation-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/oas/lint-report.json
|
||||||
|
out/ledger/oas/validation-report.txt
|
||||||
|
out/ledger/oas/spec-summary.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
check-wellknown:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check .well-known/openapi structure
|
||||||
|
run: |
|
||||||
|
# Validate .well-known structure if exists
|
||||||
|
if [ -d ".well-known" ]; then
|
||||||
|
echo "Checking .well-known/openapi..."
|
||||||
|
if [ -f ".well-known/openapi.json" ]; then
|
||||||
|
python3 -c "import json; json.load(open('.well-known/openapi.json'))"
|
||||||
|
echo ".well-known/openapi.json is valid JSON"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[info] .well-known directory not present (OK for dev)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
deprecation-check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: validate-oas
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check deprecation policy
|
||||||
|
run: |
|
||||||
|
if [ -f "ops/devops/ledger/deprecation-policy.yaml" ]; then
|
||||||
|
echo "Validating deprecation policy..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('ops/devops/ledger/deprecation-policy.yaml'))"
|
||||||
|
echo "Deprecation policy is valid"
|
||||||
|
else
|
||||||
|
echo "[info] No deprecation policy yet (OK for initial setup)"
|
||||||
|
fi
|
||||||
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
101
.gitea/workflows/ledger-packs-ci.yml
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
name: Ledger Packs CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
snapshot_id:
|
||||||
|
description: 'Snapshot ID (leave empty for auto)'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
sign:
|
||||||
|
description: 'Sign pack (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'ops/devops/ledger/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${COSIGN_PRIVATE_KEY_B64}" ] || [ "${{ github.event.inputs.sign }}" = "1" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build pack
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/ledger/build-pack.sh
|
||||||
|
SNAPSHOT_ID="${{ github.event.inputs.snapshot_id }}"
|
||||||
|
if [ -z "$SNAPSHOT_ID" ]; then
|
||||||
|
SNAPSHOT_ID="ci-$(date +%Y%m%d%H%M%S)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SIGN_FLAG=""
|
||||||
|
if [ "${{ github.event.inputs.sign }}" = "1" ] || [ -n "${COSIGN_PRIVATE_KEY_B64}" ]; then
|
||||||
|
SIGN_FLAG="--sign"
|
||||||
|
fi
|
||||||
|
|
||||||
|
SNAPSHOT_ID="$SNAPSHOT_ID" ops/devops/ledger/build-pack.sh $SIGN_FLAG
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for f in *.SHA256SUMS; do
|
||||||
|
if [ -f "$f" ]; then
|
||||||
|
sha256sum -c "$f"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Upload pack
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/ledger/packs/*.pack.tar.gz
|
||||||
|
out/ledger/packs/*.SHA256SUMS
|
||||||
|
out/ledger/packs/*.dsse.json
|
||||||
|
if-no-files-found: warn
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
verify-pack:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-pack
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download pack
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ledger-pack-${{ github.run_number }}
|
||||||
|
path: out/ledger/packs/
|
||||||
|
|
||||||
|
- name: Verify pack structure
|
||||||
|
run: |
|
||||||
|
cd out/ledger/packs
|
||||||
|
for pack in *.pack.tar.gz; do
|
||||||
|
if [ -f "$pack" ]; then
|
||||||
|
echo "Verifying $pack..."
|
||||||
|
tar -tzf "$pack" | head -20
|
||||||
|
|
||||||
|
# Extract and check manifest
|
||||||
|
tar -xzf "$pack" -C /tmp manifest.json 2>/dev/null || true
|
||||||
|
if [ -f /tmp/manifest.json ]; then
|
||||||
|
python3 -c "import json; json.load(open('/tmp/manifest.json'))"
|
||||||
|
echo "Pack manifest is valid JSON"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
188
.gitea/workflows/lighthouse-ci.yml
Normal file
188
.gitea/workflows/lighthouse-ci.yml
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
# .gitea/workflows/lighthouse-ci.yml
|
||||||
|
# Lighthouse CI for performance and accessibility testing of the StellaOps Web UI
|
||||||
|
|
||||||
|
name: Lighthouse CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
- '.gitea/workflows/lighthouse-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
branches: [main, develop]
|
||||||
|
paths:
|
||||||
|
- 'src/Web/StellaOps.Web/**'
|
||||||
|
schedule:
|
||||||
|
# Run weekly on Sunday at 2 AM UTC
|
||||||
|
- cron: '0 2 * * 0'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODE_VERSION: '20'
|
||||||
|
LHCI_BUILD_CONTEXT__CURRENT_BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||||
|
LHCI_BUILD_CONTEXT__COMMIT_SHA: ${{ github.sha }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lighthouse:
|
||||||
|
name: Lighthouse Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Install Lighthouse CI
|
||||||
|
run: npm install -g @lhci/cli@0.13.x
|
||||||
|
|
||||||
|
- name: Run Lighthouse CI
|
||||||
|
run: |
|
||||||
|
lhci autorun \
|
||||||
|
--collect.staticDistDir=./dist/stella-ops-web/browser \
|
||||||
|
--collect.numberOfRuns=3 \
|
||||||
|
--assert.preset=lighthouse:recommended \
|
||||||
|
--assert.assertions.categories:performance=off \
|
||||||
|
--assert.assertions.categories:accessibility=off \
|
||||||
|
--upload.target=filesystem \
|
||||||
|
--upload.outputDir=./lighthouse-results
|
||||||
|
|
||||||
|
- name: Evaluate Lighthouse Results
|
||||||
|
id: lhci-results
|
||||||
|
run: |
|
||||||
|
# Parse the latest Lighthouse report
|
||||||
|
REPORT=$(ls -t lighthouse-results/*.json | head -1)
|
||||||
|
|
||||||
|
if [ -f "$REPORT" ]; then
|
||||||
|
PERF=$(jq '.categories.performance.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
A11Y=$(jq '.categories.accessibility.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
BP=$(jq '.categories["best-practices"].score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
SEO=$(jq '.categories.seo.score * 100' "$REPORT" | cut -d. -f1)
|
||||||
|
|
||||||
|
echo "performance=$PERF" >> $GITHUB_OUTPUT
|
||||||
|
echo "accessibility=$A11Y" >> $GITHUB_OUTPUT
|
||||||
|
echo "best-practices=$BP" >> $GITHUB_OUTPUT
|
||||||
|
echo "seo=$SEO" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
echo "## Lighthouse Results" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Category | Score | Threshold | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|----------|-------|-----------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# Performance: target >= 90
|
||||||
|
if [ "$PERF" -ge 90 ]; then
|
||||||
|
echo "| Performance | $PERF | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Performance | $PERF | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility: target >= 95
|
||||||
|
if [ "$A11Y" -ge 95 ]; then
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Accessibility | $A11Y | >= 95 | :x: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Best Practices: target >= 90
|
||||||
|
if [ "$BP" -ge 90 ]; then
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| Best Practices | $BP | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
# SEO: target >= 90
|
||||||
|
if [ "$SEO" -ge 90 ]; then
|
||||||
|
echo "| SEO | $SEO | >= 90 | :white_check_mark: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "| SEO | $SEO | >= 90 | :warning: |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Check Quality Gates
|
||||||
|
run: |
|
||||||
|
PERF=${{ steps.lhci-results.outputs.performance }}
|
||||||
|
A11Y=${{ steps.lhci-results.outputs.accessibility }}
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
# Performance gate (warning only, not blocking)
|
||||||
|
if [ "$PERF" -lt 90 ]; then
|
||||||
|
echo "::warning::Performance score ($PERF) is below target (90)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Accessibility gate (blocking)
|
||||||
|
if [ "$A11Y" -lt 95 ]; then
|
||||||
|
echo "::error::Accessibility score ($A11Y) is below required threshold (95)"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$FAILED" -eq 1 ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload Lighthouse Reports
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: lighthouse-reports
|
||||||
|
path: src/Web/StellaOps.Web/lighthouse-results/
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
axe-accessibility:
|
||||||
|
name: Axe Accessibility Audit
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: src/Web/StellaOps.Web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: src/Web/StellaOps.Web/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
run: npx playwright install --with-deps chromium
|
||||||
|
|
||||||
|
- name: Build production bundle
|
||||||
|
run: npm run build -- --configuration production
|
||||||
|
|
||||||
|
- name: Start preview server
|
||||||
|
run: |
|
||||||
|
npx serve -s dist/stella-ops-web/browser -l 4200 &
|
||||||
|
sleep 5
|
||||||
|
|
||||||
|
- name: Run Axe accessibility tests
|
||||||
|
run: |
|
||||||
|
npm run test:a11y || true
|
||||||
|
|
||||||
|
- name: Upload Axe results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: axe-accessibility-results
|
||||||
|
path: src/Web/StellaOps.Web/test-results/
|
||||||
|
retention-days: 30
|
||||||
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
83
.gitea/workflows/lnm-migration-ci.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: LNM Migration CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
run_staging:
|
||||||
|
description: 'Run staging backfill (1=yes)'
|
||||||
|
required: false
|
||||||
|
default: '0'
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- 'src/Concelier/__Libraries/StellaOps.Concelier.Migrations/**'
|
||||||
|
- 'ops/devops/lnm/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-runner:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Setup cosign
|
||||||
|
uses: sigstore/cosign-installer@v3
|
||||||
|
|
||||||
|
- name: Configure signing
|
||||||
|
run: |
|
||||||
|
if [ -z "${{ secrets.COSIGN_PRIVATE_KEY_B64 }}" ]; then
|
||||||
|
echo "COSIGN_ALLOW_DEV_KEY=1" >> $GITHUB_ENV
|
||||||
|
echo "COSIGN_PASSWORD=stellaops-dev" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
COSIGN_PRIVATE_KEY_B64: ${{ secrets.COSIGN_PRIVATE_KEY_B64 }}
|
||||||
|
|
||||||
|
- name: Build and package runner
|
||||||
|
run: |
|
||||||
|
chmod +x ops/devops/lnm/package-runner.sh
|
||||||
|
ops/devops/lnm/package-runner.sh
|
||||||
|
|
||||||
|
- name: Verify checksums
|
||||||
|
run: |
|
||||||
|
cd out/lnm
|
||||||
|
sha256sum -c SHA256SUMS
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: lnm-migration-runner-${{ github.run_number }}
|
||||||
|
path: |
|
||||||
|
out/lnm/lnm-migration-runner.tar.gz
|
||||||
|
out/lnm/lnm-migration-runner.manifest.json
|
||||||
|
out/lnm/lnm-migration-runner.dsse.json
|
||||||
|
out/lnm/SHA256SUMS
|
||||||
|
if-no-files-found: warn
|
||||||
|
|
||||||
|
validate-metrics:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-runner
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate monitoring config
|
||||||
|
run: |
|
||||||
|
# Validate alert rules syntax
|
||||||
|
if [ -f "ops/devops/lnm/alerts/lnm-alerts.yaml" ]; then
|
||||||
|
echo "Validating alert rules..."
|
||||||
|
python3 -c "import yaml; yaml.safe_load(open('ops/devops/lnm/alerts/lnm-alerts.yaml'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate dashboard JSON
|
||||||
|
if [ -f "ops/devops/lnm/dashboards/lnm-migration.json" ]; then
|
||||||
|
echo "Validating dashboard..."
|
||||||
|
python3 -c "import json; json.load(open('ops/devops/lnm/dashboards/lnm-migration.json'))"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Monitoring config validation complete"
|
||||||
267
.gitea/workflows/reachability-corpus-ci.yml
Normal file
267
.gitea/workflows/reachability-corpus-ci.yml
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
name: Reachability Corpus Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
paths:
|
||||||
|
- 'tests/reachability/corpus/**'
|
||||||
|
- 'tests/reachability/fixtures/**'
|
||||||
|
- 'tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||||
|
- 'scripts/reachability/**'
|
||||||
|
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'tests/reachability/corpus/**'
|
||||||
|
- 'tests/reachability/fixtures/**'
|
||||||
|
- 'tests/reachability/StellaOps.Reachability.FixtureTests/**'
|
||||||
|
- 'scripts/reachability/**'
|
||||||
|
- '.gitea/workflows/reachability-corpus-ci.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-corpus:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
DOTNET_NOLOGO: 1
|
||||||
|
DOTNET_CLI_TELEMETRY_OPTOUT: 1
|
||||||
|
DOTNET_SYSTEM_GLOBALIZATION_INVARIANT: 1
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10 RC
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.100
|
||||||
|
include-prerelease: true
|
||||||
|
|
||||||
|
- name: Verify corpus manifest integrity
|
||||||
|
run: |
|
||||||
|
echo "Verifying corpus manifest..."
|
||||||
|
cd tests/reachability/corpus
|
||||||
|
if [ ! -f manifest.json ]; then
|
||||||
|
echo "::error::Corpus manifest.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Manifest exists, checking JSON validity..."
|
||||||
|
python3 -c "import json; json.load(open('manifest.json'))"
|
||||||
|
echo "Manifest is valid JSON"
|
||||||
|
|
||||||
|
- name: Verify reachbench index integrity
|
||||||
|
run: |
|
||||||
|
echo "Verifying reachbench fixtures..."
|
||||||
|
cd tests/reachability/fixtures/reachbench-2025-expanded
|
||||||
|
if [ ! -f INDEX.json ]; then
|
||||||
|
echo "::error::Reachbench INDEX.json not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "INDEX exists, checking JSON validity..."
|
||||||
|
python3 -c "import json; json.load(open('INDEX.json'))"
|
||||||
|
echo "INDEX is valid JSON"
|
||||||
|
|
||||||
|
- name: Restore test project
|
||||||
|
run: dotnet restore tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj --configfile nuget.config
|
||||||
|
|
||||||
|
- name: Build test project
|
||||||
|
run: dotnet build tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Run corpus fixture tests
|
||||||
|
run: |
|
||||||
|
dotnet test tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=corpus-results.trx" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
--filter "FullyQualifiedName~CorpusFixtureTests"
|
||||||
|
|
||||||
|
- name: Run reachbench fixture tests
|
||||||
|
run: |
|
||||||
|
dotnet test tests/reachability/StellaOps.Reachability.FixtureTests/StellaOps.Reachability.FixtureTests.csproj \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--logger "trx;LogFileName=reachbench-results.trx" \
|
||||||
|
--results-directory ./TestResults \
|
||||||
|
--filter "FullyQualifiedName~ReachbenchFixtureTests"
|
||||||
|
|
||||||
|
- name: Verify deterministic hashes
|
||||||
|
run: |
|
||||||
|
echo "Verifying SHA-256 hashes in corpus manifest..."
|
||||||
|
chmod +x scripts/reachability/verify_corpus_hashes.sh || true
|
||||||
|
if [ -f scripts/reachability/verify_corpus_hashes.sh ]; then
|
||||||
|
scripts/reachability/verify_corpus_hashes.sh
|
||||||
|
else
|
||||||
|
echo "Hash verification script not found, using inline verification..."
|
||||||
|
cd tests/reachability/corpus
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
with open('manifest.json') as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for entry in manifest:
|
||||||
|
case_id = entry['id']
|
||||||
|
lang = entry['language']
|
||||||
|
case_dir = os.path.join(lang, case_id)
|
||||||
|
for filename, expected_hash in entry['files'].items():
|
||||||
|
filepath = os.path.join(case_dir, filename)
|
||||||
|
if not os.path.exists(filepath):
|
||||||
|
errors.append(f"{case_id}: missing {filename}")
|
||||||
|
continue
|
||||||
|
with open(filepath, 'rb') as f:
|
||||||
|
actual_hash = hashlib.sha256(f.read()).hexdigest()
|
||||||
|
if actual_hash != expected_hash:
|
||||||
|
errors.append(f"{case_id}: {filename} hash mismatch (expected {expected_hash}, got {actual_hash})")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
print(f"All {len(manifest)} corpus entries verified")
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: corpus-test-results-${{ github.run_number }}
|
||||||
|
path: ./TestResults/*.trx
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
validate-ground-truths:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TZ: UTC
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Validate ground-truth schema version
|
||||||
|
run: |
|
||||||
|
echo "Validating ground-truth files..."
|
||||||
|
cd tests/reachability
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
EXPECTED_SCHEMA = "reachbench.reachgraph.truth/v1"
|
||||||
|
ALLOWED_VARIANTS = {"reachable", "unreachable"}
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
# Validate corpus ground-truths
|
||||||
|
corpus_manifest = 'corpus/manifest.json'
|
||||||
|
if os.path.exists(corpus_manifest):
|
||||||
|
with open(corpus_manifest) as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
for entry in manifest:
|
||||||
|
case_id = entry['id']
|
||||||
|
lang = entry['language']
|
||||||
|
truth_path = os.path.join('corpus', lang, case_id, 'ground-truth.json')
|
||||||
|
if not os.path.exists(truth_path):
|
||||||
|
errors.append(f"corpus/{case_id}: missing ground-truth.json")
|
||||||
|
continue
|
||||||
|
with open(truth_path) as f:
|
||||||
|
truth = json.load(f)
|
||||||
|
if truth.get('schema_version') != EXPECTED_SCHEMA:
|
||||||
|
errors.append(f"corpus/{case_id}: wrong schema_version")
|
||||||
|
if truth.get('variant') not in ALLOWED_VARIANTS:
|
||||||
|
errors.append(f"corpus/{case_id}: invalid variant '{truth.get('variant')}'")
|
||||||
|
if not isinstance(truth.get('paths'), list):
|
||||||
|
errors.append(f"corpus/{case_id}: paths must be an array")
|
||||||
|
|
||||||
|
# Validate reachbench ground-truths
|
||||||
|
reachbench_index = 'fixtures/reachbench-2025-expanded/INDEX.json'
|
||||||
|
if os.path.exists(reachbench_index):
|
||||||
|
with open(reachbench_index) as f:
|
||||||
|
index = json.load(f)
|
||||||
|
for case in index.get('cases', []):
|
||||||
|
case_id = case['id']
|
||||||
|
case_path = case.get('path', os.path.join('cases', case_id))
|
||||||
|
for variant in ['reachable', 'unreachable']:
|
||||||
|
truth_path = os.path.join('fixtures/reachbench-2025-expanded', case_path, 'images', variant, 'reachgraph.truth.json')
|
||||||
|
if not os.path.exists(truth_path):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: missing reachgraph.truth.json")
|
||||||
|
continue
|
||||||
|
with open(truth_path) as f:
|
||||||
|
truth = json.load(f)
|
||||||
|
if not truth.get('schema_version'):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: missing schema_version")
|
||||||
|
if not isinstance(truth.get('paths'), list):
|
||||||
|
errors.append(f"reachbench/{case_id}/{variant}: paths must be an array")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
print("All ground-truth files validated successfully")
|
||||||
|
EOF
|
||||||
|
|
||||||
|
determinism-check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
TZ: UTC
|
||||||
|
needs: validate-corpus
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Verify JSON determinism (sorted keys, no trailing whitespace)
|
||||||
|
run: |
|
||||||
|
echo "Checking JSON determinism..."
|
||||||
|
cd tests/reachability
|
||||||
|
python3 << 'EOF'
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def check_json_sorted(filepath):
|
||||||
|
"""Check if JSON has sorted keys (deterministic)."""
|
||||||
|
with open(filepath) as f:
|
||||||
|
content = f.read()
|
||||||
|
parsed = json.loads(content)
|
||||||
|
reserialized = json.dumps(parsed, sort_keys=True, indent=2)
|
||||||
|
# Normalize line endings
|
||||||
|
content_normalized = content.replace('\r\n', '\n').strip()
|
||||||
|
reserialized_normalized = reserialized.strip()
|
||||||
|
return content_normalized == reserialized_normalized
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
json_files = []
|
||||||
|
|
||||||
|
# Collect JSON files from corpus
|
||||||
|
for root, dirs, files in os.walk('corpus'):
|
||||||
|
for f in files:
|
||||||
|
if f.endswith('.json'):
|
||||||
|
json_files.append(os.path.join(root, f))
|
||||||
|
|
||||||
|
# Check determinism
|
||||||
|
non_deterministic = []
|
||||||
|
for filepath in json_files:
|
||||||
|
try:
|
||||||
|
if not check_json_sorted(filepath):
|
||||||
|
non_deterministic.append(filepath)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
errors.append(f"{filepath}: invalid JSON - {e}")
|
||||||
|
|
||||||
|
if non_deterministic:
|
||||||
|
print(f"::warning::Found {len(non_deterministic)} non-deterministic JSON files (keys not sorted or whitespace differs)")
|
||||||
|
for f in non_deterministic[:10]:
|
||||||
|
print(f" - {f}")
|
||||||
|
if len(non_deterministic) > 10:
|
||||||
|
print(f" ... and {len(non_deterministic) - 10} more")
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
for err in errors:
|
||||||
|
print(f"::error::{err}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"Checked {len(json_files)} JSON files")
|
||||||
|
EOF
|
||||||
@@ -34,6 +34,22 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj ruby-analyzer
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj ruby-analyzer
|
||||||
|
|
||||||
|
- name: Package Native analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Native/StellaOps.Scanner.Analyzers.Native.csproj native-analyzer
|
||||||
|
|
||||||
|
- name: Package Java analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj java-analyzer
|
||||||
|
|
||||||
|
- name: Package DotNet analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj dotnet-analyzer
|
||||||
|
|
||||||
|
- name: Package Node analyzer
|
||||||
|
run: |
|
||||||
|
RID="${{ github.event.inputs.rid }}" scripts/scanner/package-analyzer.sh src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj node-analyzer
|
||||||
|
|
||||||
- name: Upload analyzer artifacts
|
- name: Upload analyzer artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -165,3 +165,69 @@ rules:
|
|||||||
in:
|
in:
|
||||||
const: header
|
const: header
|
||||||
required: [name, in]
|
required: [name, in]
|
||||||
|
|
||||||
|
# --- Deprecation Metadata Rules (per APIGOV-63-001) ---
|
||||||
|
|
||||||
|
stella-deprecated-has-metadata:
|
||||||
|
description: "Deprecated operations must have x-deprecation extension with required fields"
|
||||||
|
message: "Add x-deprecation metadata (deprecatedAt, sunsetAt, successorPath, reason) to deprecated operations"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)]"
|
||||||
|
severity: error
|
||||||
|
then:
|
||||||
|
field: x-deprecation
|
||||||
|
function: schema
|
||||||
|
functionOptions:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- deprecatedAt
|
||||||
|
- sunsetAt
|
||||||
|
- successorPath
|
||||||
|
- reason
|
||||||
|
properties:
|
||||||
|
deprecatedAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
sunsetAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
successorPath:
|
||||||
|
type: string
|
||||||
|
successorOperationId:
|
||||||
|
type: string
|
||||||
|
reason:
|
||||||
|
type: string
|
||||||
|
migrationGuide:
|
||||||
|
type: string
|
||||||
|
format: uri
|
||||||
|
notificationChannels:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
enum: [slack, teams, email, webhook]
|
||||||
|
|
||||||
|
stella-deprecated-sunset-future:
|
||||||
|
description: "Sunset dates should be in the future (warn if sunset already passed)"
|
||||||
|
message: "x-deprecation.sunsetAt should be a future date"
|
||||||
|
given: "$.paths[*][*].x-deprecation.sunsetAt"
|
||||||
|
severity: warn
|
||||||
|
then:
|
||||||
|
function: truthy
|
||||||
|
|
||||||
|
stella-deprecated-migration-guide:
|
||||||
|
description: "Deprecated operations should include a migration guide URL"
|
||||||
|
message: "Consider adding x-deprecation.migrationGuide for consumer guidance"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)].x-deprecation"
|
||||||
|
severity: hint
|
||||||
|
then:
|
||||||
|
field: migrationGuide
|
||||||
|
function: truthy
|
||||||
|
|
||||||
|
stella-deprecated-notification-channels:
|
||||||
|
description: "Deprecated operations should specify notification channels"
|
||||||
|
message: "Add x-deprecation.notificationChannels to enable deprecation notifications"
|
||||||
|
given: "$.paths[*][*][?(@.deprecated == true)].x-deprecation"
|
||||||
|
severity: hint
|
||||||
|
then:
|
||||||
|
field: notificationChannels
|
||||||
|
function: truthy
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ When you are told you are working in a particular module or directory, assume yo
|
|||||||
* **Runtime**: .NET 10 (`net10.0`) with latest C# preview features. Microsoft.* dependencies should target the closest compatible versions.
|
* **Runtime**: .NET 10 (`net10.0`) with latest C# preview features. Microsoft.* dependencies should target the closest compatible versions.
|
||||||
* **Frontend**: Angular v17 for the UI.
|
* **Frontend**: Angular v17 for the UI.
|
||||||
* **NuGet**: Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org). Packages restore to the global NuGet cache.
|
* **NuGet**: Uses standard NuGet feeds configured in `nuget.config` (dotnet-public, nuget-mirror, nuget.org). Packages restore to the global NuGet cache.
|
||||||
* **Data**: MongoDB as canonical store and for job/export state. Use a MongoDB driver version ≥ 3.0.
|
* **Data**: PostgreSQL as canonical store and for job/export state. Use a PostgreSQL driver version ≥ 3.0.
|
||||||
* **Observability**: Structured logs, counters, and (optional) OpenTelemetry traces.
|
* **Observability**: Structured logs, counters, and (optional) OpenTelemetry traces.
|
||||||
* **Ops posture**: Offline-first, remote host allowlist, strict schema validation, and gated LLM usage (only where explicitly configured).
|
* **Ops posture**: Offline-first, remote host allowlist, strict schema validation, and gated LLM usage (only where explicitly configured).
|
||||||
|
|
||||||
@@ -126,7 +126,7 @@ It ships as containerised building blocks; each module owns a clear boundary and
|
|||||||
| Scanner | `src/Scanner/StellaOps.Scanner.WebService`<br>`src/Scanner/StellaOps.Scanner.Worker`<br>`src/Scanner/__Libraries/StellaOps.Scanner.*` | `docs/modules/scanner/architecture.md` |
|
| Scanner | `src/Scanner/StellaOps.Scanner.WebService`<br>`src/Scanner/StellaOps.Scanner.Worker`<br>`src/Scanner/__Libraries/StellaOps.Scanner.*` | `docs/modules/scanner/architecture.md` |
|
||||||
| Scheduler | `src/Scheduler/StellaOps.Scheduler.WebService`<br>`src/Scheduler/StellaOps.Scheduler.Worker` | `docs/modules/scheduler/architecture.md` |
|
| Scheduler | `src/Scheduler/StellaOps.Scheduler.WebService`<br>`src/Scheduler/StellaOps.Scheduler.Worker` | `docs/modules/scheduler/architecture.md` |
|
||||||
| CLI | `src/Cli/StellaOps.Cli`<br>`src/Cli/StellaOps.Cli.Core`<br>`src/Cli/StellaOps.Cli.Plugins.*` | `docs/modules/cli/architecture.md` |
|
| CLI | `src/Cli/StellaOps.Cli`<br>`src/Cli/StellaOps.Cli.Core`<br>`src/Cli/StellaOps.Cli.Plugins.*` | `docs/modules/cli/architecture.md` |
|
||||||
| UI / Console | `src/UI/StellaOps.UI` | `docs/modules/ui/architecture.md` |
|
| UI / Console | `src/Web/StellaOps.Web` | `docs/modules/ui/architecture.md` |
|
||||||
| Notify | `src/Notify/StellaOps.Notify.WebService`<br>`src/Notify/StellaOps.Notify.Worker` | `docs/modules/notify/architecture.md` |
|
| Notify | `src/Notify/StellaOps.Notify.WebService`<br>`src/Notify/StellaOps.Notify.Worker` | `docs/modules/notify/architecture.md` |
|
||||||
| Export Center | `src/ExportCenter/StellaOps.ExportCenter.WebService`<br>`src/ExportCenter/StellaOps.ExportCenter.Worker` | `docs/modules/export-center/architecture.md` |
|
| Export Center | `src/ExportCenter/StellaOps.ExportCenter.WebService`<br>`src/ExportCenter/StellaOps.ExportCenter.Worker` | `docs/modules/export-center/architecture.md` |
|
||||||
| Registry Token Service | `src/Registry/StellaOps.Registry.TokenService`<br>`src/Registry/__Tests/StellaOps.Registry.TokenService.Tests` | `docs/modules/registry/architecture.md` |
|
| Registry Token Service | `src/Registry/StellaOps.Registry.TokenService`<br>`src/Registry/__Tests/StellaOps.Registry.TokenService.Tests` | `docs/modules/registry/architecture.md` |
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ helm lint deploy/helm/stellaops
|
|||||||
|
|
||||||
### Technology Stack
|
### Technology Stack
|
||||||
- **Runtime:** .NET 10 (`net10.0`) with latest C# preview features
|
- **Runtime:** .NET 10 (`net10.0`) with latest C# preview features
|
||||||
- **Frontend:** Angular v17 (in `src/UI/StellaOps.UI`)
|
- **Frontend:** Angular v17 (in `src/Web/StellaOps.Web`)
|
||||||
- **Database:** PostgreSQL (≥16) with per-module schema isolation; see `docs/db/` for specification
|
- **Database:** PostgreSQL (≥16) with per-module schema isolation; see `docs/db/` for specification
|
||||||
- **Testing:** xUnit with Testcontainers (PostgreSQL), Moq, Microsoft.AspNetCore.Mvc.Testing
|
- **Testing:** xUnit with Testcontainers (PostgreSQL), Moq, Microsoft.AspNetCore.Mvc.Testing
|
||||||
- **Observability:** Structured logging, OpenTelemetry traces
|
- **Observability:** Structured logging, OpenTelemetry traces
|
||||||
|
|||||||
@@ -24,7 +24,6 @@
|
|||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<PackageTargetFallback>$(PackageTargetFallback);net8.0;net7.0;net6.0;netstandard2.1;netstandard2.0</PackageTargetFallback>
|
|
||||||
<AssetTargetFallback>$(AssetTargetFallback);net8.0;net7.0;net6.0;netstandard2.1;netstandard2.0</AssetTargetFallback>
|
<AssetTargetFallback>$(AssetTargetFallback);net8.0;net7.0;net6.0;netstandard2.1;netstandard2.0</AssetTargetFallback>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
|||||||
10
README.md
10
README.md
@@ -1,14 +1,20 @@
|
|||||||
# StellaOps Concelier & CLI
|
# StellaOps Concelier & CLI
|
||||||
|
|
||||||
|
[](https://git.stella-ops.org/stellaops/feedser/actions/workflows/build-test-deploy.yml)
|
||||||
|
[](https://git.stella-ops.org/stellaops/feedser/actions/workflows/build-test-deploy.yml)
|
||||||
|
[](docs/testing/ci-quality-gates.md)
|
||||||
|
[](docs/testing/ci-quality-gates.md)
|
||||||
|
[](docs/testing/mutation-testing-baselines.md)
|
||||||
|
|
||||||
This repository hosts the StellaOps Concelier service, its plug-in ecosystem, and the
|
This repository hosts the StellaOps Concelier service, its plug-in ecosystem, and the
|
||||||
first-party CLI (`stellaops-cli`). Concelier ingests vulnerability advisories from
|
first-party CLI (`stellaops-cli`). Concelier ingests vulnerability advisories from
|
||||||
authoritative sources, stores them in MongoDB, and exports deterministic JSON and
|
authoritative sources, stores them in PostgreSQL, and exports deterministic JSON and
|
||||||
Trivy DB artefacts. The CLI drives scanner distribution, scan execution, and job
|
Trivy DB artefacts. The CLI drives scanner distribution, scan execution, and job
|
||||||
control against the Concelier API.
|
control against the Concelier API.
|
||||||
|
|
||||||
## Quickstart
|
## Quickstart
|
||||||
|
|
||||||
1. Prepare a MongoDB instance and (optionally) install `trivy-db`/`oras`.
|
1. Prepare a PostgreSQL instance and (optionally) install `trivy-db`/`oras`.
|
||||||
2. Copy `etc/concelier.yaml.sample` to `etc/concelier.yaml` and update the storage + telemetry
|
2. Copy `etc/concelier.yaml.sample` to `etc/concelier.yaml` and update the storage + telemetry
|
||||||
settings.
|
settings.
|
||||||
3. Copy `etc/authority.yaml.sample` to `etc/authority.yaml`, review the issuer, token
|
3. Copy `etc/authority.yaml.sample` to `etc/authority.yaml`, review the issuer, token
|
||||||
|
|||||||
128
bench/README.md
128
bench/README.md
@@ -1,7 +1,7 @@
|
|||||||
# Stella Ops Bench Repository
|
# Stella Ops Bench Repository
|
||||||
|
|
||||||
> **Status:** Draft — aligns with `docs/benchmarks/vex-evidence-playbook.md` (Sprint 401).
|
> **Status:** Active · Last updated: 2025-12-13
|
||||||
> **Purpose:** Host reproducible VEX decisions and comparison data that prove Stella Ops’ signal quality vs. baseline scanners.
|
> **Purpose:** Host reproducible VEX decisions, reachability evidence, and comparison data proving Stella Ops' signal quality vs. baseline scanners.
|
||||||
|
|
||||||
## Layout
|
## Layout
|
||||||
|
|
||||||
@@ -11,20 +11,122 @@ bench/
|
|||||||
findings/ # per CVE/product bundles
|
findings/ # per CVE/product bundles
|
||||||
CVE-YYYY-NNNNN/
|
CVE-YYYY-NNNNN/
|
||||||
evidence/
|
evidence/
|
||||||
reachability.json
|
reachability.json # richgraph-v1 excerpt
|
||||||
sbom.cdx.json
|
sbom.cdx.json # CycloneDX SBOM
|
||||||
decision.openvex.json
|
decision.openvex.json # OpenVEX decision
|
||||||
decision.dsse.json
|
decision.dsse.json # DSSE envelope
|
||||||
rekor.txt
|
rekor.txt # Rekor log index + inclusion proof
|
||||||
metadata.json
|
metadata.json # finding metadata (purl, CVE, version)
|
||||||
tools/
|
tools/
|
||||||
verify.sh # DSSE + Rekor verifier
|
verify.sh # DSSE + Rekor verifier (online)
|
||||||
verify.py # offline verifier
|
verify.py # offline verifier
|
||||||
compare.py # baseline comparison script
|
compare.py # baseline comparison script
|
||||||
replay.sh # runs reachability replay manifolds
|
replay.sh # runs reachability replay manifests
|
||||||
results/
|
results/
|
||||||
summary.csv
|
summary.csv # aggregated metrics
|
||||||
runs/<date>/... # raw outputs + replay manifests
|
runs/<date>/... # raw outputs + replay manifests
|
||||||
|
reachability-benchmark/ # reachability benchmark with JDK fixtures
|
||||||
```
|
```
|
||||||
|
|
||||||
Refer to `docs/benchmarks/vex-evidence-playbook.md` for artifact contracts and automation tasks. The `bench/` tree will be populated once `BENCH-AUTO-401-019` and `DOCS-VEX-401-012` land.
|
## Related Documentation
|
||||||
|
|
||||||
|
| Document | Purpose |
|
||||||
|
|----------|---------|
|
||||||
|
| [VEX Evidence Playbook](../docs/benchmarks/vex-evidence-playbook.md) | Proof bundle schema, justification catalog, verification workflow |
|
||||||
|
| [Hybrid Attestation](../docs/reachability/hybrid-attestation.md) | Graph-level and edge-bundle DSSE decisions |
|
||||||
|
| [Function-Level Evidence](../docs/reachability/function-level-evidence.md) | Cross-module evidence chain guide |
|
||||||
|
| [Deterministic Replay](../docs/replay/DETERMINISTIC_REPLAY.md) | Replay manifest specification |
|
||||||
|
|
||||||
|
## Verification Workflows
|
||||||
|
|
||||||
|
### Quick Verification (Online)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verify a VEX proof bundle with DSSE and Rekor
|
||||||
|
./tools/verify.sh findings/CVE-2021-44228/decision.dsse.json
|
||||||
|
|
||||||
|
# Output:
|
||||||
|
# ✓ DSSE signature valid
|
||||||
|
# ✓ Rekor inclusion verified (log index: 12345678)
|
||||||
|
# ✓ Evidence hashes match
|
||||||
|
# ✓ Justification catalog membership confirmed
|
||||||
|
```
|
||||||
|
|
||||||
|
### Offline Verification
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verify without network access
|
||||||
|
python tools/verify.py \
|
||||||
|
--bundle findings/CVE-2021-44228/decision.dsse.json \
|
||||||
|
--cas-root ./findings/CVE-2021-44228/evidence/ \
|
||||||
|
--catalog ../docs/benchmarks/vex-justifications.catalog.json
|
||||||
|
|
||||||
|
# Or use the VEX proof bundle verifier
|
||||||
|
python ../scripts/vex/verify_proof_bundle.py \
|
||||||
|
--bundle ../tests/Vex/ProofBundles/sample-proof-bundle.json \
|
||||||
|
--cas-root ../tests/Vex/ProofBundles/cas/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reachability Graph Verification
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Verify graph DSSE
|
||||||
|
stella graph verify --hash blake3:a1b2c3d4...
|
||||||
|
|
||||||
|
# Verify with edge bundles
|
||||||
|
stella graph verify --hash blake3:a1b2c3d4... --include-bundles
|
||||||
|
|
||||||
|
# Offline with local CAS
|
||||||
|
stella graph verify --hash blake3:a1b2c3d4... --cas-root ./offline-cas/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Baseline Comparison
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Compare Stella Ops findings against baseline scanners
|
||||||
|
python tools/compare.py \
|
||||||
|
--stellaops results/runs/2025-12-13/findings.json \
|
||||||
|
--baseline results/baselines/trivy-latest.json \
|
||||||
|
--output results/comparison-2025-12-13.csv
|
||||||
|
|
||||||
|
# Metrics generated:
|
||||||
|
# - True positives (reachability-confirmed)
|
||||||
|
# - False positives (unreachable code paths)
|
||||||
|
# - MTTD (mean time to detect)
|
||||||
|
# - Reproducibility score
|
||||||
|
```
|
||||||
|
|
||||||
|
## Artifact Contracts
|
||||||
|
|
||||||
|
All bench artifacts must comply with:
|
||||||
|
|
||||||
|
1. **VEX Proof Bundle Schema** (`docs/benchmarks/vex-evidence-playbook.schema.json`)
|
||||||
|
- BLAKE3-256 primary hash, SHA-256 secondary
|
||||||
|
- Canonical JSON with sorted keys
|
||||||
|
- DSSE envelope with Rekor-ready digest
|
||||||
|
|
||||||
|
2. **Justification Catalog** (`docs/benchmarks/vex-justifications.catalog.json`)
|
||||||
|
- VEX1-VEX10 justification codes
|
||||||
|
- Required evidence types per justification
|
||||||
|
- Expiry and re-evaluation rules
|
||||||
|
|
||||||
|
3. **Reachability Graph** (`docs/contracts/richgraph-v1.md`)
|
||||||
|
- BLAKE3 graph_hash for content addressing
|
||||||
|
- Deterministic node/edge ordering
|
||||||
|
- SymbolID/EdgeID format compliance
|
||||||
|
|
||||||
|
## CI Integration
|
||||||
|
|
||||||
|
The bench directory is validated by:
|
||||||
|
|
||||||
|
- `.gitea/workflows/vex-proof-bundles.yml` - Verifies all proof bundles
|
||||||
|
- `.gitea/workflows/bench-determinism.yml` - Runs determinism benchmarks
|
||||||
|
- `.gitea/workflows/hybrid-attestation.yml` - Verifies graph/edge-bundle fixtures
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
1. Add new findings under `findings/CVE-YYYY-NNNNN/`
|
||||||
|
2. Include all required evidence artifacts
|
||||||
|
3. Generate DSSE envelope and Rekor proof
|
||||||
|
4. Update `results/summary.csv`
|
||||||
|
5. Run verification: `./tools/verify.sh findings/CVE-YYYY-NNNNN/decision.dsse.json`
|
||||||
|
|||||||
56
bench/baselines/ttfs-baseline.json
Normal file
56
bench/baselines/ttfs-baseline.json
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "TTFS Baseline",
|
||||||
|
"description": "Time-to-First-Signal baseline metrics for regression detection",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"created_at": "2025-12-16T00:00:00Z",
|
||||||
|
"updated_at": "2025-12-16T00:00:00Z",
|
||||||
|
"metrics": {
|
||||||
|
"ttfs_ms": {
|
||||||
|
"p50": 1500,
|
||||||
|
"p95": 4000,
|
||||||
|
"p99": 6000,
|
||||||
|
"min": 500,
|
||||||
|
"max": 10000,
|
||||||
|
"mean": 2000,
|
||||||
|
"sample_count": 500
|
||||||
|
},
|
||||||
|
"by_scan_type": {
|
||||||
|
"image_scan": {
|
||||||
|
"p50": 2500,
|
||||||
|
"p95": 5000,
|
||||||
|
"p99": 7500,
|
||||||
|
"description": "Container image scanning TTFS baseline"
|
||||||
|
},
|
||||||
|
"filesystem_scan": {
|
||||||
|
"p50": 1000,
|
||||||
|
"p95": 2000,
|
||||||
|
"p99": 3000,
|
||||||
|
"description": "Filesystem/directory scanning TTFS baseline"
|
||||||
|
},
|
||||||
|
"sbom_scan": {
|
||||||
|
"p50": 400,
|
||||||
|
"p95": 800,
|
||||||
|
"p99": 1200,
|
||||||
|
"description": "SBOM-only scanning TTFS baseline"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"thresholds": {
|
||||||
|
"p50_max_ms": 2000,
|
||||||
|
"p95_max_ms": 5000,
|
||||||
|
"p99_max_ms": 8000,
|
||||||
|
"max_regression_pct": 10,
|
||||||
|
"description": "Thresholds that will trigger CI gate failures"
|
||||||
|
},
|
||||||
|
"collection_info": {
|
||||||
|
"test_environment": "ci-standard-runner",
|
||||||
|
"runner_specs": {
|
||||||
|
"cpu_cores": 4,
|
||||||
|
"memory_gb": 8,
|
||||||
|
"storage_type": "ssd"
|
||||||
|
},
|
||||||
|
"sample_corpus": "tests/reachability/corpus",
|
||||||
|
"collection_window_days": 30
|
||||||
|
}
|
||||||
|
}
|
||||||
10
bench/findings/CVE-2015-7547-reachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2015-7547-reachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpiZTMwNDMzZTE4OGEyNTg4NTY0NDYzMzZkYmIxMDk1OWJmYjRhYjM5NzQzODBhOGVhMTI2NDZiZjI2ODdiZjlhIiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL2dsaWJjLUNWRS0yMDIzLTQ5MTEtbG9vbmV5LXR1bmFibGVzQDEuMC4wIn1dLCJzdGF0dXMiOiJhZmZlY3RlZCIsInZ1bG5lcmFiaWxpdHkiOnsiQGlkIjoiaHR0cHM6Ly9udmQubmlzdC5nb3YvdnVsbi9kZXRhaWwvQ1ZFLTIwMTUtNzU0NyIsIm5hbWUiOiJDVkUtMjAxNS03NTQ3In19XSwidGltZXN0YW1wIjoiMjAyNS0xMi0xNFQwMjoxMzozOFoiLCJ0b29saW5nIjoiU3RlbGxhT3BzL2JlbmNoLWF1dG9AMS4wLjAiLCJ2ZXJzaW9uIjoxfQ==",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
25
bench/findings/CVE-2015-7547-reachable/decision.openvex.json
Normal file
25
bench/findings/CVE-2015-7547-reachable/decision.openvex.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:be30433e188a258856446336dbb10959bfb4ab3974380a8ea12646bf2687bf9a",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2015-7547",
|
||||||
|
"name": "CVE-2015-7547"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://glibc:glibc.c#entry",
|
||||||
|
"sym://glibc:glibc.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://glibc:glibc.c#entry",
|
||||||
|
"sym://glibc:glibc.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2015-7547-reachable/metadata.json
Normal file
11
bench/findings/CVE-2015-7547-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"cve_id": "CVE-2015-7547",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2015-7547-reachable/rekor.txt
Normal file
5
bench/findings/CVE-2015-7547-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2015-7547-unreachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2015-7547-unreachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpjNDJlYzAxNGE0MmQwZTNmYjQzZWQ0ZGRhZDg5NTM4MjFlNDQ0NTcxMTlkYTY2ZGRiNDFhMzVhODAxYTNiNzI3IiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9nbGliYy1DVkUtMjAyMy00OTExLWxvb25leS10dW5hYmxlc0AxLjAuMCJ9XSwic3RhdHVzIjoibm90X2FmZmVjdGVkIiwidnVsbmVyYWJpbGl0eSI6eyJAaWQiOiJodHRwczovL252ZC5uaXN0Lmdvdi92dWxuL2RldGFpbC9DVkUtMjAxNS03NTQ3IiwibmFtZSI6IkNWRS0yMDE1LTc1NDcifX1dLCJ0aW1lc3RhbXAiOiIyMDI1LTEyLTE0VDAyOjEzOjM4WiIsInRvb2xpbmciOiJTdGVsbGFPcHMvYmVuY2gtYXV0b0AxLjAuMCIsInZlcnNpb24iOjF9",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:c42ec014a42d0e3fb43ed4ddad8953821e44457119da66ddb41a35a801a3b727",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2015-7547",
|
||||||
|
"name": "CVE-2015-7547"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2015-7547-unreachable/metadata.json
Normal file
11
bench/findings/CVE-2015-7547-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "glibc-CVE-2023-4911-looney-tunables",
|
||||||
|
"cve_id": "CVE-2015-7547",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/glibc-CVE-2023-4911-looney-tunables@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2015-7547-unreachable/rekor.txt
Normal file
5
bench/findings/CVE-2015-7547-unreachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2022-3602-reachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2022-3602-reachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjowMTQzMWZmMWVlZTc5OWM2ZmFkZDU5M2E3ZWMxOGVlMDk0Zjk4MzE0MDk2M2RhNmNiZmQ0YjdmMDZiYTBmOTcwIiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL29wZW5zc2wtQ1ZFLTIwMjItMzYwMi14NTA5LW5hbWUtY29uc3RyYWludHNAMS4wLjAifV0sInN0YXR1cyI6ImFmZmVjdGVkIiwidnVsbmVyYWJpbGl0eSI6eyJAaWQiOiJodHRwczovL252ZC5uaXN0Lmdvdi92dWxuL2RldGFpbC9DVkUtMjAyMi0zNjAyIiwibmFtZSI6IkNWRS0yMDIyLTM2MDIifX1dLCJ0aW1lc3RhbXAiOiIyMDI1LTEyLTE0VDAyOjEzOjM4WiIsInRvb2xpbmciOiJTdGVsbGFPcHMvYmVuY2gtYXV0b0AxLjAuMCIsInZlcnNpb24iOjF9",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
25
bench/findings/CVE-2022-3602-reachable/decision.openvex.json
Normal file
25
bench/findings/CVE-2022-3602-reachable/decision.openvex.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:01431ff1eee799c6fadd593a7ec18ee094f983140963da6cbfd4b7f06ba0f970",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2022-3602",
|
||||||
|
"name": "CVE-2022-3602"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://openssl:openssl.c#entry",
|
||||||
|
"sym://openssl:openssl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://openssl:openssl.c#entry",
|
||||||
|
"sym://openssl:openssl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2022-3602-reachable/metadata.json
Normal file
11
bench/findings/CVE-2022-3602-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"cve_id": "CVE-2022-3602",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2022-3602-reachable/rekor.txt
Normal file
5
bench/findings/CVE-2022-3602-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2022-3602-unreachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2022-3602-unreachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpkOWJhZjRjNjQ3NDE4Nzc4NTUxYWZjNDM3NTJkZWY0NmQ0YWYyN2Q1MzEyMmU2YzQzNzVjMzUxMzU1YjEwYTMzIiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9vcGVuc3NsLUNWRS0yMDIyLTM2MDIteDUwOS1uYW1lLWNvbnN0cmFpbnRzQDEuMC4wIn1dLCJzdGF0dXMiOiJub3RfYWZmZWN0ZWQiLCJ2dWxuZXJhYmlsaXR5Ijp7IkBpZCI6Imh0dHBzOi8vbnZkLm5pc3QuZ292L3Z1bG4vZGV0YWlsL0NWRS0yMDIyLTM2MDIiLCJuYW1lIjoiQ1ZFLTIwMjItMzYwMiJ9fV0sInRpbWVzdGFtcCI6IjIwMjUtMTItMTRUMDI6MTM6MzhaIiwidG9vbGluZyI6IlN0ZWxsYU9wcy9iZW5jaC1hdXRvQDEuMC4wIiwidmVyc2lvbiI6MX0=",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:d9baf4c647418778551afc43752def46d4af27d53122e6c4375c351355b10a33",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2022-3602",
|
||||||
|
"name": "CVE-2022-3602"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2022-3602-unreachable/metadata.json
Normal file
11
bench/findings/CVE-2022-3602-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "openssl-CVE-2022-3602-x509-name-constraints",
|
||||||
|
"cve_id": "CVE-2022-3602",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/openssl-CVE-2022-3602-x509-name-constraints@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2022-3602-unreachable/rekor.txt
Normal file
5
bench/findings/CVE-2022-3602-unreachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2023-38545-reachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2023-38545-reachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpmMWMxZmRiZTk1YjMyNTNiMTNjYTZjNzMzZWMwM2FkYTNlYTg3MWU2NmI1ZGRlZGJiNmMxNGI5ZGM2N2IwNzQ4IiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL2N1cmwtQ1ZFLTIwMjMtMzg1NDUtc29ja3M1LWhlYXBAMS4wLjAifV0sInN0YXR1cyI6ImFmZmVjdGVkIiwidnVsbmVyYWJpbGl0eSI6eyJAaWQiOiJodHRwczovL252ZC5uaXN0Lmdvdi92dWxuL2RldGFpbC9DVkUtMjAyMy0zODU0NSIsIm5hbWUiOiJDVkUtMjAyMy0zODU0NSJ9fV0sInRpbWVzdGFtcCI6IjIwMjUtMTItMTRUMDI6MTM6MzhaIiwidG9vbGluZyI6IlN0ZWxsYU9wcy9iZW5jaC1hdXRvQDEuMC4wIiwidmVyc2lvbiI6MX0=",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:f1c1fdbe95b3253b13ca6c733ec03ada3ea871e66b5ddedbb6c14b9dc67b0748",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2023-38545",
|
||||||
|
"name": "CVE-2023-38545"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://curl:curl.c#entry",
|
||||||
|
"sym://curl:curl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://curl:curl.c#entry",
|
||||||
|
"sym://curl:curl.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2023-38545-reachable/metadata.json
Normal file
11
bench/findings/CVE-2023-38545-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"cve_id": "CVE-2023-38545",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2023-38545-reachable/rekor.txt
Normal file
5
bench/findings/CVE-2023-38545-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
10
bench/findings/CVE-2023-38545-unreachable/decision.dsse.json
Normal file
10
bench/findings/CVE-2023-38545-unreachable/decision.dsse.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjplNGIxOTk0ZTU5NDEwNTYyZjQwYWI0YTVmZTIzNjM4YzExZTU4MTdiYjcwMDM5M2VkOTlmMjBkM2M5ZWY5ZmEwIiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9jdXJsLUNWRS0yMDIzLTM4NTQ1LXNvY2tzNS1oZWFwQDEuMC4wIn1dLCJzdGF0dXMiOiJub3RfYWZmZWN0ZWQiLCJ2dWxuZXJhYmlsaXR5Ijp7IkBpZCI6Imh0dHBzOi8vbnZkLm5pc3QuZ292L3Z1bG4vZGV0YWlsL0NWRS0yMDIzLTM4NTQ1IiwibmFtZSI6IkNWRS0yMDIzLTM4NTQ1In19XSwidGltZXN0YW1wIjoiMjAyNS0xMi0xNFQwMjoxMzozOFoiLCJ0b29saW5nIjoiU3RlbGxhT3BzL2JlbmNoLWF1dG9AMS4wLjAiLCJ2ZXJzaW9uIjoxfQ==",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:e4b1994e59410562f40ab4a5fe23638c11e5817bb700393ed99f20d3c9ef9fa0",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-2023-38545",
|
||||||
|
"name": "CVE-2023-38545"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-2023-38545-unreachable/metadata.json
Normal file
11
bench/findings/CVE-2023-38545-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "curl-CVE-2023-38545-socks5-heap",
|
||||||
|
"cve_id": "CVE-2023-38545",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/curl-CVE-2023-38545-socks5-heap@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-2023-38545-unreachable/rekor.txt
Normal file
5
bench/findings/CVE-2023-38545-unreachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjoxNTRiYTZlMzU5YzA5NTQ1NzhhOTU2MDM2N2YxY2JhYzFjMTUzZTVkNWRmOTNjMmI5MjljZDM4NzkyYTIxN2JiIiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL2xpbnV4LWNncm91cHMtQ1ZFLTIwMjItMDQ5Mi1yZWxlYXNlX2FnZW50QDEuMC4wIn1dLCJzdGF0dXMiOiJhZmZlY3RlZCIsInZ1bG5lcmFiaWxpdHkiOnsiQGlkIjoiaHR0cHM6Ly9udmQubmlzdC5nb3YvdnVsbi9kZXRhaWwvQ1ZFLUJFTkNILUxJTlVYLUNHIiwibmFtZSI6IkNWRS1CRU5DSC1MSU5VWC1DRyJ9fV0sInRpbWVzdGFtcCI6IjIwMjUtMTItMTRUMDI6MTM6MzhaIiwidG9vbGluZyI6IlN0ZWxsYU9wcy9iZW5jaC1hdXRvQDEuMC4wIiwidmVyc2lvbiI6MX0=",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:154ba6e359c0954578a9560367f1cbac1c153e5d5df93c2b929cd38792a217bb",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/linux-cgroups-CVE-2022-0492-release_agent@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-BENCH-LINUX-CG",
|
||||||
|
"name": "CVE-BENCH-LINUX-CG"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://linux:linux.c#entry",
|
||||||
|
"sym://linux:linux.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://linux:linux.c#entry",
|
||||||
|
"sym://linux:linux.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"purl": "pkg:generic/linux-cgroups-CVE-2022-0492-release_agent@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-BENCH-LINUX-CG-reachable/metadata.json
Normal file
11
bench/findings/CVE-BENCH-LINUX-CG-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"cve_id": "CVE-BENCH-LINUX-CG",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/linux-cgroups-CVE-2022-0492-release_agent@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-BENCH-LINUX-CG-reachable/rekor.txt
Normal file
5
bench/findings/CVE-BENCH-LINUX-CG-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpjOTUwNmRhMjc0YTdkNmJmZGJiZmE0NmVjMjZkZWNmNWQ2YjcxZmFhNDA0MjY5MzZkM2NjYmFlNjQxNjJkMWE2IiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9saW51eC1jZ3JvdXBzLUNWRS0yMDIyLTA0OTItcmVsZWFzZV9hZ2VudEAxLjAuMCJ9XSwic3RhdHVzIjoibm90X2FmZmVjdGVkIiwidnVsbmVyYWJpbGl0eSI6eyJAaWQiOiJodHRwczovL252ZC5uaXN0Lmdvdi92dWxuL2RldGFpbC9DVkUtQkVOQ0gtTElOVVgtQ0ciLCJuYW1lIjoiQ1ZFLUJFTkNILUxJTlVYLUNHIn19XSwidGltZXN0YW1wIjoiMjAyNS0xMi0xNFQwMjoxMzozOFoiLCJ0b29saW5nIjoiU3RlbGxhT3BzL2JlbmNoLWF1dG9AMS4wLjAiLCJ2ZXJzaW9uIjoxfQ==",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:c9506da274a7d6bfdbbfa46ec26decf5d6b71faa40426936d3ccbae64162d1a6",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/linux-cgroups-CVE-2022-0492-release_agent@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-BENCH-LINUX-CG",
|
||||||
|
"name": "CVE-BENCH-LINUX-CG"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"purl": "pkg:generic/linux-cgroups-CVE-2022-0492-release_agent@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-BENCH-LINUX-CG-unreachable/metadata.json
Normal file
11
bench/findings/CVE-BENCH-LINUX-CG-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "linux-cgroups-CVE-2022-0492-release_agent",
|
||||||
|
"cve_id": "CVE-BENCH-LINUX-CG",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/linux-cgroups-CVE-2022-0492-release_agent@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-BENCH-LINUX-CG-unreachable/rekor.txt
Normal file
5
bench/findings/CVE-BENCH-LINUX-CG-unreachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siYWN0aW9uX3N0YXRlbWVudCI6IlVwZ3JhZGUgdG8gcGF0Y2hlZCB2ZXJzaW9uIG9yIGFwcGx5IG1pdGlnYXRpb24uIiwiaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1NjpjNDRmYjJlMmVmYjc5Yzc4YmJhYTZhOGUyYzZiYjM4MzE3ODJhMmQ1MzU4ZGU4N2ZjN2QxNzEwMmU4YzJlMzA1IiwicHJvZHVjdHMiOlt7IkBpZCI6InBrZzpnZW5lcmljL3J1bmMtQ1ZFLTIwMjQtMjE2MjYtc3ltbGluay1icmVha291dEAxLjAuMCJ9XSwic3RhdHVzIjoiYWZmZWN0ZWQiLCJ2dWxuZXJhYmlsaXR5Ijp7IkBpZCI6Imh0dHBzOi8vbnZkLm5pc3QuZ292L3Z1bG4vZGV0YWlsL0NWRS1CRU5DSC1SVU5DLUNWRSIsIm5hbWUiOiJDVkUtQkVOQ0gtUlVOQy1DVkUifX1dLCJ0aW1lc3RhbXAiOiIyMDI1LTEyLTE0VDAyOjEzOjM4WiIsInRvb2xpbmciOiJTdGVsbGFPcHMvYmVuY2gtYXV0b0AxLjAuMCIsInZlcnNpb24iOjF9",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"action_statement": "Upgrade to patched version or apply mitigation.",
|
||||||
|
"impact_statement": "Evidence hash: sha256:c44fb2e2efb79c78bbaa6a8e2c6bb3831782a2d5358de87fc7d17102e8c2e305",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/runc-CVE-2024-21626-symlink-breakout@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-BENCH-RUNC-CVE",
|
||||||
|
"name": "CVE-BENCH-RUNC-CVE"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"case_id": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://runc:runc.c#entry",
|
||||||
|
"sym://runc:runc.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
},
|
||||||
|
"paths": [
|
||||||
|
[
|
||||||
|
"sym://net:handler#read",
|
||||||
|
"sym://runc:runc.c#entry",
|
||||||
|
"sym://runc:runc.c#sink"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"purl": "pkg:generic/runc-CVE-2024-21626-symlink-breakout@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-BENCH-RUNC-CVE-reachable/metadata.json
Normal file
11
bench/findings/CVE-BENCH-RUNC-CVE-reachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"cve_id": "CVE-BENCH-RUNC-CVE",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/runc-CVE-2024-21626-symlink-breakout@1.0.0",
|
||||||
|
"reachability_status": "reachable",
|
||||||
|
"variant": "reachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-BENCH-RUNC-CVE-reachable/rekor.txt
Normal file
5
bench/findings/CVE-BENCH-RUNC-CVE-reachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"payload": "eyJAY29udGV4dCI6Imh0dHBzOi8vb3BlbnZleC5kZXYvbnMvdjAuMi4wIiwiQHR5cGUiOiJWRVgiLCJhdXRob3IiOiJTdGVsbGFPcHMgQmVuY2ggQXV0b21hdGlvbiIsInJvbGUiOiJzZWN1cml0eV90ZWFtIiwic3RhdGVtZW50cyI6W3siaW1wYWN0X3N0YXRlbWVudCI6IkV2aWRlbmNlIGhhc2g6IHNoYTI1Njo5ZmU0MDUxMTlmYWY4MDFmYjZkYzFhZDA0Nzk2MWE3OTBjOGQwZWY1NDQ5ZTQ4MTJiYzhkYzU5YTY2MTFiNjljIiwianVzdGlmaWNhdGlvbiI6InZ1bG5lcmFibGVfY29kZV9ub3RfcHJlc2VudCIsInByb2R1Y3RzIjpbeyJAaWQiOiJwa2c6Z2VuZXJpYy9ydW5jLUNWRS0yMDI0LTIxNjI2LXN5bWxpbmstYnJlYWtvdXRAMS4wLjAifV0sInN0YXR1cyI6Im5vdF9hZmZlY3RlZCIsInZ1bG5lcmFiaWxpdHkiOnsiQGlkIjoiaHR0cHM6Ly9udmQubmlzdC5nb3YvdnVsbi9kZXRhaWwvQ1ZFLUJFTkNILVJVTkMtQ1ZFIiwibmFtZSI6IkNWRS1CRU5DSC1SVU5DLUNWRSJ9fV0sInRpbWVzdGFtcCI6IjIwMjUtMTItMTRUMDI6MTM6MzhaIiwidG9vbGluZyI6IlN0ZWxsYU9wcy9iZW5jaC1hdXRvQDEuMC4wIiwidmVyc2lvbiI6MX0=",
|
||||||
|
"payloadType": "application/vnd.openvex+json",
|
||||||
|
"signatures": [
|
||||||
|
{
|
||||||
|
"keyid": "stella.ops/bench-automation@v1",
|
||||||
|
"sig": "PLACEHOLDER_SIGNATURE_REQUIRES_ACTUAL_SIGNING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||||
|
"@type": "VEX",
|
||||||
|
"author": "StellaOps Bench Automation",
|
||||||
|
"role": "security_team",
|
||||||
|
"statements": [
|
||||||
|
{
|
||||||
|
"impact_statement": "Evidence hash: sha256:9fe405119faf801fb6dc1ad047961a790c8d0ef5449e4812bc8dc59a6611b69c",
|
||||||
|
"justification": "vulnerable_code_not_present",
|
||||||
|
"products": [
|
||||||
|
{
|
||||||
|
"@id": "pkg:generic/runc-CVE-2024-21626-symlink-breakout@1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"status": "not_affected",
|
||||||
|
"vulnerability": {
|
||||||
|
"@id": "https://nvd.nist.gov/vuln/detail/CVE-BENCH-RUNC-CVE",
|
||||||
|
"name": "CVE-BENCH-RUNC-CVE"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tooling": "StellaOps/bench-auto@1.0.0",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"case_id": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"ground_truth": {
|
||||||
|
"case_id": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "reachbench.reachgraph.truth/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
},
|
||||||
|
"paths": [],
|
||||||
|
"schema_version": "richgraph-excerpt/v1",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"bomFormat": "CycloneDX",
|
||||||
|
"components": [
|
||||||
|
{
|
||||||
|
"name": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"purl": "pkg:generic/runc-CVE-2024-21626-symlink-breakout@1.0.0",
|
||||||
|
"type": "library",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"timestamp": "2025-12-14T02:13:38Z",
|
||||||
|
"tools": [
|
||||||
|
{
|
||||||
|
"name": "bench-auto",
|
||||||
|
"vendor": "StellaOps",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"specVersion": "1.6",
|
||||||
|
"version": 1
|
||||||
|
}
|
||||||
11
bench/findings/CVE-BENCH-RUNC-CVE-unreachable/metadata.json
Normal file
11
bench/findings/CVE-BENCH-RUNC-CVE-unreachable/metadata.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"case_id": "runc-CVE-2024-21626-symlink-breakout",
|
||||||
|
"cve_id": "CVE-BENCH-RUNC-CVE",
|
||||||
|
"generated_at": "2025-12-14T02:13:38Z",
|
||||||
|
"generator": "scripts/bench/populate-findings.py",
|
||||||
|
"generator_version": "1.0.0",
|
||||||
|
"ground_truth_schema": "reachbench.reachgraph.truth/v1",
|
||||||
|
"purl": "pkg:generic/runc-CVE-2024-21626-symlink-breakout@1.0.0",
|
||||||
|
"reachability_status": "unreachable",
|
||||||
|
"variant": "unreachable"
|
||||||
|
}
|
||||||
5
bench/findings/CVE-BENCH-RUNC-CVE-unreachable/rekor.txt
Normal file
5
bench/findings/CVE-BENCH-RUNC-CVE-unreachable/rekor.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Rekor log entry placeholder
|
||||||
|
# Submit DSSE envelope to Rekor to populate this file
|
||||||
|
log_index: PENDING
|
||||||
|
uuid: PENDING
|
||||||
|
timestamp: 2025-12-14T02:13:38Z
|
||||||
107
bench/results/metrics.json
Normal file
107
bench/results/metrics.json
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
{
|
||||||
|
"comparison": {
|
||||||
|
"stellaops": {
|
||||||
|
"accuracy": 1.0,
|
||||||
|
"f1_score": 1.0,
|
||||||
|
"false_positive_rate": 0.0,
|
||||||
|
"precision": 1.0,
|
||||||
|
"recall": 1.0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"findings": [
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-2015-7547",
|
||||||
|
"evidence_hash": "sha256:be30433e188a258856446336dbb10959bfb4ab3974380a8ea12646bf2687bf9a",
|
||||||
|
"finding_id": "CVE-2015-7547-reachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "reachable",
|
||||||
|
"vex_status": "affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-2015-7547",
|
||||||
|
"evidence_hash": "sha256:c42ec014a42d0e3fb43ed4ddad8953821e44457119da66ddb41a35a801a3b727",
|
||||||
|
"finding_id": "CVE-2015-7547-unreachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "unreachable",
|
||||||
|
"vex_status": "not_affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-2022-3602",
|
||||||
|
"evidence_hash": "sha256:01431ff1eee799c6fadd593a7ec18ee094f983140963da6cbfd4b7f06ba0f970",
|
||||||
|
"finding_id": "CVE-2022-3602-reachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "reachable",
|
||||||
|
"vex_status": "affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-2022-3602",
|
||||||
|
"evidence_hash": "sha256:d9baf4c647418778551afc43752def46d4af27d53122e6c4375c351355b10a33",
|
||||||
|
"finding_id": "CVE-2022-3602-unreachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "unreachable",
|
||||||
|
"vex_status": "not_affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-2023-38545",
|
||||||
|
"evidence_hash": "sha256:f1c1fdbe95b3253b13ca6c733ec03ada3ea871e66b5ddedbb6c14b9dc67b0748",
|
||||||
|
"finding_id": "CVE-2023-38545-reachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "reachable",
|
||||||
|
"vex_status": "affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-2023-38545",
|
||||||
|
"evidence_hash": "sha256:e4b1994e59410562f40ab4a5fe23638c11e5817bb700393ed99f20d3c9ef9fa0",
|
||||||
|
"finding_id": "CVE-2023-38545-unreachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "unreachable",
|
||||||
|
"vex_status": "not_affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-BENCH-LINUX-CG",
|
||||||
|
"evidence_hash": "sha256:154ba6e359c0954578a9560367f1cbac1c153e5d5df93c2b929cd38792a217bb",
|
||||||
|
"finding_id": "CVE-BENCH-LINUX-CG-reachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "reachable",
|
||||||
|
"vex_status": "affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-BENCH-LINUX-CG",
|
||||||
|
"evidence_hash": "sha256:c9506da274a7d6bfdbbfa46ec26decf5d6b71faa40426936d3ccbae64162d1a6",
|
||||||
|
"finding_id": "CVE-BENCH-LINUX-CG-unreachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "unreachable",
|
||||||
|
"vex_status": "not_affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-BENCH-RUNC-CVE",
|
||||||
|
"evidence_hash": "sha256:c44fb2e2efb79c78bbaa6a8e2c6bb3831782a2d5358de87fc7d17102e8c2e305",
|
||||||
|
"finding_id": "CVE-BENCH-RUNC-CVE-reachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "reachable",
|
||||||
|
"vex_status": "affected"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cve_id": "CVE-BENCH-RUNC-CVE",
|
||||||
|
"evidence_hash": "sha256:9fe405119faf801fb6dc1ad047961a790c8d0ef5449e4812bc8dc59a6611b69c",
|
||||||
|
"finding_id": "CVE-BENCH-RUNC-CVE-unreachable",
|
||||||
|
"is_correct": true,
|
||||||
|
"variant": "unreachable",
|
||||||
|
"vex_status": "not_affected"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"generated_at": "2025-12-14T02:13:46Z",
|
||||||
|
"summary": {
|
||||||
|
"accuracy": 1.0,
|
||||||
|
"f1_score": 1.0,
|
||||||
|
"false_negatives": 0,
|
||||||
|
"false_positives": 0,
|
||||||
|
"mttd_ms": 0.0,
|
||||||
|
"precision": 1.0,
|
||||||
|
"recall": 1.0,
|
||||||
|
"reproducibility": 1.0,
|
||||||
|
"total_findings": 10,
|
||||||
|
"true_negatives": 5,
|
||||||
|
"true_positives": 5
|
||||||
|
}
|
||||||
|
}
|
||||||
2
bench/results/summary.csv
Normal file
2
bench/results/summary.csv
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
timestamp,total_findings,true_positives,false_positives,true_negatives,false_negatives,precision,recall,f1_score,accuracy,mttd_ms,reproducibility
|
||||||
|
2025-12-14T02:13:46Z,10,5,0,5,0,1.0000,1.0000,1.0000,1.0000,0.00,1.0000
|
||||||
|
338
bench/tools/compare.py
Normal file
338
bench/tools/compare.py
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
# BENCH-AUTO-401-019: Baseline scanner comparison script
|
||||||
|
|
||||||
|
"""
|
||||||
|
Compare StellaOps findings against baseline scanner results.
|
||||||
|
|
||||||
|
Generates comparison metrics:
|
||||||
|
- True positives (reachability-confirmed)
|
||||||
|
- False positives (unreachable code paths)
|
||||||
|
- MTTD (mean time to detect)
|
||||||
|
- Reproducibility score
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python bench/tools/compare.py --stellaops PATH --baseline PATH --output PATH
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import csv
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Finding:
|
||||||
|
"""A vulnerability finding."""
|
||||||
|
cve_id: str
|
||||||
|
purl: str
|
||||||
|
status: str # affected, not_affected
|
||||||
|
reachability: str # reachable, unreachable, unknown
|
||||||
|
source: str # stellaops, baseline
|
||||||
|
detected_at: str = ""
|
||||||
|
evidence_hash: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ComparisonResult:
|
||||||
|
"""Result of comparing two findings."""
|
||||||
|
cve_id: str
|
||||||
|
purl: str
|
||||||
|
stellaops_status: str
|
||||||
|
baseline_status: str
|
||||||
|
agreement: bool
|
||||||
|
stellaops_reachability: str
|
||||||
|
notes: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
def load_stellaops_findings(findings_dir: Path) -> list[Finding]:
|
||||||
|
"""Load StellaOps findings from bench/findings directory."""
|
||||||
|
findings = []
|
||||||
|
|
||||||
|
if not findings_dir.exists():
|
||||||
|
return findings
|
||||||
|
|
||||||
|
for finding_dir in sorted(findings_dir.iterdir()):
|
||||||
|
if not finding_dir.is_dir():
|
||||||
|
continue
|
||||||
|
|
||||||
|
metadata_path = finding_dir / "metadata.json"
|
||||||
|
openvex_path = finding_dir / "decision.openvex.json"
|
||||||
|
|
||||||
|
if not metadata_path.exists() or not openvex_path.exists():
|
||||||
|
continue
|
||||||
|
|
||||||
|
with open(metadata_path, 'r', encoding='utf-8') as f:
|
||||||
|
metadata = json.load(f)
|
||||||
|
|
||||||
|
with open(openvex_path, 'r', encoding='utf-8') as f:
|
||||||
|
openvex = json.load(f)
|
||||||
|
|
||||||
|
statements = openvex.get("statements", [])
|
||||||
|
if not statements:
|
||||||
|
continue
|
||||||
|
|
||||||
|
stmt = statements[0]
|
||||||
|
products = stmt.get("products", [])
|
||||||
|
purl = products[0].get("@id", "") if products else ""
|
||||||
|
|
||||||
|
findings.append(Finding(
|
||||||
|
cve_id=metadata.get("cve_id", ""),
|
||||||
|
purl=purl,
|
||||||
|
status=stmt.get("status", "unknown"),
|
||||||
|
reachability=metadata.get("variant", "unknown"),
|
||||||
|
source="stellaops",
|
||||||
|
detected_at=openvex.get("timestamp", ""),
|
||||||
|
evidence_hash=metadata.get("evidence_hash", "")
|
||||||
|
))
|
||||||
|
|
||||||
|
return findings
|
||||||
|
|
||||||
|
|
||||||
|
def load_baseline_findings(baseline_path: Path) -> list[Finding]:
|
||||||
|
"""Load baseline scanner findings from JSON file."""
|
||||||
|
findings = []
|
||||||
|
|
||||||
|
if not baseline_path.exists():
|
||||||
|
return findings
|
||||||
|
|
||||||
|
with open(baseline_path, 'r', encoding='utf-8') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Support multiple baseline formats
|
||||||
|
vulns = data.get("vulnerabilities", data.get("findings", data.get("results", [])))
|
||||||
|
|
||||||
|
for vuln in vulns:
|
||||||
|
cve_id = vuln.get("cve_id", vuln.get("id", vuln.get("vulnerability_id", "")))
|
||||||
|
purl = vuln.get("purl", vuln.get("package_url", ""))
|
||||||
|
|
||||||
|
# Map baseline status to our normalized form
|
||||||
|
raw_status = vuln.get("status", vuln.get("severity", ""))
|
||||||
|
if raw_status.lower() in ["affected", "vulnerable", "high", "critical", "medium"]:
|
||||||
|
status = "affected"
|
||||||
|
elif raw_status.lower() in ["not_affected", "fixed", "not_vulnerable"]:
|
||||||
|
status = "not_affected"
|
||||||
|
else:
|
||||||
|
status = "unknown"
|
||||||
|
|
||||||
|
findings.append(Finding(
|
||||||
|
cve_id=cve_id,
|
||||||
|
purl=purl,
|
||||||
|
status=status,
|
||||||
|
reachability="unknown", # Baseline scanners typically don't have reachability
|
||||||
|
source="baseline"
|
||||||
|
))
|
||||||
|
|
||||||
|
return findings
|
||||||
|
|
||||||
|
|
||||||
|
def compare_findings(
|
||||||
|
stellaops: list[Finding],
|
||||||
|
baseline: list[Finding]
|
||||||
|
) -> list[ComparisonResult]:
|
||||||
|
"""Compare StellaOps findings with baseline."""
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# Index baseline by CVE+purl
|
||||||
|
baseline_index = {}
|
||||||
|
for f in baseline:
|
||||||
|
key = (f.cve_id, f.purl)
|
||||||
|
baseline_index[key] = f
|
||||||
|
|
||||||
|
# Compare each StellaOps finding
|
||||||
|
for sf in stellaops:
|
||||||
|
key = (sf.cve_id, sf.purl)
|
||||||
|
bf = baseline_index.get(key)
|
||||||
|
|
||||||
|
if bf:
|
||||||
|
agreement = sf.status == bf.status
|
||||||
|
notes = ""
|
||||||
|
|
||||||
|
if agreement and sf.status == "not_affected":
|
||||||
|
notes = "Both agree: not affected"
|
||||||
|
elif agreement and sf.status == "affected":
|
||||||
|
notes = "Both agree: affected"
|
||||||
|
elif sf.status == "not_affected" and bf.status == "affected":
|
||||||
|
if sf.reachability == "unreachable":
|
||||||
|
notes = "FP reduction: StellaOps correctly identified unreachable code"
|
||||||
|
else:
|
||||||
|
notes = "Disagreement: investigate"
|
||||||
|
elif sf.status == "affected" and bf.status == "not_affected":
|
||||||
|
notes = "StellaOps detected, baseline missed"
|
||||||
|
|
||||||
|
results.append(ComparisonResult(
|
||||||
|
cve_id=sf.cve_id,
|
||||||
|
purl=sf.purl,
|
||||||
|
stellaops_status=sf.status,
|
||||||
|
baseline_status=bf.status,
|
||||||
|
agreement=agreement,
|
||||||
|
stellaops_reachability=sf.reachability,
|
||||||
|
notes=notes
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
# StellaOps found something baseline didn't
|
||||||
|
results.append(ComparisonResult(
|
||||||
|
cve_id=sf.cve_id,
|
||||||
|
purl=sf.purl,
|
||||||
|
stellaops_status=sf.status,
|
||||||
|
baseline_status="not_found",
|
||||||
|
agreement=False,
|
||||||
|
stellaops_reachability=sf.reachability,
|
||||||
|
notes="Only found by StellaOps"
|
||||||
|
))
|
||||||
|
|
||||||
|
# Find baseline-only findings
|
||||||
|
stellaops_keys = {(f.cve_id, f.purl) for f in stellaops}
|
||||||
|
for bf in baseline:
|
||||||
|
key = (bf.cve_id, bf.purl)
|
||||||
|
if key not in stellaops_keys:
|
||||||
|
results.append(ComparisonResult(
|
||||||
|
cve_id=bf.cve_id,
|
||||||
|
purl=bf.purl,
|
||||||
|
stellaops_status="not_found",
|
||||||
|
baseline_status=bf.status,
|
||||||
|
agreement=False,
|
||||||
|
stellaops_reachability="unknown",
|
||||||
|
notes="Only found by baseline"
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def compute_comparison_metrics(results: list[ComparisonResult]) -> dict:
|
||||||
|
"""Compute comparison metrics."""
|
||||||
|
total = len(results)
|
||||||
|
agreements = sum(1 for r in results if r.agreement)
|
||||||
|
fp_reductions = sum(1 for r in results if r.notes and "FP reduction" in r.notes)
|
||||||
|
stellaops_only = sum(1 for r in results if "Only found by StellaOps" in r.notes)
|
||||||
|
baseline_only = sum(1 for r in results if "Only found by baseline" in r.notes)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_comparisons": total,
|
||||||
|
"agreements": agreements,
|
||||||
|
"agreement_rate": agreements / total if total > 0 else 0,
|
||||||
|
"fp_reductions": fp_reductions,
|
||||||
|
"stellaops_unique": stellaops_only,
|
||||||
|
"baseline_unique": baseline_only,
|
||||||
|
"generated_at": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def write_comparison_csv(results: list[ComparisonResult], output_path: Path):
|
||||||
|
"""Write comparison results to CSV."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
with open(output_path, 'w', newline='', encoding='utf-8') as f:
|
||||||
|
writer = csv.writer(f)
|
||||||
|
writer.writerow([
|
||||||
|
"cve_id",
|
||||||
|
"purl",
|
||||||
|
"stellaops_status",
|
||||||
|
"baseline_status",
|
||||||
|
"agreement",
|
||||||
|
"reachability",
|
||||||
|
"notes"
|
||||||
|
])
|
||||||
|
|
||||||
|
for r in results:
|
||||||
|
writer.writerow([
|
||||||
|
r.cve_id,
|
||||||
|
r.purl,
|
||||||
|
r.stellaops_status,
|
||||||
|
r.baseline_status,
|
||||||
|
"yes" if r.agreement else "no",
|
||||||
|
r.stellaops_reachability,
|
||||||
|
r.notes
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Compare StellaOps findings against baseline scanner"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--stellaops",
|
||||||
|
type=Path,
|
||||||
|
default=Path("bench/findings"),
|
||||||
|
help="Path to StellaOps findings directory"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--baseline",
|
||||||
|
type=Path,
|
||||||
|
required=True,
|
||||||
|
help="Path to baseline scanner results JSON"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
type=Path,
|
||||||
|
default=Path("bench/results/comparison.csv"),
|
||||||
|
help="Output CSV path"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--json",
|
||||||
|
action="store_true",
|
||||||
|
help="Also output JSON summary"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Resolve paths
|
||||||
|
repo_root = Path(__file__).parent.parent.parent
|
||||||
|
stellaops_path = args.stellaops if args.stellaops.is_absolute() else repo_root / args.stellaops
|
||||||
|
baseline_path = args.baseline if args.baseline.is_absolute() else repo_root / args.baseline
|
||||||
|
output_path = args.output if args.output.is_absolute() else repo_root / args.output
|
||||||
|
|
||||||
|
print(f"StellaOps findings: {stellaops_path}")
|
||||||
|
print(f"Baseline results: {baseline_path}")
|
||||||
|
|
||||||
|
# Load findings
|
||||||
|
stellaops_findings = load_stellaops_findings(stellaops_path)
|
||||||
|
print(f"Loaded {len(stellaops_findings)} StellaOps findings")
|
||||||
|
|
||||||
|
baseline_findings = load_baseline_findings(baseline_path)
|
||||||
|
print(f"Loaded {len(baseline_findings)} baseline findings")
|
||||||
|
|
||||||
|
# Compare
|
||||||
|
results = compare_findings(stellaops_findings, baseline_findings)
|
||||||
|
metrics = compute_comparison_metrics(results)
|
||||||
|
|
||||||
|
print(f"\nComparison Results:")
|
||||||
|
print(f" Total comparisons: {metrics['total_comparisons']}")
|
||||||
|
print(f" Agreements: {metrics['agreements']} ({metrics['agreement_rate']:.1%})")
|
||||||
|
print(f" FP reductions: {metrics['fp_reductions']}")
|
||||||
|
print(f" StellaOps unique: {metrics['stellaops_unique']}")
|
||||||
|
print(f" Baseline unique: {metrics['baseline_unique']}")
|
||||||
|
|
||||||
|
# Write outputs
|
||||||
|
write_comparison_csv(results, output_path)
|
||||||
|
print(f"\nWrote comparison to: {output_path}")
|
||||||
|
|
||||||
|
if args.json:
|
||||||
|
json_path = output_path.with_suffix('.json')
|
||||||
|
with open(json_path, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump({
|
||||||
|
"metrics": metrics,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"cve_id": r.cve_id,
|
||||||
|
"purl": r.purl,
|
||||||
|
"stellaops_status": r.stellaops_status,
|
||||||
|
"baseline_status": r.baseline_status,
|
||||||
|
"agreement": r.agreement,
|
||||||
|
"reachability": r.stellaops_reachability,
|
||||||
|
"notes": r.notes
|
||||||
|
}
|
||||||
|
for r in results
|
||||||
|
]
|
||||||
|
}, f, indent=2, sort_keys=True)
|
||||||
|
print(f"Wrote JSON to: {json_path}")
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
183
bench/tools/replay.sh
Normal file
183
bench/tools/replay.sh
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
# BENCH-AUTO-401-019: Reachability replay script
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||||
|
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
log_info() { echo -e "${GREEN}[INFO]${NC} $*"; }
|
||||||
|
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||||
|
log_error() { echo -e "${RED}[ERROR]${NC} $*"; }
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 <manifest-or-findings-dir> [--output DIR] [--verify]"
|
||||||
|
echo ""
|
||||||
|
echo "Replay reachability manifests from bench findings."
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " --output DIR Output directory for replay results"
|
||||||
|
echo " --verify Verify replay outputs against ground truth"
|
||||||
|
echo " --help, -h Show this help"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
INPUT=""
|
||||||
|
OUTPUT_DIR="${REPO_ROOT}/bench/results/replay"
|
||||||
|
VERIFY=false
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--output)
|
||||||
|
OUTPUT_DIR="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--verify)
|
||||||
|
VERIFY=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
if [[ -z "$INPUT" ]]; then
|
||||||
|
INPUT="$1"
|
||||||
|
else
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z "$INPUT" ]]; then
|
||||||
|
# Default to bench/findings
|
||||||
|
INPUT="${REPO_ROOT}/bench/findings"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -e "$INPUT" ]]; then
|
||||||
|
log_error "Input not found: $INPUT"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$OUTPUT_DIR"
|
||||||
|
|
||||||
|
log_info "Replay input: $INPUT"
|
||||||
|
log_info "Output directory: $OUTPUT_DIR"
|
||||||
|
|
||||||
|
# Collect all reachability evidence files
|
||||||
|
EVIDENCE_FILES=()
|
||||||
|
|
||||||
|
if [[ -d "$INPUT" ]]; then
|
||||||
|
# Directory of findings
|
||||||
|
while IFS= read -r -d '' file; do
|
||||||
|
EVIDENCE_FILES+=("$file")
|
||||||
|
done < <(find "$INPUT" -name "reachability.json" -print0 2>/dev/null)
|
||||||
|
elif [[ -f "$INPUT" ]]; then
|
||||||
|
# Single manifest file
|
||||||
|
EVIDENCE_FILES+=("$INPUT")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${#EVIDENCE_FILES[@]} -eq 0 ]]; then
|
||||||
|
log_warn "No reachability evidence files found"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Found ${#EVIDENCE_FILES[@]} evidence file(s)"
|
||||||
|
|
||||||
|
# Process each evidence file
|
||||||
|
TOTAL=0
|
||||||
|
PASSED=0
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
for evidence_file in "${EVIDENCE_FILES[@]}"; do
|
||||||
|
TOTAL=$((TOTAL + 1))
|
||||||
|
finding_dir=$(dirname "$(dirname "$evidence_file")")
|
||||||
|
finding_id=$(basename "$finding_dir")
|
||||||
|
|
||||||
|
log_info "Processing: $finding_id"
|
||||||
|
|
||||||
|
# Extract metadata
|
||||||
|
metadata_file="${finding_dir}/metadata.json"
|
||||||
|
if [[ ! -f "$metadata_file" ]]; then
|
||||||
|
log_warn " No metadata.json found, skipping"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse evidence
|
||||||
|
evidence_hash=$(python3 -c "
|
||||||
|
import json
|
||||||
|
with open('$evidence_file') as f:
|
||||||
|
d = json.load(f)
|
||||||
|
paths = d.get('paths', [])
|
||||||
|
print(f'paths={len(paths)}')
|
||||||
|
print(f'variant={d.get(\"variant\", \"unknown\")}')
|
||||||
|
print(f'case_id={d.get(\"case_id\", \"unknown\")}')
|
||||||
|
" 2>/dev/null || echo "error")
|
||||||
|
|
||||||
|
if [[ "$evidence_hash" == "error" ]]; then
|
||||||
|
log_warn " Failed to parse evidence"
|
||||||
|
FAILED=$((FAILED + 1))
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo " $evidence_hash"
|
||||||
|
|
||||||
|
# Create replay output
|
||||||
|
replay_output="${OUTPUT_DIR}/${finding_id}"
|
||||||
|
mkdir -p "$replay_output"
|
||||||
|
|
||||||
|
# Copy evidence for replay
|
||||||
|
cp "$evidence_file" "$replay_output/evidence.json"
|
||||||
|
|
||||||
|
# If verify mode, check against ground truth
|
||||||
|
if [[ "$VERIFY" == true ]]; then
|
||||||
|
ground_truth=$(python3 -c "
|
||||||
|
import json
|
||||||
|
with open('$evidence_file') as f:
|
||||||
|
d = json.load(f)
|
||||||
|
gt = d.get('ground_truth')
|
||||||
|
if gt:
|
||||||
|
print(f'variant={gt.get(\"variant\", \"unknown\")}')
|
||||||
|
print(f'paths={len(gt.get(\"paths\", []))}')
|
||||||
|
else:
|
||||||
|
print('no_ground_truth')
|
||||||
|
" 2>/dev/null || echo "error")
|
||||||
|
|
||||||
|
if [[ "$ground_truth" != "no_ground_truth" && "$ground_truth" != "error" ]]; then
|
||||||
|
log_info " Ground truth: $ground_truth"
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
else
|
||||||
|
log_warn " No ground truth available"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
PASSED=$((PASSED + 1))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Record replay result
|
||||||
|
echo "{\"finding_id\": \"$finding_id\", \"status\": \"replayed\", \"timestamp\": \"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"}" > "$replay_output/replay.json"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
echo ""
|
||||||
|
log_info "Replay Summary:"
|
||||||
|
log_info " Total: $TOTAL"
|
||||||
|
log_info " Passed: $PASSED"
|
||||||
|
log_info " Failed: $FAILED"
|
||||||
|
|
||||||
|
# Write summary file
|
||||||
|
echo "{
|
||||||
|
\"total\": $TOTAL,
|
||||||
|
\"passed\": $PASSED,
|
||||||
|
\"failed\": $FAILED,
|
||||||
|
\"timestamp\": \"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"
|
||||||
|
}" > "$OUTPUT_DIR/summary.json"
|
||||||
|
|
||||||
|
log_info "Summary written to: $OUTPUT_DIR/summary.json"
|
||||||
333
bench/tools/verify.py
Normal file
333
bench/tools/verify.py
Normal file
@@ -0,0 +1,333 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
# BENCH-AUTO-401-019: Offline VEX proof bundle verifier
|
||||||
|
|
||||||
|
"""
|
||||||
|
Offline verification of VEX proof bundles without network access.
|
||||||
|
|
||||||
|
Validates:
|
||||||
|
- DSSE envelope structure
|
||||||
|
- Payload type and format
|
||||||
|
- Evidence hash references
|
||||||
|
- Justification catalog membership
|
||||||
|
- CAS hash verification
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python bench/tools/verify.py --bundle PATH [--cas-root PATH] [--catalog PATH]
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class VerificationResult:
|
||||||
|
"""Result of a verification check."""
|
||||||
|
|
||||||
|
def __init__(self, passed: bool, message: str, details: str = ""):
|
||||||
|
self.passed = passed
|
||||||
|
self.message = message
|
||||||
|
self.details = details
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
status = "\033[0;32m✓\033[0m" if self.passed else "\033[0;31m✗\033[0m"
|
||||||
|
result = f"{status} {self.message}"
|
||||||
|
if self.details:
|
||||||
|
result += f"\n {self.details}"
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def sha256_hex(data: bytes) -> str:
|
||||||
|
"""Compute SHA-256 hash."""
|
||||||
|
return hashlib.sha256(data).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def blake3_hex(data: bytes) -> str:
|
||||||
|
"""Compute BLAKE3-256 hash (fallback to SHA-256)."""
|
||||||
|
try:
|
||||||
|
import blake3
|
||||||
|
return "blake3:" + blake3.blake3(data).hexdigest()
|
||||||
|
except ImportError:
|
||||||
|
return "sha256:" + sha256_hex(data)
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(path: Path) -> dict | None:
|
||||||
|
"""Load JSON file."""
|
||||||
|
try:
|
||||||
|
with open(path, 'r', encoding='utf-8') as f:
|
||||||
|
return json.load(f)
|
||||||
|
except (json.JSONDecodeError, FileNotFoundError) as e:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def verify_dsse_structure(dsse: dict) -> list[VerificationResult]:
|
||||||
|
"""Verify DSSE envelope structure."""
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# Check required fields
|
||||||
|
if "payloadType" not in dsse:
|
||||||
|
results.append(VerificationResult(False, "Missing payloadType"))
|
||||||
|
else:
|
||||||
|
results.append(VerificationResult(True, f"payloadType: {dsse['payloadType']}"))
|
||||||
|
|
||||||
|
if "payload" not in dsse:
|
||||||
|
results.append(VerificationResult(False, "Missing payload"))
|
||||||
|
else:
|
||||||
|
results.append(VerificationResult(True, "payload present"))
|
||||||
|
|
||||||
|
if "signatures" not in dsse or not dsse["signatures"]:
|
||||||
|
results.append(VerificationResult(False, "Missing or empty signatures"))
|
||||||
|
else:
|
||||||
|
sig_count = len(dsse["signatures"])
|
||||||
|
results.append(VerificationResult(True, f"Found {sig_count} signature(s)"))
|
||||||
|
|
||||||
|
# Check for placeholder signatures
|
||||||
|
for i, sig in enumerate(dsse["signatures"]):
|
||||||
|
sig_value = sig.get("sig", "")
|
||||||
|
if sig_value.startswith("PLACEHOLDER"):
|
||||||
|
results.append(VerificationResult(
|
||||||
|
False,
|
||||||
|
f"Signature {i} is placeholder",
|
||||||
|
"Bundle needs actual signing before deployment"
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
keyid = sig.get("keyid", "unknown")
|
||||||
|
results.append(VerificationResult(True, f"Signature {i} keyid: {keyid}"))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def decode_payload(dsse: dict) -> tuple[dict | None, list[VerificationResult]]:
|
||||||
|
"""Decode DSSE payload."""
|
||||||
|
results = []
|
||||||
|
|
||||||
|
payload_b64 = dsse.get("payload", "")
|
||||||
|
if not payload_b64:
|
||||||
|
results.append(VerificationResult(False, "Empty payload"))
|
||||||
|
return None, results
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload_bytes = base64.b64decode(payload_b64)
|
||||||
|
payload = json.loads(payload_bytes)
|
||||||
|
results.append(VerificationResult(True, "Payload decoded successfully"))
|
||||||
|
return payload, results
|
||||||
|
except Exception as e:
|
||||||
|
results.append(VerificationResult(False, f"Failed to decode payload: {e}"))
|
||||||
|
return None, results
|
||||||
|
|
||||||
|
|
||||||
|
def verify_openvex(payload: dict) -> list[VerificationResult]:
|
||||||
|
"""Verify OpenVEX document structure."""
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# Check OpenVEX context
|
||||||
|
context = payload.get("@context", "")
|
||||||
|
if "openvex" in context.lower():
|
||||||
|
results.append(VerificationResult(True, f"OpenVEX context: {context}"))
|
||||||
|
else:
|
||||||
|
results.append(VerificationResult(False, f"Unexpected context: {context}"))
|
||||||
|
|
||||||
|
# Check statements
|
||||||
|
statements = payload.get("statements", [])
|
||||||
|
if not statements:
|
||||||
|
results.append(VerificationResult(False, "No VEX statements"))
|
||||||
|
else:
|
||||||
|
results.append(VerificationResult(True, f"Contains {len(statements)} statement(s)"))
|
||||||
|
|
||||||
|
for i, stmt in enumerate(statements):
|
||||||
|
vuln = stmt.get("vulnerability", {})
|
||||||
|
vuln_id = vuln.get("name", vuln.get("@id", "unknown"))
|
||||||
|
status = stmt.get("status", "unknown")
|
||||||
|
results.append(VerificationResult(
|
||||||
|
True,
|
||||||
|
f"Statement {i}: {vuln_id} -> {status}"
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def verify_evidence_hashes(payload: dict, cas_root: Path | None) -> list[VerificationResult]:
|
||||||
|
"""Verify evidence hash references against CAS."""
|
||||||
|
results = []
|
||||||
|
|
||||||
|
statements = payload.get("statements", [])
|
||||||
|
for stmt in statements:
|
||||||
|
impact = stmt.get("impact_statement", "")
|
||||||
|
if "Evidence hash:" in impact:
|
||||||
|
hash_value = impact.split("Evidence hash:")[1].strip()
|
||||||
|
results.append(VerificationResult(True, f"Evidence hash: {hash_value[:16]}..."))
|
||||||
|
|
||||||
|
# Verify against CAS if root provided
|
||||||
|
if cas_root and cas_root.exists():
|
||||||
|
# Look for reachability.json in CAS
|
||||||
|
reach_file = cas_root / "reachability.json"
|
||||||
|
if reach_file.exists():
|
||||||
|
with open(reach_file, 'rb') as f:
|
||||||
|
content = f.read()
|
||||||
|
actual_hash = blake3_hex(content)
|
||||||
|
|
||||||
|
if actual_hash == hash_value or hash_value in actual_hash:
|
||||||
|
results.append(VerificationResult(True, "Evidence hash matches CAS"))
|
||||||
|
else:
|
||||||
|
results.append(VerificationResult(
|
||||||
|
False,
|
||||||
|
"Evidence hash mismatch",
|
||||||
|
f"Expected: {hash_value[:32]}..., Got: {actual_hash[:32]}..."
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def verify_catalog_membership(payload: dict, catalog_path: Path) -> list[VerificationResult]:
|
||||||
|
"""Verify justification is in catalog."""
|
||||||
|
results = []
|
||||||
|
|
||||||
|
if not catalog_path.exists():
|
||||||
|
results.append(VerificationResult(False, f"Catalog not found: {catalog_path}"))
|
||||||
|
return results
|
||||||
|
|
||||||
|
catalog = load_json(catalog_path)
|
||||||
|
if catalog is None:
|
||||||
|
results.append(VerificationResult(False, "Failed to load catalog"))
|
||||||
|
return results
|
||||||
|
|
||||||
|
# Extract catalog entries
|
||||||
|
entries = catalog if isinstance(catalog, list) else catalog.get("entries", [])
|
||||||
|
catalog_ids = {e.get("id", "") for e in entries}
|
||||||
|
|
||||||
|
# Check each statement's justification
|
||||||
|
statements = payload.get("statements", [])
|
||||||
|
for stmt in statements:
|
||||||
|
justification = stmt.get("justification")
|
||||||
|
if justification:
|
||||||
|
if justification in catalog_ids:
|
||||||
|
results.append(VerificationResult(
|
||||||
|
True,
|
||||||
|
f"Justification '{justification}' in catalog"
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
results.append(VerificationResult(
|
||||||
|
False,
|
||||||
|
f"Justification '{justification}' not in catalog"
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Offline VEX proof bundle verifier"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--bundle",
|
||||||
|
type=Path,
|
||||||
|
required=True,
|
||||||
|
help="Path to DSSE bundle file"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--cas-root",
|
||||||
|
type=Path,
|
||||||
|
default=None,
|
||||||
|
help="Path to CAS evidence directory"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--catalog",
|
||||||
|
type=Path,
|
||||||
|
default=Path("docs/benchmarks/vex-justifications.catalog.json"),
|
||||||
|
help="Path to justification catalog"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Resolve paths
|
||||||
|
repo_root = Path(__file__).parent.parent.parent
|
||||||
|
bundle_path = args.bundle if args.bundle.is_absolute() else repo_root / args.bundle
|
||||||
|
catalog_path = args.catalog if args.catalog.is_absolute() else repo_root / args.catalog
|
||||||
|
cas_root = args.cas_root if args.cas_root and args.cas_root.is_absolute() else (
|
||||||
|
repo_root / args.cas_root if args.cas_root else None
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"Verifying: {bundle_path}")
|
||||||
|
print("")
|
||||||
|
|
||||||
|
all_results = []
|
||||||
|
passed = 0
|
||||||
|
failed = 0
|
||||||
|
|
||||||
|
# Load DSSE bundle
|
||||||
|
dsse = load_json(bundle_path)
|
||||||
|
if dsse is None:
|
||||||
|
print("\033[0;31m✗\033[0m Failed to load bundle")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Verify DSSE structure
|
||||||
|
print("DSSE Structure:")
|
||||||
|
results = verify_dsse_structure(dsse)
|
||||||
|
for r in results:
|
||||||
|
print(f" {r}")
|
||||||
|
if r.passed:
|
||||||
|
passed += 1
|
||||||
|
else:
|
||||||
|
failed += 1
|
||||||
|
all_results.extend(results)
|
||||||
|
|
||||||
|
# Decode payload
|
||||||
|
print("\nPayload:")
|
||||||
|
payload, results = decode_payload(dsse)
|
||||||
|
for r in results:
|
||||||
|
print(f" {r}")
|
||||||
|
if r.passed:
|
||||||
|
passed += 1
|
||||||
|
else:
|
||||||
|
failed += 1
|
||||||
|
all_results.extend(results)
|
||||||
|
|
||||||
|
if payload:
|
||||||
|
# Verify OpenVEX structure
|
||||||
|
payload_type = dsse.get("payloadType", "")
|
||||||
|
if "openvex" in payload_type.lower():
|
||||||
|
print("\nOpenVEX:")
|
||||||
|
results = verify_openvex(payload)
|
||||||
|
for r in results:
|
||||||
|
print(f" {r}")
|
||||||
|
if r.passed:
|
||||||
|
passed += 1
|
||||||
|
else:
|
||||||
|
failed += 1
|
||||||
|
all_results.extend(results)
|
||||||
|
|
||||||
|
# Verify evidence hashes
|
||||||
|
print("\nEvidence:")
|
||||||
|
results = verify_evidence_hashes(payload, cas_root)
|
||||||
|
for r in results:
|
||||||
|
print(f" {r}")
|
||||||
|
if r.passed:
|
||||||
|
passed += 1
|
||||||
|
else:
|
||||||
|
failed += 1
|
||||||
|
all_results.extend(results)
|
||||||
|
|
||||||
|
# Verify catalog membership
|
||||||
|
print("\nCatalog:")
|
||||||
|
results = verify_catalog_membership(payload, catalog_path)
|
||||||
|
for r in results:
|
||||||
|
print(f" {r}")
|
||||||
|
if r.passed:
|
||||||
|
passed += 1
|
||||||
|
else:
|
||||||
|
failed += 1
|
||||||
|
all_results.extend(results)
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
print(f"\n{'='*40}")
|
||||||
|
print(f"Passed: {passed}, Failed: {failed}")
|
||||||
|
|
||||||
|
return 0 if failed == 0 else 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
198
bench/tools/verify.sh
Normal file
198
bench/tools/verify.sh
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
# BENCH-AUTO-401-019: Online DSSE + Rekor verification script
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||||
|
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
log_pass() { echo -e "${GREEN}✓${NC} $*"; }
|
||||||
|
log_fail() { echo -e "${RED}✗${NC} $*"; }
|
||||||
|
log_warn() { echo -e "${YELLOW}!${NC} $*"; }
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 <dsse-file> [--catalog PATH] [--rekor-url URL]"
|
||||||
|
echo ""
|
||||||
|
echo "Verify a VEX proof bundle with DSSE signature and Rekor inclusion."
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " --catalog PATH Path to justification catalog (default: docs/benchmarks/vex-justifications.catalog.json)"
|
||||||
|
echo " --rekor-url URL Rekor URL (default: https://rekor.sigstore.dev)"
|
||||||
|
echo " --offline Skip Rekor verification"
|
||||||
|
echo " --help, -h Show this help"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
DSSE_FILE=""
|
||||||
|
CATALOG="${REPO_ROOT}/docs/benchmarks/vex-justifications.catalog.json"
|
||||||
|
REKOR_URL="https://rekor.sigstore.dev"
|
||||||
|
OFFLINE=false
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--catalog)
|
||||||
|
CATALOG="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--rekor-url)
|
||||||
|
REKOR_URL="$2"
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
--offline)
|
||||||
|
OFFLINE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--help|-h)
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
if [[ -z "$DSSE_FILE" ]]; then
|
||||||
|
DSSE_FILE="$1"
|
||||||
|
else
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z "$DSSE_FILE" ]]; then
|
||||||
|
echo "Error: DSSE file required"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$DSSE_FILE" ]]; then
|
||||||
|
echo "Error: DSSE file not found: $DSSE_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Verifying: $DSSE_FILE"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 1: Validate JSON structure
|
||||||
|
if ! python3 -c "import json; json.load(open('$DSSE_FILE'))" 2>/dev/null; then
|
||||||
|
log_fail "Invalid JSON"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
log_pass "Valid JSON structure"
|
||||||
|
|
||||||
|
# Step 2: Check DSSE envelope structure
|
||||||
|
PAYLOAD_TYPE=$(python3 -c "import json; print(json.load(open('$DSSE_FILE')).get('payloadType', ''))")
|
||||||
|
if [[ -z "$PAYLOAD_TYPE" ]]; then
|
||||||
|
log_fail "Missing payloadType"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
log_pass "DSSE payloadType: $PAYLOAD_TYPE"
|
||||||
|
|
||||||
|
# Step 3: Decode and validate payload
|
||||||
|
PAYLOAD_B64=$(python3 -c "import json; print(json.load(open('$DSSE_FILE')).get('payload', ''))")
|
||||||
|
if [[ -z "$PAYLOAD_B64" ]]; then
|
||||||
|
log_fail "Missing payload"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Decode payload
|
||||||
|
PAYLOAD_JSON=$(echo "$PAYLOAD_B64" | base64 -d 2>/dev/null || echo "")
|
||||||
|
if [[ -z "$PAYLOAD_JSON" ]]; then
|
||||||
|
log_fail "Failed to decode payload"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
log_pass "Payload decoded successfully"
|
||||||
|
|
||||||
|
# Step 4: Validate OpenVEX structure (if applicable)
|
||||||
|
if [[ "$PAYLOAD_TYPE" == *"openvex"* ]]; then
|
||||||
|
STATEMENTS_COUNT=$(echo "$PAYLOAD_JSON" | python3 -c "import json,sys; d=json.load(sys.stdin); print(len(d.get('statements', [])))")
|
||||||
|
if [[ "$STATEMENTS_COUNT" -eq 0 ]]; then
|
||||||
|
log_warn "OpenVEX has no statements"
|
||||||
|
else
|
||||||
|
log_pass "OpenVEX contains $STATEMENTS_COUNT statement(s)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 5: Check signature presence
|
||||||
|
SIG_COUNT=$(python3 -c "import json; print(len(json.load(open('$DSSE_FILE')).get('signatures', [])))")
|
||||||
|
if [[ "$SIG_COUNT" -eq 0 ]]; then
|
||||||
|
log_fail "No signatures found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
log_pass "Found $SIG_COUNT signature(s)"
|
||||||
|
|
||||||
|
# Step 6: Check for placeholder signatures
|
||||||
|
SIG_VALUE=$(python3 -c "import json; sigs=json.load(open('$DSSE_FILE')).get('signatures', []); print(sigs[0].get('sig', '') if sigs else '')")
|
||||||
|
if [[ "$SIG_VALUE" == "PLACEHOLDER"* ]]; then
|
||||||
|
log_warn "Signature is a placeholder (not yet signed)"
|
||||||
|
else
|
||||||
|
log_pass "Signature present (verification requires public key)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 7: Rekor verification (if online)
|
||||||
|
if [[ "$OFFLINE" == false ]]; then
|
||||||
|
# Check for rekor.txt in same directory
|
||||||
|
DSSE_DIR=$(dirname "$DSSE_FILE")
|
||||||
|
REKOR_FILE="${DSSE_DIR}/rekor.txt"
|
||||||
|
|
||||||
|
if [[ -f "$REKOR_FILE" ]]; then
|
||||||
|
LOG_INDEX=$(grep -E "^log_index:" "$REKOR_FILE" | cut -d: -f2 | tr -d ' ')
|
||||||
|
if [[ "$LOG_INDEX" != "PENDING" && -n "$LOG_INDEX" ]]; then
|
||||||
|
log_pass "Rekor log index: $LOG_INDEX"
|
||||||
|
|
||||||
|
# Verify with Rekor API
|
||||||
|
if command -v curl &>/dev/null; then
|
||||||
|
REKOR_RESP=$(curl -s "${REKOR_URL}/api/v1/log/entries?logIndex=${LOG_INDEX}" 2>/dev/null || echo "")
|
||||||
|
if [[ -n "$REKOR_RESP" && "$REKOR_RESP" != "null" ]]; then
|
||||||
|
log_pass "Rekor inclusion verified"
|
||||||
|
else
|
||||||
|
log_warn "Could not verify Rekor inclusion (may be offline or index invalid)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warn "curl not available for Rekor verification"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warn "Rekor entry pending submission"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warn "No rekor.txt found - Rekor verification skipped"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warn "Offline mode - Rekor verification skipped"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 8: Check justification catalog membership
|
||||||
|
if [[ -f "$CATALOG" ]]; then
|
||||||
|
# Extract justification from payload if present
|
||||||
|
JUSTIFICATION=$(echo "$PAYLOAD_JSON" | python3 -c "
|
||||||
|
import json, sys
|
||||||
|
d = json.load(sys.stdin)
|
||||||
|
stmts = d.get('statements', [])
|
||||||
|
if stmts:
|
||||||
|
print(stmts[0].get('justification', ''))
|
||||||
|
" 2>/dev/null || echo "")
|
||||||
|
|
||||||
|
if [[ -n "$JUSTIFICATION" ]]; then
|
||||||
|
CATALOG_MATCH=$(python3 -c "
|
||||||
|
import json
|
||||||
|
catalog = json.load(open('$CATALOG'))
|
||||||
|
entries = catalog if isinstance(catalog, list) else catalog.get('entries', [])
|
||||||
|
ids = [e.get('id', '') for e in entries]
|
||||||
|
print('yes' if '$JUSTIFICATION' in ids else 'no')
|
||||||
|
" 2>/dev/null || echo "no")
|
||||||
|
|
||||||
|
if [[ "$CATALOG_MATCH" == "yes" ]]; then
|
||||||
|
log_pass "Justification '$JUSTIFICATION' found in catalog"
|
||||||
|
else
|
||||||
|
log_warn "Justification '$JUSTIFICATION' not in catalog"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log_warn "Justification catalog not found at $CATALOG"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Verification complete."
|
||||||
87
datasets/reachability/README.md
Normal file
87
datasets/reachability/README.md
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# Reachability Test Datasets
|
||||||
|
|
||||||
|
This directory contains ground truth samples for validating reachability analysis accuracy.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
datasets/reachability/
|
||||||
|
├── README.md # This file
|
||||||
|
├── samples/ # Test samples by language
|
||||||
|
│ ├── csharp/
|
||||||
|
│ │ ├── simple-reachable/ # Positive: direct call path
|
||||||
|
│ │ └── dead-code/ # Negative: unreachable code
|
||||||
|
│ ├── java/
|
||||||
|
│ │ └── vulnerable-log4j/ # Positive: Log4Shell CVE
|
||||||
|
│ └── native/
|
||||||
|
│ └── stripped-elf/ # Positive: stripped binary
|
||||||
|
└── schema/
|
||||||
|
├── manifest.schema.json # Sample manifest schema
|
||||||
|
└── ground-truth.schema.json # Ground truth schema
|
||||||
|
```
|
||||||
|
|
||||||
|
## Sample Categories
|
||||||
|
|
||||||
|
### Positive (Reachable)
|
||||||
|
Samples where vulnerable code has a confirmed path from entry points:
|
||||||
|
- `csharp/simple-reachable` - Direct call to vulnerable API
|
||||||
|
- `java/vulnerable-log4j` - Log4Shell with runtime confirmation
|
||||||
|
- `native/stripped-elf` - Stripped ELF with heuristic analysis
|
||||||
|
|
||||||
|
### Negative (Unreachable)
|
||||||
|
Samples where vulnerable code exists but is never called:
|
||||||
|
- `csharp/dead-code` - Deprecated API replaced by safe implementation
|
||||||
|
|
||||||
|
## Schema Reference
|
||||||
|
|
||||||
|
### manifest.json
|
||||||
|
Sample metadata including:
|
||||||
|
- `sampleId` - Unique identifier
|
||||||
|
- `language` - Primary language (java, csharp, native, etc.)
|
||||||
|
- `category` - positive, negative, or contested
|
||||||
|
- `vulnerabilities` - CVEs and affected symbols
|
||||||
|
- `artifacts` - Binary/SBOM file references
|
||||||
|
|
||||||
|
### ground-truth.json
|
||||||
|
Expected outcomes including:
|
||||||
|
- `targets` - Symbols with expected lattice states
|
||||||
|
- `entryPoints` - Program entry points
|
||||||
|
- `expectedUncertainty` - Expected uncertainty tier
|
||||||
|
- `expectedGateDecisions` - Expected policy gate outcomes
|
||||||
|
|
||||||
|
## Lattice States
|
||||||
|
|
||||||
|
| Code | Name | Description |
|
||||||
|
|------|------|-------------|
|
||||||
|
| U | Unknown | No analysis performed |
|
||||||
|
| SR | StaticallyReachable | Static analysis finds path |
|
||||||
|
| SU | StaticallyUnreachable | Static analysis finds no path |
|
||||||
|
| RO | RuntimeObserved | Runtime probe observed execution |
|
||||||
|
| RU | RuntimeUnobserved | Runtime probe did not observe |
|
||||||
|
| CR | ConfirmedReachable | Both static and runtime confirm |
|
||||||
|
| CU | ConfirmedUnreachable | Both static and runtime confirm unreachable |
|
||||||
|
| X | Contested | Static and runtime evidence conflict |
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Validate schemas
|
||||||
|
npx ajv validate -s schema/ground-truth.schema.json -d samples/**/ground-truth.json
|
||||||
|
|
||||||
|
# Run benchmark tests
|
||||||
|
dotnet test --filter "GroundTruth" src/Scanner/__Tests/StellaOps.Scanner.Reachability.Benchmarks/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding New Samples
|
||||||
|
|
||||||
|
1. Create directory: `samples/{language}/{sample-name}/`
|
||||||
|
2. Add `manifest.json` with sample metadata
|
||||||
|
3. Add `ground-truth.json` with expected outcomes
|
||||||
|
4. Include `reasoning` for each target explaining the expected state
|
||||||
|
5. Validate against schema before committing
|
||||||
|
|
||||||
|
## Related Documentation
|
||||||
|
|
||||||
|
- [Ground Truth Schema](../../docs/reachability/ground-truth-schema.md)
|
||||||
|
- [Lattice Model](../../docs/reachability/lattice.md)
|
||||||
|
- [Policy Gates](../../docs/reachability/policy-gate.md)
|
||||||
@@ -0,0 +1,86 @@
|
|||||||
|
{
|
||||||
|
"schema": "ground-truth-v1",
|
||||||
|
"sampleId": "sample:csharp:dead-code:001",
|
||||||
|
"generatedAt": "2025-12-13T12:00:00Z",
|
||||||
|
"generator": {
|
||||||
|
"name": "manual-annotation",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"annotator": "scanner-guild"
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:csharp:JsonConvert.DeserializeObject",
|
||||||
|
"display": "Newtonsoft.Json.JsonConvert.DeserializeObject<T>(string, JsonSerializerSettings)",
|
||||||
|
"purl": "pkg:nuget/Newtonsoft.Json@13.0.1",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "CU",
|
||||||
|
"bucket": "unreachable",
|
||||||
|
"reachable": false,
|
||||||
|
"confidence": 0.95,
|
||||||
|
"pathLength": null,
|
||||||
|
"path": null
|
||||||
|
},
|
||||||
|
"reasoning": "DeserializeObject referenced in deprecated LegacyParser class but LegacyParser is never instantiated - new SafeParser uses System.Text.Json instead"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:csharp:LegacyParser.ParseJson",
|
||||||
|
"display": "SampleApp.LegacyParser.ParseJson(string)",
|
||||||
|
"purl": "pkg:generic/SampleApp@1.0.0",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "SU",
|
||||||
|
"bucket": "unreachable",
|
||||||
|
"reachable": false,
|
||||||
|
"confidence": 0.90,
|
||||||
|
"pathLength": null,
|
||||||
|
"path": null
|
||||||
|
},
|
||||||
|
"reasoning": "LegacyParser.ParseJson exists but LegacyParser is never instantiated - replaced by SafeParser"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:csharp:SafeParser.ParseJson",
|
||||||
|
"display": "SampleApp.SafeParser.ParseJson(string)",
|
||||||
|
"purl": "pkg:generic/SampleApp@1.0.0",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "SR",
|
||||||
|
"bucket": "direct",
|
||||||
|
"reachable": true,
|
||||||
|
"confidence": 0.95,
|
||||||
|
"pathLength": 2,
|
||||||
|
"path": [
|
||||||
|
"sym:csharp:Program.Main",
|
||||||
|
"sym:csharp:SafeParser.ParseJson"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reasoning": "SafeParser.ParseJson is the active implementation called from Main"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"entryPoints": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:csharp:Program.Main",
|
||||||
|
"display": "SampleApp.Program.Main(string[])",
|
||||||
|
"phase": "runtime",
|
||||||
|
"source": "manifest"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"expectedUncertainty": {
|
||||||
|
"states": [],
|
||||||
|
"aggregateTier": "T4",
|
||||||
|
"riskScore": 0.0
|
||||||
|
},
|
||||||
|
"expectedGateDecisions": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2024-21907",
|
||||||
|
"targetSymbol": "sym:csharp:JsonConvert.DeserializeObject",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "allow",
|
||||||
|
"expectedReason": "CU state allows not_affected - confirmed unreachable"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2024-21907",
|
||||||
|
"targetSymbol": "sym:csharp:JsonConvert.DeserializeObject",
|
||||||
|
"requestedStatus": "affected",
|
||||||
|
"expectedDecision": "warn",
|
||||||
|
"expectedReason": "Marking as affected when CU suggests false positive"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
27
datasets/reachability/samples/csharp/dead-code/manifest.json
Normal file
27
datasets/reachability/samples/csharp/dead-code/manifest.json
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"sampleId": "sample:csharp:dead-code:001",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"createdAt": "2025-12-13T12:00:00Z",
|
||||||
|
"language": "csharp",
|
||||||
|
"category": "negative",
|
||||||
|
"description": "C# app where vulnerable code exists but is never called - deprecated API replaced by safe implementation",
|
||||||
|
"source": {
|
||||||
|
"repository": "synthetic",
|
||||||
|
"commit": "synthetic-sample",
|
||||||
|
"buildToolchain": "dotnet:10.0"
|
||||||
|
},
|
||||||
|
"vulnerabilities": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2024-21907",
|
||||||
|
"purl": "pkg:nuget/Newtonsoft.Json@13.0.1",
|
||||||
|
"affectedSymbol": "Newtonsoft.Json.JsonConvert.DeserializeObject"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"artifacts": [
|
||||||
|
{
|
||||||
|
"path": "artifacts/app.dll",
|
||||||
|
"hash": "sha256:0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
"type": "application/x-msdownload"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,79 @@
|
|||||||
|
{
|
||||||
|
"schema": "ground-truth-v1",
|
||||||
|
"sampleId": "sample:csharp:simple-reachable:001",
|
||||||
|
"generatedAt": "2025-12-13T12:00:00Z",
|
||||||
|
"generator": {
|
||||||
|
"name": "manual-annotation",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"annotator": "scanner-guild"
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:csharp:JsonConvert.DeserializeObject",
|
||||||
|
"display": "Newtonsoft.Json.JsonConvert.DeserializeObject<T>(string, JsonSerializerSettings)",
|
||||||
|
"purl": "pkg:nuget/Newtonsoft.Json@13.0.1",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "SR",
|
||||||
|
"bucket": "direct",
|
||||||
|
"reachable": true,
|
||||||
|
"confidence": 0.95,
|
||||||
|
"pathLength": 2,
|
||||||
|
"path": [
|
||||||
|
"sym:csharp:Program.Main",
|
||||||
|
"sym:csharp:JsonConvert.DeserializeObject"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reasoning": "Direct call from Main() to JsonConvert.DeserializeObject with TypeNameHandling.All settings"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:csharp:JsonConvert.SerializeObject",
|
||||||
|
"display": "Newtonsoft.Json.JsonConvert.SerializeObject(object)",
|
||||||
|
"purl": "pkg:nuget/Newtonsoft.Json@13.0.1",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "SU",
|
||||||
|
"bucket": "unreachable",
|
||||||
|
"reachable": false,
|
||||||
|
"confidence": 0.90,
|
||||||
|
"pathLength": null,
|
||||||
|
"path": null
|
||||||
|
},
|
||||||
|
"reasoning": "SerializeObject is present in the dependency but never called from any entry point"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"entryPoints": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:csharp:Program.Main",
|
||||||
|
"display": "SampleApp.Program.Main(string[])",
|
||||||
|
"phase": "runtime",
|
||||||
|
"source": "manifest"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"expectedUncertainty": {
|
||||||
|
"states": [],
|
||||||
|
"aggregateTier": "T4",
|
||||||
|
"riskScore": 0.0
|
||||||
|
},
|
||||||
|
"expectedGateDecisions": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2024-21907",
|
||||||
|
"targetSymbol": "sym:csharp:JsonConvert.DeserializeObject",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "block",
|
||||||
|
"expectedBlockedBy": "LatticeState",
|
||||||
|
"expectedReason": "SR state incompatible with not_affected - code path exists from entry point"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2024-21907",
|
||||||
|
"targetSymbol": "sym:csharp:JsonConvert.DeserializeObject",
|
||||||
|
"requestedStatus": "affected",
|
||||||
|
"expectedDecision": "allow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2024-21907",
|
||||||
|
"targetSymbol": "sym:csharp:JsonConvert.SerializeObject",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "allow",
|
||||||
|
"expectedReason": "SU state allows not_affected - unreachable code path"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"sampleId": "sample:csharp:simple-reachable:001",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"createdAt": "2025-12-13T12:00:00Z",
|
||||||
|
"language": "csharp",
|
||||||
|
"category": "positive",
|
||||||
|
"description": "Simple C# console app with direct call path to vulnerable Newtonsoft.Json TypeNameHandling usage",
|
||||||
|
"source": {
|
||||||
|
"repository": "synthetic",
|
||||||
|
"commit": "synthetic-sample",
|
||||||
|
"buildToolchain": "dotnet:10.0"
|
||||||
|
},
|
||||||
|
"vulnerabilities": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2024-21907",
|
||||||
|
"purl": "pkg:nuget/Newtonsoft.Json@13.0.1",
|
||||||
|
"affectedSymbol": "Newtonsoft.Json.JsonConvert.DeserializeObject"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"artifacts": [
|
||||||
|
{
|
||||||
|
"path": "artifacts/app.dll",
|
||||||
|
"hash": "sha256:0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
"type": "application/x-msdownload"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,108 @@
|
|||||||
|
{
|
||||||
|
"schema": "ground-truth-v1",
|
||||||
|
"sampleId": "sample:java:vulnerable-log4j:001",
|
||||||
|
"generatedAt": "2025-12-13T12:00:00Z",
|
||||||
|
"generator": {
|
||||||
|
"name": "manual-annotation",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"annotator": "security-team"
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:java:log4j.JndiLookup.lookup",
|
||||||
|
"display": "org.apache.logging.log4j.core.lookup.JndiLookup.lookup(LogEvent, String)",
|
||||||
|
"purl": "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "CR",
|
||||||
|
"bucket": "runtime",
|
||||||
|
"reachable": true,
|
||||||
|
"confidence": 0.98,
|
||||||
|
"pathLength": 4,
|
||||||
|
"path": [
|
||||||
|
"sym:java:HttpRequestHandler.handle",
|
||||||
|
"sym:java:LogManager.getLogger",
|
||||||
|
"sym:java:Logger.info",
|
||||||
|
"sym:java:log4j.JndiLookup.lookup"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reasoning": "Confirmed reachable via runtime probe - HTTP request handler logs user-controlled input which triggers JNDI lookup via message substitution"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:java:log4j.JndiManager.lookup",
|
||||||
|
"display": "org.apache.logging.log4j.core.net.JndiManager.lookup(String)",
|
||||||
|
"purl": "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "CU",
|
||||||
|
"bucket": "unreachable",
|
||||||
|
"reachable": false,
|
||||||
|
"confidence": 0.92,
|
||||||
|
"pathLength": null,
|
||||||
|
"path": null
|
||||||
|
},
|
||||||
|
"reasoning": "JndiManager.lookup is present in log4j-core but the direct JndiManager usage path is not exercised - only JndiLookup wrapper is used"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:java:log4j.ScriptLookup.lookup",
|
||||||
|
"display": "org.apache.logging.log4j.core.lookup.ScriptLookup.lookup(LogEvent, String)",
|
||||||
|
"purl": "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "SU",
|
||||||
|
"bucket": "unreachable",
|
||||||
|
"reachable": false,
|
||||||
|
"confidence": 0.85,
|
||||||
|
"pathLength": null,
|
||||||
|
"path": null
|
||||||
|
},
|
||||||
|
"reasoning": "ScriptLookup exists in log4j-core but is disabled by default and no configuration enables it"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"entryPoints": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:java:HttpRequestHandler.handle",
|
||||||
|
"display": "com.example.app.HttpRequestHandler.handle(HttpExchange)",
|
||||||
|
"phase": "runtime",
|
||||||
|
"source": "servlet"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:java:Application.main",
|
||||||
|
"display": "com.example.app.Application.main(String[])",
|
||||||
|
"phase": "main",
|
||||||
|
"source": "manifest"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"expectedUncertainty": {
|
||||||
|
"states": [],
|
||||||
|
"aggregateTier": "T4",
|
||||||
|
"riskScore": 0.0
|
||||||
|
},
|
||||||
|
"expectedGateDecisions": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2021-44228",
|
||||||
|
"targetSymbol": "sym:java:log4j.JndiLookup.lookup",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "block",
|
||||||
|
"expectedBlockedBy": "LatticeState",
|
||||||
|
"expectedReason": "CR state blocks not_affected - runtime evidence confirms reachability"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2021-44228",
|
||||||
|
"targetSymbol": "sym:java:log4j.JndiLookup.lookup",
|
||||||
|
"requestedStatus": "affected",
|
||||||
|
"expectedDecision": "allow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2021-44228",
|
||||||
|
"targetSymbol": "sym:java:log4j.JndiManager.lookup",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "allow",
|
||||||
|
"expectedReason": "CU state allows not_affected - confirmed unreachable"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2021-44228",
|
||||||
|
"targetSymbol": "sym:java:log4j.ScriptLookup.lookup",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "warn",
|
||||||
|
"expectedReason": "SU state allows not_affected but with warning - static analysis only, no runtime confirmation"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"sampleId": "sample:java:vulnerable-log4j:001",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"createdAt": "2025-12-13T12:00:00Z",
|
||||||
|
"language": "java",
|
||||||
|
"category": "positive",
|
||||||
|
"description": "Log4Shell CVE-2021-44228 reachable via JNDI lookup in logging path from HTTP request handler",
|
||||||
|
"source": {
|
||||||
|
"repository": "synthetic",
|
||||||
|
"commit": "synthetic-sample",
|
||||||
|
"buildToolchain": "maven:3.9.0,jdk:17"
|
||||||
|
},
|
||||||
|
"vulnerabilities": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2021-44228",
|
||||||
|
"purl": "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1",
|
||||||
|
"affectedSymbol": "org.apache.logging.log4j.core.lookup.JndiLookup.lookup"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"artifacts": [
|
||||||
|
{
|
||||||
|
"path": "artifacts/app.jar",
|
||||||
|
"hash": "sha256:0000000000000000000000000000000000000000000000000000000000000004",
|
||||||
|
"type": "application/java-archive"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "artifacts/sbom.cdx.json",
|
||||||
|
"hash": "sha256:0000000000000000000000000000000000000000000000000000000000000005",
|
||||||
|
"type": "application/vnd.cyclonedx+json"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,100 @@
|
|||||||
|
{
|
||||||
|
"schema": "ground-truth-v1",
|
||||||
|
"sampleId": "sample:native:stripped-elf:001",
|
||||||
|
"generatedAt": "2025-12-13T12:00:00Z",
|
||||||
|
"generator": {
|
||||||
|
"name": "manual-annotation",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"annotator": "scanner-guild"
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:binary:ossl_punycode_decode",
|
||||||
|
"display": "ossl_punycode_decode",
|
||||||
|
"purl": "pkg:deb/ubuntu/openssl@3.0.2?arch=amd64",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "SR",
|
||||||
|
"bucket": "direct",
|
||||||
|
"reachable": true,
|
||||||
|
"confidence": 0.85,
|
||||||
|
"pathLength": 4,
|
||||||
|
"path": [
|
||||||
|
"sym:binary:_start",
|
||||||
|
"sym:binary:main",
|
||||||
|
"sym:binary:SSL_connect",
|
||||||
|
"sym:binary:ossl_punycode_decode"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reasoning": "punycode_decode is reachable via SSL certificate validation during SSL_connect - lower confidence due to stripped binary heuristics"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:binary:sub_401000",
|
||||||
|
"display": "sub_401000 (heuristic function)",
|
||||||
|
"purl": "pkg:generic/app@1.0.0",
|
||||||
|
"expected": {
|
||||||
|
"latticeState": "U",
|
||||||
|
"bucket": "unknown",
|
||||||
|
"reachable": null,
|
||||||
|
"confidence": 0.4,
|
||||||
|
"pathLength": null,
|
||||||
|
"path": null
|
||||||
|
},
|
||||||
|
"reasoning": "Stripped symbol detected by heuristic CFG analysis - function boundaries uncertain"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"entryPoints": [
|
||||||
|
{
|
||||||
|
"symbolId": "sym:binary:_start",
|
||||||
|
"display": "_start",
|
||||||
|
"phase": "load",
|
||||||
|
"source": "e_entry"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "sym:binary:main",
|
||||||
|
"display": "main",
|
||||||
|
"phase": "runtime",
|
||||||
|
"source": "symbol"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbolId": "init:binary:0x401000",
|
||||||
|
"display": "DT_INIT_ARRAY[0]",
|
||||||
|
"phase": "init",
|
||||||
|
"source": "DT_INIT_ARRAY"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"expectedUncertainty": {
|
||||||
|
"states": [
|
||||||
|
{
|
||||||
|
"code": "U1",
|
||||||
|
"entropy": 0.35
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"aggregateTier": "T2",
|
||||||
|
"riskScore": 0.25
|
||||||
|
},
|
||||||
|
"expectedGateDecisions": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2022-3602",
|
||||||
|
"targetSymbol": "sym:binary:ossl_punycode_decode",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "block",
|
||||||
|
"expectedBlockedBy": "LatticeState",
|
||||||
|
"expectedReason": "SR state blocks not_affected - static analysis shows reachability"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2022-3602",
|
||||||
|
"targetSymbol": "sym:binary:ossl_punycode_decode",
|
||||||
|
"requestedStatus": "affected",
|
||||||
|
"expectedDecision": "warn",
|
||||||
|
"expectedReason": "T2 uncertainty tier requires review for affected status"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2022-3602",
|
||||||
|
"targetSymbol": "sym:binary:sub_401000",
|
||||||
|
"requestedStatus": "not_affected",
|
||||||
|
"expectedDecision": "block",
|
||||||
|
"expectedBlockedBy": "UncertaintyTier",
|
||||||
|
"expectedReason": "Unknown state with U1 uncertainty blocks not_affected without justification"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"sampleId": "sample:native:stripped-elf:001",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"createdAt": "2025-12-13T12:00:00Z",
|
||||||
|
"language": "native",
|
||||||
|
"category": "positive",
|
||||||
|
"description": "Stripped ELF binary linking to vulnerable OpenSSL version with reachable SSL_read path",
|
||||||
|
"source": {
|
||||||
|
"repository": "synthetic",
|
||||||
|
"commit": "synthetic-sample",
|
||||||
|
"buildToolchain": "gcc:13.0,openssl:3.0.2"
|
||||||
|
},
|
||||||
|
"vulnerabilities": [
|
||||||
|
{
|
||||||
|
"vulnId": "CVE-2022-3602",
|
||||||
|
"purl": "pkg:deb/ubuntu/openssl@3.0.2?arch=amd64",
|
||||||
|
"affectedSymbol": "ossl_punycode_decode"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"artifacts": [
|
||||||
|
{
|
||||||
|
"path": "artifacts/app",
|
||||||
|
"hash": "sha256:0000000000000000000000000000000000000000000000000000000000000003",
|
||||||
|
"type": "application/x-executable"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
189
datasets/reachability/schema/ground-truth.schema.json
Normal file
189
datasets/reachability/schema/ground-truth.schema.json
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"$id": "https://stellaops.io/schemas/reachability/ground-truth.schema.json",
|
||||||
|
"title": "Reachability Ground Truth",
|
||||||
|
"description": "Ground truth annotations for reachability test samples",
|
||||||
|
"type": "object",
|
||||||
|
"required": ["schema", "sampleId", "generatedAt", "generator", "targets", "entryPoints"],
|
||||||
|
"properties": {
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"const": "ground-truth-v1"
|
||||||
|
},
|
||||||
|
"sampleId": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^sample:[a-z]+:[a-z0-9-]+:[0-9]+$"
|
||||||
|
},
|
||||||
|
"generatedAt": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time"
|
||||||
|
},
|
||||||
|
"generator": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["name", "version"],
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"version": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"annotator": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/target"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"entryPoints": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/entryPoint"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"expectedUncertainty": {
|
||||||
|
"$ref": "#/definitions/uncertainty"
|
||||||
|
},
|
||||||
|
"expectedGateDecisions": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/gateDecision"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"definitions": {
|
||||||
|
"target": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["symbolId", "expected", "reasoning"],
|
||||||
|
"properties": {
|
||||||
|
"symbolId": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^sym:[a-z]+:.+"
|
||||||
|
},
|
||||||
|
"display": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"purl": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"expected": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["latticeState", "bucket", "reachable", "confidence"],
|
||||||
|
"properties": {
|
||||||
|
"latticeState": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["U", "SR", "SU", "RO", "RU", "CR", "CU", "X"]
|
||||||
|
},
|
||||||
|
"bucket": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["unknown", "direct", "runtime", "unreachable", "entrypoint"]
|
||||||
|
},
|
||||||
|
"reachable": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"confidence": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1
|
||||||
|
},
|
||||||
|
"pathLength": {
|
||||||
|
"type": ["integer", "null"],
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
|
"path": {
|
||||||
|
"type": ["array", "null"],
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"reasoning": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"entryPoint": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["symbolId", "phase", "source"],
|
||||||
|
"properties": {
|
||||||
|
"symbolId": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"display": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"phase": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["load", "init", "runtime", "main", "fini"]
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uncertainty": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["aggregateTier"],
|
||||||
|
"properties": {
|
||||||
|
"states": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["code", "entropy"],
|
||||||
|
"properties": {
|
||||||
|
"code": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["U1", "U2", "U3", "U4"]
|
||||||
|
},
|
||||||
|
"entropy": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"aggregateTier": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["T1", "T2", "T3", "T4"]
|
||||||
|
},
|
||||||
|
"riskScore": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"maximum": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gateDecision": {
|
||||||
|
"type": "object",
|
||||||
|
"required": ["vulnId", "targetSymbol", "requestedStatus", "expectedDecision"],
|
||||||
|
"properties": {
|
||||||
|
"vulnId": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"targetSymbol": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"requestedStatus": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["affected", "not_affected", "under_investigation", "fixed"]
|
||||||
|
},
|
||||||
|
"expectedDecision": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["allow", "block", "warn"]
|
||||||
|
},
|
||||||
|
"expectedBlockedBy": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"expectedReason": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user