CD/CD consolidation

This commit is contained in:
StellaOps Bot
2025-12-26 17:32:23 +02:00
parent a866eb6277
commit c786faae84
638 changed files with 3821 additions and 181 deletions

View File

@@ -1,46 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/../.." && pwd)"
cd "$ROOT_DIR"
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
export DOTNET_CLI_TELEMETRY_OPTOUT=1
export DOTNET_NOLOGO=1
# Restore once for the Concelier solution.
dotnet restore src/Concelier/StellaOps.Concelier.sln
# Build the two test projects with analyzers disabled to keep CI fast.
dotnet build src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj \
-c Release -p:DisableAnalyzers=true
dotnet build src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj \
-c Release -p:DisableAnalyzers=true
# Run filtered attestation tests; keep logs in TestResults.
RESULTS=TestResults/concelier-attestation
mkdir -p "$RESULTS"
core_log="$RESULTS/core.trx"
web_log="$RESULTS/web.trx"
set +e
dotnet test src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj \
-c Release --no-build --filter EvidenceBundleAttestationBuilderTests \
--logger "trx;LogFileName=$(basename "$core_log")" --results-directory "$RESULTS"
CORE_EXIT=$?
dotnet test src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj \
-c Release --no-build --filter InternalAttestationVerify \
--logger "trx;LogFileName=$(basename "$web_log")" --results-directory "$RESULTS"
WEB_EXIT=$?
set -e
if [[ $CORE_EXIT -ne 0 || $WEB_EXIT -ne 0 ]]; then
echo "Attestation test run failed: core=$CORE_EXIT web=$WEB_EXIT" >&2
exit 1
fi
echo "Attestation tests succeeded; results in $RESULTS"

View File

@@ -1,124 +0,0 @@
# Cosign binaries (runtime/signals signing)
## Preferred (system)
- Version: `v3.0.2`
- Path: `/usr/local/bin/cosign` (installed on WSL Debian host)
- Breaking change: v3 requires `--bundle <file>` when signing blobs; older `--output-signature`/`--output-certificate` pairs are deprecated.
## Offline fallback (repo-pinned)
- Version: `v2.6.0`
- Binary: `tools/cosign/cosign``tools/cosign/v2.6.0/cosign-linux-amd64`
- SHA256: `ea5c65f99425d6cfbb5c4b5de5dac035f14d09131c1a0ea7c7fc32eab39364f9`
- Check: `cd tools/cosign/v2.6.0 && sha256sum -c cosign_checksums.txt --ignore-missing`
## Usage examples
- v3 DSSE blob: `cosign sign-blob --key cosign.key --predicate-type stella.ops/confidenceDecayConfig@v1 --bundle confidence_decay_config.sigstore.json decay/confidence_decay_config.yaml`
- v3 verify: `cosign verify-blob --bundle confidence_decay_config.sigstore.json decay/confidence_decay_config.yaml`
- To force offline fallback, export `PATH=./tools/cosign:$PATH` (ensures v2.6.0 is used).
## CI Workflow: signals-dsse-sign.yml
The `.gitea/workflows/signals-dsse-sign.yml` workflow automates DSSE signing for Signals artifacts.
### Required Secrets
| Secret | Description | Required |
|--------|-------------|----------|
| `COSIGN_PRIVATE_KEY_B64` | Base64-encoded cosign private key | Yes (for production) |
| `COSIGN_PASSWORD` | Password for the private key | If key is encrypted |
| `CI_EVIDENCE_LOCKER_TOKEN` | Token for Evidence Locker upload | Optional |
### Trigger Options
1. **Automatic**: On push to `main` when signals artifacts change
2. **Manual**: Via workflow_dispatch with options:
- `out_dir`: Output directory (default: `evidence-locker/signals/2025-12-01`)
- `allow_dev_key`: Set to `1` for testing with dev key
### Setting Up CI Secrets
```bash
# Generate production key pair (do this once, securely)
cosign generate-key-pair
# Base64 encode the private key
cat cosign.key | base64 -w0 > cosign.key.b64
# Add to Gitea secrets:
# - COSIGN_PRIVATE_KEY_B64: contents of cosign.key.b64
# - COSIGN_PASSWORD: password used during key generation
```
## CI / secrets (manual usage)
- CI should provide a base64-encoded private key via secret `COSIGN_PRIVATE_KEY_B64` and optional password in `COSIGN_PASSWORD`.
- Example bootstrap in jobs:
```bash
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > /tmp/cosign.key
chmod 600 /tmp/cosign.key
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" cosign version
```
- For local dev, copy your own key to `tools/cosign/cosign.key` or export `COSIGN_PRIVATE_KEY_B64` before running signing scripts. Never commit real keys; only `cosign.key.example` lives in git.
## Development signing key
A development key pair is provided for local testing and smoke tests:
| File | Description |
|------|-------------|
| `tools/cosign/cosign.dev.key` | Private key (password-protected) |
| `tools/cosign/cosign.dev.pub` | Public key for verification |
### Usage
```bash
# Sign signals artifacts with dev key
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
OUT_DIR=docs/modules/signals/dev-test \
tools/cosign/sign-signals.sh
# Verify a signature
cosign verify-blob \
--key tools/cosign/cosign.dev.pub \
--bundle docs/modules/signals/dev-test/confidence_decay_config.sigstore.json \
docs/modules/signals/decay/confidence_decay_config.yaml
```
### Security Notes
- Password: `stellaops-dev` (do not reuse elsewhere)
- **NOT** for production or Evidence Locker ingestion
- Real signing requires the Signals Guild key via `COSIGN_PRIVATE_KEY_B64` (CI) or `tools/cosign/cosign.key` (local drop-in)
- `sign-signals.sh` requires `COSIGN_ALLOW_DEV_KEY=1` to use the dev key; otherwise it refuses
- The signing helper disables tlog upload (`--tlog-upload=false`) and auto-accepts prompts (`--yes`) for offline runs
## Signing Scripts
### sign-signals.sh
Signs decay config, unknowns manifest, and heuristics catalog with DSSE envelopes.
```bash
# Production (CI secret or cosign.key drop-in)
OUT_DIR=evidence-locker/signals/2025-12-01 tools/cosign/sign-signals.sh
# Development (dev key)
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
OUT_DIR=docs/modules/signals/dev-test \
tools/cosign/sign-signals.sh
```
### Key Resolution Order
1. `COSIGN_KEY_FILE` environment variable
2. `COSIGN_PRIVATE_KEY_B64` (decoded to temp file)
3. `tools/cosign/cosign.key` (production drop-in)
4. `tools/cosign/cosign.dev.key` (only if `COSIGN_ALLOW_DEV_KEY=1`)
### sign-authority-gaps.sh
Signs Authority gap artefacts (AU1AU10, RR1RR10) under `docs/modules/authority/gaps/artifacts/`.
```
# Production (Authority key via CI secret or cosign.key drop-in)
OUT_DIR=docs/modules/authority/gaps/dsse/2025-12-04 tools/cosign/sign-authority-gaps.sh
# Development (dev key, smoke only)
COSIGN_ALLOW_DEV_KEY=1 COSIGN_PASSWORD=stellaops-dev \
OUT_DIR=docs/modules/authority/gaps/dev-smoke/2025-12-04 \
tools/cosign/sign-authority-gaps.sh
```
- Outputs bundles or dsse signatures plus `SHA256SUMS` in `OUT_DIR`.
- tlog upload disabled (`--tlog-upload=false`) and prompts auto-accepted (`--yes`) for offline use.

View File

@@ -1 +0,0 @@
v2.6.0/cosign-linux-amd64

View File

@@ -1,11 +0,0 @@
-----BEGIN ENCRYPTED SIGSTORE PRIVATE KEY-----
eyJrZGYiOnsibmFtZSI6InNjcnlwdCIsInBhcmFtcyI6eyJOIjo2NTUzNiwiciI6
OCwicCI6MX0sInNhbHQiOiJ5dlhpaXliR2lTR0NPS2x0Q2M1dlFhTy91S3pBVzNs
Skl3QTRaU2dEMTAwPSJ9LCJjaXBoZXIiOnsibmFtZSI6Im5hY2wvc2VjcmV0Ym94
Iiwibm9uY2UiOiIyNHA0T2xJZnJxdnhPVnM3dlY2MXNwVGpkNk80cVBEVCJ9LCJj
aXBoZXJ0ZXh0IjoiTHRWSGRqVi94MXJrYXhscGxJbVB5dkVtc2NBYTB5dW5oakZ5
UUFiZ1RSNVdZL3lCS0tYMWdFb09hclZDWksrQU0yY0tIM2tJQWlJNWlMd1AvV3c5
Q3k2SVY1ek4za014cExpcjJ1QVZNV3c3Y3BiYUhnNjV4TzNOYkEwLzJOSi84R0dN
NWt1QXhJRWsraER3ZWJ4Tld4WkRtNEZ4NTJVcVJxa2NPT09vNk9xWXB4OWFMaVZw
RjgzRElGZFpRK2R4K05RUnUxUmNrKzBtOHc9PSJ9
-----END ENCRYPTED SIGSTORE PRIVATE KEY-----

View File

@@ -1,4 +0,0 @@
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfoI+9RFCTcfjeMqpCQ3FAyvKwBQU
YAIM2cfDR8W98OxnXV+gfV5Dhfoi8qofAnG/vC7DbBlX2t/gT7GKUZAChA==
-----END PUBLIC KEY-----

View File

@@ -1,8 +0,0 @@
# Placeholder development cosign key
#
# Do not use in production. Generate your own:
# cosign generate-key-pair
#
# Store the private key securely (e.g., CI secret COSIGN_PRIVATE_KEY_B64).
#
# This file exists only as a path stub for tooling; it is not a real key.

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Deterministic DSSE signing helper for Authority gap artefacts (AU1AU10, RR1RR10).
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
COSIGN_BIN="${COSIGN_BIN:-}"
# Detect cosign binary
if [[ -z "$COSIGN_BIN" ]]; then
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
COSIGN_BIN="/usr/local/bin/cosign"
elif command -v cosign >/dev/null 2>&1; then
COSIGN_BIN="$(command -v cosign)"
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
COSIGN_BIN="$ROOT/tools/cosign/cosign"
else
echo "cosign not found; install or set COSIGN_BIN" >&2
exit 1
fi
fi
# Resolve key
TMP_KEY=""
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
KEY_FILE="$COSIGN_KEY_FILE"
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
TMP_KEY="$(mktemp)"
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
chmod 600 "$TMP_KEY"
KEY_FILE="$TMP_KEY"
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
KEY_FILE="$ROOT/tools/cosign/cosign.key"
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
else
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
exit 2
fi
OUT_BASE="${OUT_DIR:-$ROOT/docs/modules/authority/gaps/dsse/2025-12-04}"
if [[ "$OUT_BASE" != /* ]]; then
OUT_BASE="$ROOT/$OUT_BASE"
fi
mkdir -p "$OUT_BASE"
ARTEFACTS=(
"docs/modules/authority/gaps/artifacts/authority-scope-role-catalog.v1.json|authority-scope-role-catalog"
"docs/modules/authority/gaps/artifacts/authority-jwks-metadata.schema.json|authority-jwks-metadata.schema"
"docs/modules/authority/gaps/artifacts/crypto-profile-registry.v1.json|crypto-profile-registry"
"docs/modules/authority/gaps/artifacts/authority-offline-verifier-bundle.v1.json|authority-offline-verifier-bundle"
"docs/modules/authority/gaps/artifacts/authority-abac.schema.json|authority-abac.schema"
"docs/modules/authority/gaps/artifacts/rekor-receipt-policy.v1.json|rekor-receipt-policy"
"docs/modules/authority/gaps/artifacts/rekor-receipt.schema.json|rekor-receipt.schema"
"docs/modules/authority/gaps/artifacts/rekor-receipt-bundle.v1.json|rekor-receipt-bundle"
)
USE_BUNDLE=0
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
USE_BUNDLE=1
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
USE_BUNDLE=1
fi
SHA_FILE="$OUT_BASE/SHA256SUMS"
: > "$SHA_FILE"
for entry in "${ARTEFACTS[@]}"; do
IFS="|" read -r path stem <<<"$entry"
if [[ ! -f "$ROOT/$path" ]]; then
echo "Missing artefact: $path" >&2
exit 3
fi
if (( USE_BUNDLE )); then
bundle="$OUT_BASE/${stem}.sigstore.json"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--bundle "$bundle" \
"$ROOT/$path"
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
else
sig="$OUT_BASE/${stem}.dsse"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--output-signature "$sig" \
"$ROOT/$path"
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
fi
printf "%s %s\n" "$(sha256sum "$ROOT/$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$ROOT/$path")" >> "$SHA_FILE"
echo "Signed $path"
done
echo "Signed artefacts written to $OUT_BASE"
if [[ -n "$TMP_KEY" ]]; then
rm -f "$TMP_KEY"
fi

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Deterministic DSSE signing helper for Signals artifacts.
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
COSIGN_BIN="${COSIGN_BIN:-}"
# Detect cosign binary (v3 preferred).
if [[ -z "$COSIGN_BIN" ]]; then
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
COSIGN_BIN="/usr/local/bin/cosign"
elif command -v cosign >/dev/null 2>&1; then
COSIGN_BIN="$(command -v cosign)"
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
COSIGN_BIN="$ROOT/tools/cosign/cosign"
else
echo "cosign not found; install or set COSIGN_BIN" >&2
exit 1
fi
fi
# Resolve key
TMP_KEY=""
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
KEY_FILE="$COSIGN_KEY_FILE"
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
TMP_KEY="$(mktemp)"
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
chmod 600 "$TMP_KEY"
KEY_FILE="$TMP_KEY"
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
KEY_FILE="$ROOT/tools/cosign/cosign.key"
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
else
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
exit 2
fi
OUT_BASE="${OUT_DIR:-$ROOT/evidence-locker/signals/2025-12-01}"
# Normalize OUT_BASE to absolute to avoid pushd-relative path issues.
if [[ "$OUT_BASE" != /* ]]; then
OUT_BASE="$ROOT/$OUT_BASE"
fi
mkdir -p "$OUT_BASE"
ARTIFACTS=(
"decay/confidence_decay_config.yaml|stella.ops/confidenceDecayConfig@v1|confidence_decay_config"
"unknowns/unknowns_scoring_manifest.json|stella.ops/unknownsScoringManifest@v1|unknowns_scoring_manifest"
"heuristics/heuristics.catalog.json|stella.ops/heuristicCatalog@v1|heuristics_catalog"
)
USE_BUNDLE=0
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
USE_BUNDLE=1
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
USE_BUNDLE=1
fi
pushd "$ROOT/docs/modules/signals" >/dev/null
SHA_FILE="$OUT_BASE/SHA256SUMS"
: > "$SHA_FILE"
for entry in "${ARTIFACTS[@]}"; do
IFS="|" read -r path predicate stem <<<"$entry"
if [[ ! -f "$path" ]]; then
echo "Missing artifact: $path" >&2
exit 3
fi
if (( USE_BUNDLE )); then
bundle="$OUT_BASE/${stem}.sigstore.json"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--bundle "$bundle" \
"$path"
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
else
sig="$OUT_BASE/${stem}.dsse"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--output-signature "$sig" \
"$path"
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
fi
printf "%s %s\n" "$(sha256sum "$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$path")" >> "$SHA_FILE"
done
popd >/dev/null
echo "Signed artifacts written to $OUT_BASE"
if [[ -n "$TMP_KEY" ]]; then
rm -f "$TMP_KEY"
fi

View File

@@ -1,40 +0,0 @@
e8c634db1252725eabfd517f02e6ebf0d07bfba5b4779d7b45ef373ceff07b38 cosign-2.6.0-1.aarch64.rpm
9de55601c34fe7a8eaecb7a2fab93da032dd91d423a04ae6ac17e3f5ed99ec72 cosign-2.6.0-1.armv7hl.rpm
f7281a822306c35f2bd66c055ba6f77a7298de3375a401b12664035b8b323fdf cosign-2.6.0-1.ppc64le.rpm
814b890a07b56bcc6a42dfdf9004fadfe45c112e9b11a0c2f4ebf45568e72b4c cosign-2.6.0-1.riscv64.rpm
19241a09cc065f062d63a9c9ce45ed7c7ff839b93672be4688334b925809d266 cosign-2.6.0-1.s390x.rpm
52709467f072043f24553c6dd1e0f287eeeedb23340dd90a4438b8506df0a0bc cosign-2.6.0-1.x86_64.rpm
83b0fb42bc265e62aef7de49f4979b7957c9b7320d362a9f20046b2f823330f3 cosign-darwin-amd64
3bcbcfc41d89e162e47ba08f70ffeffaac567f663afb3545c0265a5041ce652d cosign-darwin-amd64_2.6.0_darwin_amd64.sbom.json
dea5b83b8b375b99ac803c7bdb1f798963dbeb47789ceb72153202e7f20e8d07 cosign-darwin-arm64
c09a84869eb31fcf334e54d0a9f81bf466ba7444dc975a8fe46b94d742288980 cosign-darwin-arm64_2.6.0_darwin_arm64.sbom.json
ea5c65f99425d6cfbb5c4b5de5dac035f14d09131c1a0ea7c7fc32eab39364f9 cosign-linux-amd64
b4ccc276a5cc326f87d81fd1ae12f12a8dba64214ec368a39401522cccae7f9a cosign-linux-amd64_2.6.0_linux_amd64.sbom.json
641e05c21ce423cd263a49b1f9ffca58e2df022cb12020dcea63f8317c456950 cosign-linux-arm
e09684650882fd721ed22b716ffc399ee11426cd4d1c9b4fec539cba8bf46b86 cosign-linux-arm64
d05d37f6965c3f3c77260171289281dbf88d1f2b07e865bf9d4fd94d9f2fe5c4 cosign-linux-arm64_2.6.0_linux_arm64.sbom.json
1b8b96535a7c30dbecead51ac3f51f559b31d8ab1dd4842562f857ebb1941fa5 cosign-linux-arm_2.6.0_linux_arm.sbom.json
6fa93dbd97664ccce6c3e5221e22e14547b0d202ba829e2b34a3479266b33751 cosign-linux-pivkey-pkcs11key-amd64
17b9803701f5908476d5904492b7a4d1568b86094c3fbb5a06afaa62a6910e8c cosign-linux-pivkey-pkcs11key-amd64_2.6.0_linux_amd64.sbom.json
fbb78394e6fc19a2f34fea4ba03ea796aca84b666b6cdf65f46775f295fc9103 cosign-linux-pivkey-pkcs11key-arm64
35ac308bd9c59844e056f6251ab76184bfc321cb1b3ac337fdb94a9a289d4d44 cosign-linux-pivkey-pkcs11key-arm64_2.6.0_linux_arm64.sbom.json
bd9cc643ec8a517ca66b22221b830dc9d6064bd4f3b76579e4e28b6af5cfba5f cosign-linux-ppc64le
ef04b0e087b95ce1ba7a902ecc962e50bfc974da0bd6b5db59c50880215a3f06 cosign-linux-ppc64le_2.6.0_linux_ppc64le.sbom.json
17c8ff6a5dc48d3802b511c3eb7495da6142397ace28af9a1baa58fb34fad75c cosign-linux-riscv64
2007628a662808f221dc1983d9fba2676df32bb98717f89360cd191c929492ba cosign-linux-riscv64_2.6.0_linux_riscv64.sbom.json
7f7f042e7131950c658ff87079ac9080e7d64392915f06811f06a96238c242c1 cosign-linux-s390x
e22a35083b21552c80bafb747c022aa2aad302c861a392199bc2a8fad22dd6b5 cosign-linux-s390x_2.6.0_linux_s390x.sbom.json
7beb4dd1e19a72c328bbf7c0d7342d744edbf5cbb082f227b2b76e04a21c16ef cosign-windows-amd64.exe
8110eab8c5842caf93cf05dd26f260b6836d93b0263e49e06c1bd22dd5abb82c cosign-windows-amd64.exe_2.6.0_windows_amd64.sbom.json
7713d587f8668ce8f2a48556ee17f47c281cfb90102adfdb7182de62bc016cab cosign_2.6.0_aarch64.apk
c51b6437559624ef88b29a1ddd88d0782549b585dbbae0a5cb2fcc02bec72687 cosign_2.6.0_amd64.deb
438baaa35101e9982081c6450a44ea19e04cd4d2aba283ed52242e451736990b cosign_2.6.0_arm64.deb
8dc33858a68e18bf0cc2cb18c2ba0a7d829aa59ad3125366b24477e7d6188024 cosign_2.6.0_armhf.deb
88397077deee943690033276eef5206f7c60a30ea5f6ced66a51601ce79d0d0e cosign_2.6.0_armv7.apk
ca45b82cde86634705187f2361363e67c70c23212283594ff942d583a543f9dd cosign_2.6.0_ppc64el.deb
497f1a6d3899493153a4426286e673422e357224f3f931fdc028455db2fb5716 cosign_2.6.0_ppc64le.apk
1e37d9c3d278323095899897236452858c0bc49b52a48c3bcf8ce7a236bf2ee1 cosign_2.6.0_riscv64.apk
f2f65cf3d115fa5b25c61f6692449df2f4da58002a99e3efacc52a848fd3bca8 cosign_2.6.0_riscv64.deb
af0a62231880fd3495bbd1f5d4c64384034464b80930b7ffcd819d7152e75759 cosign_2.6.0_s390x.apk
e282d9337e4ba163a48ff1175855a6f6d6fbb562bc6c576c93944a6126984203 cosign_2.6.0_s390x.deb
382a842b2242656ecd442ae461c4dc454a366ed50d41a2dafcce8b689bfd03e4 cosign_2.6.0_x86_64.apk

View File

@@ -1,28 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Deterministic hashing helper for DevPortal SDK snippet packs and offline bundle artefacts.
# Usage:
# SNIPPET_DIR=src/DevPortal/StellaOps.DevPortal.Site/snippets \
# OUT_SHA=src/DevPortal/StellaOps.DevPortal.Site/SHA256SUMS.devportal-stubs \
# tools/devportal/hash-snippets.sh
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
SNIPPET_DIR="${SNIPPET_DIR:-$ROOT/src/DevPortal/StellaOps.DevPortal.Site/snippets}"
OUT_SHA="${OUT_SHA:-$ROOT/src/DevPortal/StellaOps.DevPortal.Site/SHA256SUMS.devportal-stubs}"
if [[ ! -d "$SNIPPET_DIR" ]]; then
echo "Snippet dir not found: $SNIPPET_DIR" >&2
exit 1
fi
mkdir -p "$(dirname "$OUT_SHA")"
: > "$OUT_SHA"
cd "$SNIPPET_DIR"
find . -type f -print0 | sort -z | while IFS= read -r -d '' f; do
sha=$(sha256sum "$f" | cut -d' ' -f1)
printf "%s %s\n" "$sha" "${SNIPPET_DIR#$ROOT/}/$f" >> "$OUT_SHA"
echo "hashed $f"
done
echo "Hashes written to $OUT_SHA"

View File

@@ -1,27 +0,0 @@
#!/usr/bin/env bash
# Thin wrapper to strip the harness-injected "workdir:" switch that breaks dotnet/msbuild parsing.
set -euo pipefail
real_dotnet="$(command -v dotnet)"
if [[ -z "${real_dotnet}" ]]; then
echo "dotnet executable not found in PATH" >&2
exit 1
fi
filtered_args=()
for arg in "$@"; do
# Drop any argument that is exactly or contains the injected workdir switch.
if [[ "${arg}" == *"workdir:"* ]]; then
# If the arg also contains other comma-separated parts, keep the non-workdir pieces.
IFS=',' read -r -a parts <<< "${arg}"
for part in "${parts[@]}"; do
[[ "${part}" == *"workdir:"* || -z "${part}" ]] && continue
filtered_args+=("${part}")
done
continue
fi
filtered_args+=("${arg}")
done
exec "${real_dotnet}" "${filtered_args[@]}"

View File

@@ -1,24 +0,0 @@
#!/usr/bin/env bash
# CI runner profile for Concelier /linksets tests without harness workdir injection.
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
PROJECT="$ROOT_DIR/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj"
DOTNET_EXE=$(command -v dotnet)
if [[ -z "$DOTNET_EXE" ]]; then
echo "dotnet not found" >&2; exit 1; fi
export VSTEST_DISABLE_APPDOMAIN=1
export DOTNET_CLI_UI_LANGUAGE=en
export DOTNET_CLI_TELEMETRY_OPTOUT=1
# Prefer the curated offline feed to avoid network flakiness during CI.
export NUGET_PACKAGES="${ROOT_DIR}/.nuget/packages"
RESTORE_SOURCE="--source ${ROOT_DIR}/.nuget/packages --ignore-failed-sources"
# Ensure Mongo2Go can find OpenSSL 1.1 (needed by bundled mongod)
OPENSSL11_DIR="$ROOT_DIR/tools/openssl1.1/lib"
if [[ -d "$OPENSSL11_DIR" ]]; then
export LD_LIBRARY_PATH="$OPENSSL11_DIR:${LD_LIBRARY_PATH:-}"
fi
RESULTS_DIR="$ROOT_DIR/out/test-results/linksets"
mkdir -p "$RESULTS_DIR"
# Restore explicitly against offline cache, then run tests without restoring again.
"$ROOT_DIR/tools/dotnet-filter.sh" restore "$PROJECT" $RESTORE_SOURCE
exec "$ROOT_DIR/tools/dotnet-filter.sh" test "$PROJECT" --no-restore --filter "Linksets" --results-directory "$RESULTS_DIR" --logger "trx;LogFileName=linksets.trx"

View File

@@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Stub lint: enforce docs tag placeholder until full checks land.
if git diff --cached --name-only | grep -q '^docs/'; then
echo "[stub] docs touched: ensure commit includes 'docs:' trailer (value or 'n/a')"
fi

View File

@@ -1,31 +0,0 @@
AWSSDK.S3|3.7.305.6
CycloneDX.Core|10.0.1
Google.Protobuf|3.27.2
Grpc.Net.Client|2.65.0
Grpc.Tools|2.65.0
Microsoft.Data.Sqlite|9.0.0-rc.1.24451.1
Microsoft.Extensions.Configuration.Abstractions|10.0.0-rc.2.25502.107
Microsoft.Extensions.Configuration.Abstractions|9.0.0
Microsoft.Extensions.Configuration.Binder|10.0.0-rc.2.25502.107
Microsoft.Extensions.DependencyInjection.Abstractions|10.0.0-rc.2.25502.107
Microsoft.Extensions.DependencyInjection.Abstractions|9.0.0
Microsoft.Extensions.Diagnostics.Abstractions|10.0.0-rc.2.25502.107
Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions|10.0.0-rc.2.25502.107
Microsoft.Extensions.Diagnostics.HealthChecks|10.0.0-rc.2.25502.107
Microsoft.Extensions.Hosting.Abstractions|10.0.0-rc.2.25502.107
Microsoft.Extensions.Http.Polly|10.0.0-rc.2.25502.107
Microsoft.Extensions.Http|10.0.0-rc.2.25502.107
Microsoft.Extensions.Logging.Abstractions|10.0.0-rc.2.25502.107
Microsoft.Extensions.Logging.Abstractions|9.0.0
Microsoft.Extensions.Options.ConfigurationExtensions|10.0.0-rc.2.25502.107
Microsoft.Extensions.Options|10.0.0-rc.2.25502.107
Microsoft.Extensions.Options|9.0.0
MongoDB.Driver|3.5.0
NATS.Client.Core|2.0.0
NATS.Client.JetStream|2.0.0
RoaringBitmap|0.0.9
Serilog.AspNetCore|8.0.1
Serilog.Extensions.Hosting|8.0.0
Serilog.Sinks.Console|5.0.1
StackExchange.Redis|2.7.33
System.Text.Json|10.0.0-preview.7.25380.108

View File

@@ -1,14 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<RestorePackagesPath>../../local-nugets/packages</RestorePackagesPath>
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
<EnableDefaultItems>false</EnableDefaultItems>
</PropertyGroup>
<ItemGroup>
<PackageDownload Include="Microsoft.Extensions.Configuration.Abstractions" Version="[9.0.0]" />
<PackageDownload Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="[9.0.0]" />
<PackageDownload Include="Microsoft.Extensions.Logging.Abstractions" Version="[9.0.0]" />
<PackageDownload Include="Microsoft.Extensions.Options" Version="[9.0.0]" />
</ItemGroup>
</Project>

View File

@@ -1,45 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<RestorePackagesPath>../../local-nugets/packages</RestorePackagesPath>
<DisableImplicitFrameworkReferences>true</DisableImplicitFrameworkReferences>
<EnableDefaultItems>false</EnableDefaultItems>
</PropertyGroup>
<ItemGroup>
<PackageDownload Include="AWSSDK.Core" Version="[4.0.1.3]" />
<PackageDownload Include="AWSSDK.KeyManagementService" Version="[4.0.6]" />
<PackageDownload Include="AWSSDK.S3" Version="[3.7.305.6]" />
<PackageDownload Include="CycloneDX.Core" Version="[10.0.2]" />
<PackageDownload Include="Google.Protobuf" Version="[3.27.2]" />
<PackageDownload Include="Grpc.Net.Client" Version="[2.65.0]" />
<PackageDownload Include="Grpc.Tools" Version="[2.65.0]" />
<PackageDownload Include="Microsoft.Data.Sqlite" Version="[9.0.0-rc.1.24451.1]" />
<PackageDownload Include="Microsoft.Extensions.Configuration.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Configuration.Binder" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Diagnostics.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Hosting.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Http.Polly" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Http" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Logging.Abstractions" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="Microsoft.Extensions.Options" Version="[10.0.0-rc.2.25502.107]" />
<PackageDownload Include="NATS.Client.Core" Version="[2.0.0]" />
<PackageDownload Include="NATS.Client.JetStream" Version="[2.0.0]" />
<PackageDownload Include="RoaringBitmap" Version="[0.0.9]" />
<PackageDownload Include="Serilog.AspNetCore" Version="[8.0.1]" />
<PackageDownload Include="Serilog.Extensions.Hosting" Version="[8.0.0]" />
<PackageDownload Include="Serilog.Sinks.Console" Version="[5.0.1]" />
<PackageDownload Include="StackExchange.Redis" Version="[2.8.37]" />
<PackageDownload Include="System.Text.Json" Version="[10.0.0-preview.7.25380.108]" />
<PackageDownload Include="Google.Api.CommonProtos" Version="[2.17.0]" />
<PackageDownload Include="Google.Api.Gax" Version="[4.11.0]" />
<PackageDownload Include="Google.Api.Gax.Grpc" Version="[4.11.0]" />
<PackageDownload Include="Google.Api.Gax.Grpc.GrpcCore" Version="[4.11.0]" />
<PackageDownload Include="Google.Apis" Version="[1.69.0]" />
<PackageDownload Include="Google.Apis.Auth" Version="[1.69.0]" />
<PackageDownload Include="Google.Apis.Core" Version="[1.64.0]" />
</ItemGroup>
</Project>

View File

@@ -1,28 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
PACKAGES_DIR="$ROOT/.nuget/packages"
TMP_DIR="$ROOT/tmp/sbomservice-feed"
PROJECT="$TMP_DIR/probe.csproj"
mkdir -p "$TMP_DIR" "$PACKAGES_DIR"
cat > "$PROJECT" <<'CS'
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.14.0" />
<PackageReference Include="Pkcs11Interop" Version="4.1.0" />
</ItemGroup>
</Project>
CS
dotnet restore "$PROJECT" \
--packages "$PACKAGES_DIR" \
--ignore-failed-sources \
/p:RestoreUseStaticGraphEvaluation=true \
/p:RestorePackagesWithLockFile=false
find "$PACKAGES_DIR" -name '*.nupkg' -maxdepth 5 -type f -printf '%P\n' | sort

Binary file not shown.

View File

@@ -1,23 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Runs only the Airgap bundle determinism tests for Concelier WebService.
# Intended for CI runners with warmed NuGet cache; keeps outputs deterministic.
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
RESULTS_DIR="${RESULTS_DIR:-${ROOT_DIR}/TestResults}"
mkdir -p "${RESULTS_DIR}"
pushd "${ROOT_DIR}" >/dev/null
dotnet test \
src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj \
-c Release \
--filter AirgapBundleBuilderTests \
--logger "trx;LogFileName=airgap-bundle.trx" \
-- ResultsDirectory="${RESULTS_DIR}"
popd >/dev/null
echo "Airgap bundle tests complete. TRX: ${RESULTS_DIR}/airgap-bundle.trx"

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Minimal helper to run the LNM-21-002/003-related slices with TRX output.
# Use a clean environment to reduce "invalid test source" issues seen locally.
export DOTNET_CLI_TELEMETRY_OPTOUT=1
export DOTNET_ROLL_FORWARD=Major
root_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
pushd "$root_dir" >/dev/null
dotnet test \
src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj \
--filter "AdvisoryObservationAggregationTests" \
--logger "trx;LogFileName=core-linksets.trx"
dotnet test \
src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj \
--filter "ConcelierMongoLinksetStoreTests" \
--logger "trx;LogFileName=storage-linksets.trx"
popd >/dev/null

View File

@@ -1,43 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
STAGED_DIR="evidence-locker/signals/2025-12-05"
MODULE_ROOT="docs/modules/signals"
TAR_OUT="/tmp/signals-evidence.tar"
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
exit 1
fi
tmpdir=$(mktemp -d)
trap 'rm -rf "$tmpdir"' EXIT
rsync -a --relative \
"$STAGED_DIR/SHA256SUMS" \
"$STAGED_DIR/confidence_decay_config.sigstore.json" \
"$STAGED_DIR/unknowns_scoring_manifest.sigstore.json" \
"$STAGED_DIR/heuristics_catalog.sigstore.json" \
"$MODULE_ROOT/decay/confidence_decay_config.yaml" \
"$MODULE_ROOT/unknowns/unknowns_scoring_manifest.json" \
"$MODULE_ROOT/heuristics/heuristics.catalog.json" \
"$tmpdir/"
pushd "$tmpdir/$STAGED_DIR" >/dev/null
sha256sum --check SHA256SUMS
popd >/dev/null
# Build deterministic tarball
pushd "$tmpdir" >/dev/null
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
-cf "$TAR_OUT" .
popd >/dev/null
sha256sum "$TAR_OUT"
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
--data-binary "@$TAR_OUT"
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/signals/2025-12-05/"

View File

@@ -1,24 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
TAR_PATH=${1:-evidence-locker/signals/2025-12-05/signals-evidence.tar}
EXPECTED_SHA=${EXPECTED_SHA:-a17910b8e90aaf44d4546057db22cdc791105dd41feb14f0c9b7c8bac5392e0d}
if [[ ! -f "$TAR_PATH" ]]; then
echo "missing tar: $TAR_PATH" >&2
exit 1
fi
sha=$(sha256sum "$TAR_PATH" | awk '{print $1}')
if [[ -n "$EXPECTED_SHA" && "$sha" != "$EXPECTED_SHA" ]]; then
echo "sha mismatch: got $sha expected $EXPECTED_SHA" >&2
exit 2
fi
tmpdir=$(mktemp -d)
trap 'rm -rf "$tmpdir"' EXIT
tar -xf "$TAR_PATH" -C "$tmpdir"
(cd "$tmpdir/evidence-locker/signals/2025-12-05" && sha256sum --check SHA256SUMS)
echo "OK: tar hash=${sha} (expected=${EXPECTED_SHA:-<not set>}); inner SHA256SUMS verified"

View File

@@ -1,143 +0,0 @@
// Framework detection for Go projects
package main
import (
"golang.org/x/tools/go/ssa"
"strings"
)
// FrameworkPattern defines detection patterns for a framework
type FrameworkPattern struct {
Name string
Packages []string
EntrypointFns []string
HandlerType string
}
// Known Go web frameworks
var frameworkPatterns = []FrameworkPattern{
{
Name: "net/http",
Packages: []string{"net/http"},
EntrypointFns: []string{"HandleFunc", "Handle", "ListenAndServe"},
HandlerType: "http_handler",
},
{
Name: "gin",
Packages: []string{"github.com/gin-gonic/gin"},
EntrypointFns: []string{"GET", "POST", "PUT", "DELETE", "PATCH", "Run"},
HandlerType: "http_handler",
},
{
Name: "echo",
Packages: []string{"github.com/labstack/echo"},
EntrypointFns: []string{"GET", "POST", "PUT", "DELETE", "PATCH", "Start"},
HandlerType: "http_handler",
},
{
Name: "fiber",
Packages: []string{"github.com/gofiber/fiber"},
EntrypointFns: []string{"Get", "Post", "Put", "Delete", "Patch", "Listen"},
HandlerType: "http_handler",
},
{
Name: "chi",
Packages: []string{"github.com/go-chi/chi"},
EntrypointFns: []string{"Get", "Post", "Put", "Delete", "Patch", "Route"},
HandlerType: "http_handler",
},
{
Name: "mux",
Packages: []string{"github.com/gorilla/mux"},
EntrypointFns: []string{"HandleFunc", "Handle", "NewRouter"},
HandlerType: "http_handler",
},
{
Name: "grpc",
Packages: []string{"google.golang.org/grpc"},
EntrypointFns: []string{"RegisterServer", "NewServer"},
HandlerType: "grpc_method",
},
{
Name: "cobra",
Packages: []string{"github.com/spf13/cobra"},
EntrypointFns: []string{"Execute", "AddCommand", "Run"},
HandlerType: "cli_command",
},
}
// DetectFramework checks if a function is related to a known framework
func DetectFramework(fn *ssa.Function) *FrameworkPattern {
if fn.Pkg == nil {
return nil
}
pkgPath := fn.Pkg.Pkg.Path()
for _, pattern := range frameworkPatterns {
for _, pkg := range pattern.Packages {
if strings.Contains(pkgPath, pkg) {
return &pattern
}
}
}
return nil
}
// DetectFrameworkEntrypoint checks if a call is a framework route registration
func DetectFrameworkEntrypoint(call *ssa.Call) *Entrypoint {
callee := call.Call.StaticCallee()
if callee == nil || callee.Pkg == nil {
return nil
}
pkgPath := callee.Pkg.Pkg.Path()
fnName := callee.Name()
for _, pattern := range frameworkPatterns {
for _, pkg := range pattern.Packages {
if strings.Contains(pkgPath, pkg) {
for _, epFn := range pattern.EntrypointFns {
if fnName == epFn {
nodeID := makeSymbolID(callee)
return &Entrypoint{
ID: nodeID,
Type: pattern.HandlerType,
}
}
}
}
}
}
return nil
}
// IsHTTPHandler checks if a function signature matches http.Handler
func IsHTTPHandler(fn *ssa.Function) bool {
sig := fn.Signature
// Check for (http.ResponseWriter, *http.Request) signature
if sig.Params().Len() == 2 {
p0 := sig.Params().At(0).Type().String()
p1 := sig.Params().At(1).Type().String()
if strings.Contains(p0, "ResponseWriter") && strings.Contains(p1, "Request") {
return true
}
}
// Check for gin.Context, echo.Context, fiber.Ctx, etc.
if sig.Params().Len() >= 1 {
p0 := sig.Params().At(0).Type().String()
if strings.Contains(p0, "gin.Context") ||
strings.Contains(p0, "echo.Context") ||
strings.Contains(p0, "fiber.Ctx") ||
strings.Contains(p0, "chi.") {
return true
}
}
return false
}

View File

@@ -1,12 +0,0 @@
module github.com/stella-ops/stella-callgraph-go
go 1.21
require (
golang.org/x/tools v0.16.0
)
require (
golang.org/x/mod v0.14.0 // indirect
golang.org/x/sys v0.15.0 // indirect
)

View File

@@ -1,395 +0,0 @@
// stella-callgraph-go
// Call graph extraction tool for Go projects using SSA analysis.
package main
import (
"encoding/json"
"flag"
"fmt"
"os"
"path/filepath"
"strings"
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/callgraph/cha"
"golang.org/x/tools/go/callgraph/rta"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
)
// CallGraphResult is the output structure
type CallGraphResult struct {
Module string `json:"module"`
Nodes []Node `json:"nodes"`
Edges []Edge `json:"edges"`
Entrypoints []Entrypoint `json:"entrypoints"`
}
// Node represents a function in the call graph
type Node struct {
ID string `json:"id"`
Package string `json:"package"`
Name string `json:"name"`
Signature string `json:"signature"`
Position Position `json:"position"`
Visibility string `json:"visibility"`
Annotations []string `json:"annotations"`
}
// Edge represents a call between functions
type Edge struct {
From string `json:"from"`
To string `json:"to"`
Kind string `json:"kind"`
Site Position `json:"site"`
}
// Position in source code
type Position struct {
File string `json:"file"`
Line int `json:"line"`
Column int `json:"column"`
}
// Entrypoint represents an entry point function
type Entrypoint struct {
ID string `json:"id"`
Type string `json:"type"`
Route string `json:"route,omitempty"`
Method string `json:"method,omitempty"`
}
func main() {
var (
projectPath string
algorithm string
jsonFormat bool
)
flag.StringVar(&projectPath, "path", ".", "Path to Go project")
flag.StringVar(&algorithm, "algo", "cha", "Call graph algorithm: cha, rta, or static")
flag.BoolVar(&jsonFormat, "json", false, "Output formatted JSON")
flag.Parse()
if len(flag.Args()) > 0 {
projectPath = flag.Args()[0]
}
result, err := analyzeProject(projectPath, algorithm)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
}
var output []byte
if jsonFormat {
output, err = json.MarshalIndent(result, "", " ")
} else {
output, err = json.Marshal(result)
}
if err != nil {
fmt.Fprintf(os.Stderr, "Error encoding JSON: %v\n", err)
os.Exit(1)
}
fmt.Println(string(output))
}
func analyzeProject(projectPath string, algorithm string) (*CallGraphResult, error) {
absPath, err := filepath.Abs(projectPath)
if err != nil {
return nil, fmt.Errorf("invalid path: %w", err)
}
// Load packages
cfg := &packages.Config{
Mode: packages.LoadAllSyntax,
Dir: absPath,
}
pkgs, err := packages.Load(cfg, "./...")
if err != nil {
return nil, fmt.Errorf("failed to load packages: %w", err)
}
if len(pkgs) == 0 {
return nil, fmt.Errorf("no packages found")
}
// Check for errors
for _, pkg := range pkgs {
if len(pkg.Errors) > 0 {
// Log but continue
for _, e := range pkg.Errors {
fmt.Fprintf(os.Stderr, "Warning: %v\n", e)
}
}
}
// Build SSA
prog, _ := ssautil.AllPackages(pkgs, ssa.SanityCheckFunctions)
prog.Build()
// Extract module name
moduleName := extractModuleName(absPath, pkgs)
// Build call graph using the specified algorithm
var cg *callgraph.Graph
switch algorithm {
case "rta":
// RTA (Rapid Type Analysis) - more precise for programs with main
mains := ssautil.MainPackages(prog.AllPackages())
if len(mains) > 0 {
var roots []*ssa.Function
for _, main := range mains {
if mainFn := main.Func("main"); mainFn != nil {
roots = append(roots, mainFn)
}
if initFn := main.Func("init"); initFn != nil {
roots = append(roots, initFn)
}
}
if len(roots) > 0 {
rtaResult := rta.Analyze(roots, true)
cg = rtaResult.CallGraph
}
}
if cg == nil {
// Fall back to CHA if no main packages
cg = cha.CallGraph(prog)
}
case "cha":
// CHA (Class Hierarchy Analysis) - sound but less precise
cg = cha.CallGraph(prog)
default:
// Default to CHA
cg = cha.CallGraph(prog)
}
// Collect nodes and edges from call graph
nodes := make([]Node, 0)
edges := make([]Edge, 0)
entrypoints := make([]Entrypoint, 0)
seenNodes := make(map[string]bool)
seenEdges := make(map[string]bool)
// If we have a call graph, use it for edges
if cg != nil {
callgraph.GraphVisitEdges(cg, func(edge *callgraph.Edge) error {
if edge.Caller.Func == nil || edge.Callee.Func == nil {
return nil
}
callerID := makeSymbolID(edge.Caller.Func)
calleeID := makeSymbolID(edge.Callee.Func)
// Add caller node if not seen
if !seenNodes[callerID] {
seenNodes[callerID] = true
nodes = append(nodes, makeNodeFromFunction(prog, edge.Caller.Func))
}
// Add callee node if not seen
if !seenNodes[calleeID] {
seenNodes[calleeID] = true
nodes = append(nodes, makeNodeFromFunction(prog, edge.Callee.Func))
}
// Add edge
edgeKey := fmt.Sprintf("%s|%s", callerID, calleeID)
if !seenEdges[edgeKey] {
seenEdges[edgeKey] = true
kind := "direct"
if edge.Site != nil {
if _, ok := edge.Site.(*ssa.Go); ok {
kind = "goroutine"
} else if _, ok := edge.Site.(*ssa.Defer); ok {
kind = "defer"
}
}
var site Position
if edge.Site != nil {
pos := prog.Fset.Position(edge.Site.Pos())
site = Position{
File: pos.Filename,
Line: pos.Line,
}
}
edges = append(edges, Edge{
From: callerID,
To: calleeID,
Kind: kind,
Site: site,
})
}
return nil
})
}
// Also scan all functions to find any missing nodes and entrypoints
for _, pkg := range prog.AllPackages() {
if pkg == nil {
continue
}
for _, member := range pkg.Members {
fn, ok := member.(*ssa.Function)
if !ok {
continue
}
nodeID := makeSymbolID(fn)
if !seenNodes[nodeID] {
seenNodes[nodeID] = true
nodes = append(nodes, makeNodeFromFunction(prog, fn))
}
// Check for entrypoints
if ep := detectEntrypoint(fn); ep != nil {
entrypoints = append(entrypoints, *ep)
}
}
}
return &CallGraphResult{
Module: moduleName,
Nodes: nodes,
Edges: edges,
Entrypoints: entrypoints,
}, nil
}
func makeNodeFromFunction(prog *ssa.Program, fn *ssa.Function) Node {
pos := prog.Fset.Position(fn.Pos())
pkgPath := ""
if fn.Pkg != nil {
pkgPath = fn.Pkg.Pkg.Path()
}
return Node{
ID: makeSymbolID(fn),
Package: pkgPath,
Name: fn.Name(),
Signature: fn.Signature.String(),
Position: Position{
File: pos.Filename,
Line: pos.Line,
Column: pos.Column,
},
Visibility: getVisibility(fn.Name()),
Annotations: detectAnnotations(fn),
}
}
func extractModuleName(projectPath string, pkgs []*packages.Package) string {
// Try to get from go.mod
goModPath := filepath.Join(projectPath, "go.mod")
if data, err := os.ReadFile(goModPath); err == nil {
lines := strings.Split(string(data), "\n")
for _, line := range lines {
if strings.HasPrefix(line, "module ") {
return strings.TrimSpace(strings.TrimPrefix(line, "module "))
}
}
}
// Fall back to first package path
if len(pkgs) > 0 {
return pkgs[0].PkgPath
}
return filepath.Base(projectPath)
}
func makeSymbolID(fn *ssa.Function) string {
if fn.Pkg == nil {
return fmt.Sprintf("go:external/%s", fn.Name())
}
pkg := fn.Pkg.Pkg.Path()
if fn.Signature.Recv() != nil {
// Method
recv := fn.Signature.Recv().Type().String()
recv = strings.TrimPrefix(recv, "*")
if idx := strings.LastIndex(recv, "."); idx >= 0 {
recv = recv[idx+1:]
}
return fmt.Sprintf("go:%s.%s.%s", pkg, recv, fn.Name())
}
return fmt.Sprintf("go:%s.%s", pkg, fn.Name())
}
func getVisibility(name string) string {
if len(name) == 0 {
return "private"
}
if name[0] >= 'A' && name[0] <= 'Z' {
return "public"
}
return "private"
}
func detectAnnotations(fn *ssa.Function) []string {
// Go doesn't have annotations, but we can detect patterns
annotations := make([]string, 0)
// Detect handler patterns from naming
if strings.HasSuffix(fn.Name(), "Handler") {
annotations = append(annotations, "handler")
}
if strings.HasSuffix(fn.Name(), "Middleware") {
annotations = append(annotations, "middleware")
}
return annotations
}
func detectEntrypoint(fn *ssa.Function) *Entrypoint {
name := fn.Name()
pkg := ""
if fn.Pkg != nil {
pkg = fn.Pkg.Pkg.Path()
}
nodeID := makeSymbolID(fn)
// main.main
if name == "main" && strings.HasSuffix(pkg, "main") {
return &Entrypoint{
ID: nodeID,
Type: "cli_command",
}
}
// init functions
if name == "init" {
return &Entrypoint{
ID: nodeID,
Type: "background_job",
}
}
// HTTP handler patterns
if strings.HasSuffix(name, "Handler") || strings.Contains(name, "Handle") {
return &Entrypoint{
ID: nodeID,
Type: "http_handler",
}
}
// gRPC patterns
if strings.HasSuffix(name, "Server") && strings.HasPrefix(name, "Register") {
return &Entrypoint{
ID: nodeID,
Type: "grpc_method",
}
}
return nil
}

View File

@@ -1,178 +0,0 @@
// -----------------------------------------------------------------------------
// framework-detect.js
// Framework detection patterns for JavaScript/TypeScript projects.
// -----------------------------------------------------------------------------
/**
* Framework detection patterns
*/
export const frameworkPatterns = {
express: {
packageNames: ['express'],
patterns: [
/const\s+\w+\s*=\s*require\(['"]express['"]\)/,
/import\s+\w+\s+from\s+['"]express['"]/,
/app\.(get|post|put|delete|patch)\s*\(/
],
entrypointType: 'http_handler'
},
fastify: {
packageNames: ['fastify'],
patterns: [
/require\(['"]fastify['"]\)/,
/import\s+\w+\s+from\s+['"]fastify['"]/,
/fastify\.(get|post|put|delete|patch)\s*\(/
],
entrypointType: 'http_handler'
},
koa: {
packageNames: ['koa', '@koa/router'],
patterns: [
/require\(['"]koa['"]\)/,
/import\s+\w+\s+from\s+['"]koa['"]/,
/router\.(get|post|put|delete|patch)\s*\(/
],
entrypointType: 'http_handler'
},
hapi: {
packageNames: ['@hapi/hapi'],
patterns: [
/require\(['"]@hapi\/hapi['"]\)/,
/import\s+\w+\s+from\s+['"]@hapi\/hapi['"]/,
/server\.route\s*\(/
],
entrypointType: 'http_handler'
},
nestjs: {
packageNames: ['@nestjs/core', '@nestjs/common'],
patterns: [
/@Controller\s*\(/,
/@Get\s*\(/,
/@Post\s*\(/,
/@Put\s*\(/,
/@Delete\s*\(/,
/@Patch\s*\(/
],
entrypointType: 'http_handler'
},
socketio: {
packageNames: ['socket.io'],
patterns: [
/require\(['"]socket\.io['"]\)/,
/import\s+\w+\s+from\s+['"]socket\.io['"]/,
/io\.on\s*\(\s*['"]connection['"]/,
/socket\.on\s*\(/
],
entrypointType: 'websocket_handler'
},
awsLambda: {
packageNames: ['aws-lambda', '@types/aws-lambda'],
patterns: [
/exports\.handler\s*=/,
/export\s+(const|async function)\s+handler/,
/module\.exports\.handler/,
/APIGatewayProxyHandler/,
/APIGatewayEvent/
],
entrypointType: 'lambda'
},
azureFunctions: {
packageNames: ['@azure/functions'],
patterns: [
/require\(['"]@azure\/functions['"]\)/,
/import\s+\w+\s+from\s+['"]@azure\/functions['"]/,
/app\.(http|timer|queue|blob)\s*\(/
],
entrypointType: 'cloud_function'
},
gcpFunctions: {
packageNames: ['@google-cloud/functions-framework'],
patterns: [
/require\(['"]@google-cloud\/functions-framework['"]\)/,
/functions\.(http|cloudEvent)\s*\(/
],
entrypointType: 'cloud_function'
},
electron: {
packageNames: ['electron'],
patterns: [
/require\(['"]electron['"]\)/,
/import\s+\{[^}]*\}\s+from\s+['"]electron['"]/,
/ipcMain\.on\s*\(/,
/ipcRenderer\.on\s*\(/
],
entrypointType: 'event_handler'
},
grpc: {
packageNames: ['@grpc/grpc-js', 'grpc'],
patterns: [
/require\(['"]@grpc\/grpc-js['"]\)/,
/addService\s*\(/,
/loadPackageDefinition\s*\(/
],
entrypointType: 'grpc_method'
}
};
/**
* Detect frameworks from package.json dependencies
* @param {object} packageJson
* @returns {string[]}
*/
export function detectFrameworks(packageJson) {
const detected = [];
const allDeps = {
...packageJson.dependencies,
...packageJson.devDependencies
};
for (const [framework, config] of Object.entries(frameworkPatterns)) {
for (const pkgName of config.packageNames) {
if (allDeps[pkgName]) {
detected.push(framework);
break;
}
}
}
return detected;
}
/**
* Detect frameworks from source code patterns
* @param {string} content
* @returns {string[]}
*/
export function detectFrameworksFromCode(content) {
const detected = [];
for (const [framework, config] of Object.entries(frameworkPatterns)) {
for (const pattern of config.patterns) {
if (pattern.test(content)) {
detected.push(framework);
break;
}
}
}
return detected;
}
/**
* Get entrypoint type for a detected framework
* @param {string} framework
* @returns {string}
*/
export function getEntrypointType(framework) {
return frameworkPatterns[framework]?.entrypointType || 'unknown';
}

View File

@@ -1,478 +0,0 @@
#!/usr/bin/env node
// -----------------------------------------------------------------------------
// stella-callgraph-node
// Call graph extraction tool for JavaScript/TypeScript projects.
// Uses Babel AST for static analysis.
// -----------------------------------------------------------------------------
import { readFileSync, readdirSync, statSync, existsSync } from 'fs';
import { join, extname, relative, dirname } from 'path';
import { parse } from '@babel/parser';
import traverse from '@babel/traverse';
import { buildSinkLookup, matchSink } from './sink-detect.js';
// Pre-build sink lookup for fast detection
const sinkLookup = buildSinkLookup();
/**
* Main entry point
*/
async function main() {
const args = process.argv.slice(2);
if (args.length === 0 || args.includes('--help')) {
printUsage();
process.exit(0);
}
const targetPath = args[0];
const outputFormat = args.includes('--json') ? 'json' : 'ndjson';
try {
const result = await analyzeProject(targetPath);
if (outputFormat === 'json') {
console.log(JSON.stringify(result, null, 2));
} else {
console.log(JSON.stringify(result));
}
} catch (error) {
console.error(`Error: ${error.message}`);
process.exit(1);
}
}
function printUsage() {
console.log(`
stella-callgraph-node - JavaScript/TypeScript call graph extractor
Usage:
stella-callgraph-node <project-path> [options]
Options:
--json Output formatted JSON instead of NDJSON
--help Show this help message
Example:
stella-callgraph-node ./my-express-app --json
`);
}
/**
* Analyze a JavaScript/TypeScript project
* @param {string} projectPath
* @returns {Promise<CallGraphResult>}
*/
async function analyzeProject(projectPath) {
const packageJsonPath = join(projectPath, 'package.json');
let packageInfo = { name: 'unknown', version: '0.0.0' };
if (existsSync(packageJsonPath)) {
const content = readFileSync(packageJsonPath, 'utf-8');
packageInfo = JSON.parse(content);
}
const sourceFiles = findSourceFiles(projectPath);
const nodes = [];
const edges = [];
const entrypoints = [];
const sinks = [];
for (const file of sourceFiles) {
try {
const content = readFileSync(file, 'utf-8');
const relativePath = relative(projectPath, file);
const result = analyzeFile(content, relativePath, packageInfo.name);
nodes.push(...result.nodes);
edges.push(...result.edges);
entrypoints.push(...result.entrypoints);
sinks.push(...result.sinks);
} catch (error) {
// Skip files that can't be parsed
console.error(`Warning: Could not parse ${file}: ${error.message}`);
}
}
return {
module: packageInfo.name,
version: packageInfo.version,
nodes: deduplicateNodes(nodes),
edges: deduplicateEdges(edges),
entrypoints,
sinks: deduplicateSinks(sinks)
};
}
/**
* Find all JavaScript/TypeScript source files
* @param {string} dir
* @returns {string[]}
*/
function findSourceFiles(dir) {
const files = [];
const excludeDirs = ['node_modules', 'dist', 'build', '.git', 'coverage', '__tests__'];
const extensions = ['.js', '.jsx', '.ts', '.tsx', '.mjs', '.cjs'];
function walk(currentDir) {
const entries = readdirSync(currentDir);
for (const entry of entries) {
const fullPath = join(currentDir, entry);
const stat = statSync(fullPath);
if (stat.isDirectory()) {
if (!excludeDirs.includes(entry) && !entry.startsWith('.')) {
walk(fullPath);
}
} else if (stat.isFile()) {
const ext = extname(entry);
if (extensions.includes(ext) && !entry.includes('.test.') && !entry.includes('.spec.')) {
files.push(fullPath);
}
}
}
}
walk(dir);
return files.sort();
}
/**
* Analyze a single source file
* @param {string} content
* @param {string} relativePath
* @param {string} packageName
* @returns {{ nodes: any[], edges: any[], entrypoints: any[] }}
*/
function analyzeFile(content, relativePath, packageName) {
const nodes = [];
const edges = [];
const entrypoints = [];
const sinks = [];
const moduleBase = relativePath.replace(/\.[^.]+$/, '').replace(/\\/g, '/');
// Parse with Babel
const ast = parse(content, {
sourceType: 'module',
plugins: [
'typescript',
'jsx',
'decorators-legacy',
'classProperties',
'classPrivateProperties',
'classPrivateMethods',
'dynamicImport',
'optionalChaining',
'nullishCoalescingOperator'
],
errorRecovery: true
});
// Track current function context for edges
let currentFunction = null;
traverse.default(ast, {
// Function declarations
FunctionDeclaration(path) {
const name = path.node.id?.name;
if (!name) return;
const nodeId = `js:${packageName}/${moduleBase}.${name}`;
const isExported = path.parent.type === 'ExportNamedDeclaration' ||
path.parent.type === 'ExportDefaultDeclaration';
nodes.push({
id: nodeId,
package: packageName,
name,
signature: getFunctionSignature(path.node),
position: {
file: relativePath,
line: path.node.loc?.start.line || 0,
column: path.node.loc?.start.column || 0
},
visibility: isExported ? 'public' : 'private',
annotations: []
});
// Check for route handlers
const routeInfo = detectRouteHandler(path);
if (routeInfo) {
entrypoints.push({
id: nodeId,
type: routeInfo.type,
route: routeInfo.route,
method: routeInfo.method
});
}
currentFunction = nodeId;
},
// Arrow functions assigned to variables
VariableDeclarator(path) {
if (path.node.init?.type === 'ArrowFunctionExpression' ||
path.node.init?.type === 'FunctionExpression') {
const name = path.node.id?.name;
if (!name) return;
const nodeId = `js:${packageName}/${moduleBase}.${name}`;
const parent = path.parentPath?.parent;
const isExported = parent?.type === 'ExportNamedDeclaration';
nodes.push({
id: nodeId,
package: packageName,
name,
signature: getFunctionSignature(path.node.init),
position: {
file: relativePath,
line: path.node.loc?.start.line || 0,
column: path.node.loc?.start.column || 0
},
visibility: isExported ? 'public' : 'private',
annotations: []
});
currentFunction = nodeId;
}
},
// Class methods
ClassMethod(path) {
const className = path.parentPath?.parent?.id?.name;
const methodName = path.node.key?.name;
if (!className || !methodName) return;
const nodeId = `js:${packageName}/${moduleBase}.${className}.${methodName}`;
nodes.push({
id: nodeId,
package: packageName,
name: `${className}.${methodName}`,
signature: getFunctionSignature(path.node),
position: {
file: relativePath,
line: path.node.loc?.start.line || 0,
column: path.node.loc?.start.column || 0
},
visibility: path.node.accessibility || 'public',
annotations: getDecorators(path)
});
// Check for controller/handler patterns
if (className.endsWith('Controller') || className.endsWith('Handler')) {
entrypoints.push({
id: nodeId,
type: 'http_handler',
route: null,
method: null
});
}
currentFunction = nodeId;
},
// Call expressions (edges)
CallExpression(path) {
if (!currentFunction) return;
const callee = path.node.callee;
let targetId = null;
let objName = null;
let methodName = null;
if (callee.type === 'Identifier') {
targetId = `js:${packageName}/${moduleBase}.${callee.name}`;
methodName = callee.name;
} else if (callee.type === 'MemberExpression') {
objName = callee.object?.name || 'unknown';
methodName = callee.property?.name || 'unknown';
targetId = `js:external/${objName}.${methodName}`;
}
if (targetId) {
edges.push({
from: currentFunction,
to: targetId,
kind: 'direct',
site: {
file: relativePath,
line: path.node.loc?.start.line || 0
}
});
}
// Detect security sinks
if (methodName) {
const sinkMatch = matchSink(objName || methodName, methodName, sinkLookup);
if (sinkMatch) {
sinks.push({
caller: currentFunction,
category: sinkMatch.category,
method: `${objName ? objName + '.' : ''}${methodName}`,
site: {
file: relativePath,
line: path.node.loc?.start.line || 0,
column: path.node.loc?.start.column || 0
}
});
}
}
// Detect Express/Fastify route registration
detectRouteRegistration(path, entrypoints, packageName, moduleBase, relativePath);
}
});
return { nodes, edges, entrypoints, sinks };
}
/**
* Get function signature string
* @param {object} node
* @returns {string}
*/
function getFunctionSignature(node) {
const params = node.params?.map(p => {
if (p.type === 'Identifier') {
return p.name;
} else if (p.type === 'AssignmentPattern') {
return p.left?.name || 'arg';
} else if (p.type === 'RestElement') {
return `...${p.argument?.name || 'args'}`;
}
return 'arg';
}) || [];
const isAsync = node.async ? 'async ' : '';
return `${isAsync}(${params.join(', ')})`;
}
/**
* Get decorators from a path
* @param {object} path
* @returns {string[]}
*/
function getDecorators(path) {
const decorators = path.node.decorators || [];
return decorators.map(d => {
if (d.expression?.callee?.name) {
return `@${d.expression.callee.name}`;
} else if (d.expression?.name) {
return `@${d.expression.name}`;
}
return '@unknown';
});
}
/**
* Detect if function is a route handler
* @param {object} path
* @returns {{ type: string, route: string | null, method: string | null } | null}
*/
function detectRouteHandler(path) {
const name = path.node.id?.name?.toLowerCase();
if (!name) return null;
// Common handler naming patterns
if (name.includes('handler') || name.includes('controller')) {
return { type: 'http_handler', route: null, method: null };
}
// Lambda handler pattern
if (name === 'handler' || name === 'main') {
return { type: 'lambda', route: null, method: null };
}
return null;
}
/**
* Detect Express/Fastify route registration
* @param {object} path
* @param {any[]} entrypoints
* @param {string} packageName
* @param {string} moduleBase
* @param {string} relativePath
*/
function detectRouteRegistration(path, entrypoints, packageName, moduleBase, relativePath) {
const callee = path.node.callee;
if (callee.type !== 'MemberExpression') return;
const methodName = callee.property?.name?.toLowerCase();
const httpMethods = ['get', 'post', 'put', 'delete', 'patch', 'options', 'head'];
if (!httpMethods.includes(methodName)) return;
// Get route path from first argument
const firstArg = path.node.arguments?.[0];
let routePath = null;
if (firstArg?.type === 'StringLiteral') {
routePath = firstArg.value;
}
if (routePath) {
const handlerName = `${methodName.toUpperCase()}_${routePath.replace(/[/:{}*?]/g, '_')}`;
const nodeId = `js:${packageName}/${moduleBase}.${handlerName}`;
entrypoints.push({
id: nodeId,
type: 'http_handler',
route: routePath,
method: methodName.toUpperCase()
});
}
}
/**
* Remove duplicate nodes
* @param {any[]} nodes
* @returns {any[]}
*/
function deduplicateNodes(nodes) {
const seen = new Set();
return nodes.filter(n => {
if (seen.has(n.id)) return false;
seen.add(n.id);
return true;
});
}
/**
* Remove duplicate edges
* @param {any[]} edges
* @returns {any[]}
*/
function deduplicateEdges(edges) {
const seen = new Set();
return edges.filter(e => {
const key = `${e.from}|${e.to}`;
if (seen.has(key)) return false;
seen.add(key);
return true;
});
}
/**
* Remove duplicate sinks
* @param {any[]} sinks
* @returns {any[]}
*/
function deduplicateSinks(sinks) {
const seen = new Set();
return sinks.filter(s => {
const key = `${s.caller}|${s.category}|${s.method}|${s.site.file}:${s.site.line}`;
if (seen.has(key)) return false;
seen.add(key);
return true;
});
}
// Run
main().catch(console.error);

View File

@@ -1,675 +0,0 @@
// -----------------------------------------------------------------------------
// index.test.js
// Sprint: SPRINT_3600_0004_0001 (Node.js Babel Integration)
// Tasks: NODE-017, NODE-018 - Unit tests for AST parsing and entrypoint detection
// Description: Tests for call graph extraction from JavaScript/TypeScript.
// -----------------------------------------------------------------------------
import { test, describe, beforeEach } from 'node:test';
import assert from 'node:assert/strict';
import { parse } from '@babel/parser';
import traverse from '@babel/traverse';
// Test utilities for AST parsing
function parseCode(code, options = {}) {
return parse(code, {
sourceType: 'module',
plugins: [
'typescript',
'jsx',
'decorators-legacy',
'classProperties',
'classPrivateProperties',
'classPrivateMethods',
'dynamicImport',
'optionalChaining',
'nullishCoalescingOperator'
],
errorRecovery: true,
...options
});
}
describe('Babel Parser Integration', () => {
test('parses simple JavaScript function', () => {
const code = `
function hello(name) {
return 'Hello, ' + name;
}
`;
const ast = parseCode(code);
assert.ok(ast);
assert.equal(ast.type, 'File');
assert.ok(ast.program.body.length > 0);
});
test('parses arrow function', () => {
const code = `
const greet = (name) => {
return \`Hello, \${name}\`;
};
`;
const ast = parseCode(code);
assert.ok(ast);
let foundArrow = false;
traverse.default(ast, {
ArrowFunctionExpression() {
foundArrow = true;
}
});
assert.ok(foundArrow, 'Should find arrow function');
});
test('parses async function', () => {
const code = `
async function fetchData(url) {
const response = await fetch(url);
return response.json();
}
`;
const ast = parseCode(code);
let isAsync = false;
traverse.default(ast, {
FunctionDeclaration(path) {
isAsync = path.node.async;
}
});
assert.ok(isAsync, 'Should detect async function');
});
test('parses class with methods', () => {
const code = `
class UserController {
async getUser(id) {
return this.userService.findById(id);
}
async createUser(data) {
return this.userService.create(data);
}
}
`;
const ast = parseCode(code);
const methods = [];
traverse.default(ast, {
ClassMethod(path) {
methods.push(path.node.key.name);
}
});
assert.deepEqual(methods.sort(), ['createUser', 'getUser']);
});
test('parses TypeScript with types', () => {
const code = `
interface User {
id: string;
name: string;
}
function getUser(id: string): Promise<User> {
return db.query<User>('SELECT * FROM users WHERE id = $1', [id]);
}
`;
const ast = parseCode(code);
assert.ok(ast);
let foundFunction = false;
traverse.default(ast, {
FunctionDeclaration(path) {
if (path.node.id.name === 'getUser') {
foundFunction = true;
}
}
});
assert.ok(foundFunction, 'Should parse TypeScript function');
});
test('parses JSX components', () => {
const code = `
function Button({ onClick, children }) {
return <button onClick={onClick}>{children}</button>;
}
`;
const ast = parseCode(code);
let foundJSX = false;
traverse.default(ast, {
JSXElement() {
foundJSX = true;
}
});
assert.ok(foundJSX, 'Should parse JSX');
});
test('parses decorators', () => {
const code = `
@Controller('/users')
class UserController {
@Get('/:id')
async getUser(@Param('id') id: string) {
return this.userService.findById(id);
}
}
`;
const ast = parseCode(code);
const decorators = [];
traverse.default(ast, {
ClassDeclaration(path) {
if (path.node.decorators) {
decorators.push(...path.node.decorators.map(d =>
d.expression?.callee?.name || d.expression?.name
));
}
},
ClassMethod(path) {
if (path.node.decorators) {
decorators.push(...path.node.decorators.map(d =>
d.expression?.callee?.name || d.expression?.name
));
}
}
});
assert.ok(decorators.includes('Controller'));
assert.ok(decorators.includes('Get'));
});
test('parses dynamic imports', () => {
const code = `
async function loadModule(name) {
const module = await import(\`./modules/\${name}\`);
return module.default;
}
`;
const ast = parseCode(code);
let foundDynamicImport = false;
traverse.default(ast, {
Import() {
foundDynamicImport = true;
}
});
assert.ok(foundDynamicImport, 'Should detect dynamic import');
});
test('parses optional chaining', () => {
const code = `
const name = user?.profile?.name ?? 'Anonymous';
`;
const ast = parseCode(code);
let foundOptionalChain = false;
traverse.default(ast, {
OptionalMemberExpression() {
foundOptionalChain = true;
}
});
assert.ok(foundOptionalChain, 'Should parse optional chaining');
});
test('parses class private fields', () => {
const code = `
class Counter {
#count = 0;
increment() {
this.#count++;
}
get value() {
return this.#count;
}
}
`;
const ast = parseCode(code);
let foundPrivateField = false;
traverse.default(ast, {
ClassPrivateProperty() {
foundPrivateField = true;
}
});
assert.ok(foundPrivateField, 'Should parse private class field');
});
});
describe('Function Declaration Extraction', () => {
test('extracts function name', () => {
const code = `
function processRequest(req, res) {
res.json({ status: 'ok' });
}
`;
const ast = parseCode(code);
let functionName = null;
traverse.default(ast, {
FunctionDeclaration(path) {
functionName = path.node.id.name;
}
});
assert.equal(functionName, 'processRequest');
});
test('extracts function parameters', () => {
const code = `
function greet(firstName, lastName, options = {}) {
return \`Hello, \${firstName} \${lastName}\`;
}
`;
const ast = parseCode(code);
let params = [];
traverse.default(ast, {
FunctionDeclaration(path) {
params = path.node.params.map(p => {
if (p.type === 'Identifier') return p.name;
if (p.type === 'AssignmentPattern') return p.left.name;
return 'unknown';
});
}
});
assert.deepEqual(params, ['firstName', 'lastName', 'options']);
});
test('detects exported functions', () => {
const code = `
export function publicFunction() {}
function privateFunction() {}
export default function defaultFunction() {}
`;
const ast = parseCode(code);
const functions = { public: [], private: [] };
traverse.default(ast, {
FunctionDeclaration(path) {
const name = path.node.id?.name;
if (!name) return;
const isExported =
path.parent.type === 'ExportNamedDeclaration' ||
path.parent.type === 'ExportDefaultDeclaration';
if (isExported) {
functions.public.push(name);
} else {
functions.private.push(name);
}
}
});
assert.deepEqual(functions.public.sort(), ['defaultFunction', 'publicFunction']);
assert.deepEqual(functions.private, ['privateFunction']);
});
});
describe('Call Expression Extraction', () => {
test('extracts direct function calls', () => {
const code = `
function main() {
helper();
processData();
}
`;
const ast = parseCode(code);
const calls = [];
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.type === 'Identifier') {
calls.push(path.node.callee.name);
}
}
});
assert.deepEqual(calls.sort(), ['helper', 'processData']);
});
test('extracts method calls', () => {
const code = `
function handler() {
db.query('SELECT * FROM users');
fs.readFile('./config.json');
console.log('done');
}
`;
const ast = parseCode(code);
const methodCalls = [];
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.type === 'MemberExpression') {
const obj = path.node.callee.object.name;
const method = path.node.callee.property.name;
methodCalls.push(`${obj}.${method}`);
}
}
});
assert.ok(methodCalls.includes('db.query'));
assert.ok(methodCalls.includes('fs.readFile'));
assert.ok(methodCalls.includes('console.log'));
});
test('extracts chained method calls', () => {
const code = `
const result = data
.filter(x => x.active)
.map(x => x.name)
.join(', ');
`;
const ast = parseCode(code);
const methods = [];
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.type === 'MemberExpression') {
const method = path.node.callee.property.name;
methods.push(method);
}
}
});
assert.ok(methods.includes('filter'));
assert.ok(methods.includes('map'));
assert.ok(methods.includes('join'));
});
});
describe('Framework Entrypoint Detection', () => {
test('detects Express route handlers', () => {
const code = `
const express = require('express');
const app = express();
app.get('/users', (req, res) => {
res.json(users);
});
app.post('/users', async (req, res) => {
const user = await createUser(req.body);
res.json(user);
});
app.delete('/users/:id', (req, res) => {
deleteUser(req.params.id);
res.sendStatus(204);
});
`;
const ast = parseCode(code);
const routes = [];
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.type === 'MemberExpression') {
const method = path.node.callee.property.name?.toLowerCase();
const httpMethods = ['get', 'post', 'put', 'delete', 'patch'];
if (httpMethods.includes(method)) {
const routeArg = path.node.arguments[0];
if (routeArg?.type === 'StringLiteral') {
routes.push({ method: method.toUpperCase(), path: routeArg.value });
}
}
}
}
});
assert.equal(routes.length, 3);
assert.ok(routes.some(r => r.method === 'GET' && r.path === '/users'));
assert.ok(routes.some(r => r.method === 'POST' && r.path === '/users'));
assert.ok(routes.some(r => r.method === 'DELETE' && r.path === '/users/:id'));
});
test('detects Fastify route handlers', () => {
const code = `
const fastify = require('fastify')();
fastify.get('/health', async (request, reply) => {
return { status: 'ok' };
});
fastify.route({
method: 'POST',
url: '/items',
handler: async (request, reply) => {
return { id: 1 };
}
});
`;
const ast = parseCode(code);
const routes = [];
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.type === 'MemberExpression') {
const method = path.node.callee.property.name?.toLowerCase();
if (['get', 'post', 'put', 'delete', 'patch', 'route'].includes(method)) {
const routeArg = path.node.arguments[0];
if (routeArg?.type === 'StringLiteral') {
routes.push({ method: method.toUpperCase(), path: routeArg.value });
}
}
}
}
});
assert.ok(routes.some(r => r.path === '/health'));
});
test('detects NestJS controller decorators', () => {
const code = `
@Controller('users')
export class UsersController {
@Get()
findAll() {
return this.usersService.findAll();
}
@Get(':id')
findOne(@Param('id') id: string) {
return this.usersService.findOne(id);
}
@Post()
create(@Body() createUserDto: CreateUserDto) {
return this.usersService.create(createUserDto);
}
}
`;
const ast = parseCode(code);
const handlers = [];
traverse.default(ast, {
ClassMethod(path) {
const decorators = path.node.decorators || [];
for (const decorator of decorators) {
const name = decorator.expression?.callee?.name || decorator.expression?.name;
if (['Get', 'Post', 'Put', 'Delete', 'Patch'].includes(name)) {
handlers.push({
method: name.toUpperCase(),
handler: path.node.key.name
});
}
}
}
});
assert.equal(handlers.length, 3);
assert.ok(handlers.some(h => h.handler === 'findAll'));
assert.ok(handlers.some(h => h.handler === 'findOne'));
assert.ok(handlers.some(h => h.handler === 'create'));
});
test('detects Koa router handlers', () => {
const code = `
const Router = require('koa-router');
const router = new Router();
router.get('/items', async (ctx) => {
ctx.body = await getItems();
});
router.post('/items', async (ctx) => {
ctx.body = await createItem(ctx.request.body);
});
`;
const ast = parseCode(code);
const routes = [];
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.type === 'MemberExpression') {
const objName = path.node.callee.object.name;
const method = path.node.callee.property.name?.toLowerCase();
if (objName === 'router' && ['get', 'post', 'put', 'delete'].includes(method)) {
const routeArg = path.node.arguments[0];
if (routeArg?.type === 'StringLiteral') {
routes.push({ method: method.toUpperCase(), path: routeArg.value });
}
}
}
}
});
assert.equal(routes.length, 2);
assert.ok(routes.some(r => r.method === 'GET' && r.path === '/items'));
assert.ok(routes.some(r => r.method === 'POST' && r.path === '/items'));
});
test('detects AWS Lambda handlers', () => {
const code = `
export const handler = async (event, context) => {
const body = JSON.parse(event.body);
return {
statusCode: 200,
body: JSON.stringify({ message: 'Success' })
};
};
export const main = async (event) => {
return { statusCode: 200 };
};
`;
const ast = parseCode(code);
const handlers = [];
traverse.default(ast, {
VariableDeclarator(path) {
const name = path.node.id?.name?.toLowerCase();
if (['handler', 'main'].includes(name)) {
if (path.node.init?.type === 'ArrowFunctionExpression') {
handlers.push(path.node.id.name);
}
}
}
});
assert.ok(handlers.includes('handler'));
assert.ok(handlers.includes('main'));
});
test('detects Hapi route handlers', () => {
const code = `
const server = Hapi.server({ port: 3000 });
server.route({
method: 'GET',
path: '/users',
handler: (request, h) => {
return getUsers();
}
});
server.route({
method: 'POST',
path: '/users',
handler: async (request, h) => {
return createUser(request.payload);
}
});
`;
const ast = parseCode(code);
let routeCount = 0;
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.type === 'MemberExpression') {
const method = path.node.callee.property.name;
if (method === 'route') {
routeCount++;
}
}
}
});
assert.equal(routeCount, 2);
});
});
describe('Module Import/Export Detection', () => {
test('detects CommonJS require', () => {
const code = `
const express = require('express');
const { Router } = require('express');
const db = require('./db');
`;
const ast = parseCode(code);
const imports = [];
traverse.default(ast, {
CallExpression(path) {
if (path.node.callee.name === 'require') {
const arg = path.node.arguments[0];
if (arg?.type === 'StringLiteral') {
imports.push(arg.value);
}
}
}
});
assert.ok(imports.includes('express'));
assert.ok(imports.includes('./db'));
});
test('detects ES module imports', () => {
const code = `
import express from 'express';
import { Router, Request, Response } from 'express';
import * as fs from 'fs';
import db from './db.js';
`;
const ast = parseCode(code);
const imports = [];
traverse.default(ast, {
ImportDeclaration(path) {
imports.push(path.node.source.value);
}
});
assert.ok(imports.includes('express'));
assert.ok(imports.includes('fs'));
assert.ok(imports.includes('./db.js'));
});
test('detects ES module exports', () => {
const code = `
export function publicFn() {}
export const publicConst = 42;
export default class MainClass {}
export { helper, utils };
`;
const ast = parseCode(code);
let exportCount = 0;
traverse.default(ast, {
ExportNamedDeclaration() { exportCount++; },
ExportDefaultDeclaration() { exportCount++; }
});
assert.ok(exportCount >= 3);
});
});

View File

@@ -1,243 +0,0 @@
{
"name": "stella-callgraph-node",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "stella-callgraph-node",
"version": "1.0.0",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@babel/parser": "^7.23.0",
"@babel/traverse": "^7.23.0",
"@babel/types": "^7.23.0"
},
"bin": {
"stella-callgraph-node": "index.js"
},
"devDependencies": {
"@types/node": "^20.0.0"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@babel/code-frame": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
"license": "MIT",
"dependencies": {
"@babel/helper-validator-identifier": "^7.27.1",
"js-tokens": "^4.0.0",
"picocolors": "^1.1.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/generator": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz",
"integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==",
"license": "MIT",
"dependencies": {
"@babel/parser": "^7.28.5",
"@babel/types": "^7.28.5",
"@jridgewell/gen-mapping": "^0.3.12",
"@jridgewell/trace-mapping": "^0.3.28",
"jsesc": "^3.0.2"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-globals": {
"version": "7.28.0",
"resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
"integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.27.1",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
"integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/helper-validator-identifier": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
"integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/parser": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
"integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
"license": "MIT",
"dependencies": {
"@babel/types": "^7.28.5"
},
"bin": {
"parser": "bin/babel-parser.js"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@babel/template": {
"version": "7.27.2",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
"integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/parser": "^7.27.2",
"@babel/types": "^7.27.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/traverse": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz",
"integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==",
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/generator": "^7.28.5",
"@babel/helper-globals": "^7.28.0",
"@babel/parser": "^7.28.5",
"@babel/template": "^7.27.2",
"@babel/types": "^7.28.5",
"debug": "^4.3.1"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/types": {
"version": "7.28.5",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
"integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.27.1",
"@babel/helper-validator-identifier": "^7.28.5"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@jridgewell/gen-mapping": {
"version": "0.3.13",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
"integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
"license": "MIT",
"dependencies": {
"@jridgewell/sourcemap-codec": "^1.5.0",
"@jridgewell/trace-mapping": "^0.3.24"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.5.5",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.31",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
"integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.1.0",
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@types/node": {
"version": "20.19.27",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.27.tgz",
"integrity": "sha512-N2clP5pJhB2YnZJ3PIHFk5RkygRX5WO/5f0WC08tp0wd+sv0rsJk3MqWn3CbNmT2J505a5336jaQj4ph1AdMug==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
"license": "MIT"
},
"node_modules/jsesc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
"integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
"license": "MIT",
"bin": {
"jsesc": "bin/jsesc"
},
"engines": {
"node": ">=6"
}
},
"node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT"
},
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"license": "ISC"
},
"node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
}
}
}

View File

@@ -1,33 +0,0 @@
{
"name": "stella-callgraph-node",
"version": "1.0.0",
"description": "Call graph extraction tool for JavaScript/TypeScript using Babel AST",
"main": "index.js",
"type": "module",
"bin": {
"stella-callgraph-node": "./index.js"
},
"scripts": {
"start": "node index.js",
"test": "node --test"
},
"keywords": [
"callgraph",
"ast",
"babel",
"static-analysis",
"security"
],
"license": "AGPL-3.0-or-later",
"dependencies": {
"@babel/parser": "^7.23.0",
"@babel/traverse": "^7.23.0",
"@babel/types": "^7.23.0"
},
"devDependencies": {
"@types/node": "^20.0.0"
},
"engines": {
"node": ">=18.0.0"
}
}

View File

@@ -1,230 +0,0 @@
// -----------------------------------------------------------------------------
// sink-detect.js
// Security sink detection patterns for JavaScript/TypeScript.
// -----------------------------------------------------------------------------
/**
* Sink detection patterns organized by category.
*/
export const sinkPatterns = {
command_injection: {
category: 'command_injection',
patterns: [
{ module: 'child_process', methods: ['exec', 'execSync', 'spawn', 'spawnSync', 'execFile', 'execFileSync', 'fork'] },
{ module: 'shelljs', methods: ['exec', 'which', 'cat', 'sed', 'grep', 'rm', 'cp', 'mv', 'mkdir'] },
{ object: 'process', methods: ['exec'] }
]
},
sql_injection: {
category: 'sql_injection',
patterns: [
{ object: 'connection', methods: ['query', 'execute'] },
{ object: 'pool', methods: ['query', 'execute'] },
{ object: 'client', methods: ['query'] },
{ module: 'mysql', methods: ['query', 'execute'] },
{ module: 'mysql2', methods: ['query', 'execute'] },
{ module: 'pg', methods: ['query'] },
{ module: 'sqlite3', methods: ['run', 'exec', 'all', 'get'] },
{ module: 'knex', methods: ['raw', 'whereRaw', 'havingRaw', 'orderByRaw'] },
{ module: 'sequelize', methods: ['query', 'literal'] },
{ module: 'typeorm', methods: ['query', 'createQueryBuilder'] },
{ module: 'prisma', methods: ['$queryRaw', '$executeRaw', '$queryRawUnsafe', '$executeRawUnsafe'] }
]
},
file_write: {
category: 'file_write',
patterns: [
{ module: 'fs', methods: ['writeFile', 'writeFileSync', 'appendFile', 'appendFileSync', 'createWriteStream', 'rename', 'renameSync', 'unlink', 'unlinkSync', 'rmdir', 'rmdirSync', 'rm', 'rmSync'] },
{ module: 'fs/promises', methods: ['writeFile', 'appendFile', 'rename', 'unlink', 'rmdir', 'rm'] }
]
},
file_read: {
category: 'file_read',
patterns: [
{ module: 'fs', methods: ['readFile', 'readFileSync', 'createReadStream', 'readdir', 'readdirSync'] },
{ module: 'fs/promises', methods: ['readFile', 'readdir'] }
]
},
deserialization: {
category: 'deserialization',
patterns: [
{ global: true, methods: ['eval', 'Function'] },
{ object: 'JSON', methods: ['parse'] },
{ module: 'vm', methods: ['runInContext', 'runInNewContext', 'runInThisContext', 'createScript'] },
{ module: 'serialize-javascript', methods: ['deserialize'] },
{ module: 'node-serialize', methods: ['unserialize'] },
{ module: 'js-yaml', methods: ['load', 'loadAll'] }
]
},
ssrf: {
category: 'ssrf',
patterns: [
{ module: 'http', methods: ['request', 'get'] },
{ module: 'https', methods: ['request', 'get'] },
{ module: 'axios', methods: ['get', 'post', 'put', 'delete', 'patch', 'request'] },
{ module: 'node-fetch', methods: ['default'] },
{ global: true, methods: ['fetch'] },
{ module: 'got', methods: ['get', 'post', 'put', 'delete', 'patch'] },
{ module: 'superagent', methods: ['get', 'post', 'put', 'delete', 'patch'] },
{ module: 'request', methods: ['get', 'post', 'put', 'delete', 'patch'] },
{ module: 'undici', methods: ['request', 'fetch'] }
]
},
path_traversal: {
category: 'path_traversal',
patterns: [
{ module: 'path', methods: ['join', 'resolve', 'normalize'] },
{ module: 'fs', methods: ['readFile', 'readFileSync', 'writeFile', 'writeFileSync', 'access', 'accessSync', 'stat', 'statSync'] }
]
},
weak_crypto: {
category: 'weak_crypto',
patterns: [
{ module: 'crypto', methods: ['createCipher', 'createDecipher', 'createCipheriv', 'createDecipheriv'] },
{ object: 'crypto', methods: ['createHash'] } // MD5, SHA1 are weak
]
},
ldap_injection: {
category: 'ldap_injection',
patterns: [
{ module: 'ldapjs', methods: ['search', 'modify', 'add', 'del'] },
{ module: 'activedirectory', methods: ['find', 'findUser', 'findGroup'] }
]
},
nosql_injection: {
category: 'nosql_injection',
patterns: [
{ module: 'mongodb', methods: ['find', 'findOne', 'updateOne', 'updateMany', 'deleteOne', 'deleteMany', 'aggregate'] },
{ module: 'mongoose', methods: ['find', 'findOne', 'findById', 'updateOne', 'updateMany', 'deleteOne', 'deleteMany', 'where', 'aggregate'] }
]
},
xss: {
category: 'xss',
patterns: [
{ object: 'document', methods: ['write', 'writeln'] },
{ object: 'element', methods: ['innerHTML', 'outerHTML'] },
{ module: 'dangerouslySetInnerHTML', methods: ['__html'] } // React pattern
]
},
log_injection: {
category: 'log_injection',
patterns: [
{ object: 'console', methods: ['log', 'info', 'warn', 'error', 'debug'] },
{ module: 'winston', methods: ['log', 'info', 'warn', 'error', 'debug'] },
{ module: 'pino', methods: ['info', 'warn', 'error', 'debug', 'trace'] },
{ module: 'bunyan', methods: ['info', 'warn', 'error', 'debug', 'trace'] }
]
},
regex_dos: {
category: 'regex_dos',
patterns: [
{ object: 'RegExp', methods: ['test', 'exec', 'match'] },
{ global: true, methods: ['RegExp'] }
]
}
};
/**
* Build a lookup map for fast sink detection.
* @returns {Map<string, { category: string, method: string }>}
*/
export function buildSinkLookup() {
const lookup = new Map();
for (const [_, config] of Object.entries(sinkPatterns)) {
for (const pattern of config.patterns) {
for (const method of pattern.methods) {
// Key formats: "module:method", "object.method", "global:method"
if (pattern.module) {
lookup.set(`${pattern.module}:${method}`, { category: config.category, method });
}
if (pattern.object) {
lookup.set(`${pattern.object}.${method}`, { category: config.category, method });
}
if (pattern.global) {
lookup.set(`global:${method}`, { category: config.category, method });
}
}
}
}
return lookup;
}
/**
* Check if a call expression is a security sink.
* @param {string} objectOrModule - The object/module name (e.g., 'fs', 'child_process', 'connection')
* @param {string} methodName - The method being called
* @param {Map} sinkLookup - Pre-built sink lookup map
* @returns {{ category: string, method: string } | null}
*/
export function matchSink(objectOrModule, methodName, sinkLookup) {
// Check module:method pattern
const moduleKey = `${objectOrModule}:${methodName}`;
if (sinkLookup.has(moduleKey)) {
return sinkLookup.get(moduleKey);
}
// Check object.method pattern
const objectKey = `${objectOrModule}.${methodName}`;
if (sinkLookup.has(objectKey)) {
return sinkLookup.get(objectKey);
}
// Check global functions
const globalKey = `global:${objectOrModule}`;
if (sinkLookup.has(globalKey)) {
return sinkLookup.get(globalKey);
}
// Check if methodName itself is a global sink (like eval)
const directGlobal = `global:${methodName}`;
if (sinkLookup.has(directGlobal)) {
return sinkLookup.get(directGlobal);
}
return null;
}
/**
* Common dangerous patterns that indicate direct user input flow.
*/
export const taintSources = [
'req.body',
'req.query',
'req.params',
'req.headers',
'req.cookies',
'request.body',
'request.query',
'request.params',
'event.body',
'event.queryStringParameters',
'event.pathParameters',
'ctx.request.body',
'ctx.request.query',
'ctx.params',
'process.env',
'process.argv'
];
/**
* Check if an identifier is a potential taint source.
* @param {string} identifier
* @returns {boolean}
*/
export function isTaintSource(identifier) {
return taintSources.some(source => identifier.includes(source));
}

View File

@@ -1,236 +0,0 @@
// -----------------------------------------------------------------------------
// sink-detect.test.js
// Sprint: SPRINT_3600_0004_0001 (Node.js Babel Integration)
// Tasks: NODE-019 - Unit tests for sink detection (all categories)
// Description: Tests for security sink detection patterns.
// -----------------------------------------------------------------------------
import { test, describe } from 'node:test';
import assert from 'node:assert/strict';
import { buildSinkLookup, matchSink, sinkPatterns, isTaintSource } from './sink-detect.js';
describe('buildSinkLookup', () => {
test('builds lookup map with all patterns', () => {
const lookup = buildSinkLookup();
assert.ok(lookup instanceof Map);
assert.ok(lookup.size > 0);
});
test('includes command injection sinks', () => {
const lookup = buildSinkLookup();
assert.ok(lookup.has('child_process:exec'));
assert.ok(lookup.has('child_process:spawn'));
assert.ok(lookup.has('child_process:execSync'));
});
test('includes SQL injection sinks', () => {
const lookup = buildSinkLookup();
assert.ok(lookup.has('connection.query'));
assert.ok(lookup.has('mysql:query'));
assert.ok(lookup.has('pg:query'));
assert.ok(lookup.has('knex:raw'));
});
test('includes file write sinks', () => {
const lookup = buildSinkLookup();
assert.ok(lookup.has('fs:writeFile'));
assert.ok(lookup.has('fs:writeFileSync'));
assert.ok(lookup.has('fs:appendFile'));
});
test('includes deserialization sinks', () => {
const lookup = buildSinkLookup();
assert.ok(lookup.has('global:eval'));
assert.ok(lookup.has('global:Function'));
assert.ok(lookup.has('vm:runInContext'));
});
test('includes SSRF sinks', () => {
const lookup = buildSinkLookup();
assert.ok(lookup.has('http:request'));
assert.ok(lookup.has('https:get'));
assert.ok(lookup.has('axios:get'));
assert.ok(lookup.has('global:fetch'));
});
test('includes NoSQL injection sinks', () => {
const lookup = buildSinkLookup();
assert.ok(lookup.has('mongodb:find'));
assert.ok(lookup.has('mongoose:findOne'));
assert.ok(lookup.has('mongodb:aggregate'));
});
});
describe('matchSink', () => {
const lookup = buildSinkLookup();
test('detects command injection via child_process.exec', () => {
const result = matchSink('child_process', 'exec', lookup);
assert.ok(result);
assert.equal(result.category, 'command_injection');
assert.equal(result.method, 'exec');
});
test('detects command injection via child_process.spawn', () => {
const result = matchSink('child_process', 'spawn', lookup);
assert.ok(result);
assert.equal(result.category, 'command_injection');
});
test('detects SQL injection via connection.query', () => {
const result = matchSink('connection', 'query', lookup);
assert.ok(result);
assert.equal(result.category, 'sql_injection');
});
test('detects SQL injection via knex.raw', () => {
const result = matchSink('knex', 'raw', lookup);
assert.ok(result);
assert.equal(result.category, 'sql_injection');
});
test('detects SQL injection via prisma.$queryRaw', () => {
const result = matchSink('prisma', '$queryRaw', lookup);
assert.ok(result);
assert.equal(result.category, 'sql_injection');
});
test('detects file write via fs.writeFile', () => {
const result = matchSink('fs', 'writeFile', lookup);
assert.ok(result);
// fs.writeFile is categorized in both file_write and path_traversal
// The lookup returns path_traversal since it's processed later
assert.ok(['file_write', 'path_traversal'].includes(result.category));
});
test('detects deserialization via eval', () => {
const result = matchSink('eval', 'eval', lookup);
assert.ok(result);
assert.equal(result.category, 'deserialization');
});
test('detects SSRF via axios.get', () => {
const result = matchSink('axios', 'get', lookup);
assert.ok(result);
assert.equal(result.category, 'ssrf');
});
test('detects SSRF via fetch', () => {
const result = matchSink('fetch', 'fetch', lookup);
assert.ok(result);
assert.equal(result.category, 'ssrf');
});
test('detects NoSQL injection via mongoose.find', () => {
const result = matchSink('mongoose', 'find', lookup);
assert.ok(result);
assert.equal(result.category, 'nosql_injection');
});
test('detects weak crypto via crypto.createCipher', () => {
const result = matchSink('crypto', 'createCipher', lookup);
assert.ok(result);
assert.equal(result.category, 'weak_crypto');
});
test('detects LDAP injection via ldapjs.search', () => {
const result = matchSink('ldapjs', 'search', lookup);
assert.ok(result);
assert.equal(result.category, 'ldap_injection');
});
test('returns null for non-sink methods', () => {
const result = matchSink('console', 'clear', lookup);
assert.equal(result, null);
});
test('returns null for unknown objects', () => {
const result = matchSink('myCustomModule', 'doSomething', lookup);
assert.equal(result, null);
});
});
describe('sinkPatterns', () => {
test('has expected categories', () => {
const categories = Object.keys(sinkPatterns);
assert.ok(categories.includes('command_injection'));
assert.ok(categories.includes('sql_injection'));
assert.ok(categories.includes('file_write'));
assert.ok(categories.includes('deserialization'));
assert.ok(categories.includes('ssrf'));
assert.ok(categories.includes('nosql_injection'));
assert.ok(categories.includes('xss'));
assert.ok(categories.includes('log_injection'));
});
test('command_injection has child_process patterns', () => {
const cmdPatterns = sinkPatterns.command_injection.patterns;
const childProcessPattern = cmdPatterns.find(p => p.module === 'child_process');
assert.ok(childProcessPattern);
assert.ok(childProcessPattern.methods.includes('exec'));
assert.ok(childProcessPattern.methods.includes('spawn'));
assert.ok(childProcessPattern.methods.includes('fork'));
});
test('sql_injection covers major ORMs', () => {
const sqlPatterns = sinkPatterns.sql_injection.patterns;
const modules = sqlPatterns.map(p => p.module).filter(Boolean);
assert.ok(modules.includes('mysql'));
assert.ok(modules.includes('pg'));
assert.ok(modules.includes('knex'));
assert.ok(modules.includes('sequelize'));
assert.ok(modules.includes('prisma'));
});
test('ssrf covers HTTP clients', () => {
const ssrfPatterns = sinkPatterns.ssrf.patterns;
const modules = ssrfPatterns.map(p => p.module).filter(Boolean);
assert.ok(modules.includes('http'));
assert.ok(modules.includes('https'));
assert.ok(modules.includes('axios'));
assert.ok(modules.includes('got'));
});
});
describe('isTaintSource', () => {
test('detects req.body as taint source', () => {
assert.ok(isTaintSource('req.body'));
assert.ok(isTaintSource('req.body.username'));
});
test('detects req.query as taint source', () => {
assert.ok(isTaintSource('req.query'));
assert.ok(isTaintSource('req.query.id'));
});
test('detects req.params as taint source', () => {
assert.ok(isTaintSource('req.params'));
assert.ok(isTaintSource('req.params.userId'));
});
test('detects req.headers as taint source', () => {
assert.ok(isTaintSource('req.headers'));
assert.ok(isTaintSource('req.headers.authorization'));
});
test('detects event.body (Lambda) as taint source', () => {
assert.ok(isTaintSource('event.body'));
assert.ok(isTaintSource('event.queryStringParameters'));
});
test('detects ctx.request.body (Koa) as taint source', () => {
assert.ok(isTaintSource('ctx.request.body'));
assert.ok(isTaintSource('ctx.params'));
});
test('detects process.env as taint source', () => {
assert.ok(isTaintSource('process.env'));
assert.ok(isTaintSource('process.env.SECRET'));
});
test('does not flag safe identifiers', () => {
assert.ok(!isTaintSource('myLocalVariable'));
assert.ok(!isTaintSource('config.port'));
assert.ok(!isTaintSource('user.name'));
});
});

View File

@@ -1,168 +0,0 @@
#!/usr/bin/env python3
"""
stella-callgraph-python
Call graph extraction tool for Python projects using AST analysis.
"""
import argparse
import ast
import json
import os
import sys
from pathlib import Path
from typing import Any
from ast_analyzer import PythonASTAnalyzer
from framework_detect import detect_frameworks
def main() -> int:
parser = argparse.ArgumentParser(
description="Extract call graphs from Python projects"
)
parser.add_argument(
"path",
help="Path to Python project or file"
)
parser.add_argument(
"--json",
action="store_true",
help="Output formatted JSON"
)
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="Verbose output"
)
args = parser.parse_args()
try:
result = analyze_project(Path(args.path), verbose=args.verbose)
if args.json:
print(json.dumps(result, indent=2))
else:
print(json.dumps(result))
return 0
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
return 1
def analyze_project(project_path: Path, verbose: bool = False) -> dict[str, Any]:
"""Analyze a Python project and extract its call graph."""
if not project_path.exists():
raise FileNotFoundError(f"Path not found: {project_path}")
# Find project root (look for pyproject.toml, setup.py, etc.)
root = find_project_root(project_path)
package_name = extract_package_name(root)
# Detect frameworks
frameworks = detect_frameworks(root)
# Find Python source files
source_files = find_python_files(root)
if verbose:
print(f"Found {len(source_files)} Python files", file=sys.stderr)
# Analyze all files
analyzer = PythonASTAnalyzer(package_name, root, frameworks)
for source_file in source_files:
try:
with open(source_file, 'r', encoding='utf-8') as f:
content = f.read()
tree = ast.parse(content, filename=str(source_file))
relative_path = source_file.relative_to(root)
analyzer.analyze_file(tree, str(relative_path))
except SyntaxError as e:
if verbose:
print(f"Warning: Syntax error in {source_file}: {e}", file=sys.stderr)
except Exception as e:
if verbose:
print(f"Warning: Failed to parse {source_file}: {e}", file=sys.stderr)
return analyzer.get_result()
def find_project_root(path: Path) -> Path:
"""Find the project root by looking for marker files."""
markers = ['pyproject.toml', 'setup.py', 'setup.cfg', 'requirements.txt', '.git']
current = path.resolve()
if current.is_file():
current = current.parent
while current != current.parent:
for marker in markers:
if (current / marker).exists():
return current
current = current.parent
return path.resolve() if path.is_dir() else path.parent.resolve()
def extract_package_name(root: Path) -> str:
"""Extract package name from project metadata."""
# Try pyproject.toml
pyproject = root / 'pyproject.toml'
if pyproject.exists():
try:
import tomllib
with open(pyproject, 'rb') as f:
data = tomllib.load(f)
return data.get('project', {}).get('name', root.name)
except Exception:
pass
# Try setup.py
setup_py = root / 'setup.py'
if setup_py.exists():
try:
with open(setup_py, 'r') as f:
content = f.read()
# Simple regex-based extraction
import re
match = re.search(r"name\s*=\s*['\"]([^'\"]+)['\"]", content)
if match:
return match.group(1)
except Exception:
pass
return root.name
def find_python_files(root: Path) -> list[Path]:
"""Find all Python source files in the project."""
exclude_dirs = {
'__pycache__', '.git', '.tox', '.nox', '.mypy_cache',
'.pytest_cache', 'venv', '.venv', 'env', '.env',
'node_modules', 'dist', 'build', 'eggs', '*.egg-info'
}
files = []
for path in root.rglob('*.py'):
# Skip excluded directories
skip = False
for part in path.parts:
if part in exclude_dirs or part.endswith('.egg-info'):
skip = True
break
if not skip and not path.name.startswith('.'):
files.append(path)
return sorted(files)
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,322 +0,0 @@
"""
AST analyzer for Python call graph extraction.
"""
import ast
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any, Optional
@dataclass
class FunctionNode:
"""Represents a function in the call graph."""
id: str
package: str
name: str
qualified_name: str
file: str
line: int
visibility: str
annotations: list[str] = field(default_factory=list)
is_entrypoint: bool = False
entrypoint_type: Optional[str] = None
@dataclass
class CallEdge:
"""Represents a call between functions."""
from_id: str
to_id: str
kind: str
file: str
line: int
@dataclass
class Entrypoint:
"""Represents a detected entrypoint."""
id: str
type: str
route: Optional[str] = None
method: Optional[str] = None
class PythonASTAnalyzer:
"""Analyzes Python AST to extract call graph information."""
def __init__(self, package_name: str, root: Path, frameworks: list[str]):
self.package_name = package_name
self.root = root
self.frameworks = frameworks
self.nodes: dict[str, FunctionNode] = {}
self.edges: list[CallEdge] = []
self.entrypoints: list[Entrypoint] = []
self.current_function: Optional[str] = None
self.current_file: str = ""
self.current_class: Optional[str] = None
def analyze_file(self, tree: ast.AST, relative_path: str) -> None:
"""Analyze a single Python file."""
self.current_file = relative_path
self.current_function = None
self.current_class = None
visitor = FunctionVisitor(self)
visitor.visit(tree)
def get_result(self) -> dict[str, Any]:
"""Get the analysis result as a dictionary."""
return {
"module": self.package_name,
"nodes": [self._node_to_dict(n) for n in self.nodes.values()],
"edges": [self._edge_to_dict(e) for e in self._dedupe_edges()],
"entrypoints": [self._entrypoint_to_dict(e) for e in self.entrypoints]
}
def _node_to_dict(self, node: FunctionNode) -> dict[str, Any]:
return {
"id": node.id,
"package": node.package,
"name": node.name,
"signature": node.qualified_name,
"position": {
"file": node.file,
"line": node.line,
"column": 0
},
"visibility": node.visibility,
"annotations": node.annotations
}
def _edge_to_dict(self, edge: CallEdge) -> dict[str, Any]:
return {
"from": edge.from_id,
"to": edge.to_id,
"kind": edge.kind,
"site": {
"file": edge.file,
"line": edge.line
}
}
def _entrypoint_to_dict(self, ep: Entrypoint) -> dict[str, Any]:
result: dict[str, Any] = {
"id": ep.id,
"type": ep.type
}
if ep.route:
result["route"] = ep.route
if ep.method:
result["method"] = ep.method
return result
def _dedupe_edges(self) -> list[CallEdge]:
seen: set[str] = set()
result: list[CallEdge] = []
for edge in self.edges:
key = f"{edge.from_id}|{edge.to_id}"
if key not in seen:
seen.add(key)
result.append(edge)
return result
def make_symbol_id(self, name: str, class_name: Optional[str] = None) -> str:
"""Create a symbol ID for a function or method."""
module_base = self.current_file.replace('.py', '').replace('/', '.').replace('\\', '.')
if class_name:
return f"py:{self.package_name}/{module_base}.{class_name}.{name}"
return f"py:{self.package_name}/{module_base}.{name}"
def add_function(
self,
name: str,
line: int,
decorators: list[str],
class_name: Optional[str] = None,
is_private: bool = False
) -> str:
"""Add a function node to the graph."""
symbol_id = self.make_symbol_id(name, class_name)
qualified_name = f"{class_name}.{name}" if class_name else name
visibility = "private" if is_private or name.startswith('_') else "public"
node = FunctionNode(
id=symbol_id,
package=self.package_name,
name=name,
qualified_name=qualified_name,
file=self.current_file,
line=line,
visibility=visibility,
annotations=decorators
)
self.nodes[symbol_id] = node
# Detect entrypoints
entrypoint = self._detect_entrypoint(name, decorators, class_name)
if entrypoint:
node.is_entrypoint = True
node.entrypoint_type = entrypoint.type
self.entrypoints.append(entrypoint)
return symbol_id
def add_call(self, target_name: str, line: int) -> None:
"""Add a call edge from the current function."""
if not self.current_function:
return
# Try to resolve the target
target_id = self._resolve_target(target_name)
self.edges.append(CallEdge(
from_id=self.current_function,
to_id=target_id,
kind="direct",
file=self.current_file,
line=line
))
def _resolve_target(self, name: str) -> str:
"""Resolve a call target to a symbol ID."""
# Check if it's a known local function
for node_id, node in self.nodes.items():
if node.name == name or node.qualified_name == name:
return node_id
# External or unresolved
return f"py:external/{name}"
def _detect_entrypoint(
self,
name: str,
decorators: list[str],
class_name: Optional[str]
) -> Optional[Entrypoint]:
"""Detect if a function is an entrypoint based on frameworks and decorators."""
symbol_id = self.make_symbol_id(name, class_name)
for decorator in decorators:
# Flask routes
if 'route' in decorator.lower() or decorator.lower() in ['get', 'post', 'put', 'delete', 'patch']:
route = self._extract_route_from_decorator(decorator)
method = self._extract_method_from_decorator(decorator)
return Entrypoint(id=symbol_id, type="http_handler", route=route, method=method)
# FastAPI routes
if decorator.lower() in ['get', 'post', 'put', 'delete', 'patch', 'api_route']:
route = self._extract_route_from_decorator(decorator)
return Entrypoint(id=symbol_id, type="http_handler", route=route, method=decorator.upper())
# Celery tasks
if 'task' in decorator.lower() or 'shared_task' in decorator.lower():
return Entrypoint(id=symbol_id, type="background_job")
# Click commands
if 'command' in decorator.lower() or 'group' in decorator.lower():
return Entrypoint(id=symbol_id, type="cli_command")
# Django views (class-based)
if class_name and class_name.endswith('View'):
if name in ['get', 'post', 'put', 'delete', 'patch']:
return Entrypoint(id=symbol_id, type="http_handler", method=name.upper())
# main() function
if name == 'main' and not class_name:
return Entrypoint(id=symbol_id, type="cli_command")
return None
def _extract_route_from_decorator(self, decorator: str) -> Optional[str]:
"""Extract route path from decorator string."""
import re
match = re.search(r"['\"]([/\w{}<>:.-]+)['\"]", decorator)
return match.group(1) if match else None
def _extract_method_from_decorator(self, decorator: str) -> Optional[str]:
"""Extract HTTP method from decorator string."""
import re
methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS']
for method in methods:
if method.lower() in decorator.lower():
return method
match = re.search(r"methods\s*=\s*\[([^\]]+)\]", decorator)
if match:
return match.group(1).strip("'\"").upper()
return None
class FunctionVisitor(ast.NodeVisitor):
"""AST visitor that extracts function definitions and calls."""
def __init__(self, analyzer: PythonASTAnalyzer):
self.analyzer = analyzer
def visit_ClassDef(self, node: ast.ClassDef) -> None:
"""Visit class definitions."""
old_class = self.analyzer.current_class
self.analyzer.current_class = node.name
self.generic_visit(node)
self.analyzer.current_class = old_class
def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
"""Visit function definitions."""
self._visit_function(node)
def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
"""Visit async function definitions."""
self._visit_function(node)
def _visit_function(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None:
"""Common logic for function and async function definitions."""
decorators = [ast.unparse(d) for d in node.decorator_list]
is_private = node.name.startswith('_') and not node.name.startswith('__')
symbol_id = self.analyzer.add_function(
name=node.name,
line=node.lineno,
decorators=decorators,
class_name=self.analyzer.current_class,
is_private=is_private
)
# Visit function body for calls
old_function = self.analyzer.current_function
self.analyzer.current_function = symbol_id
for child in ast.walk(node):
if isinstance(child, ast.Call):
target_name = self._get_call_target(child)
if target_name:
self.analyzer.add_call(target_name, child.lineno)
self.analyzer.current_function = old_function
def _get_call_target(self, node: ast.Call) -> Optional[str]:
"""Extract the target name from a Call node."""
if isinstance(node.func, ast.Name):
return node.func.id
elif isinstance(node.func, ast.Attribute):
parts = self._get_attribute_parts(node.func)
return '.'.join(parts)
return None
def _get_attribute_parts(self, node: ast.Attribute) -> list[str]:
"""Get all parts of an attribute chain."""
parts: list[str] = []
current: ast.expr = node
while isinstance(current, ast.Attribute):
parts.insert(0, current.attr)
current = current.value
if isinstance(current, ast.Name):
parts.insert(0, current.id)
return parts

View File

@@ -1,250 +0,0 @@
"""
Framework detection for Python projects.
"""
from pathlib import Path
from typing import Any
import re
# Framework patterns
FRAMEWORK_PATTERNS = {
"flask": {
"packages": ["flask"],
"imports": [r"from flask import", r"import flask"],
"patterns": [r"@\w+\.route\(", r"Flask\(__name__\)"],
"entrypoint_type": "http_handler"
},
"fastapi": {
"packages": ["fastapi"],
"imports": [r"from fastapi import", r"import fastapi"],
"patterns": [r"@\w+\.(get|post|put|delete|patch)\(", r"FastAPI\("],
"entrypoint_type": "http_handler"
},
"django": {
"packages": ["django"],
"imports": [r"from django", r"import django"],
"patterns": [r"urlpatterns\s*=", r"class \w+View\(", r"@api_view\("],
"entrypoint_type": "http_handler"
},
"click": {
"packages": ["click"],
"imports": [r"from click import", r"import click"],
"patterns": [r"@click\.command\(", r"@click\.group\(", r"@\w+\.command\("],
"entrypoint_type": "cli_command"
},
"typer": {
"packages": ["typer"],
"imports": [r"from typer import", r"import typer"],
"patterns": [r"typer\.Typer\(", r"@\w+\.command\("],
"entrypoint_type": "cli_command"
},
"celery": {
"packages": ["celery"],
"imports": [r"from celery import", r"import celery"],
"patterns": [r"@\w+\.task\(", r"@shared_task\(", r"Celery\("],
"entrypoint_type": "background_job"
},
"dramatiq": {
"packages": ["dramatiq"],
"imports": [r"from dramatiq import", r"import dramatiq"],
"patterns": [r"@dramatiq\.actor\("],
"entrypoint_type": "background_job"
},
"rq": {
"packages": ["rq"],
"imports": [r"from rq import", r"import rq"],
"patterns": [r"@job\(", r"queue\.enqueue\("],
"entrypoint_type": "background_job"
},
"sanic": {
"packages": ["sanic"],
"imports": [r"from sanic import", r"import sanic"],
"patterns": [r"@\w+\.route\(", r"Sanic\("],
"entrypoint_type": "http_handler"
},
"aiohttp": {
"packages": ["aiohttp"],
"imports": [r"from aiohttp import", r"import aiohttp"],
"patterns": [r"web\.Application\(", r"@routes\.(get|post|put|delete)\("],
"entrypoint_type": "http_handler"
},
"tornado": {
"packages": ["tornado"],
"imports": [r"from tornado import", r"import tornado"],
"patterns": [r"class \w+Handler\(", r"tornado\.web\.Application\("],
"entrypoint_type": "http_handler"
},
"aws_lambda": {
"packages": ["aws_lambda_powertools", "boto3"],
"imports": [r"def handler\(event", r"def lambda_handler\("],
"patterns": [r"def handler\(event,\s*context\)", r"@logger\.inject_lambda_context"],
"entrypoint_type": "lambda"
},
"azure_functions": {
"packages": ["azure.functions"],
"imports": [r"import azure\.functions"],
"patterns": [r"@func\.route\(", r"func\.HttpRequest"],
"entrypoint_type": "cloud_function"
},
"grpc": {
"packages": ["grpcio", "grpc"],
"imports": [r"import grpc", r"from grpc import"],
"patterns": [r"_pb2_grpc\.add_\w+Servicer_to_server\("],
"entrypoint_type": "grpc_method"
},
"graphql": {
"packages": ["graphene", "strawberry", "ariadne"],
"imports": [r"import graphene", r"import strawberry", r"import ariadne"],
"patterns": [r"@strawberry\.(type|mutation|query)\(", r"class \w+\(graphene\.ObjectType\)"],
"entrypoint_type": "graphql_resolver"
}
}
def detect_frameworks(project_root: Path) -> list[str]:
"""Detect frameworks used in a Python project."""
detected: set[str] = set()
# Check pyproject.toml
pyproject = project_root / "pyproject.toml"
if pyproject.exists():
detected.update(_detect_from_pyproject(pyproject))
# Check requirements.txt
requirements = project_root / "requirements.txt"
if requirements.exists():
detected.update(_detect_from_requirements(requirements))
# Check setup.py
setup_py = project_root / "setup.py"
if setup_py.exists():
detected.update(_detect_from_setup_py(setup_py))
# Scan source files for import patterns
detected.update(_detect_from_source(project_root))
return sorted(detected)
def _detect_from_pyproject(path: Path) -> set[str]:
"""Detect frameworks from pyproject.toml."""
detected: set[str] = set()
try:
import tomllib
with open(path, 'rb') as f:
data = tomllib.load(f)
# Check dependencies
deps = set()
deps.update(data.get("project", {}).get("dependencies", []))
deps.update(data.get("project", {}).get("optional-dependencies", {}).get("dev", []))
# Poetry format
poetry = data.get("tool", {}).get("poetry", {})
deps.update(poetry.get("dependencies", {}).keys())
deps.update(poetry.get("dev-dependencies", {}).keys())
for dep in deps:
# Extract package name (remove version specifier)
pkg = re.split(r'[<>=!~\[]', dep)[0].strip().lower()
for framework, config in FRAMEWORK_PATTERNS.items():
if pkg in config["packages"]:
detected.add(framework)
except Exception:
pass
return detected
def _detect_from_requirements(path: Path) -> set[str]:
"""Detect frameworks from requirements.txt."""
detected: set[str] = set()
try:
with open(path, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
# Extract package name
pkg = re.split(r'[<>=!~\[]', line)[0].strip().lower()
for framework, config in FRAMEWORK_PATTERNS.items():
if pkg in config["packages"]:
detected.add(framework)
except Exception:
pass
return detected
def _detect_from_setup_py(path: Path) -> set[str]:
"""Detect frameworks from setup.py."""
detected: set[str] = set()
try:
with open(path, 'r') as f:
content = f.read()
# Look for install_requires
for framework, config in FRAMEWORK_PATTERNS.items():
for pkg in config["packages"]:
if f'"{pkg}"' in content or f"'{pkg}'" in content:
detected.add(framework)
except Exception:
pass
return detected
def _detect_from_source(project_root: Path) -> set[str]:
"""Detect frameworks by scanning Python source files."""
detected: set[str] = set()
exclude_dirs = {
'__pycache__', '.git', '.tox', '.nox', 'venv', '.venv', 'env', '.env',
'node_modules', 'dist', 'build'
}
# Only scan first few files to avoid slow startup
max_files = 50
scanned = 0
for py_file in project_root.rglob('*.py'):
if scanned >= max_files:
break
# Skip excluded directories
skip = False
for part in py_file.parts:
if part in exclude_dirs:
skip = True
break
if skip:
continue
try:
with open(py_file, 'r', encoding='utf-8') as f:
content = f.read(4096) # Only read first 4KB
for framework, config in FRAMEWORK_PATTERNS.items():
if framework in detected:
continue
for pattern in config["imports"] + config["patterns"]:
if re.search(pattern, content):
detected.add(framework)
break
scanned += 1
except Exception:
continue
return detected
def get_entrypoint_type(framework: str) -> str:
"""Get the entrypoint type for a framework."""
return FRAMEWORK_PATTERNS.get(framework, {}).get("entrypoint_type", "unknown")

View File

@@ -1,2 +0,0 @@
# stella-callgraph-python requirements
# No external dependencies - uses Python 3.11+ stdlib only

View File

@@ -1,95 +0,0 @@
param(
[Parameter(Mandatory = $true, ValueFromPipeline = $true)]
[string[]] $ShaFiles,
[string] $RepoRoot = (Resolve-Path (Join-Path $PSScriptRoot "..")).Path,
[switch] $WhatIf
)
Set-StrictMode -Version Latest
$ErrorActionPreference = "Stop"
function Resolve-HashTargetPath {
param(
[Parameter(Mandatory = $true)]
[string] $ShaFileDirectory,
[Parameter(Mandatory = $true)]
[string] $PathText
)
$candidateRepoRoot = Join-Path $RepoRoot $PathText
if (Test-Path -LiteralPath $candidateRepoRoot -PathType Leaf) {
return (Resolve-Path -LiteralPath $candidateRepoRoot).Path
}
$candidateLocal = Join-Path $ShaFileDirectory $PathText
if (Test-Path -LiteralPath $candidateLocal -PathType Leaf) {
return (Resolve-Path -LiteralPath $candidateLocal).Path
}
throw "SHA256SUMS entry not found: '$PathText' (checked repo root and '$ShaFileDirectory')"
}
function Write-Utf8NoBomLf {
param(
[Parameter(Mandatory = $true)]
[string] $Path,
[Parameter(Mandatory = $true)]
[string] $Content
)
$utf8NoBom = New-Object System.Text.UTF8Encoding($false)
$bytes = $utf8NoBom.GetBytes($Content)
[System.IO.File]::WriteAllBytes($Path, $bytes)
}
foreach ($shaFile in $ShaFiles) {
$shaFilePath = (Resolve-Path -LiteralPath $shaFile).Path
$shaFileDir = Split-Path -Parent $shaFilePath
$inputLines = Get-Content -LiteralPath $shaFilePath
$outputLines = New-Object System.Collections.Generic.List[string]
foreach ($line in $inputLines) {
$trimmed = $line.Trim()
if ($trimmed.Length -eq 0 -or $trimmed.StartsWith("#")) {
$outputLines.Add($line)
continue
}
$pathText = $null
$format = $null
$separator = " "
if ($trimmed -match "^([0-9a-fA-F]{64})(\s+)(.+)$") {
$format = "hash-first"
$separator = $Matches[2]
$pathText = $Matches[3].Trim()
}
elseif ($trimmed -match "^(.+?)(\s+)([0-9a-fA-F]{64})$") {
$format = "path-first"
$separator = $Matches[2]
$pathText = $Matches[1].Trim()
}
else {
throw "Unrecognized SHA256SUMS line format in '$shaFilePath': $line"
}
$targetPath = Resolve-HashTargetPath -ShaFileDirectory $shaFileDir -PathText $pathText
$hash = (Get-FileHash -Algorithm SHA256 -LiteralPath $targetPath).Hash.ToLowerInvariant()
if ($format -eq "hash-first") {
$outputLines.Add("$hash$separator$pathText")
}
else {
$outputLines.Add("$pathText$separator$hash")
}
}
$content = ($outputLines -join "`n") + "`n"
if ($WhatIf) {
Write-Output "[whatif] Would update $shaFilePath"
continue
}
Write-Utf8NoBomLf -Path $shaFilePath -Content $content
Write-Output "Updated $shaFilePath"
}

View File

@@ -1,46 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Upload both Zastava and Signals evidence bundles to the locker.
# Requires EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN.
EVIDENCE_LOCKER_URL=${EVIDENCE_LOCKER_URL:-}
CI_EVIDENCE_LOCKER_TOKEN=${CI_EVIDENCE_LOCKER_TOKEN:-}
if [[ -z "$EVIDENCE_LOCKER_URL" || -z "$CI_EVIDENCE_LOCKER_TOKEN" ]]; then
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
exit 1
fi
# Defaults
ZASTAVA_TAR=${ZASTAVA_TAR:-evidence-locker/zastava/2025-12-02/zastava-evidence.tar}
ZASTAVA_VERIFY=${ZASTAVA_VERIFY:-tools/zastava-verify-evidence-tar.sh}
ZASTAVA_PATH=\$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar
SIGNALS_TAR=${SIGNALS_TAR:-evidence-locker/signals/2025-12-05/signals-evidence.tar}
SIGNALS_VERIFY=${SIGNALS_VERIFY:-tools/signals-verify-evidence-tar.sh}
SIGNALS_PATH=\$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar
# Verify
if [[ -x "$ZASTAVA_VERIFY" ]]; then
"$ZASTAVA_VERIFY" "$ZASTAVA_TAR"
fi
if [[ -x "$SIGNALS_VERIFY" ]]; then
"$SIGNALS_VERIFY" "$SIGNALS_TAR"
fi
# Upload Zastava
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
--data-binary @"$ZASTAVA_TAR"
echo "Uploaded Zastava evidence to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar"
# Upload Signals
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
--data-binary @"$SIGNALS_TAR"
echo "Uploaded Signals evidence to $EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar"

View File

@@ -1,48 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
exit 1
fi
STAGED_DIR="evidence-locker/zastava/2025-12-02"
TAR_OUT="/tmp/zastava-evidence.tar"
MODULE_ROOT="docs/modules/zastava"
test -d "$MODULE_ROOT" || { echo "missing module root $MODULE_ROOT" >&2; exit 1; }
mkdir -p "$STAGED_DIR"
tmpdir=$(mktemp -d)
trap 'rm -rf "$tmpdir"' EXIT
rsync -a --relative \
"$MODULE_ROOT/SHA256SUMS" \
"$MODULE_ROOT/schemas/" \
"$MODULE_ROOT/exports/" \
"$MODULE_ROOT/thresholds.yaml" \
"$MODULE_ROOT/thresholds.yaml.dsse" \
"$MODULE_ROOT/kit/verify.sh" \
"$MODULE_ROOT/kit/README.md" \
"$MODULE_ROOT/kit/ed25519.pub" \
"$MODULE_ROOT/kit/zastava-kit.tzst" \
"$MODULE_ROOT/kit/zastava-kit.tzst.dsse" \
"$MODULE_ROOT/evidence/README.md" \
"$tmpdir/"
pushd "$tmpdir/docs/modules/zastava" >/dev/null
sha256sum --check SHA256SUMS
# Build deterministic tarball for reproducibility (payloads + DSSE)
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
-cf "$TAR_OUT" .
popd >/dev/null
sha256sum "$TAR_OUT"
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
--data-binary "@$TAR_OUT"
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/"

View File

@@ -1,24 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
TAR_PATH=${1:-evidence-locker/zastava/2025-12-02/zastava-evidence.tar}
EXPECTED_SHA=${EXPECTED_SHA:-e1d67424273828c48e9bf5b495a96c2ebcaf1ef2c308f60d8b9c62b8a1b735ae}
if [[ ! -f "$TAR_PATH" ]]; then
echo "missing tar: $TAR_PATH" >&2
exit 1
fi
sha=$(sha256sum "$TAR_PATH" | awk '{print $1}')
if [[ "$sha" != "$EXPECTED_SHA" ]]; then
echo "sha mismatch: got $sha expected $EXPECTED_SHA" >&2
exit 2
fi
tmpdir=$(mktemp -d)
trap 'rm -rf "$tmpdir"' EXIT
tar -xf "$TAR_PATH" -C "$tmpdir"
(cd "$tmpdir" && sha256sum --check SHA256SUMS)
echo "OK: tar hash matches and inner SHA256SUMS verified"