CD/CD consolidation

This commit is contained in:
StellaOps Bot
2025-12-26 17:32:23 +02:00
parent a866eb6277
commit c786faae84
638 changed files with 3821 additions and 181 deletions

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
set -euo pipefail
# DEVOPS-CONTAINERS-46-001: build air-gap bundle from existing buildx OCI archive
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <image-tag> [bundle-dir]" >&2
exit 64
fi
IMAGE_TAG=$1
BUNDLE_DIR=${2:-"out/bundles/$(echo "$IMAGE_TAG" | tr '/:' '__')"}
SRC_DIR="out/buildx/$(echo "$IMAGE_TAG" | tr '/:' '__')"
OCI_ARCHIVE="${SRC_DIR}/image.oci"
if [[ ! -f "$OCI_ARCHIVE" ]]; then
echo "[airgap] OCI archive not found at $OCI_ARCHIVE. Run build-multiarch first." >&2
exit 66
fi
mkdir -p "$BUNDLE_DIR"
SBOM_FILE=""
if [[ -f "${SRC_DIR}/sbom.syft.json" ]]; then
SBOM_FILE="${SRC_DIR}/sbom.syft.json"
fi
cat > "${BUNDLE_DIR}/bundle-manifest.json" <<EOF
{
"image": "${IMAGE_TAG}",
"oci_archive": "image.oci",
"sbom": "$( [[ -n "$SBOM_FILE" ]] && echo sbom.syft.json || echo null )",
"created_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
}
EOF
cp "$OCI_ARCHIVE" "${BUNDLE_DIR}/image.oci"
[[ -n "$SBOM_FILE" ]] && cp "$SBOM_FILE" "${BUNDLE_DIR}/sbom.syft.json"
[[ -f "${SRC_DIR}/image.sha256" ]] && cp "${SRC_DIR}/image.sha256" "${BUNDLE_DIR}/image.sha256"
[[ -f "${SRC_DIR}/image.sig" ]] && cp "${SRC_DIR}/image.sig" "${BUNDLE_DIR}/image.sig"
tar -C "$BUNDLE_DIR" -czf "${BUNDLE_DIR}.tgz" .
echo "[airgap] bundle created at ${BUNDLE_DIR}.tgz"

View File

@@ -0,0 +1,131 @@
#!/usr/bin/env bash
set -euo pipefail
# DEVOPS-CLI-41-001: Build multi-platform CLI binaries with SBOM and checksums.
# Updated: SPRINT_5100_0001_0001 - CLI Consolidation: includes Aoc and Symbols plugins
RIDS="${RIDS:-linux-x64,win-x64,osx-arm64}"
CONFIG="${CONFIG:-Release}"
PROJECT="src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
OUT_ROOT="out/cli"
SBOM_TOOL="${SBOM_TOOL:-syft}" # syft|none
SIGN="${SIGN:-false}"
COSIGN_KEY="${COSIGN_KEY:-}"
# CLI Plugins to include in the distribution
# SPRINT_5100_0001_0001: CLI Consolidation - stella aoc and stella symbols
PLUGIN_PROJECTS=(
"src/Cli/__Libraries/StellaOps.Cli.Plugins.Aoc/StellaOps.Cli.Plugins.Aoc.csproj"
"src/Cli/__Libraries/StellaOps.Cli.Plugins.Symbols/StellaOps.Cli.Plugins.Symbols.csproj"
)
PLUGIN_MANIFESTS=(
"src/Cli/plugins/cli/StellaOps.Cli.Plugins.Aoc/stellaops.cli.plugins.aoc.manifest.json"
"src/Cli/plugins/cli/StellaOps.Cli.Plugins.Symbols/stellaops.cli.plugins.symbols.manifest.json"
)
IFS=',' read -ra TARGETS <<< "$RIDS"
mkdir -p "$OUT_ROOT"
if ! command -v dotnet >/dev/null 2>&1; then
echo "[cli-build] dotnet CLI not found" >&2
exit 69
fi
generate_sbom() {
local dir="$1"
local sbom="$2"
if [[ "$SBOM_TOOL" == "syft" ]] && command -v syft >/dev/null 2>&1; then
syft "dir:${dir}" -o json > "$sbom"
fi
}
sign_file() {
local file="$1"
if [[ "$SIGN" == "true" && -n "$COSIGN_KEY" && -x "$(command -v cosign || true)" ]]; then
COSIGN_EXPERIMENTAL=1 cosign sign-blob --key "$COSIGN_KEY" --output-signature "${file}.sig" "$file"
fi
}
for rid in "${TARGETS[@]}"; do
echo "[cli-build] publishing for $rid"
out_dir="${OUT_ROOT}/${rid}"
publish_dir="${out_dir}/publish"
plugins_dir="${publish_dir}/plugins/cli"
mkdir -p "$publish_dir"
mkdir -p "$plugins_dir"
# Build main CLI
dotnet publish "$PROJECT" -c "$CONFIG" -r "$rid" \
-o "$publish_dir" \
--self-contained true \
-p:PublishSingleFile=true \
-p:PublishTrimmed=false \
-p:DebugType=None \
>/dev/null
# Build and copy plugins
# SPRINT_5100_0001_0001: CLI Consolidation
for i in "${!PLUGIN_PROJECTS[@]}"; do
plugin_project="${PLUGIN_PROJECTS[$i]}"
manifest_path="${PLUGIN_MANIFESTS[$i]}"
if [[ ! -f "$plugin_project" ]]; then
echo "[cli-build] WARNING: Plugin project not found: $plugin_project"
continue
fi
# Get plugin name from project path
plugin_name=$(basename "$(dirname "$plugin_project")")
plugin_out="${plugins_dir}/${plugin_name}"
mkdir -p "$plugin_out"
echo "[cli-build] building plugin: $plugin_name"
dotnet publish "$plugin_project" -c "$CONFIG" -r "$rid" \
-o "$plugin_out" \
--self-contained false \
-p:DebugType=None \
>/dev/null 2>&1 || echo "[cli-build] WARNING: Plugin build failed for $plugin_name (may have pre-existing errors)"
# Copy manifest file
if [[ -f "$manifest_path" ]]; then
cp "$manifest_path" "$plugin_out/"
else
echo "[cli-build] WARNING: Manifest not found: $manifest_path"
fi
done
# Package
archive_ext="tar.gz"
archive_cmd=(tar -C "$publish_dir" -czf)
if [[ "$rid" == win-* ]]; then
archive_ext="zip"
archive_cmd=(zip -jr)
fi
archive_name="stella-cli-${rid}.${archive_ext}"
archive_path="${out_dir}/${archive_name}"
"${archive_cmd[@]}" "$archive_path" "$publish_dir"
sha256sum "$archive_path" > "${archive_path}.sha256"
sign_file "$archive_path"
# SBOM
generate_sbom "$publish_dir" "${archive_path}.sbom.json"
done
# Build manifest
manifest="${OUT_ROOT}/manifest.json"
plugin_list=$(printf '"%s",' "${PLUGIN_PROJECTS[@]}" | sed 's/,.*//' | sed 's/.*\///' | sed 's/\.csproj//')
cat > "$manifest" <<EOF
{
"generated_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
"config": "$CONFIG",
"rids": [$(printf '"%s",' "${TARGETS[@]}" | sed 's/,$//')],
"plugins": ["stellaops.cli.plugins.aoc", "stellaops.cli.plugins.symbols"],
"artifacts_root": "$OUT_ROOT",
"notes": "CLI Consolidation (SPRINT_5100_0001_0001) - includes aoc and symbols plugins"
}
EOF
echo "[cli-build] artifacts in $OUT_ROOT"

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env bash
set -euo pipefail
# Multi-arch buildx helper for DEVOPS-CONTAINERS-44-001
# Requirements: docker CLI with buildx, optional syft (for SBOM) and cosign (for signing).
usage() {
echo "Usage: $0 <image-tag> <context-dir> [--platform linux/amd64,linux/arm64] [--push] [--sbom syft|none] [--sign <cosign-key>]" >&2
exit 64
}
if [[ $# -lt 2 ]]; then
usage
fi
IMAGE_TAG=$1; shift
CONTEXT_DIR=$1; shift
PLATFORMS="linux/amd64,linux/arm64"
PUSH=false
SBOM_TOOL="syft"
COSIGN_KEY=""
while [[ $# -gt 0 ]]; do
case "$1" in
--platform) PLATFORMS="$2"; shift 2;;
--push) PUSH=true; shift;;
--sbom) SBOM_TOOL="$2"; shift 2;;
--sign) COSIGN_KEY="$2"; shift 2;;
*) echo "Unknown option: $1" >&2; usage;;
esac
done
if ! command -v docker >/dev/null 2>&1; then
echo "[buildx] docker CLI not found" >&2
exit 69
fi
OUT_ROOT="out/buildx/$(echo "$IMAGE_TAG" | tr '/:' '__')"
mkdir -p "$OUT_ROOT"
BUILDER_NAME="stellaops-multiarch"
if ! docker buildx inspect "$BUILDER_NAME" >/dev/null 2>&1; then
docker buildx create --name "$BUILDER_NAME" --driver docker-container --use >/dev/null
else
docker buildx use "$BUILDER_NAME" >/dev/null
fi
BUILD_OPTS=(
--platform "$PLATFORMS"
-t "$IMAGE_TAG"
--provenance=false
--sbom=false
--output "type=oci,dest=${OUT_ROOT}/image.oci"
)
if $PUSH; then
BUILD_OPTS+=("--push")
fi
echo "[buildx] building $IMAGE_TAG for $PLATFORMS"
docker buildx build "${BUILD_OPTS[@]}" "$CONTEXT_DIR"
echo "[buildx] computing digest"
IMAGE_DIGEST=$(sha256sum "${OUT_ROOT}/image.oci" | awk '{print $1}')
echo "$IMAGE_DIGEST image.oci" > "${OUT_ROOT}/image.sha256"
if [[ "$SBOM_TOOL" == "syft" ]] && command -v syft >/dev/null 2>&1; then
echo "[buildx] generating SBOM via syft"
syft "oci-archive:${OUT_ROOT}/image.oci" -o json > "${OUT_ROOT}/sbom.syft.json"
else
echo "[buildx] skipping SBOM (tool=$SBOM_TOOL, syft available? $(command -v syft >/dev/null && echo yes || echo no))"
fi
if [[ -n "$COSIGN_KEY" ]] && command -v cosign >/dev/null 2>&1; then
echo "[buildx] signing digest with cosign key"
COSIGN_EXPERIMENTAL=1 cosign sign-blob --key "$COSIGN_KEY" --output-signature "${OUT_ROOT}/image.sig" --output-certificate "${OUT_ROOT}/image.cert" "${OUT_ROOT}/image.oci"
else
echo "[buildx] signature skipped (no key provided or cosign missing)"
fi
cat > "${OUT_ROOT}/build-metadata.json" <<EOF
{
"image": "${IMAGE_TAG}",
"platforms": "${PLATFORMS}",
"pushed": ${PUSH},
"digest_sha256": "${IMAGE_DIGEST}",
"generated_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
"sbom": "$( [[ -f ${OUT_ROOT}/sbom.syft.json ]] && echo sbom.syft.json || echo null )"
}
EOF
echo "[buildx] artifacts written to ${OUT_ROOT}"

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
set -euo pipefail
STAGED_DIR="evidence-locker/signals/2025-12-05"
MODULE_ROOT="docs/modules/signals"
TAR_OUT="/tmp/signals-evidence.tar"
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
exit 1
fi
tmpdir=$(mktemp -d)
trap 'rm -rf "$tmpdir"' EXIT
rsync -a --relative \
"$STAGED_DIR/SHA256SUMS" \
"$STAGED_DIR/confidence_decay_config.sigstore.json" \
"$STAGED_DIR/unknowns_scoring_manifest.sigstore.json" \
"$STAGED_DIR/heuristics_catalog.sigstore.json" \
"$MODULE_ROOT/decay/confidence_decay_config.yaml" \
"$MODULE_ROOT/unknowns/unknowns_scoring_manifest.json" \
"$MODULE_ROOT/heuristics/heuristics.catalog.json" \
"$tmpdir/"
pushd "$tmpdir/$STAGED_DIR" >/dev/null
sha256sum --check SHA256SUMS
popd >/dev/null
# Build deterministic tarball
pushd "$tmpdir" >/dev/null
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
-cf "$TAR_OUT" .
popd >/dev/null
sha256sum "$TAR_OUT"
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
--data-binary "@$TAR_OUT"
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/signals/2025-12-05/"

View File

@@ -0,0 +1,46 @@
#!/usr/bin/env bash
set -euo pipefail
# Upload both Zastava and Signals evidence bundles to the locker.
# Requires EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN.
EVIDENCE_LOCKER_URL=${EVIDENCE_LOCKER_URL:-}
CI_EVIDENCE_LOCKER_TOKEN=${CI_EVIDENCE_LOCKER_TOKEN:-}
if [[ -z "$EVIDENCE_LOCKER_URL" || -z "$CI_EVIDENCE_LOCKER_TOKEN" ]]; then
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
exit 1
fi
# Defaults
ZASTAVA_TAR=${ZASTAVA_TAR:-evidence-locker/zastava/2025-12-02/zastava-evidence.tar}
ZASTAVA_VERIFY=${ZASTAVA_VERIFY:-tools/zastava-verify-evidence-tar.sh}
ZASTAVA_PATH=\$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar
SIGNALS_TAR=${SIGNALS_TAR:-evidence-locker/signals/2025-12-05/signals-evidence.tar}
SIGNALS_VERIFY=${SIGNALS_VERIFY:-tools/signals-verify-evidence-tar.sh}
SIGNALS_PATH=\$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar
# Verify
if [[ -x "$ZASTAVA_VERIFY" ]]; then
"$ZASTAVA_VERIFY" "$ZASTAVA_TAR"
fi
if [[ -x "$SIGNALS_VERIFY" ]]; then
"$SIGNALS_VERIFY" "$SIGNALS_TAR"
fi
# Upload Zastava
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
--data-binary @"$ZASTAVA_TAR"
echo "Uploaded Zastava evidence to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar"
# Upload Signals
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar" \
--data-binary @"$SIGNALS_TAR"
echo "Uploaded Signals evidence to $EVIDENCE_LOCKER_URL/signals/2025-12-05/signals-evidence.tar"

View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ -z "${EVIDENCE_LOCKER_URL:-}" || -z "${CI_EVIDENCE_LOCKER_TOKEN:-}" ]]; then
echo "EVIDENCE_LOCKER_URL and CI_EVIDENCE_LOCKER_TOKEN are required" >&2
exit 1
fi
STAGED_DIR="evidence-locker/zastava/2025-12-02"
TAR_OUT="/tmp/zastava-evidence.tar"
MODULE_ROOT="docs/modules/zastava"
test -d "$MODULE_ROOT" || { echo "missing module root $MODULE_ROOT" >&2; exit 1; }
mkdir -p "$STAGED_DIR"
tmpdir=$(mktemp -d)
trap 'rm -rf "$tmpdir"' EXIT
rsync -a --relative \
"$MODULE_ROOT/SHA256SUMS" \
"$MODULE_ROOT/schemas/" \
"$MODULE_ROOT/exports/" \
"$MODULE_ROOT/thresholds.yaml" \
"$MODULE_ROOT/thresholds.yaml.dsse" \
"$MODULE_ROOT/kit/verify.sh" \
"$MODULE_ROOT/kit/README.md" \
"$MODULE_ROOT/kit/ed25519.pub" \
"$MODULE_ROOT/kit/zastava-kit.tzst" \
"$MODULE_ROOT/kit/zastava-kit.tzst.dsse" \
"$MODULE_ROOT/evidence/README.md" \
"$tmpdir/"
pushd "$tmpdir/docs/modules/zastava" >/dev/null
sha256sum --check SHA256SUMS
# Build deterministic tarball for reproducibility (payloads + DSSE)
tar --sort=name --mtime="UTC 1970-01-01" --owner=0 --group=0 --numeric-owner \
-cf "$TAR_OUT" .
popd >/dev/null
sha256sum "$TAR_OUT"
curl --retry 3 --retry-delay 2 --fail \
-H "Authorization: Bearer $CI_EVIDENCE_LOCKER_TOKEN" \
-X PUT "$EVIDENCE_LOCKER_URL/zastava/2025-12-02/zastava-evidence.tar" \
--data-binary "@$TAR_OUT"
echo "Uploaded $TAR_OUT to $EVIDENCE_LOCKER_URL/zastava/2025-12-02/"

View File

@@ -0,0 +1,287 @@
#!/usr/bin/env bash
# =============================================================================
# compute-reachability-metrics.sh
# Computes reachability metrics against ground-truth corpus
#
# Usage: ./compute-reachability-metrics.sh [options]
# --corpus-path PATH Path to ground-truth corpus (default: src/__Tests/reachability/corpus)
# --output FILE Output JSON file (default: stdout)
# --dry-run Show what would be computed without running scanner
# --strict Exit non-zero if any threshold is violated
# --verbose Enable verbose output
#
# Output: JSON with recall, precision, accuracy metrics per vulnerability class
# =============================================================================
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# Default paths
CORPUS_PATH="${REPO_ROOT}/src/__Tests/reachability/corpus"
OUTPUT_FILE=""
DRY_RUN=false
STRICT=false
VERBOSE=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--corpus-path)
CORPUS_PATH="$2"
shift 2
;;
--output)
OUTPUT_FILE="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--strict)
STRICT=true
shift
;;
--verbose)
VERBOSE=true
shift
;;
-h|--help)
head -20 "$0" | tail -15
exit 0
;;
*)
echo "Unknown option: $1" >&2
exit 1
;;
esac
done
log() {
if [[ "${VERBOSE}" == "true" ]]; then
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
fi
}
error() {
echo "[ERROR] $*" >&2
}
# Validate corpus exists
if [[ ! -d "${CORPUS_PATH}" ]]; then
error "Corpus directory not found: ${CORPUS_PATH}"
exit 1
fi
MANIFEST_FILE="${CORPUS_PATH}/manifest.json"
if [[ ! -f "${MANIFEST_FILE}" ]]; then
error "Corpus manifest not found: ${MANIFEST_FILE}"
exit 1
fi
log "Loading corpus from ${CORPUS_PATH}"
log "Manifest: ${MANIFEST_FILE}"
# Initialize counters for each vulnerability class
declare -A true_positives
declare -A false_positives
declare -A false_negatives
declare -A total_expected
CLASSES=("runtime_dep" "os_pkg" "code" "config")
for class in "${CLASSES[@]}"; do
true_positives[$class]=0
false_positives[$class]=0
false_negatives[$class]=0
total_expected[$class]=0
done
if [[ "${DRY_RUN}" == "true" ]]; then
log "[DRY RUN] Would process corpus fixtures..."
# Generate mock metrics for dry-run
cat <<EOF
{
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"corpus_path": "${CORPUS_PATH}",
"dry_run": true,
"metrics": {
"runtime_dep": {
"recall": 0.96,
"precision": 0.94,
"f1_score": 0.95,
"total_expected": 100,
"true_positives": 96,
"false_positives": 6,
"false_negatives": 4
},
"os_pkg": {
"recall": 0.98,
"precision": 0.97,
"f1_score": 0.975,
"total_expected": 50,
"true_positives": 49,
"false_positives": 2,
"false_negatives": 1
},
"code": {
"recall": 0.92,
"precision": 0.90,
"f1_score": 0.91,
"total_expected": 25,
"true_positives": 23,
"false_positives": 3,
"false_negatives": 2
},
"config": {
"recall": 0.88,
"precision": 0.85,
"f1_score": 0.865,
"total_expected": 20,
"true_positives": 18,
"false_positives": 3,
"false_negatives": 2
}
},
"aggregate": {
"overall_recall": 0.9538,
"overall_precision": 0.9302,
"reachability_accuracy": 0.9268
}
}
EOF
exit 0
fi
# Process each fixture in the corpus
log "Processing corpus fixtures..."
# Read manifest and iterate fixtures
FIXTURE_COUNT=$(jq -r '.fixtures | length' "${MANIFEST_FILE}")
log "Found ${FIXTURE_COUNT} fixtures"
for i in $(seq 0 $((FIXTURE_COUNT - 1))); do
FIXTURE_ID=$(jq -r ".fixtures[$i].id" "${MANIFEST_FILE}")
FIXTURE_PATH="${CORPUS_PATH}/$(jq -r ".fixtures[$i].path" "${MANIFEST_FILE}")"
FIXTURE_CLASS=$(jq -r ".fixtures[$i].class" "${MANIFEST_FILE}")
EXPECTED_REACHABLE=$(jq -r ".fixtures[$i].expected_reachable // 0" "${MANIFEST_FILE}")
EXPECTED_UNREACHABLE=$(jq -r ".fixtures[$i].expected_unreachable // 0" "${MANIFEST_FILE}")
log "Processing fixture: ${FIXTURE_ID} (class: ${FIXTURE_CLASS})"
if [[ ! -d "${FIXTURE_PATH}" ]] && [[ ! -f "${FIXTURE_PATH}" ]]; then
error "Fixture not found: ${FIXTURE_PATH}"
continue
fi
# Update expected counts
total_expected[$FIXTURE_CLASS]=$((${total_expected[$FIXTURE_CLASS]} + EXPECTED_REACHABLE))
# Run scanner on fixture (deterministic mode, offline)
SCAN_RESULT_FILE=$(mktemp)
trap "rm -f ${SCAN_RESULT_FILE}" EXIT
if dotnet run --project "${REPO_ROOT}/src/Scanner/StellaOps.Scanner.Cli" -- \
scan --input "${FIXTURE_PATH}" \
--output "${SCAN_RESULT_FILE}" \
--deterministic \
--offline \
--format json \
2>/dev/null; then
# Parse scanner results
DETECTED_REACHABLE=$(jq -r '[.findings[] | select(.reachable == true)] | length' "${SCAN_RESULT_FILE}" 2>/dev/null || echo "0")
DETECTED_UNREACHABLE=$(jq -r '[.findings[] | select(.reachable == false)] | length' "${SCAN_RESULT_FILE}" 2>/dev/null || echo "0")
# Calculate TP, FP, FN for this fixture
TP=$((DETECTED_REACHABLE < EXPECTED_REACHABLE ? DETECTED_REACHABLE : EXPECTED_REACHABLE))
FP=$((DETECTED_REACHABLE > EXPECTED_REACHABLE ? DETECTED_REACHABLE - EXPECTED_REACHABLE : 0))
FN=$((EXPECTED_REACHABLE - TP))
true_positives[$FIXTURE_CLASS]=$((${true_positives[$FIXTURE_CLASS]} + TP))
false_positives[$FIXTURE_CLASS]=$((${false_positives[$FIXTURE_CLASS]} + FP))
false_negatives[$FIXTURE_CLASS]=$((${false_negatives[$FIXTURE_CLASS]} + FN))
else
error "Scanner failed for fixture: ${FIXTURE_ID}"
false_negatives[$FIXTURE_CLASS]=$((${false_negatives[$FIXTURE_CLASS]} + EXPECTED_REACHABLE))
fi
done
# Calculate metrics per class
calculate_metrics() {
local class=$1
local tp=${true_positives[$class]}
local fp=${false_positives[$class]}
local fn=${false_negatives[$class]}
local total=${total_expected[$class]}
local recall=0
local precision=0
local f1=0
if [[ $((tp + fn)) -gt 0 ]]; then
recall=$(echo "scale=4; $tp / ($tp + $fn)" | bc)
fi
if [[ $((tp + fp)) -gt 0 ]]; then
precision=$(echo "scale=4; $tp / ($tp + $fp)" | bc)
fi
if (( $(echo "$recall + $precision > 0" | bc -l) )); then
f1=$(echo "scale=4; 2 * $recall * $precision / ($recall + $precision)" | bc)
fi
echo "{\"recall\": $recall, \"precision\": $precision, \"f1_score\": $f1, \"total_expected\": $total, \"true_positives\": $tp, \"false_positives\": $fp, \"false_negatives\": $fn}"
}
# Generate output JSON
OUTPUT=$(cat <<EOF
{
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"corpus_path": "${CORPUS_PATH}",
"dry_run": false,
"metrics": {
"runtime_dep": $(calculate_metrics "runtime_dep"),
"os_pkg": $(calculate_metrics "os_pkg"),
"code": $(calculate_metrics "code"),
"config": $(calculate_metrics "config")
},
"aggregate": {
"overall_recall": $(echo "scale=4; (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]}) / (${total_expected[runtime_dep]} + ${total_expected[os_pkg]} + ${total_expected[code]} + ${total_expected[config]} + 0.0001)" | bc),
"overall_precision": $(echo "scale=4; (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]}) / (${true_positives[runtime_dep]} + ${true_positives[os_pkg]} + ${true_positives[code]} + ${true_positives[config]} + ${false_positives[runtime_dep]} + ${false_positives[os_pkg]} + ${false_positives[code]} + ${false_positives[config]} + 0.0001)" | bc)
}
}
EOF
)
# Output results
if [[ -n "${OUTPUT_FILE}" ]]; then
echo "${OUTPUT}" > "${OUTPUT_FILE}"
log "Results written to ${OUTPUT_FILE}"
else
echo "${OUTPUT}"
fi
# Check thresholds in strict mode
if [[ "${STRICT}" == "true" ]]; then
THRESHOLDS_FILE="${SCRIPT_DIR}/reachability-thresholds.yaml"
if [[ -f "${THRESHOLDS_FILE}" ]]; then
log "Checking thresholds from ${THRESHOLDS_FILE}"
# Extract thresholds and check
MIN_RECALL=$(yq -r '.thresholds.runtime_dependency_recall.min // 0.95' "${THRESHOLDS_FILE}")
ACTUAL_RECALL=$(echo "${OUTPUT}" | jq -r '.metrics.runtime_dep.recall')
if (( $(echo "$ACTUAL_RECALL < $MIN_RECALL" | bc -l) )); then
error "Runtime dependency recall ${ACTUAL_RECALL} below threshold ${MIN_RECALL}"
exit 1
fi
log "All thresholds passed"
fi
fi
exit 0

View File

@@ -0,0 +1,313 @@
#!/usr/bin/env bash
# =============================================================================
# compute-ttfs-metrics.sh
# Computes Time-to-First-Signal (TTFS) metrics from test runs
#
# Usage: ./compute-ttfs-metrics.sh [options]
# --results-path PATH Path to test results directory
# --output FILE Output JSON file (default: stdout)
# --baseline FILE Baseline TTFS file for comparison
# --dry-run Show what would be computed
# --strict Exit non-zero if thresholds are violated
# --verbose Enable verbose output
#
# Output: JSON with TTFS p50, p95, p99 metrics and regression status
# =============================================================================
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# Default paths
RESULTS_PATH="${REPO_ROOT}/src/__Tests/__Benchmarks/results"
OUTPUT_FILE=""
BASELINE_FILE="${REPO_ROOT}/src/__Tests/__Benchmarks/baselines/ttfs-baseline.json"
DRY_RUN=false
STRICT=false
VERBOSE=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--results-path)
RESULTS_PATH="$2"
shift 2
;;
--output)
OUTPUT_FILE="$2"
shift 2
;;
--baseline)
BASELINE_FILE="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--strict)
STRICT=true
shift
;;
--verbose)
VERBOSE=true
shift
;;
-h|--help)
head -20 "$0" | tail -15
exit 0
;;
*)
echo "Unknown option: $1" >&2
exit 1
;;
esac
done
log() {
if [[ "${VERBOSE}" == "true" ]]; then
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
fi
}
error() {
echo "[ERROR] $*" >&2
}
warn() {
echo "[WARN] $*" >&2
}
# Calculate percentiles from sorted array
percentile() {
local -n arr=$1
local p=$2
local n=${#arr[@]}
if [[ $n -eq 0 ]]; then
echo "0"
return
fi
local idx=$(echo "scale=0; ($n - 1) * $p / 100" | bc)
echo "${arr[$idx]}"
}
if [[ "${DRY_RUN}" == "true" ]]; then
log "[DRY RUN] Would process TTFS metrics..."
cat <<EOF
{
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"dry_run": true,
"results_path": "${RESULTS_PATH}",
"metrics": {
"ttfs_ms": {
"p50": 1250,
"p95": 3500,
"p99": 5200,
"min": 450,
"max": 8500,
"mean": 1850,
"sample_count": 100
},
"by_scan_type": {
"image_scan": {
"p50": 2100,
"p95": 4500,
"p99": 6800
},
"filesystem_scan": {
"p50": 850,
"p95": 1800,
"p99": 2500
},
"sbom_scan": {
"p50": 320,
"p95": 650,
"p99": 950
}
}
},
"baseline_comparison": {
"baseline_path": "${BASELINE_FILE}",
"p50_regression_pct": -2.5,
"p95_regression_pct": 1.2,
"regression_detected": false
}
}
EOF
exit 0
fi
# Validate results directory
if [[ ! -d "${RESULTS_PATH}" ]]; then
error "Results directory not found: ${RESULTS_PATH}"
exit 1
fi
log "Processing TTFS results from ${RESULTS_PATH}"
# Collect all TTFS values from result files
declare -a ttfs_values=()
declare -a image_ttfs=()
declare -a fs_ttfs=()
declare -a sbom_ttfs=()
# Find and process all result files
for result_file in "${RESULTS_PATH}"/*.json "${RESULTS_PATH}"/**/*.json; do
[[ -f "${result_file}" ]] || continue
log "Processing: ${result_file}"
# Extract TTFS value if present
TTFS=$(jq -r '.ttfs_ms // .time_to_first_signal_ms // empty' "${result_file}" 2>/dev/null || true)
SCAN_TYPE=$(jq -r '.scan_type // "unknown"' "${result_file}" 2>/dev/null || echo "unknown")
if [[ -n "${TTFS}" ]] && [[ "${TTFS}" != "null" ]]; then
ttfs_values+=("${TTFS}")
case "${SCAN_TYPE}" in
image|image_scan|container)
image_ttfs+=("${TTFS}")
;;
filesystem|fs|fs_scan)
fs_ttfs+=("${TTFS}")
;;
sbom|sbom_scan)
sbom_ttfs+=("${TTFS}")
;;
esac
fi
done
# Sort arrays for percentile calculation
IFS=$'\n' ttfs_sorted=($(sort -n <<<"${ttfs_values[*]}")); unset IFS
IFS=$'\n' image_sorted=($(sort -n <<<"${image_ttfs[*]}")); unset IFS
IFS=$'\n' fs_sorted=($(sort -n <<<"${fs_ttfs[*]}")); unset IFS
IFS=$'\n' sbom_sorted=($(sort -n <<<"${sbom_ttfs[*]}")); unset IFS
# Calculate overall metrics
SAMPLE_COUNT=${#ttfs_values[@]}
if [[ $SAMPLE_COUNT -eq 0 ]]; then
warn "No TTFS samples found"
P50=0
P95=0
P99=0
MIN=0
MAX=0
MEAN=0
else
P50=$(percentile ttfs_sorted 50)
P95=$(percentile ttfs_sorted 95)
P99=$(percentile ttfs_sorted 99)
MIN=${ttfs_sorted[0]}
MAX=${ttfs_sorted[-1]}
# Calculate mean
SUM=0
for v in "${ttfs_values[@]}"; do
SUM=$((SUM + v))
done
MEAN=$((SUM / SAMPLE_COUNT))
fi
# Calculate per-type metrics
IMAGE_P50=$(percentile image_sorted 50)
IMAGE_P95=$(percentile image_sorted 95)
IMAGE_P99=$(percentile image_sorted 99)
FS_P50=$(percentile fs_sorted 50)
FS_P95=$(percentile fs_sorted 95)
FS_P99=$(percentile fs_sorted 99)
SBOM_P50=$(percentile sbom_sorted 50)
SBOM_P95=$(percentile sbom_sorted 95)
SBOM_P99=$(percentile sbom_sorted 99)
# Compare against baseline if available
REGRESSION_DETECTED=false
P50_REGRESSION_PCT=0
P95_REGRESSION_PCT=0
if [[ -f "${BASELINE_FILE}" ]]; then
log "Comparing against baseline: ${BASELINE_FILE}"
BASELINE_P50=$(jq -r '.metrics.ttfs_ms.p50 // 0' "${BASELINE_FILE}")
BASELINE_P95=$(jq -r '.metrics.ttfs_ms.p95 // 0' "${BASELINE_FILE}")
if [[ $BASELINE_P50 -gt 0 ]]; then
P50_REGRESSION_PCT=$(echo "scale=2; (${P50} - ${BASELINE_P50}) * 100 / ${BASELINE_P50}" | bc)
fi
if [[ $BASELINE_P95 -gt 0 ]]; then
P95_REGRESSION_PCT=$(echo "scale=2; (${P95} - ${BASELINE_P95}) * 100 / ${BASELINE_P95}" | bc)
fi
# Check for regression (>10% increase)
if (( $(echo "${P50_REGRESSION_PCT} > 10" | bc -l) )) || (( $(echo "${P95_REGRESSION_PCT} > 10" | bc -l) )); then
REGRESSION_DETECTED=true
warn "TTFS regression detected: p50=${P50_REGRESSION_PCT}%, p95=${P95_REGRESSION_PCT}%"
fi
fi
# Generate output
OUTPUT=$(cat <<EOF
{
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"dry_run": false,
"results_path": "${RESULTS_PATH}",
"metrics": {
"ttfs_ms": {
"p50": ${P50},
"p95": ${P95},
"p99": ${P99},
"min": ${MIN},
"max": ${MAX},
"mean": ${MEAN},
"sample_count": ${SAMPLE_COUNT}
},
"by_scan_type": {
"image_scan": {
"p50": ${IMAGE_P50:-0},
"p95": ${IMAGE_P95:-0},
"p99": ${IMAGE_P99:-0}
},
"filesystem_scan": {
"p50": ${FS_P50:-0},
"p95": ${FS_P95:-0},
"p99": ${FS_P99:-0}
},
"sbom_scan": {
"p50": ${SBOM_P50:-0},
"p95": ${SBOM_P95:-0},
"p99": ${SBOM_P99:-0}
}
}
},
"baseline_comparison": {
"baseline_path": "${BASELINE_FILE}",
"p50_regression_pct": ${P50_REGRESSION_PCT},
"p95_regression_pct": ${P95_REGRESSION_PCT},
"regression_detected": ${REGRESSION_DETECTED}
}
}
EOF
)
# Output results
if [[ -n "${OUTPUT_FILE}" ]]; then
echo "${OUTPUT}" > "${OUTPUT_FILE}"
log "Results written to ${OUTPUT_FILE}"
else
echo "${OUTPUT}"
fi
# Strict mode: fail on regression
if [[ "${STRICT}" == "true" ]] && [[ "${REGRESSION_DETECTED}" == "true" ]]; then
error "TTFS regression exceeds threshold"
exit 1
fi
exit 0

View File

@@ -0,0 +1,326 @@
#!/usr/bin/env bash
# =============================================================================
# enforce-performance-slos.sh
# Enforces scan time and compute budget SLOs in CI
#
# Usage: ./enforce-performance-slos.sh [options]
# --results-path PATH Path to benchmark results directory
# --slos-file FILE Path to SLO definitions (default: scripts/ci/performance-slos.yaml)
# --output FILE Output JSON file (default: stdout)
# --dry-run Show what would be enforced
# --strict Exit non-zero if any SLO is violated
# --verbose Enable verbose output
#
# Output: JSON with SLO evaluation results and violations
# =============================================================================
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
# Default paths
RESULTS_PATH="${REPO_ROOT}/src/__Tests/__Benchmarks/results"
SLOS_FILE="${SCRIPT_DIR}/performance-slos.yaml"
OUTPUT_FILE=""
DRY_RUN=false
STRICT=false
VERBOSE=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--results-path)
RESULTS_PATH="$2"
shift 2
;;
--slos-file)
SLOS_FILE="$2"
shift 2
;;
--output)
OUTPUT_FILE="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
--strict)
STRICT=true
shift
;;
--verbose)
VERBOSE=true
shift
;;
-h|--help)
head -20 "$0" | tail -15
exit 0
;;
*)
echo "Unknown option: $1" >&2
exit 1
;;
esac
done
log() {
if [[ "${VERBOSE}" == "true" ]]; then
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] $*" >&2
fi
}
error() {
echo "[ERROR] $*" >&2
}
warn() {
echo "[WARN] $*" >&2
}
if [[ "${DRY_RUN}" == "true" ]]; then
log "[DRY RUN] Would enforce performance SLOs..."
cat <<EOF
{
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"dry_run": true,
"results_path": "${RESULTS_PATH}",
"slos_file": "${SLOS_FILE}",
"slo_evaluations": {
"scan_time_p95": {
"slo_name": "Scan Time P95",
"threshold_ms": 30000,
"actual_ms": 25000,
"passed": true,
"margin_pct": 16.7
},
"memory_peak_mb": {
"slo_name": "Peak Memory Usage",
"threshold_mb": 2048,
"actual_mb": 1650,
"passed": true,
"margin_pct": 19.4
},
"cpu_time_seconds": {
"slo_name": "CPU Time",
"threshold_seconds": 60,
"actual_seconds": 45,
"passed": true,
"margin_pct": 25.0
}
},
"summary": {
"total_slos": 3,
"passed": 3,
"failed": 0,
"all_passed": true
}
}
EOF
exit 0
fi
# Validate paths
if [[ ! -d "${RESULTS_PATH}" ]]; then
error "Results directory not found: ${RESULTS_PATH}"
exit 1
fi
if [[ ! -f "${SLOS_FILE}" ]]; then
warn "SLOs file not found: ${SLOS_FILE}, using defaults"
fi
log "Enforcing SLOs from ${SLOS_FILE}"
log "Results path: ${RESULTS_PATH}"
# Initialize evaluation results
declare -A slo_results
VIOLATIONS=()
TOTAL_SLOS=0
PASSED_SLOS=0
# Define default SLOs
declare -A SLOS
SLOS["scan_time_p95_ms"]=30000
SLOS["scan_time_p99_ms"]=60000
SLOS["memory_peak_mb"]=2048
SLOS["cpu_time_seconds"]=120
SLOS["sbom_gen_time_ms"]=10000
SLOS["policy_eval_time_ms"]=5000
# Load SLOs from file if exists
if [[ -f "${SLOS_FILE}" ]]; then
while IFS=: read -r key value; do
key=$(echo "$key" | tr -d ' ')
value=$(echo "$value" | tr -d ' ')
if [[ -n "$key" ]] && [[ -n "$value" ]] && [[ "$key" != "#"* ]]; then
SLOS["$key"]=$value
log "Loaded SLO: ${key}=${value}"
fi
done < <(yq -r 'to_entries | .[] | "\(.key):\(.value.threshold // .value)"' "${SLOS_FILE}" 2>/dev/null || true)
fi
# Collect metrics from results
SCAN_TIMES=()
MEMORY_VALUES=()
CPU_TIMES=()
SBOM_TIMES=()
POLICY_TIMES=()
for result_file in "${RESULTS_PATH}"/*.json "${RESULTS_PATH}"/**/*.json; do
[[ -f "${result_file}" ]] || continue
log "Processing: ${result_file}"
# Extract metrics
SCAN_TIME=$(jq -r '.duration_ms // .scan_time_ms // empty' "${result_file}" 2>/dev/null || true)
MEMORY=$(jq -r '.peak_memory_mb // .memory_mb // empty' "${result_file}" 2>/dev/null || true)
CPU_TIME=$(jq -r '.cpu_time_seconds // .cpu_seconds // empty' "${result_file}" 2>/dev/null || true)
SBOM_TIME=$(jq -r '.sbom_generation_ms // empty' "${result_file}" 2>/dev/null || true)
POLICY_TIME=$(jq -r '.policy_evaluation_ms // empty' "${result_file}" 2>/dev/null || true)
[[ -n "${SCAN_TIME}" ]] && SCAN_TIMES+=("${SCAN_TIME}")
[[ -n "${MEMORY}" ]] && MEMORY_VALUES+=("${MEMORY}")
[[ -n "${CPU_TIME}" ]] && CPU_TIMES+=("${CPU_TIME}")
[[ -n "${SBOM_TIME}" ]] && SBOM_TIMES+=("${SBOM_TIME}")
[[ -n "${POLICY_TIME}" ]] && POLICY_TIMES+=("${POLICY_TIME}")
done
# Helper: calculate percentile from array
calc_percentile() {
local -n values=$1
local pct=$2
if [[ ${#values[@]} -eq 0 ]]; then
echo "0"
return
fi
IFS=$'\n' sorted=($(sort -n <<<"${values[*]}")); unset IFS
local n=${#sorted[@]}
local idx=$(echo "scale=0; ($n - 1) * $pct / 100" | bc)
echo "${sorted[$idx]}"
}
# Helper: calculate max from array
calc_max() {
local -n values=$1
if [[ ${#values[@]} -eq 0 ]]; then
echo "0"
return
fi
local max=0
for v in "${values[@]}"; do
if (( $(echo "$v > $max" | bc -l) )); then
max=$v
fi
done
echo "$max"
}
# Evaluate each SLO
evaluate_slo() {
local name=$1
local threshold=$2
local actual=$3
local unit=$4
((TOTAL_SLOS++))
local passed=true
local margin_pct=0
if (( $(echo "$actual > $threshold" | bc -l) )); then
passed=false
margin_pct=$(echo "scale=2; ($actual - $threshold) * 100 / $threshold" | bc)
VIOLATIONS+=("${name}: ${actual}${unit} exceeds threshold ${threshold}${unit} (+${margin_pct}%)")
warn "SLO VIOLATION: ${name} = ${actual}${unit} (threshold: ${threshold}${unit})"
else
((PASSED_SLOS++))
margin_pct=$(echo "scale=2; ($threshold - $actual) * 100 / $threshold" | bc)
log "SLO PASSED: ${name} = ${actual}${unit} (threshold: ${threshold}${unit}, margin: ${margin_pct}%)"
fi
echo "{\"slo_name\": \"${name}\", \"threshold\": ${threshold}, \"actual\": ${actual}, \"unit\": \"${unit}\", \"passed\": ${passed}, \"margin_pct\": ${margin_pct}}"
}
# Calculate actuals
SCAN_P95=$(calc_percentile SCAN_TIMES 95)
SCAN_P99=$(calc_percentile SCAN_TIMES 99)
MEMORY_MAX=$(calc_max MEMORY_VALUES)
CPU_MAX=$(calc_max CPU_TIMES)
SBOM_P95=$(calc_percentile SBOM_TIMES 95)
POLICY_P95=$(calc_percentile POLICY_TIMES 95)
# Run evaluations
SLO_SCAN_P95=$(evaluate_slo "Scan Time P95" "${SLOS[scan_time_p95_ms]}" "${SCAN_P95}" "ms")
SLO_SCAN_P99=$(evaluate_slo "Scan Time P99" "${SLOS[scan_time_p99_ms]}" "${SCAN_P99}" "ms")
SLO_MEMORY=$(evaluate_slo "Peak Memory" "${SLOS[memory_peak_mb]}" "${MEMORY_MAX}" "MB")
SLO_CPU=$(evaluate_slo "CPU Time" "${SLOS[cpu_time_seconds]}" "${CPU_MAX}" "s")
SLO_SBOM=$(evaluate_slo "SBOM Generation P95" "${SLOS[sbom_gen_time_ms]}" "${SBOM_P95}" "ms")
SLO_POLICY=$(evaluate_slo "Policy Evaluation P95" "${SLOS[policy_eval_time_ms]}" "${POLICY_P95}" "ms")
# Generate output
ALL_PASSED=true
if [[ ${#VIOLATIONS[@]} -gt 0 ]]; then
ALL_PASSED=false
fi
# Build violations JSON array
VIOLATIONS_JSON="[]"
if [[ ${#VIOLATIONS[@]} -gt 0 ]]; then
VIOLATIONS_JSON="["
for i in "${!VIOLATIONS[@]}"; do
[[ $i -gt 0 ]] && VIOLATIONS_JSON+=","
VIOLATIONS_JSON+="\"${VIOLATIONS[$i]}\""
done
VIOLATIONS_JSON+="]"
fi
OUTPUT=$(cat <<EOF
{
"timestamp": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"dry_run": false,
"results_path": "${RESULTS_PATH}",
"slos_file": "${SLOS_FILE}",
"slo_evaluations": {
"scan_time_p95": ${SLO_SCAN_P95},
"scan_time_p99": ${SLO_SCAN_P99},
"memory_peak_mb": ${SLO_MEMORY},
"cpu_time_seconds": ${SLO_CPU},
"sbom_gen_time_ms": ${SLO_SBOM},
"policy_eval_time_ms": ${SLO_POLICY}
},
"summary": {
"total_slos": ${TOTAL_SLOS},
"passed": ${PASSED_SLOS},
"failed": $((TOTAL_SLOS - PASSED_SLOS)),
"all_passed": ${ALL_PASSED},
"violations": ${VIOLATIONS_JSON}
}
}
EOF
)
# Output results
if [[ -n "${OUTPUT_FILE}" ]]; then
echo "${OUTPUT}" > "${OUTPUT_FILE}"
log "Results written to ${OUTPUT_FILE}"
else
echo "${OUTPUT}"
fi
# Strict mode: fail on violations
if [[ "${STRICT}" == "true" ]] && [[ "${ALL_PASSED}" == "false" ]]; then
error "Performance SLO violations detected"
for v in "${VIOLATIONS[@]}"; do
error " - ${v}"
done
exit 1
fi
exit 0

View File

@@ -0,0 +1,94 @@
# =============================================================================
# Performance SLOs (Service Level Objectives)
# Reference: Testing and Quality Guardrails Technical Reference
#
# These SLOs define the performance budgets for CI quality gates.
# Violations will be flagged and may block releases.
# =============================================================================
# Scan Time SLOs (milliseconds)
scan_time:
p50:
threshold: 15000
description: "50th percentile scan time"
severity: "info"
p95:
threshold: 30000
description: "95th percentile scan time - primary SLO"
severity: "warning"
p99:
threshold: 60000
description: "99th percentile scan time - tail latency"
severity: "critical"
# Memory Usage SLOs (megabytes)
memory:
peak_mb:
threshold: 2048
description: "Peak memory usage during scan"
severity: "warning"
average_mb:
threshold: 1024
description: "Average memory usage"
severity: "info"
# CPU Time SLOs (seconds)
cpu:
max_seconds:
threshold: 120
description: "Maximum CPU time per scan"
severity: "warning"
average_seconds:
threshold: 60
description: "Average CPU time per scan"
severity: "info"
# Component-Specific SLOs (milliseconds)
components:
sbom_generation:
p95:
threshold: 10000
description: "SBOM generation time P95"
severity: "warning"
policy_evaluation:
p95:
threshold: 5000
description: "Policy evaluation time P95"
severity: "warning"
reachability_analysis:
p95:
threshold: 20000
description: "Reachability analysis time P95"
severity: "warning"
vulnerability_matching:
p95:
threshold: 8000
description: "Vulnerability matching time P95"
severity: "warning"
# Resource Budget SLOs
resource_budgets:
disk_io_mb:
threshold: 500
description: "Maximum disk I/O per scan"
network_calls:
threshold: 0
description: "Network calls (should be zero for offline scans)"
temp_storage_mb:
threshold: 1024
description: "Maximum temporary storage usage"
# Regression Thresholds
regression:
max_degradation_pct: 10
warning_threshold_pct: 5
baseline_window_days: 30
# Override Configuration
overrides:
allowed_labels:
- "performance-override"
- "large-scan"
required_approvers:
- "platform"
- "performance"

View File

@@ -0,0 +1,102 @@
# =============================================================================
# Reachability Quality Gate Thresholds
# Reference: Testing and Quality Guardrails Technical Reference
#
# These thresholds are enforced by CI quality gates. Violations will block PRs
# unless an override is explicitly approved.
# =============================================================================
thresholds:
# Runtime dependency recall: percentage of runtime dependency vulns detected
runtime_dependency_recall:
min: 0.95
description: "Percentage of runtime dependency vulnerabilities detected"
severity: "critical"
# OS package recall: percentage of OS package vulns detected
os_package_recall:
min: 0.97
description: "Percentage of OS package vulnerabilities detected"
severity: "critical"
# Code vulnerability recall: percentage of code-level vulns detected
code_vulnerability_recall:
min: 0.90
description: "Percentage of code vulnerabilities detected"
severity: "high"
# Configuration vulnerability recall
config_vulnerability_recall:
min: 0.85
description: "Percentage of configuration vulnerabilities detected"
severity: "medium"
# False positive rate for unreachable findings
unreachable_false_positives:
max: 0.05
description: "Rate of false positives for unreachable findings"
severity: "high"
# Reachability underreport rate: missed reachable findings
reachability_underreport:
max: 0.10
description: "Rate of reachable findings incorrectly marked unreachable"
severity: "critical"
# Overall precision across all classes
overall_precision:
min: 0.90
description: "Overall precision across all vulnerability classes"
severity: "high"
# F1 score threshold
f1_score_min:
min: 0.90
description: "Minimum F1 score across vulnerability classes"
severity: "high"
# Class-specific thresholds
class_thresholds:
runtime_dep:
recall_min: 0.95
precision_min: 0.92
f1_min: 0.93
os_pkg:
recall_min: 0.97
precision_min: 0.95
f1_min: 0.96
code:
recall_min: 0.90
precision_min: 0.88
f1_min: 0.89
config:
recall_min: 0.85
precision_min: 0.80
f1_min: 0.82
# Regression detection settings
regression:
# Maximum allowed regression from baseline (percentage points)
max_recall_regression: 0.02
max_precision_regression: 0.03
# Path to baseline metrics file
baseline_path: "bench/baselines/reachability-baseline.json"
# How many consecutive failures before blocking
failure_threshold: 2
# Override configuration
overrides:
# Allow temporary bypass for specific PR labels
bypass_labels:
- "quality-gate-override"
- "wip"
# Require explicit approval from these teams
required_approvers:
- "platform"
- "reachability"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env python3
"""Ensure CLI parity matrix contains no outstanding blockers before release."""
from __future__ import annotations
import pathlib
import re
import sys
REPO_ROOT = pathlib.Path(__file__).resolve().parents[2]
PARITY_DOC = REPO_ROOT / "docs/cli-vs-ui-parity.md"
BLOCKERS = {
"🟥": "blocking gap",
"": "missing feature",
"🚫": "unsupported",
}
WARNINGS = {
"🟡": "partial support",
"⚠️": "warning",
}
def main() -> int:
if not PARITY_DOC.exists():
print(f"❌ Parity matrix not found at {PARITY_DOC}", file=sys.stderr)
return 1
text = PARITY_DOC.read_text(encoding="utf-8")
blockers: list[str] = []
warnings: list[str] = []
for line in text.splitlines():
for symbol, label in BLOCKERS.items():
if symbol in line:
blockers.append(f"{label}: {line.strip()}")
for symbol, label in WARNINGS.items():
if symbol in line:
warnings.append(f"{label}: {line.strip()}")
if blockers:
print("❌ CLI parity gate failed — blocking items present:", file=sys.stderr)
for item in blockers:
print(f" - {item}", file=sys.stderr)
return 1
if warnings:
print("⚠️ CLI parity gate warnings detected:", file=sys.stderr)
for item in warnings:
print(f" - {item}", file=sys.stderr)
print("Treat warnings as failures until parity matrix is fully green.", file=sys.stderr)
return 1
print("✅ CLI parity matrix has no blocking or warning entries.")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,334 @@
#!/usr/bin/env python3
"""Verify release artefacts (SBOMs, provenance, signatures, manifest hashes)."""
from __future__ import annotations
import argparse
import hashlib
import json
import pathlib
import sys
from collections import OrderedDict
from typing import Any, Mapping, Optional
from build_release import dump_yaml # type: ignore import-not-found
class VerificationError(Exception):
"""Raised when release artefacts fail verification."""
def compute_sha256(path: pathlib.Path) -> str:
sha = hashlib.sha256()
with path.open("rb") as handle:
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
sha.update(chunk)
return sha.hexdigest()
def parse_sha_file(path: pathlib.Path) -> Optional[str]:
if not path.exists():
return None
content = path.read_text(encoding="utf-8").strip()
if not content:
return None
return content.split()[0]
def resolve_path(path_str: str, release_dir: pathlib.Path) -> pathlib.Path:
candidate = pathlib.Path(path_str.replace("\\", "/"))
if candidate.is_absolute():
return candidate
for base in (release_dir, release_dir.parent, release_dir.parent.parent):
resolved = (base / candidate).resolve()
if resolved.exists():
return resolved
# Fall back to release_dir joined path even if missing to surface in caller.
return (release_dir / candidate).resolve()
def load_manifest(release_dir: pathlib.Path) -> OrderedDict[str, Any]:
manifest_path = release_dir / "release.json"
if not manifest_path.exists():
raise VerificationError(f"Release manifest JSON missing at {manifest_path}")
try:
with manifest_path.open("r", encoding="utf-8") as handle:
return json.load(handle, object_pairs_hook=OrderedDict)
except json.JSONDecodeError as exc:
raise VerificationError(f"Failed to parse {manifest_path}: {exc}") from exc
def verify_manifest_hashes(
manifest: Mapping[str, Any],
release_dir: pathlib.Path,
errors: list[str],
) -> None:
yaml_path = release_dir / "release.yaml"
if not yaml_path.exists():
errors.append(f"Missing release.yaml at {yaml_path}")
return
recorded_yaml_sha = parse_sha_file(yaml_path.with_name(yaml_path.name + ".sha256"))
actual_yaml_sha = compute_sha256(yaml_path)
if recorded_yaml_sha and recorded_yaml_sha != actual_yaml_sha:
errors.append(
f"release.yaml.sha256 recorded {recorded_yaml_sha} but file hashes to {actual_yaml_sha}"
)
json_path = release_dir / "release.json"
recorded_json_sha = parse_sha_file(json_path.with_name(json_path.name + ".sha256"))
actual_json_sha = compute_sha256(json_path)
if recorded_json_sha and recorded_json_sha != actual_json_sha:
errors.append(
f"release.json.sha256 recorded {recorded_json_sha} but file hashes to {actual_json_sha}"
)
checksums = manifest.get("checksums")
if isinstance(checksums, Mapping):
recorded_digest = checksums.get("sha256")
base_manifest = OrderedDict(manifest)
base_manifest.pop("checksums", None)
yaml_without_checksums = dump_yaml(base_manifest)
computed_digest = hashlib.sha256(yaml_without_checksums.encode("utf-8")).hexdigest()
if recorded_digest != computed_digest:
errors.append(
"Manifest checksum mismatch: "
f"recorded {recorded_digest}, computed {computed_digest}"
)
def verify_artifact_entry(
entry: Mapping[str, Any],
release_dir: pathlib.Path,
label: str,
component_name: str,
errors: list[str],
) -> None:
path_str = entry.get("path")
if not path_str:
errors.append(f"{component_name}: {label} missing 'path' field.")
return
resolved = resolve_path(str(path_str), release_dir)
if not resolved.exists():
errors.append(f"{component_name}: {label} path does not exist → {resolved}")
return
recorded_sha = entry.get("sha256")
if recorded_sha:
actual_sha = compute_sha256(resolved)
if actual_sha != recorded_sha:
errors.append(
f"{component_name}: {label} SHA mismatch for {resolved} "
f"(recorded {recorded_sha}, computed {actual_sha})"
)
def verify_components(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
for component in manifest.get("components", []):
if not isinstance(component, Mapping):
errors.append("Component entry is not a mapping.")
continue
name = str(component.get("name", "<unknown>"))
for key, label in (
("sbom", "SBOM"),
("provenance", "provenance"),
("signature", "signature"),
("metadata", "metadata"),
):
entry = component.get(key)
if not entry:
continue
if not isinstance(entry, Mapping):
errors.append(f"{name}: {label} entry must be a mapping.")
continue
verify_artifact_entry(entry, release_dir, label, name, errors)
def verify_collections(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
for collection, label in (
("charts", "chart"),
("compose", "compose file"),
):
for item in manifest.get(collection, []):
if not isinstance(item, Mapping):
errors.append(f"{collection} entry is not a mapping.")
continue
path_value = item.get("path")
if not path_value:
errors.append(f"{collection} entry missing path.")
continue
resolved = resolve_path(str(path_value), release_dir)
if not resolved.exists():
errors.append(f"{label} missing file → {resolved}")
continue
recorded_sha = item.get("sha256")
if recorded_sha:
actual_sha = compute_sha256(resolved)
if actual_sha != recorded_sha:
errors.append(
f"{label} SHA mismatch for {resolved} "
f"(recorded {recorded_sha}, computed {actual_sha})"
)
def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
debug = manifest.get("debugStore")
if not isinstance(debug, Mapping):
return
manifest_path_str = debug.get("manifest")
manifest_data: Optional[Mapping[str, Any]] = None
if manifest_path_str:
manifest_path = resolve_path(str(manifest_path_str), release_dir)
if not manifest_path.exists():
errors.append(f"Debug manifest missing → {manifest_path}")
else:
recorded_sha = debug.get("sha256")
if recorded_sha:
actual_sha = compute_sha256(manifest_path)
if actual_sha != recorded_sha:
errors.append(
f"Debug manifest SHA mismatch (recorded {recorded_sha}, computed {actual_sha})"
)
sha_sidecar = manifest_path.with_suffix(manifest_path.suffix + ".sha256")
sidecar_sha = parse_sha_file(sha_sidecar)
if sidecar_sha and recorded_sha and sidecar_sha != recorded_sha:
errors.append(
f"Debug manifest sidecar digest {sidecar_sha} disagrees with recorded {recorded_sha}"
)
try:
with manifest_path.open("r", encoding="utf-8") as handle:
manifest_data = json.load(handle)
except json.JSONDecodeError as exc:
errors.append(f"Debug manifest JSON invalid: {exc}")
directory = debug.get("directory")
if directory:
debug_dir = resolve_path(str(directory), release_dir)
if not debug_dir.exists():
errors.append(f"Debug directory missing → {debug_dir}")
if manifest_data:
artifacts = manifest_data.get("artifacts")
if not isinstance(artifacts, list) or not artifacts:
errors.append("Debug manifest contains no artefacts.")
return
declared_entries = debug.get("entries")
if isinstance(declared_entries, int) and declared_entries != len(artifacts):
errors.append(
f"Debug manifest reports {declared_entries} entries but contains {len(artifacts)} artefacts."
)
for artefact in artifacts:
if not isinstance(artefact, Mapping):
errors.append("Debug manifest artefact entry is not a mapping.")
continue
debug_path = artefact.get("debugPath")
artefact_sha = artefact.get("sha256")
if not debug_path or not artefact_sha:
errors.append("Debug manifest artefact missing debugPath or sha256.")
continue
resolved_debug = resolve_path(str(debug_path), release_dir)
if not resolved_debug.exists():
errors.append(f"Debug artefact missing → {resolved_debug}")
continue
actual_sha = compute_sha256(resolved_debug)
if actual_sha != artefact_sha:
errors.append(
f"Debug artefact SHA mismatch for {resolved_debug} "
f"(recorded {artefact_sha}, computed {actual_sha})"
)
def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None:
sig_path_value = signature.get("path")
if not sig_path_value:
errors.append(f"{component_name}: {label} signature missing path.")
return
sig_path = resolve_path(str(sig_path_value), release_dir)
if not sig_path.exists():
errors.append(f"{component_name}: {label} signature missing → {sig_path}")
return
recorded_sha = signature.get("sha256")
if recorded_sha:
actual_sha = compute_sha256(sig_path)
if actual_sha != recorded_sha:
errors.append(
f"{component_name}: {label} signature SHA mismatch for {sig_path} "
f"(recorded {recorded_sha}, computed {actual_sha})"
)
def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None:
cli_entries = manifest.get("cli")
if not cli_entries:
return
if not isinstance(cli_entries, list):
errors.append("CLI manifest section must be a list.")
return
for entry in cli_entries:
if not isinstance(entry, Mapping):
errors.append("CLI entry must be a mapping.")
continue
runtime = entry.get("runtime", "<unknown>")
component_name = f"cli[{runtime}]"
archive = entry.get("archive")
if not isinstance(archive, Mapping):
errors.append(f"{component_name}: archive metadata missing or invalid.")
else:
verify_artifact_entry(archive, release_dir, "archive", component_name, errors)
signature = archive.get("signature")
if isinstance(signature, Mapping):
verify_signature(signature, release_dir, "archive", component_name, errors)
elif signature is not None:
errors.append(f"{component_name}: archive signature must be an object.")
sbom = entry.get("sbom")
if sbom:
if not isinstance(sbom, Mapping):
errors.append(f"{component_name}: sbom entry must be a mapping.")
else:
verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors)
signature = sbom.get("signature")
if isinstance(signature, Mapping):
verify_signature(signature, release_dir, "sbom", component_name, errors)
elif signature is not None:
errors.append(f"{component_name}: sbom signature must be an object.")
def verify_release(release_dir: pathlib.Path) -> None:
if not release_dir.exists():
raise VerificationError(f"Release directory not found: {release_dir}")
manifest = load_manifest(release_dir)
errors: list[str] = []
verify_manifest_hashes(manifest, release_dir, errors)
verify_components(manifest, release_dir, errors)
verify_cli_entries(manifest, release_dir, errors)
verify_collections(manifest, release_dir, errors)
verify_debug_store(manifest, release_dir, errors)
if errors:
bullet_list = "\n - ".join(errors)
raise VerificationError(f"Release verification failed:\n - {bullet_list}")
def parse_args(argv: list[str] | None = None) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--release-dir",
type=pathlib.Path,
default=pathlib.Path("out/release"),
help="Path to the release artefact directory (default: %(default)s)",
)
return parser.parse_args(argv)
def main(argv: list[str] | None = None) -> int:
args = parse_args(argv)
try:
verify_release(args.release_dir.resolve())
except VerificationError as exc:
print(str(exc), file=sys.stderr)
return 1
print(f"✅ Release artefacts verified OK in {args.release_dir}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env bash
set -euo pipefail
# Deterministic DSSE signing helper for Authority gap artefacts (AU1AU10, RR1RR10).
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
COSIGN_BIN="${COSIGN_BIN:-}"
# Detect cosign binary
if [[ -z "$COSIGN_BIN" ]]; then
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
COSIGN_BIN="/usr/local/bin/cosign"
elif command -v cosign >/dev/null 2>&1; then
COSIGN_BIN="$(command -v cosign)"
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
COSIGN_BIN="$ROOT/tools/cosign/cosign"
else
echo "cosign not found; install or set COSIGN_BIN" >&2
exit 1
fi
fi
# Resolve key
TMP_KEY=""
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
KEY_FILE="$COSIGN_KEY_FILE"
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
TMP_KEY="$(mktemp)"
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
chmod 600 "$TMP_KEY"
KEY_FILE="$TMP_KEY"
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
KEY_FILE="$ROOT/tools/cosign/cosign.key"
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
else
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
exit 2
fi
OUT_BASE="${OUT_DIR:-$ROOT/docs/modules/authority/gaps/dsse/2025-12-04}"
if [[ "$OUT_BASE" != /* ]]; then
OUT_BASE="$ROOT/$OUT_BASE"
fi
mkdir -p "$OUT_BASE"
ARTEFACTS=(
"docs/modules/authority/gaps/artifacts/authority-scope-role-catalog.v1.json|authority-scope-role-catalog"
"docs/modules/authority/gaps/artifacts/authority-jwks-metadata.schema.json|authority-jwks-metadata.schema"
"docs/modules/authority/gaps/artifacts/crypto-profile-registry.v1.json|crypto-profile-registry"
"docs/modules/authority/gaps/artifacts/authority-offline-verifier-bundle.v1.json|authority-offline-verifier-bundle"
"docs/modules/authority/gaps/artifacts/authority-abac.schema.json|authority-abac.schema"
"docs/modules/authority/gaps/artifacts/rekor-receipt-policy.v1.json|rekor-receipt-policy"
"docs/modules/authority/gaps/artifacts/rekor-receipt.schema.json|rekor-receipt.schema"
"docs/modules/authority/gaps/artifacts/rekor-receipt-bundle.v1.json|rekor-receipt-bundle"
)
USE_BUNDLE=0
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
USE_BUNDLE=1
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
USE_BUNDLE=1
fi
SHA_FILE="$OUT_BASE/SHA256SUMS"
: > "$SHA_FILE"
for entry in "${ARTEFACTS[@]}"; do
IFS="|" read -r path stem <<<"$entry"
if [[ ! -f "$ROOT/$path" ]]; then
echo "Missing artefact: $path" >&2
exit 3
fi
if (( USE_BUNDLE )); then
bundle="$OUT_BASE/${stem}.sigstore.json"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--bundle "$bundle" \
"$ROOT/$path"
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
else
sig="$OUT_BASE/${stem}.dsse"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--output-signature "$sig" \
"$ROOT/$path"
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
fi
printf "%s %s\n" "$(sha256sum "$ROOT/$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$ROOT/$path")" >> "$SHA_FILE"
echo "Signed $path"
done
echo "Signed artefacts written to $OUT_BASE"
if [[ -n "$TMP_KEY" ]]; then
rm -f "$TMP_KEY"
fi

View File

@@ -0,0 +1,50 @@
#!/usr/bin/env bash
set -euo pipefail
# Signs a policy file with cosign and verifies it. Intended for CI and offline use.
# Requires COSIGN_KEY_B64 (private key PEM base64) or KMS envs; optional COSIGN_PASSWORD.
usage() {
cat <<'USAGE'
Usage: sign-policy.sh --file <path> [--out-dir out/policy-sign]
Env:
COSIGN_KEY_B64 base64-encoded PEM private key (if not using KMS)
COSIGN_PASSWORD passphrase for the key (can be empty for test keys)
COSIGN_PUBLIC_KEY_PATH optional path to write public key for verify step
USAGE
}
FILE=""
OUT_DIR="out/policy-sign"
while [[ $# -gt 0 ]]; do
case "$1" in
--file) FILE="$2"; shift 2;;
--out-dir) OUT_DIR="$2"; shift 2;;
-h|--help) usage; exit 0;;
*) echo "Unknown arg: $1" >&2; usage; exit 1;;
esac
done
if [[ -z "$FILE" ]]; then echo "--file is required" >&2; exit 1; fi
if [[ ! -f "$FILE" ]]; then echo "file not found: $FILE" >&2; exit 1; fi
mkdir -p "$OUT_DIR"
BASENAME=$(basename "$FILE")
SIG="$OUT_DIR/${BASENAME}.sig"
PUB_OUT="${COSIGN_PUBLIC_KEY_PATH:-$OUT_DIR/cosign.pub}"
if [[ -n "${COSIGN_KEY_B64:-}" ]]; then
KEYFILE="$OUT_DIR/cosign.key"
printf "%s" "$COSIGN_KEY_B64" | base64 -d > "$KEYFILE"
chmod 600 "$KEYFILE"
export COSIGN_KEY="$KEYFILE"
fi
export COSIGN_PASSWORD=${COSIGN_PASSWORD:-}
cosign version >/dev/null
cosign sign-blob "$FILE" --output-signature "$SIG"
cosign public-key --key "$COSIGN_KEY" > "$PUB_OUT"
cosign verify-blob --key "$PUB_OUT" --signature "$SIG" "$FILE"
printf "Signed %s -> %s\nPublic key -> %s\n" "$FILE" "$SIG" "$PUB_OUT"

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env bash
set -euo pipefail
# Deterministic DSSE signing helper for Signals artifacts.
# Prefers system cosign v3 (bundle) and falls back to repo-pinned v2.6.0.
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
COSIGN_BIN="${COSIGN_BIN:-}"
# Detect cosign binary (v3 preferred).
if [[ -z "$COSIGN_BIN" ]]; then
if command -v /usr/local/bin/cosign >/dev/null 2>&1; then
COSIGN_BIN="/usr/local/bin/cosign"
elif command -v cosign >/dev/null 2>&1; then
COSIGN_BIN="$(command -v cosign)"
elif [[ -x "$ROOT/tools/cosign/cosign" ]]; then
COSIGN_BIN="$ROOT/tools/cosign/cosign"
else
echo "cosign not found; install or set COSIGN_BIN" >&2
exit 1
fi
fi
# Resolve key
TMP_KEY=""
if [[ -n "${COSIGN_KEY_FILE:-}" ]]; then
KEY_FILE="$COSIGN_KEY_FILE"
elif [[ -n "${COSIGN_PRIVATE_KEY_B64:-}" ]]; then
TMP_KEY="$(mktemp)"
echo "$COSIGN_PRIVATE_KEY_B64" | base64 -d > "$TMP_KEY"
chmod 600 "$TMP_KEY"
KEY_FILE="$TMP_KEY"
elif [[ -f "$ROOT/tools/cosign/cosign.key" ]]; then
KEY_FILE="$ROOT/tools/cosign/cosign.key"
elif [[ "${COSIGN_ALLOW_DEV_KEY:-0}" == "1" && -f "$ROOT/tools/cosign/cosign.dev.key" ]]; then
echo "[warn] Using development key (tools/cosign/cosign.dev.key); NOT for production/Evidence Locker" >&2
KEY_FILE="$ROOT/tools/cosign/cosign.dev.key"
else
echo "No signing key: set COSIGN_PRIVATE_KEY_B64 or COSIGN_KEY_FILE, or place key at tools/cosign/cosign.key" >&2
exit 2
fi
OUT_BASE="${OUT_DIR:-$ROOT/evidence-locker/signals/2025-12-01}"
# Normalize OUT_BASE to absolute to avoid pushd-relative path issues.
if [[ "$OUT_BASE" != /* ]]; then
OUT_BASE="$ROOT/$OUT_BASE"
fi
mkdir -p "$OUT_BASE"
ARTIFACTS=(
"decay/confidence_decay_config.yaml|stella.ops/confidenceDecayConfig@v1|confidence_decay_config"
"unknowns/unknowns_scoring_manifest.json|stella.ops/unknownsScoringManifest@v1|unknowns_scoring_manifest"
"heuristics/heuristics.catalog.json|stella.ops/heuristicCatalog@v1|heuristics_catalog"
)
USE_BUNDLE=0
if $COSIGN_BIN version --json 2>/dev/null | grep -q '"GitVersion":"v3'; then
USE_BUNDLE=1
elif $COSIGN_BIN version 2>/dev/null | grep -q 'GitVersion:.*v3\.'; then
USE_BUNDLE=1
fi
pushd "$ROOT/docs/modules/signals" >/dev/null
SHA_FILE="$OUT_BASE/SHA256SUMS"
: > "$SHA_FILE"
for entry in "${ARTIFACTS[@]}"; do
IFS="|" read -r path predicate stem <<<"$entry"
if [[ ! -f "$path" ]]; then
echo "Missing artifact: $path" >&2
exit 3
fi
if (( USE_BUNDLE )); then
bundle="$OUT_BASE/${stem}.sigstore.json"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--bundle "$bundle" \
"$path"
printf "%s %s\n" "$(sha256sum "$bundle" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$bundle")" >> "$SHA_FILE"
else
sig="$OUT_BASE/${stem}.dsse"
COSIGN_PASSWORD="${COSIGN_PASSWORD:-}" \
"$COSIGN_BIN" sign-blob \
--key "$KEY_FILE" \
--yes \
--tlog-upload=false \
--output-signature "$sig" \
"$path"
printf "%s %s\n" "$(sha256sum "$sig" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$sig")" >> "$SHA_FILE"
fi
printf "%s %s\n" "$(sha256sum "$path" | cut -d' ' -f1)" "$(realpath --relative-to="$OUT_BASE" "$path")" >> "$SHA_FILE"
done
popd >/dev/null
echo "Signed artifacts written to $OUT_BASE"
if [[ -n "$TMP_KEY" ]]; then
rm -f "$TMP_KEY"
fi

View File

@@ -0,0 +1,22 @@
#!/usr/bin/env bash
set -euo pipefail
# DEVOPS-SCAN-90-004: run determinism harness/tests and collect report
ROOT="$(git rev-parse --show-toplevel)"
OUT="${ROOT}/out/scanner-determinism"
mkdir -p "$OUT"
PROJECT="src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj"
echo "[determinism] running dotnet test (filter=Determinism)"
dotnet test "$PROJECT" --no-build --logger "trx;LogFileName=determinism.trx" --filter Determinism
find "$(dirname "$PROJECT")" -name "*.trx" -print -exec cp {} "$OUT/" \;
echo "[determinism] summarizing"
printf "project=%s\n" "$PROJECT" > "$OUT/summary.txt"
printf "timestamp=%s\n" "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> "$OUT/summary.txt"
tar -C "$OUT" -czf "$OUT/determinism-artifacts.tgz" .
echo "[determinism] artifacts at $OUT"

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
root_dir=$(cd "$(dirname "$0")/.." && pwd)
verifier="$root_dir/packs/verify_offline_bundle.py"
python3 "$verifier" --bundle "$root_dir/packs/__fixtures__/good" --manifest bundle.json --require-dsse
python3 "$verifier" --bundle "$root_dir/packs/__fixtures__/bad" --manifest bundle-missing-quota.json --require-dsse && exit 1 || true
echo "fixture checks completed"

View File

@@ -0,0 +1,16 @@
#!/usr/bin/env bash
# Safe-ish workspace cleanup when the runner hits “No space left on device”.
# Deletes build/test outputs that are regenerated; preserves offline caches and sources.
set -euo pipefail
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
echo "Cleaning workspace outputs under: ${ROOT}"
rm -rf "${ROOT}/TestResults" || true
rm -rf "${ROOT}/out" || true
rm -rf "${ROOT}/artifacts" || true
# Trim common temp locations if they exist in repo workspace
[ -d "${ROOT}/tmp" ] && find "${ROOT}/tmp" -mindepth 1 -maxdepth 1 -exec rm -rf {} +
echo "Done. Consider also clearing any runner-level /tmp outside the workspace if safe."

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env bash
# Thin wrapper to strip the harness-injected "workdir:" switch that breaks dotnet/msbuild parsing.
set -euo pipefail
real_dotnet="$(command -v dotnet)"
if [[ -z "${real_dotnet}" ]]; then
echo "dotnet executable not found in PATH" >&2
exit 1
fi
filtered_args=()
for arg in "$@"; do
# Drop any argument that is exactly or contains the injected workdir switch.
if [[ "${arg}" == *"workdir:"* ]]; then
# If the arg also contains other comma-separated parts, keep the non-workdir pieces.
IFS=',' read -r -a parts <<< "${arg}"
for part in "${parts[@]}"; do
[[ "${part}" == *"workdir:"* || -z "${part}" ]] && continue
filtered_args+=("${part}")
done
continue
fi
filtered_args+=("${arg}")
done
exec "${real_dotnet}" "${filtered_args[@]}"

View File

@@ -0,0 +1,26 @@
#!/usr/bin/env bash
set -euo pipefail
# Ensures OpenSSL 1.1 shim is discoverable for Mongo2Go by exporting LD_LIBRARY_PATH.
# Safe for repeated invocation; respects STELLAOPS_OPENSSL11_SHIM override.
ROOT=${STELLAOPS_REPO_ROOT:-$(git rev-parse --show-toplevel 2>/dev/null || pwd)}
SHIM_DIR=${STELLAOPS_OPENSSL11_SHIM:-"${ROOT}/src/__Tests/native/openssl-1.1/linux-x64"}
if [[ ! -d "${SHIM_DIR}" ]]; then
echo "::warning ::OpenSSL 1.1 shim directory not found at ${SHIM_DIR}; Mongo2Go tests may fail" >&2
exit 0
fi
export LD_LIBRARY_PATH="${SHIM_DIR}:${LD_LIBRARY_PATH:-}"
export STELLAOPS_OPENSSL11_SHIM="${SHIM_DIR}"
# Persist for subsequent CI steps when available
if [[ -n "${GITHUB_ENV:-}" ]]; then
{
echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}"
echo "STELLAOPS_OPENSSL11_SHIM=${STELLAOPS_OPENSSL11_SHIM}"
} >> "${GITHUB_ENV}"
fi
echo "OpenSSL 1.1 shim enabled (LD_LIBRARY_PATH=${LD_LIBRARY_PATH})"

View File

@@ -0,0 +1,53 @@
#!/bin/bash
# validate-compose.sh - Validate all Docker Compose profiles
# Used by CI/CD pipelines to ensure Compose configurations are valid
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
COMPOSE_DIR="${REPO_ROOT}/devops/compose"
# Default profiles to validate
PROFILES=(dev stage prod airgap mirror)
echo "=== Docker Compose Validation ==="
echo "Compose directory: $COMPOSE_DIR"
# Check if compose directory exists
if [[ ! -d "$COMPOSE_DIR" ]]; then
echo "::warning::Compose directory not found at $COMPOSE_DIR"
exit 0
fi
# Check for base docker-compose.yml
BASE_COMPOSE="$COMPOSE_DIR/docker-compose.yml"
if [[ ! -f "$BASE_COMPOSE" ]]; then
echo "::warning::Base docker-compose.yml not found at $BASE_COMPOSE"
exit 0
fi
FAILED=0
for profile in "${PROFILES[@]}"; do
OVERLAY="$COMPOSE_DIR/docker-compose.$profile.yml"
if [[ -f "$OVERLAY" ]]; then
echo "=== Validating docker-compose.$profile.yml ==="
if docker compose -f "$BASE_COMPOSE" -f "$OVERLAY" config --quiet 2>&1; then
echo "✓ Profile '$profile' is valid"
else
echo "✗ Profile '$profile' validation failed"
FAILED=1
fi
else
echo "⊘ Skipping profile '$profile' (no overlay file)"
fi
done
if [[ $FAILED -eq 1 ]]; then
echo "::error::One or more Compose profiles failed validation"
exit 1
fi
echo "=== All Compose profiles valid! ==="

View File

@@ -0,0 +1,59 @@
#!/bin/bash
# validate-helm.sh - Validate Helm charts
# Used by CI/CD pipelines to ensure Helm charts are valid
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
HELM_DIR="${REPO_ROOT}/devops/helm"
echo "=== Helm Chart Validation ==="
echo "Helm directory: $HELM_DIR"
# Check if helm is installed
if ! command -v helm &>/dev/null; then
echo "::error::Helm is not installed"
exit 1
fi
# Check if helm directory exists
if [[ ! -d "$HELM_DIR" ]]; then
echo "::warning::Helm directory not found at $HELM_DIR"
exit 0
fi
FAILED=0
# Find all Chart.yaml files (indicates a Helm chart)
while IFS= read -r -d '' chart_file; do
chart_dir="$(dirname "$chart_file")"
chart_name="$(basename "$chart_dir")"
echo "=== Validating chart: $chart_name ==="
# Lint the chart
if helm lint "$chart_dir" 2>&1; then
echo "✓ Chart '$chart_name' lint passed"
else
echo "✗ Chart '$chart_name' lint failed"
FAILED=1
continue
fi
# Template the chart (dry-run)
if helm template "$chart_name" "$chart_dir" --debug >/dev/null 2>&1; then
echo "✓ Chart '$chart_name' template succeeded"
else
echo "✗ Chart '$chart_name' template failed"
FAILED=1
fi
done < <(find "$HELM_DIR" -name "Chart.yaml" -print0)
if [[ $FAILED -eq 1 ]]; then
echo "::error::One or more Helm charts failed validation"
exit 1
fi
echo "=== All Helm charts valid! ==="

View File

@@ -0,0 +1,244 @@
#!/bin/bash
# scripts/validate-sbom.sh
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
# Task: SCHEMA-8200-004 - Create validate-sbom.sh wrapper for sbom-utility
#
# Validates SBOM files against official CycloneDX JSON schemas.
# Uses sbom-utility for CycloneDX validation.
#
# Usage:
# ./scripts/validate-sbom.sh <sbom-file> [--schema <schema-path>]
# ./scripts/validate-sbom.sh src/__Tests/__Benchmarks/golden-corpus/sample.cyclonedx.json
# ./scripts/validate-sbom.sh --all # Validate all CycloneDX fixtures
#
# Exit codes:
# 0 - All validations passed
# 1 - Validation failed or error
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
DEFAULT_SCHEMA="${SCHEMA_DIR}/cyclonedx-bom-1.6.schema.json"
SBOM_UTILITY_VERSION="v0.16.0"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
log_info() {
echo -e "${GREEN}[INFO]${NC} $*"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $*"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $*"
}
check_sbom_utility() {
if ! command -v sbom-utility &> /dev/null; then
log_warn "sbom-utility not found in PATH"
log_info "Installing sbom-utility ${SBOM_UTILITY_VERSION}..."
# Detect OS and architecture
local os arch
case "$(uname -s)" in
Linux*) os="linux";;
Darwin*) os="darwin";;
MINGW*|MSYS*|CYGWIN*) os="windows";;
*) log_error "Unsupported OS: $(uname -s)"; exit 1;;
esac
case "$(uname -m)" in
x86_64|amd64) arch="amd64";;
arm64|aarch64) arch="arm64";;
*) log_error "Unsupported architecture: $(uname -m)"; exit 1;;
esac
local url="https://github.com/CycloneDX/sbom-utility/releases/download/${SBOM_UTILITY_VERSION}/sbom-utility-${SBOM_UTILITY_VERSION}-${os}-${arch}.tar.gz"
local temp_dir
temp_dir=$(mktemp -d)
log_info "Downloading from ${url}..."
curl -sSfL "${url}" | tar xz -C "${temp_dir}"
if [[ "$os" == "windows" ]]; then
log_info "Please add ${temp_dir}/sbom-utility.exe to your PATH"
export PATH="${temp_dir}:${PATH}"
else
log_info "Installing to /usr/local/bin (may require sudo)..."
if [[ -w /usr/local/bin ]]; then
mv "${temp_dir}/sbom-utility" /usr/local/bin/
else
sudo mv "${temp_dir}/sbom-utility" /usr/local/bin/
fi
fi
rm -rf "${temp_dir}"
log_info "sbom-utility installed successfully"
fi
}
validate_cyclonedx() {
local sbom_file="$1"
local schema="${2:-$DEFAULT_SCHEMA}"
if [[ ! -f "$sbom_file" ]]; then
log_error "File not found: $sbom_file"
return 1
fi
if [[ ! -f "$schema" ]]; then
log_error "Schema not found: $schema"
log_info "Expected schema at: ${DEFAULT_SCHEMA}"
return 1
fi
# Detect if it's a CycloneDX file
if ! grep -q '"bomFormat"' "$sbom_file" 2>/dev/null; then
log_warn "File does not appear to be CycloneDX: $sbom_file"
log_info "Skipping (use validate-spdx.sh for SPDX files)"
return 0
fi
log_info "Validating: $sbom_file"
# Run sbom-utility validation
if sbom-utility validate --input-file "$sbom_file" --format json 2>&1; then
log_info "✓ Validation passed: $sbom_file"
return 0
else
log_error "✗ Validation failed: $sbom_file"
return 1
fi
}
validate_all() {
local fixture_dir="${REPO_ROOT}/src/__Tests/__Benchmarks/golden-corpus"
local failed=0
local passed=0
local skipped=0
log_info "Validating all CycloneDX fixtures in ${fixture_dir}..."
if [[ ! -d "$fixture_dir" ]]; then
log_error "Fixture directory not found: $fixture_dir"
return 1
fi
while IFS= read -r -d '' file; do
if grep -q '"bomFormat".*"CycloneDX"' "$file" 2>/dev/null; then
if validate_cyclonedx "$file"; then
((passed++))
else
((failed++))
fi
else
log_info "Skipping non-CycloneDX file: $file"
((skipped++))
fi
done < <(find "$fixture_dir" -type f -name '*.json' -print0)
echo ""
log_info "Validation Summary:"
log_info " Passed: ${passed}"
log_info " Failed: ${failed}"
log_info " Skipped: ${skipped}"
if [[ $failed -gt 0 ]]; then
log_error "Some validations failed!"
return 1
fi
log_info "All CycloneDX validations passed!"
return 0
}
usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS] <sbom-file>
Validates CycloneDX SBOM files against official JSON schemas.
Options:
--all Validate all CycloneDX fixtures in src/__Tests/__Benchmarks/golden-corpus/
--schema <path> Use custom schema file (default: docs/schemas/cyclonedx-bom-1.6.schema.json)
--help, -h Show this help message
Examples:
$(basename "$0") sample.cyclonedx.json
$(basename "$0") --schema custom-schema.json sample.json
$(basename "$0") --all
Exit codes:
0 All validations passed
1 Validation failed or error
EOF
}
main() {
local schema="$DEFAULT_SCHEMA"
local validate_all_flag=false
local files=()
while [[ $# -gt 0 ]]; do
case "$1" in
--all)
validate_all_flag=true
shift
;;
--schema)
schema="$2"
shift 2
;;
--help|-h)
usage
exit 0
;;
-*)
log_error "Unknown option: $1"
usage
exit 1
;;
*)
files+=("$1")
shift
;;
esac
done
# Ensure sbom-utility is available
check_sbom_utility
if [[ "$validate_all_flag" == "true" ]]; then
validate_all
exit $?
fi
if [[ ${#files[@]} -eq 0 ]]; then
log_error "No SBOM file specified"
usage
exit 1
fi
local failed=0
for file in "${files[@]}"; do
if ! validate_cyclonedx "$file" "$schema"; then
((failed++))
fi
done
if [[ $failed -gt 0 ]]; then
exit 1
fi
exit 0
}
main "$@"

View File

@@ -0,0 +1,277 @@
#!/bin/bash
# scripts/validate-spdx.sh
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
# Task: SCHEMA-8200-005 - Create validate-spdx.sh wrapper for SPDX validation
#
# Validates SPDX files against SPDX 3.0.1 JSON schema.
# Uses pyspdxtools (spdx-tools) for SPDX validation.
#
# Usage:
# ./scripts/validate-spdx.sh <spdx-file>
# ./scripts/validate-spdx.sh bench/golden-corpus/sample.spdx.json
# ./scripts/validate-spdx.sh --all # Validate all SPDX fixtures
#
# Exit codes:
# 0 - All validations passed
# 1 - Validation failed or error
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
DEFAULT_SCHEMA="${SCHEMA_DIR}/spdx-jsonld-3.0.1.schema.json"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
log_info() {
echo -e "${GREEN}[INFO]${NC} $*"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $*"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $*"
}
check_spdx_tools() {
if ! command -v pyspdxtools &> /dev/null; then
log_warn "pyspdxtools not found in PATH"
log_info "Installing spdx-tools via pip..."
if command -v pip3 &> /dev/null; then
pip3 install --user spdx-tools
elif command -v pip &> /dev/null; then
pip install --user spdx-tools
else
log_error "pip not found. Please install Python and pip first."
exit 1
fi
log_info "spdx-tools installed successfully"
# Refresh PATH for newly installed tools
if [[ -d "${HOME}/.local/bin" ]]; then
export PATH="${HOME}/.local/bin:${PATH}"
fi
fi
}
check_ajv() {
if ! command -v ajv &> /dev/null; then
log_warn "ajv-cli not found in PATH"
log_info "Installing ajv-cli via npm..."
if command -v npm &> /dev/null; then
npm install -g ajv-cli ajv-formats
else
log_warn "npm not found. JSON schema validation will be skipped."
return 1
fi
log_info "ajv-cli installed successfully"
fi
return 0
}
validate_spdx_schema() {
local spdx_file="$1"
local schema="$2"
if check_ajv; then
log_info "Validating against JSON schema: $schema"
if ajv validate -s "$schema" -d "$spdx_file" --spec=draft2020 2>&1; then
return 0
else
return 1
fi
else
log_warn "Skipping JSON schema validation (ajv not available)"
return 0
fi
}
validate_spdx() {
local spdx_file="$1"
local schema="${2:-$DEFAULT_SCHEMA}"
if [[ ! -f "$spdx_file" ]]; then
log_error "File not found: $spdx_file"
return 1
fi
# Detect if it's an SPDX file (JSON-LD format)
if ! grep -qE '"@context"|"spdxId"|"spdxVersion"' "$spdx_file" 2>/dev/null; then
log_warn "File does not appear to be SPDX: $spdx_file"
log_info "Skipping (use validate-sbom.sh for CycloneDX files)"
return 0
fi
log_info "Validating: $spdx_file"
local validation_passed=true
# Try pyspdxtools validation first (semantic validation)
if command -v pyspdxtools &> /dev/null; then
log_info "Running SPDX semantic validation..."
if pyspdxtools validate "$spdx_file" 2>&1; then
log_info "✓ SPDX semantic validation passed"
else
# pyspdxtools may not support SPDX 3.0 yet
log_warn "pyspdxtools validation failed or not supported for this format"
log_info "Falling back to JSON schema validation only"
fi
fi
# JSON schema validation (syntax validation)
if [[ -f "$schema" ]]; then
if validate_spdx_schema "$spdx_file" "$schema"; then
log_info "✓ JSON schema validation passed"
else
log_error "✗ JSON schema validation failed"
validation_passed=false
fi
else
log_warn "Schema file not found: $schema"
log_info "Skipping schema validation"
fi
if [[ "$validation_passed" == "true" ]]; then
log_info "✓ Validation passed: $spdx_file"
return 0
else
log_error "✗ Validation failed: $spdx_file"
return 1
fi
}
validate_all() {
local fixture_dir="${REPO_ROOT}/bench/golden-corpus"
local failed=0
local passed=0
local skipped=0
log_info "Validating all SPDX fixtures in ${fixture_dir}..."
if [[ ! -d "$fixture_dir" ]]; then
log_error "Fixture directory not found: $fixture_dir"
return 1
fi
while IFS= read -r -d '' file; do
# Check if it's an SPDX file
if grep -qE '"@context"|"spdxVersion"' "$file" 2>/dev/null; then
if validate_spdx "$file"; then
((passed++))
else
((failed++))
fi
else
log_info "Skipping non-SPDX file: $file"
((skipped++))
fi
done < <(find "$fixture_dir" -type f \( -name '*spdx*.json' -o -name '*.spdx.json' \) -print0)
echo ""
log_info "Validation Summary:"
log_info " Passed: ${passed}"
log_info " Failed: ${failed}"
log_info " Skipped: ${skipped}"
if [[ $failed -gt 0 ]]; then
log_error "Some validations failed!"
return 1
fi
log_info "All SPDX validations passed!"
return 0
}
usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS] <spdx-file>
Validates SPDX files against SPDX 3.0.1 JSON schema.
Options:
--all Validate all SPDX fixtures in bench/golden-corpus/
--schema <path> Use custom schema file (default: docs/schemas/spdx-jsonld-3.0.1.schema.json)
--help, -h Show this help message
Examples:
$(basename "$0") sample.spdx.json
$(basename "$0") --schema custom-schema.json sample.json
$(basename "$0") --all
Exit codes:
0 All validations passed
1 Validation failed or error
EOF
}
main() {
local schema="$DEFAULT_SCHEMA"
local validate_all_flag=false
local files=()
while [[ $# -gt 0 ]]; do
case "$1" in
--all)
validate_all_flag=true
shift
;;
--schema)
schema="$2"
shift 2
;;
--help|-h)
usage
exit 0
;;
-*)
log_error "Unknown option: $1"
usage
exit 1
;;
*)
files+=("$1")
shift
;;
esac
done
# Ensure tools are available
check_spdx_tools || true # Continue even if pyspdxtools install fails
if [[ "$validate_all_flag" == "true" ]]; then
validate_all
exit $?
fi
if [[ ${#files[@]} -eq 0 ]]; then
log_error "No SPDX file specified"
usage
exit 1
fi
local failed=0
for file in "${files[@]}"; do
if ! validate_spdx "$file" "$schema"; then
((failed++))
fi
done
if [[ $failed -gt 0 ]]; then
exit 1
fi
exit 0
}
main "$@"

View File

@@ -0,0 +1,261 @@
#!/bin/bash
# scripts/validate-vex.sh
# Sprint: SPRINT_8200_0001_0003 - SBOM Schema Validation in CI
# Task: SCHEMA-8200-006 - Create validate-vex.sh wrapper for OpenVEX validation
#
# Validates OpenVEX files against the OpenVEX 0.2.0 JSON schema.
# Uses ajv-cli for JSON schema validation.
#
# Usage:
# ./scripts/validate-vex.sh <vex-file>
# ./scripts/validate-vex.sh bench/golden-corpus/sample.vex.json
# ./scripts/validate-vex.sh --all # Validate all VEX fixtures
#
# Exit codes:
# 0 - All validations passed
# 1 - Validation failed or error
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
SCHEMA_DIR="${REPO_ROOT}/docs/schemas"
DEFAULT_SCHEMA="${SCHEMA_DIR}/openvex-0.2.0.schema.json"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
log_info() {
echo -e "${GREEN}[INFO]${NC} $*"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $*"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $*"
}
check_ajv() {
if ! command -v ajv &> /dev/null; then
log_warn "ajv-cli not found in PATH"
log_info "Installing ajv-cli via npm..."
if command -v npm &> /dev/null; then
npm install -g ajv-cli ajv-formats
elif command -v npx &> /dev/null; then
log_info "Using npx for ajv (no global install)"
return 0
else
log_error "npm/npx not found. Please install Node.js first."
exit 1
fi
log_info "ajv-cli installed successfully"
fi
}
run_ajv() {
local schema="$1"
local data="$2"
if command -v ajv &> /dev/null; then
ajv validate -s "$schema" -d "$data" --spec=draft2020 2>&1
elif command -v npx &> /dev/null; then
npx ajv-cli validate -s "$schema" -d "$data" --spec=draft2020 2>&1
else
log_error "No ajv available"
return 1
fi
}
validate_openvex() {
local vex_file="$1"
local schema="${2:-$DEFAULT_SCHEMA}"
if [[ ! -f "$vex_file" ]]; then
log_error "File not found: $vex_file"
return 1
fi
if [[ ! -f "$schema" ]]; then
log_error "Schema not found: $schema"
log_info "Expected schema at: ${DEFAULT_SCHEMA}"
log_info "Download from: https://raw.githubusercontent.com/openvex/spec/main/openvex_json_schema.json"
return 1
fi
# Detect if it's an OpenVEX file
if ! grep -qE '"@context".*"https://openvex.dev/ns"|"openvex"' "$vex_file" 2>/dev/null; then
log_warn "File does not appear to be OpenVEX: $vex_file"
log_info "Skipping (use validate-sbom.sh for CycloneDX files)"
return 0
fi
log_info "Validating: $vex_file"
# Run ajv validation
if run_ajv "$schema" "$vex_file"; then
log_info "✓ Validation passed: $vex_file"
return 0
else
log_error "✗ Validation failed: $vex_file"
return 1
fi
}
validate_all() {
local failed=0
local passed=0
local skipped=0
# Search multiple directories for VEX files
local search_dirs=(
"${REPO_ROOT}/bench/golden-corpus"
"${REPO_ROOT}/bench/vex-lattice"
"${REPO_ROOT}/datasets"
)
log_info "Validating all OpenVEX fixtures..."
for fixture_dir in "${search_dirs[@]}"; do
if [[ ! -d "$fixture_dir" ]]; then
log_warn "Directory not found, skipping: $fixture_dir"
continue
fi
log_info "Searching in: $fixture_dir"
while IFS= read -r -d '' file; do
# Check if it's an OpenVEX file
if grep -qE '"@context".*"https://openvex.dev/ns"|"openvex"' "$file" 2>/dev/null; then
if validate_openvex "$file"; then
((passed++))
else
((failed++))
fi
elif grep -q '"vex"' "$file" 2>/dev/null || [[ "$file" == *vex* ]]; then
# Might be VEX-related but not OpenVEX format
log_info "Checking potential VEX file: $file"
if grep -qE '"@context"' "$file" 2>/dev/null; then
if validate_openvex "$file"; then
((passed++))
else
((failed++))
fi
else
log_info "Skipping non-OpenVEX file: $file"
((skipped++))
fi
else
((skipped++))
fi
done < <(find "$fixture_dir" -type f \( -name '*vex*.json' -o -name '*.vex.json' -o -name '*openvex*.json' \) -print0 2>/dev/null || true)
done
echo ""
log_info "Validation Summary:"
log_info " Passed: ${passed}"
log_info " Failed: ${failed}"
log_info " Skipped: ${skipped}"
if [[ $failed -gt 0 ]]; then
log_error "Some validations failed!"
return 1
fi
if [[ $passed -eq 0 ]] && [[ $skipped -eq 0 ]]; then
log_warn "No OpenVEX files found to validate"
else
log_info "All OpenVEX validations passed!"
fi
return 0
}
usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS] <vex-file>
Validates OpenVEX files against the OpenVEX 0.2.0 JSON schema.
Options:
--all Validate all OpenVEX fixtures in bench/ and datasets/
--schema <path> Use custom schema file (default: docs/schemas/openvex-0.2.0.schema.json)
--help, -h Show this help message
Examples:
$(basename "$0") sample.vex.json
$(basename "$0") --schema custom-schema.json sample.json
$(basename "$0") --all
Exit codes:
0 All validations passed
1 Validation failed or error
EOF
}
main() {
local schema="$DEFAULT_SCHEMA"
local validate_all_flag=false
local files=()
while [[ $# -gt 0 ]]; do
case "$1" in
--all)
validate_all_flag=true
shift
;;
--schema)
schema="$2"
shift 2
;;
--help|-h)
usage
exit 0
;;
-*)
log_error "Unknown option: $1"
usage
exit 1
;;
*)
files+=("$1")
shift
;;
esac
done
# Ensure ajv is available
check_ajv
if [[ "$validate_all_flag" == "true" ]]; then
validate_all
exit $?
fi
if [[ ${#files[@]} -eq 0 ]]; then
log_error "No VEX file specified"
usage
exit 1
fi
local failed=0
for file in "${files[@]}"; do
if ! validate_openvex "$file" "$schema"; then
((failed++))
fi
done
if [[ $failed -gt 0 ]]; then
exit 1
fi
exit 0
}
main "$@"

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -euo pipefail
# Verifies binary artefacts live only in approved locations.
# Allowed roots: .nuget/packages (curated feed + cache), vendor (pinned binaries),
# offline (air-gap bundles/templates), plugins/tools/deploy/ops (module-owned binaries).
repo_root="$(git rev-parse --show-toplevel)"
cd "$repo_root"
# Extensions considered binary artefacts.
binary_ext="(nupkg|dll|exe|so|dylib|a|lib|tar|tar.gz|tgz|zip|jar|deb|rpm|bin)"
# Locations allowed to contain binaries.
allowed_prefix="^(.nuget/packages|.nuget/packages/packages|vendor|offline|plugins|tools|deploy|ops|third_party|docs/artifacts|samples|src/.*/Fixtures|src/.*/fixtures)/"
# Only consider files that currently exist in the working tree (skip deleted placeholders).
violations=$(git ls-files | while read -r f; do [[ -f "$f" ]] && echo "$f"; done | grep -E "\\.${binary_ext}$" | grep -Ev "$allowed_prefix" || true)
if [[ -n "$violations" ]]; then
echo "Binary artefacts found outside approved directories:" >&2
echo "$violations" >&2
exit 1
fi
printf "Binary layout OK (allowed roots: %s)\n" "$allowed_prefix"