test fixes and new product advisories work
This commit is contained in:
137
scripts/ebpf/docker-kernel-test.sh
Normal file
137
scripts/ebpf/docker-kernel-test.sh
Normal file
@@ -0,0 +1,137 @@
|
||||
#!/bin/bash
|
||||
# ============================================================================
|
||||
# Docker-based eBPF Kernel Compatibility Test
|
||||
# Tests eBPF code on different Ubuntu versions (targeting different kernels)
|
||||
#
|
||||
# Usage: ./docker-kernel-test.sh <base_image> <kernel_version> <distro_name>
|
||||
# Example: ./docker-kernel-test.sh ubuntu:20.04 5.4 focal
|
||||
# ============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Disable MSYS path conversion for Docker commands
|
||||
export MSYS_NO_PATHCONV=1
|
||||
export MSYS2_ARG_CONV_EXCL="*"
|
||||
|
||||
BASE_IMAGE="${1:-ubuntu:22.04}"
|
||||
KERNEL_VERSION="${2:-5.15}"
|
||||
DISTRO_NAME="${3:-jammy}"
|
||||
|
||||
# Get repo root
|
||||
if [[ "$OSTYPE" == "msys" ]] || [[ "$OSTYPE" == "cygwin" ]] || [[ -n "${WINDIR:-}" ]]; then
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -W 2>/dev/null || pwd)"
|
||||
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd -W 2>/dev/null || pwd)"
|
||||
REPO_ROOT="${REPO_ROOT//\\//}"
|
||||
else
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
fi
|
||||
|
||||
OUTPUT_DIR="${REPO_ROOT}/out"
|
||||
|
||||
echo "=============================================="
|
||||
echo "eBPF Kernel Compatibility Test"
|
||||
echo "=============================================="
|
||||
echo "Base image: ${BASE_IMAGE}"
|
||||
echo "Target kernel: ${KERNEL_VERSION}"
|
||||
echo "Distro: ${DISTRO_NAME}"
|
||||
echo "Repo root: ${REPO_ROOT}"
|
||||
echo ""
|
||||
|
||||
mkdir -p "${OUTPUT_DIR}" 2>/dev/null || true
|
||||
|
||||
IMAGE_TAG="stellaops-ebpf-test:${DISTRO_NAME}"
|
||||
|
||||
# Check if image already exists
|
||||
if ! docker image inspect "${IMAGE_TAG}" >/dev/null 2>&1; then
|
||||
echo "Building test container image..."
|
||||
|
||||
# Use heredoc with docker build
|
||||
docker build -t "${IMAGE_TAG}" --build-arg BASE_IMAGE="${BASE_IMAGE}" - <<'DOCKERFILE'
|
||||
ARG BASE_IMAGE=ubuntu:22.04
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV DOTNET_NOLOGO=1
|
||||
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
ENV DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
|
||||
ENV TZ=UTC
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl wget ca-certificates apt-transport-https \
|
||||
libc6 libicu-dev libssl-dev zlib1g \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN wget https://dot.net/v1/dotnet-install.sh -O dotnet-install.sh \
|
||||
&& chmod +x dotnet-install.sh \
|
||||
&& ./dotnet-install.sh --channel 10.0 --install-dir /usr/share/dotnet \
|
||||
&& ln -s /usr/share/dotnet/dotnet /usr/bin/dotnet \
|
||||
&& rm dotnet-install.sh
|
||||
|
||||
WORKDIR /src
|
||||
DOCKERFILE
|
||||
|
||||
echo "Image built: ${IMAGE_TAG}"
|
||||
else
|
||||
echo "Using cached image: ${IMAGE_TAG}"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Running tests in container..."
|
||||
|
||||
docker run --rm \
|
||||
-v "${REPO_ROOT}:/src" \
|
||||
-v "${OUTPUT_DIR}:/out" \
|
||||
-e STELLAOPS_UPDATE_FIXTURES=false \
|
||||
"${IMAGE_TAG}" \
|
||||
/bin/bash -c "
|
||||
cd /src
|
||||
|
||||
echo '=============================================='
|
||||
echo 'Environment Info'
|
||||
echo '=============================================='
|
||||
uname -a
|
||||
cat /etc/os-release | head -3
|
||||
dotnet --version
|
||||
echo ''
|
||||
|
||||
echo '=============================================='
|
||||
echo 'Restoring packages'
|
||||
echo '=============================================='
|
||||
dotnet restore src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj \
|
||||
--configfile nuget.config 2>&1 | tail -5
|
||||
echo ''
|
||||
|
||||
echo '=============================================='
|
||||
echo 'Building'
|
||||
echo '=============================================='
|
||||
dotnet build src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj \
|
||||
-c Release 2>&1 | tail -8
|
||||
echo ''
|
||||
|
||||
echo '=============================================='
|
||||
echo 'Running Tests'
|
||||
echo '=============================================='
|
||||
dotnet test src/Signals/__Tests/StellaOps.Signals.Ebpf.Tests/StellaOps.Signals.Ebpf.Tests.csproj \
|
||||
-c Release --no-build \
|
||||
--logger 'trx;LogFileName=/out/ebpf-tests-${DISTRO_NAME}.trx' \
|
||||
--logger 'console;verbosity=minimal'
|
||||
|
||||
TEST_EXIT_CODE=\$?
|
||||
|
||||
echo ''
|
||||
echo '=============================================='
|
||||
echo 'Test Results'
|
||||
echo '=============================================='
|
||||
if [ \$TEST_EXIT_CODE -eq 0 ]; then
|
||||
echo 'Kernel ${KERNEL_VERSION} (${DISTRO_NAME}): ALL TESTS PASSED'
|
||||
else
|
||||
echo 'Kernel ${KERNEL_VERSION} (${DISTRO_NAME}): TESTS FAILED'
|
||||
exit \$TEST_EXIT_CODE
|
||||
fi
|
||||
"
|
||||
|
||||
echo ""
|
||||
echo "=============================================="
|
||||
echo "Test complete for kernel ${KERNEL_VERSION}"
|
||||
echo "=============================================="
|
||||
97
scripts/ebpf/run-multi-kernel-tests.sh
Normal file
97
scripts/ebpf/run-multi-kernel-tests.sh
Normal file
@@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
# ============================================================================
|
||||
# Multi-Kernel eBPF Test Runner
|
||||
# Runs eBPF tests on 3 major kernel versions: 5.4, 5.15, 6.x
|
||||
#
|
||||
# Usage: ./run-multi-kernel-tests.sh [--parallel]
|
||||
# ============================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
PARALLEL="${1:-}"
|
||||
|
||||
echo "=============================================="
|
||||
echo "Multi-Kernel eBPF Test Suite"
|
||||
echo "=============================================="
|
||||
echo "Testing on kernel versions: 5.4, 5.15, 6.x"
|
||||
echo "Repository: ${REPO_ROOT}"
|
||||
echo ""
|
||||
|
||||
# Define kernel test matrix
|
||||
declare -A KERNELS=(
|
||||
["5.4"]="ubuntu:20.04|focal"
|
||||
["5.15"]="ubuntu:22.04|jammy"
|
||||
["6.x"]="ubuntu:24.04|noble"
|
||||
)
|
||||
|
||||
FAILED_KERNELS=()
|
||||
PASSED_KERNELS=()
|
||||
|
||||
run_kernel_test() {
|
||||
local kernel_version="$1"
|
||||
local config="${KERNELS[$kernel_version]}"
|
||||
local base_image="${config%|*}"
|
||||
local distro="${config#*|}"
|
||||
|
||||
echo ""
|
||||
echo "=============================================="
|
||||
echo "Testing Kernel ${kernel_version} (${distro})"
|
||||
echo "=============================================="
|
||||
|
||||
if "${SCRIPT_DIR}/docker-kernel-test.sh" "${base_image}" "${kernel_version}" "${distro}"; then
|
||||
PASSED_KERNELS+=("${kernel_version}")
|
||||
return 0
|
||||
else
|
||||
FAILED_KERNELS+=("${kernel_version}")
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
if [ "${PARALLEL}" == "--parallel" ]; then
|
||||
echo "Running tests in parallel..."
|
||||
pids=()
|
||||
for kernel in "${!KERNELS[@]}"; do
|
||||
run_kernel_test "$kernel" &
|
||||
pids+=($!)
|
||||
done
|
||||
|
||||
# Wait for all and collect results
|
||||
for pid in "${pids[@]}"; do
|
||||
wait "$pid" || true
|
||||
done
|
||||
else
|
||||
echo "Running tests sequentially..."
|
||||
for kernel in "5.4" "5.15" "6.x"; do
|
||||
run_kernel_test "$kernel" || true
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=============================================="
|
||||
echo "Multi-Kernel Test Summary"
|
||||
echo "=============================================="
|
||||
echo ""
|
||||
|
||||
if [ ${#PASSED_KERNELS[@]} -gt 0 ]; then
|
||||
echo "PASSED kernels:"
|
||||
for k in "${PASSED_KERNELS[@]}"; do
|
||||
echo " - Kernel ${k}"
|
||||
done
|
||||
fi
|
||||
|
||||
if [ ${#FAILED_KERNELS[@]} -gt 0 ]; then
|
||||
echo ""
|
||||
echo "FAILED kernels:"
|
||||
for k in "${FAILED_KERNELS[@]}"; do
|
||||
echo " - Kernel ${k}"
|
||||
done
|
||||
echo ""
|
||||
echo "ERROR: Some kernel tests failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "SUCCESS: All kernel versions passed!"
|
||||
echo "Tested: 5.4 (focal), 5.15 (jammy), 6.x (noble)"
|
||||
182
scripts/release/collect-rekor-proofs.sh
Normal file
182
scripts/release/collect-rekor-proofs.sh
Normal file
@@ -0,0 +1,182 @@
|
||||
#!/bin/bash
|
||||
# Copyright (c) StellaOps. All rights reserved.
|
||||
# Licensed under the BUSL-1.1 license.
|
||||
#
|
||||
# collect-rekor-proofs.sh
|
||||
# Collects Rekor transparency log inclusion proofs for release artifacts
|
||||
#
|
||||
# Usage: ./collect-rekor-proofs.sh --artifacts <dir> --output <dir>
|
||||
#
|
||||
# Prerequisites:
|
||||
# - rekor-cli installed (https://github.com/sigstore/rekor)
|
||||
# - Artifacts must already be signed and uploaded to Rekor
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Default values
|
||||
ARTIFACTS_DIR="artifacts"
|
||||
OUTPUT_DIR="rekor-proofs"
|
||||
REKOR_SERVER="${REKOR_SERVER:-https://rekor.sigstore.dev}"
|
||||
PUBLIC_KEY_FILE="cosign.pub"
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--artifacts)
|
||||
ARTIFACTS_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
--output)
|
||||
OUTPUT_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
--public-key)
|
||||
PUBLIC_KEY_FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--rekor-server)
|
||||
REKOR_SERVER="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Usage: $0 --artifacts <dir> --output <dir>"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --artifacts Directory containing signed artifacts (default: artifacts)"
|
||||
echo " --output Output directory for Rekor proofs (default: rekor-proofs)"
|
||||
echo " --public-key Path to public key file (default: cosign.pub)"
|
||||
echo " --rekor-server Rekor server URL (default: https://rekor.sigstore.dev)"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unknown option: $1${NC}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check for rekor-cli
|
||||
if ! command -v rekor-cli &> /dev/null; then
|
||||
echo -e "${YELLOW}Warning: rekor-cli not found. Skipping Rekor proof collection.${NC}"
|
||||
echo "Install from: https://github.com/sigstore/rekor/releases"
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
echo '{"warning": "rekor-cli not available", "proofs": []}' > "${OUTPUT_DIR}/inclusion-proofs.json"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create output directories
|
||||
mkdir -p "${OUTPUT_DIR}/log-entries"
|
||||
|
||||
echo -e "${GREEN}Collecting Rekor inclusion proofs${NC}"
|
||||
echo " Artifacts: ${ARTIFACTS_DIR}"
|
||||
echo " Output: ${OUTPUT_DIR}"
|
||||
echo " Rekor Server: ${REKOR_SERVER}"
|
||||
|
||||
# Initialize inclusion proofs JSON
|
||||
proofs_json='{"proofs": []}'
|
||||
checkpoint=""
|
||||
|
||||
# Function to collect proof for a single artifact
|
||||
collect_proof() {
|
||||
local artifact_path="$1"
|
||||
local artifact_name
|
||||
artifact_name=$(basename "$artifact_path")
|
||||
local sig_path="${artifact_path}.sig"
|
||||
|
||||
if [[ ! -f "$sig_path" ]]; then
|
||||
echo -e " ${YELLOW}Skipping ${artifact_name}: no signature file found${NC}"
|
||||
return
|
||||
fi
|
||||
|
||||
echo " Processing: ${artifact_name}"
|
||||
|
||||
# Search for the entry in Rekor
|
||||
local search_result
|
||||
if ! search_result=$(rekor-cli search --artifact "$artifact_path" --rekor_server "$REKOR_SERVER" 2>/dev/null); then
|
||||
echo -e " ${YELLOW}No Rekor entry found${NC}"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract UUIDs from search result
|
||||
local uuids
|
||||
uuids=$(echo "$search_result" | grep -oE '[0-9a-f]{64}' || true)
|
||||
|
||||
if [[ -z "$uuids" ]]; then
|
||||
echo -e " ${YELLOW}No matching entries in Rekor${NC}"
|
||||
return
|
||||
fi
|
||||
|
||||
# Get the first (most recent) UUID
|
||||
local uuid
|
||||
uuid=$(echo "$uuids" | head -1)
|
||||
|
||||
echo " Found entry: ${uuid}"
|
||||
|
||||
# Get the full log entry
|
||||
local entry_file="${OUTPUT_DIR}/log-entries/${uuid}.json"
|
||||
if rekor-cli get --uuid "$uuid" --rekor_server "$REKOR_SERVER" --format json > "$entry_file" 2>/dev/null; then
|
||||
echo -e " ${GREEN}Saved log entry${NC}"
|
||||
|
||||
# Extract log index and integrated time
|
||||
local log_index
|
||||
log_index=$(jq -r '.LogIndex' "$entry_file" 2>/dev/null || echo "-1")
|
||||
local integrated_time
|
||||
integrated_time=$(jq -r '.IntegratedTime' "$entry_file" 2>/dev/null || echo "0")
|
||||
|
||||
# Add to proofs JSON
|
||||
proofs_json=$(echo "$proofs_json" | jq --arg uuid "$uuid" \
|
||||
--arg artifact "$artifact_name" \
|
||||
--argjson logIndex "$log_index" \
|
||||
--argjson integratedTime "$integrated_time" \
|
||||
--arg path "log-entries/${uuid}.json" \
|
||||
'.proofs += [{"uuid": $uuid, "artifactName": $artifact, "logIndex": $logIndex, "integratedTime": $integratedTime, "inclusionProofPath": $path}]')
|
||||
else
|
||||
echo -e " ${YELLOW}Failed to retrieve entry details${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get Rekor checkpoint (signed tree head)
|
||||
echo ""
|
||||
echo "Fetching Rekor checkpoint..."
|
||||
if checkpoint_result=$(curl -s "${REKOR_SERVER}/api/v1/log" 2>/dev/null); then
|
||||
echo "$checkpoint_result" > "${OUTPUT_DIR}/checkpoint.json"
|
||||
checkpoint=$(echo "$checkpoint_result" | jq -r '.signedTreeHead // empty' 2>/dev/null || true)
|
||||
if [[ -n "$checkpoint" ]]; then
|
||||
echo -e " ${GREEN}Checkpoint saved${NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Process all artifacts
|
||||
echo ""
|
||||
echo "Processing artifacts..."
|
||||
for artifact in "${ARTIFACTS_DIR}"/stella-*.tar.gz "${ARTIFACTS_DIR}"/stella-*.zip; do
|
||||
if [[ -f "$artifact" ]]; then
|
||||
collect_proof "$artifact"
|
||||
fi
|
||||
done
|
||||
|
||||
# Also process checksums if signed
|
||||
for checksum_file in "${ARTIFACTS_DIR}"/*.sums "${ARTIFACTS_DIR}"/SHA256SUMS "${ARTIFACTS_DIR}"/SHA512SUMS; do
|
||||
if [[ -f "$checksum_file" ]] && [[ -f "${checksum_file}.sig" ]]; then
|
||||
collect_proof "$checksum_file"
|
||||
fi
|
||||
done
|
||||
|
||||
# Write final inclusion proofs JSON
|
||||
echo "$proofs_json" | jq '.' > "${OUTPUT_DIR}/inclusion-proofs.json"
|
||||
|
||||
# Count proofs
|
||||
proof_count=$(echo "$proofs_json" | jq '.proofs | length')
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}Collected ${proof_count} inclusion proof(s)${NC}"
|
||||
echo "Files written to: ${OUTPUT_DIR}/"
|
||||
echo ""
|
||||
echo "Contents:"
|
||||
ls -la "${OUTPUT_DIR}/"
|
||||
185
scripts/release/generate-slsa-provenance.sh
Normal file
185
scripts/release/generate-slsa-provenance.sh
Normal file
@@ -0,0 +1,185 @@
|
||||
#!/bin/bash
|
||||
# Copyright (c) StellaOps. All rights reserved.
|
||||
# Licensed under the BUSL-1.1 license.
|
||||
#
|
||||
# generate-slsa-provenance.sh
|
||||
# Generates SLSA v1.0 provenance statements for release artifacts
|
||||
#
|
||||
# Usage: ./generate-slsa-provenance.sh --version <version> --commit <sha> --output <dir>
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Default values
|
||||
VERSION=""
|
||||
COMMIT=""
|
||||
OUTPUT_DIR="provenance"
|
||||
ARTIFACTS_DIR="artifacts"
|
||||
BUILDER_ID="${BUILDER_ID:-https://ci.stella-ops.org/builder/v1}"
|
||||
BUILD_TYPE="${BUILD_TYPE:-https://stella-ops.io/ReleaseBuilder/v1}"
|
||||
REPOSITORY_URI="${REPOSITORY_URI:-git+https://git.stella-ops.org/stella-ops.org/git.stella-ops.org}"
|
||||
|
||||
# Parse arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--version)
|
||||
VERSION="$2"
|
||||
shift 2
|
||||
;;
|
||||
--commit)
|
||||
COMMIT="$2"
|
||||
shift 2
|
||||
;;
|
||||
--output)
|
||||
OUTPUT_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
--artifacts)
|
||||
ARTIFACTS_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
--builder-id)
|
||||
BUILDER_ID="$2"
|
||||
shift 2
|
||||
;;
|
||||
--build-type)
|
||||
BUILD_TYPE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Usage: $0 --version <version> --commit <sha> --output <dir>"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --version Release version (required)"
|
||||
echo " --commit Git commit SHA (required)"
|
||||
echo " --output Output directory for provenance files (default: provenance)"
|
||||
echo " --artifacts Directory containing release artifacts (default: artifacts)"
|
||||
echo " --builder-id Builder ID URI (default: https://ci.stella-ops.org/builder/v1)"
|
||||
echo " --build-type Build type URI (default: https://stella-ops.io/ReleaseBuilder/v1)"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unknown option: $1${NC}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Validate required arguments
|
||||
if [[ -z "$VERSION" ]]; then
|
||||
echo -e "${RED}Error: --version is required${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$COMMIT" ]]; then
|
||||
echo -e "${RED}Error: --commit is required${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create output directory
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
# Get timestamps
|
||||
STARTED_ON="${BUILD_STARTED_ON:-$(date -u +%Y-%m-%dT%H:%M:%SZ)}"
|
||||
FINISHED_ON="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
|
||||
# Get invocation ID from CI environment
|
||||
INVOCATION_ID="${CI_JOB_ID:-${GITHUB_RUN_ID:-$(uuidgen || cat /proc/sys/kernel/random/uuid 2>/dev/null || echo "local-build")}}"
|
||||
|
||||
echo -e "${GREEN}Generating SLSA v1.0 provenance for version ${VERSION}${NC}"
|
||||
echo " Commit: ${COMMIT}"
|
||||
echo " Builder: ${BUILDER_ID}"
|
||||
echo " Output: ${OUTPUT_DIR}"
|
||||
|
||||
# Function to generate provenance for a single artifact
|
||||
generate_provenance() {
|
||||
local artifact_path="$1"
|
||||
local artifact_name
|
||||
artifact_name=$(basename "$artifact_path")
|
||||
|
||||
# Compute SHA-256 digest
|
||||
local sha256
|
||||
sha256=$(sha256sum "$artifact_path" | cut -d' ' -f1)
|
||||
|
||||
# Determine component name from artifact
|
||||
local component_name
|
||||
component_name=$(echo "$artifact_name" | sed -E 's/stella-([^-]+).*/\1/')
|
||||
|
||||
local output_file="${OUTPUT_DIR}/${component_name}.slsa.intoto.jsonl"
|
||||
|
||||
echo " Generating provenance for: ${artifact_name}"
|
||||
|
||||
# Generate SLSA v1.0 provenance statement
|
||||
cat > "$output_file" << EOF
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"subject": [
|
||||
{
|
||||
"name": "${artifact_name}",
|
||||
"digest": {
|
||||
"sha256": "${sha256}"
|
||||
}
|
||||
}
|
||||
],
|
||||
"predicateType": "https://slsa.dev/provenance/v1",
|
||||
"predicate": {
|
||||
"buildDefinition": {
|
||||
"buildType": "${BUILD_TYPE}",
|
||||
"externalParameters": {
|
||||
"version": "${VERSION}",
|
||||
"repository": "${REPOSITORY_URI}",
|
||||
"ref": "refs/tags/v${VERSION}"
|
||||
},
|
||||
"internalParameters": {},
|
||||
"resolvedDependencies": [
|
||||
{
|
||||
"uri": "${REPOSITORY_URI}@refs/tags/v${VERSION}",
|
||||
"digest": {
|
||||
"gitCommit": "${COMMIT}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"runDetails": {
|
||||
"builder": {
|
||||
"id": "${BUILDER_ID}",
|
||||
"version": {
|
||||
"stellaOps": "${VERSION}"
|
||||
}
|
||||
},
|
||||
"metadata": {
|
||||
"invocationId": "${INVOCATION_ID}",
|
||||
"startedOn": "${STARTED_ON}",
|
||||
"finishedOn": "${FINISHED_ON}"
|
||||
},
|
||||
"byproducts": []
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo -e " ${GREEN}Created: ${output_file}${NC}"
|
||||
}
|
||||
|
||||
# Find and process artifacts
|
||||
artifact_count=0
|
||||
for artifact in "${ARTIFACTS_DIR}"/stella-*.tar.gz "${ARTIFACTS_DIR}"/stella-*.zip; do
|
||||
if [[ -f "$artifact" ]]; then
|
||||
generate_provenance "$artifact"
|
||||
((artifact_count++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $artifact_count -eq 0 ]]; then
|
||||
echo -e "${YELLOW}Warning: No artifacts found in ${ARTIFACTS_DIR}${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}Generated ${artifact_count} provenance statement(s)${NC}"
|
||||
echo "Files written to: ${OUTPUT_DIR}/"
|
||||
Reference in New Issue
Block a user