stabilize tests
This commit is contained in:
102
scripts/test-stabilization/run-batch.sh
Normal file
102
scripts/test-stabilization/run-batch.sh
Normal file
@@ -0,0 +1,102 @@
|
||||
#!/bin/bash
|
||||
# Run tests from a batch file with per-project timeout
|
||||
# Usage: ./run-batch.sh <batch-file> <output-csv> <timeout-seconds>
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
BATCH_FILE="$SCRIPT_DIR/${1:-batch-001.txt}"
|
||||
OUTPUT_CSV="$REPO_ROOT/test-results/${2:-batch-001-results.csv}"
|
||||
TIMEOUT_SECONDS="${3:-300}"
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
# Create output directory
|
||||
mkdir -p "$(dirname "$OUTPUT_CSV")"
|
||||
|
||||
# CSV header
|
||||
echo "Project,Status,Errors,Warnings,Total,Passed,Failed,Skipped,Duration,ExitCode" > "$OUTPUT_CSV"
|
||||
|
||||
echo "=========================================="
|
||||
echo "Running batch: $BATCH_FILE"
|
||||
echo "Output: $OUTPUT_CSV"
|
||||
echo "Timeout: ${TIMEOUT_SECONDS}s per project"
|
||||
echo "Repo root: $REPO_ROOT"
|
||||
echo "=========================================="
|
||||
|
||||
total=0
|
||||
passed=0
|
||||
failed=0
|
||||
build_errors=0
|
||||
timeouts=0
|
||||
|
||||
while IFS= read -r project; do
|
||||
# Skip empty lines
|
||||
[[ -z "$project" ]] && continue
|
||||
|
||||
project_name=$(basename "$project" .csproj)
|
||||
total=$((total + 1))
|
||||
|
||||
echo -n "[$total] Testing $project_name ... "
|
||||
|
||||
start_time=$(date +%s)
|
||||
|
||||
# Run test with timeout, capture output
|
||||
output_file="/tmp/test-output-$$.txt"
|
||||
|
||||
if timeout "${TIMEOUT_SECONDS}s" dotnet test "$REPO_ROOT/$project" --no-restore --verbosity minimal 2>&1 > "$output_file"; then
|
||||
exit_code=0
|
||||
else
|
||||
exit_code=$?
|
||||
fi
|
||||
|
||||
end_time=$(date +%s)
|
||||
duration=$((end_time - start_time))
|
||||
|
||||
# Parse output for test counts
|
||||
test_total=$(grep -oP 'Total:\s*\K\d+' "$output_file" 2>/dev/null | head -1 || echo "0")
|
||||
test_passed=$(grep -oP 'Passed:\s*\K\d+' "$output_file" 2>/dev/null | head -1 || echo "0")
|
||||
test_failed=$(grep -oP 'Failed:\s*\K\d+' "$output_file" 2>/dev/null | head -1 || echo "0")
|
||||
test_skipped=$(grep -oP 'Skipped:\s*\K\d+' "$output_file" 2>/dev/null | head -1 || echo "0")
|
||||
|
||||
# Count build errors and warnings
|
||||
errors=$(grep -c "error [A-Z]*[0-9]*:" "$output_file" 2>/dev/null || echo "0")
|
||||
warnings=$(grep -c "warning [A-Z]*[0-9]*:" "$output_file" 2>/dev/null || echo "0")
|
||||
|
||||
# Determine status
|
||||
if [[ $exit_code -eq 124 ]]; then
|
||||
status="Timeout"
|
||||
timeouts=$((timeouts + 1))
|
||||
echo "TIMEOUT (${duration}s)"
|
||||
elif [[ $errors -gt 0 ]]; then
|
||||
status="BuildError"
|
||||
build_errors=$((build_errors + 1))
|
||||
echo "BUILD ERROR ($errors errors, ${duration}s)"
|
||||
elif [[ $exit_code -eq 0 ]]; then
|
||||
status="Passed"
|
||||
passed=$((passed + 1))
|
||||
echo "PASSED ($test_passed/$test_total, ${duration}s)"
|
||||
else
|
||||
status="Failed"
|
||||
failed=$((failed + 1))
|
||||
echo "FAILED ($test_failed/$test_total failed, ${duration}s)"
|
||||
fi
|
||||
|
||||
# Write to CSV
|
||||
echo "$project_name,$status,$errors,$warnings,$test_total,$test_passed,$test_failed,$test_skipped,$duration,$exit_code" >> "$OUTPUT_CSV"
|
||||
|
||||
# Cleanup
|
||||
rm -f "$output_file"
|
||||
|
||||
done < "$BATCH_FILE"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "BATCH SUMMARY"
|
||||
echo "=========================================="
|
||||
echo "Total: $total"
|
||||
echo "Passed: $passed"
|
||||
echo "Failed: $failed"
|
||||
echo "Build Errors: $build_errors"
|
||||
echo "Timeouts: $timeouts"
|
||||
echo "Results: $OUTPUT_CSV"
|
||||
Reference in New Issue
Block a user