Fix build and code structure improvements. New but essential UI functionality. CI improvements. Documentation improvements. AI module improvements.

This commit is contained in:
StellaOps Bot
2025-12-26 21:54:17 +02:00
parent 335ff7da16
commit c2b9cd8d1f
3717 changed files with 264714 additions and 48202 deletions

View File

@@ -0,0 +1,93 @@
<#
.SYNOPSIS
Scaffolds EF Core DbContext, entities, and compiled models for all StellaOps modules.
.DESCRIPTION
Iterates through all configured modules and runs Scaffold-Module.ps1 for each.
Use this after schema changes or for initial setup.
.PARAMETER SkipMissing
Skip modules whose projects don't exist yet (default: true)
.EXAMPLE
.\Scaffold-AllModules.ps1
.EXAMPLE
.\Scaffold-AllModules.ps1 -SkipMissing:$false
#>
param(
[bool]$SkipMissing = $true
)
$ErrorActionPreference = "Stop"
# Module definitions: Module name -> Schema name
$modules = @(
@{ Module = "Unknowns"; Schema = "unknowns" },
@{ Module = "PacksRegistry"; Schema = "packs" },
@{ Module = "Authority"; Schema = "authority" },
@{ Module = "Scanner"; Schema = "scanner" },
@{ Module = "Scheduler"; Schema = "scheduler" },
@{ Module = "TaskRunner"; Schema = "taskrunner" },
@{ Module = "Policy"; Schema = "policy" },
@{ Module = "Notify"; Schema = "notify" },
@{ Module = "Concelier"; Schema = "vuln" },
@{ Module = "Excititor"; Schema = "vex" },
@{ Module = "Signals"; Schema = "signals" },
@{ Module = "Attestor"; Schema = "proofchain" },
@{ Module = "Signer"; Schema = "signer" }
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
$RepoRoot = (Get-Item $ScriptDir).Parent.Parent.Parent.FullName
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " EF Core Scaffolding for All Modules" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host ""
$successCount = 0
$skipCount = 0
$failCount = 0
foreach ($m in $modules) {
$projectPath = Join-Path $RepoRoot "src" $m.Module "__Libraries" "StellaOps.$($m.Module).Persistence.EfCore"
if (-not (Test-Path "$projectPath\*.csproj")) {
if ($SkipMissing) {
Write-Host "SKIP: $($m.Module) - Project not found" -ForegroundColor DarkGray
$skipCount++
continue
} else {
Write-Host "FAIL: $($m.Module) - Project not found at: $projectPath" -ForegroundColor Red
$failCount++
continue
}
}
Write-Host ""
Write-Host ">>> Scaffolding $($m.Module)..." -ForegroundColor Magenta
try {
& "$ScriptDir\Scaffold-Module.ps1" -Module $m.Module -Schema $m.Schema
$successCount++
}
catch {
Write-Host "FAIL: $($m.Module) - $($_.Exception.Message)" -ForegroundColor Red
$failCount++
}
}
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Summary" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Success: $successCount"
Write-Host " Skipped: $skipCount"
Write-Host " Failed: $failCount"
Write-Host ""
if ($failCount -gt 0) {
exit 1
}

View File

@@ -0,0 +1,162 @@
<#
.SYNOPSIS
Scaffolds EF Core DbContext, entities, and compiled models from PostgreSQL schema.
.DESCRIPTION
This script performs database-first scaffolding for a StellaOps module:
1. Cleans existing generated files (Entities, CompiledModels, DbContext)
2. Scaffolds DbContext and entities from live PostgreSQL schema
3. Generates compiled models for startup performance
.PARAMETER Module
The module name (e.g., Unknowns, PacksRegistry, Authority)
.PARAMETER Schema
The PostgreSQL schema name (defaults to lowercase module name)
.PARAMETER ConnectionString
PostgreSQL connection string. If not provided, uses default dev connection.
.PARAMETER ProjectPath
Optional custom project path. Defaults to src/{Module}/__Libraries/StellaOps.{Module}.Persistence.EfCore
.EXAMPLE
.\Scaffold-Module.ps1 -Module Unknowns
.EXAMPLE
.\Scaffold-Module.ps1 -Module Unknowns -Schema unknowns -ConnectionString "Host=localhost;Database=stellaops_platform;Username=unknowns_user;Password=unknowns_dev"
.EXAMPLE
.\Scaffold-Module.ps1 -Module PacksRegistry -Schema packs
#>
param(
[Parameter(Mandatory=$true)]
[string]$Module,
[string]$Schema,
[string]$ConnectionString,
[string]$ProjectPath
)
$ErrorActionPreference = "Stop"
# Resolve repository root
$RepoRoot = (Get-Item $PSScriptRoot).Parent.Parent.Parent.FullName
# Default schema to lowercase module name
if (-not $Schema) {
$Schema = $Module.ToLower()
}
# Default connection string
if (-not $ConnectionString) {
$user = "${Schema}_user"
$password = "${Schema}_dev"
$ConnectionString = "Host=localhost;Port=5432;Database=stellaops_platform;Username=$user;Password=$password;SearchPath=$Schema"
}
# Default project path
if (-not $ProjectPath) {
$ProjectPath = Join-Path $RepoRoot "src" $Module "__Libraries" "StellaOps.$Module.Persistence.EfCore"
}
$ContextDir = "Context"
$EntitiesDir = "Entities"
$CompiledModelsDir = "CompiledModels"
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " EF Core Scaffolding for Module: $Module" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Schema: $Schema"
Write-Host " Project: $ProjectPath"
Write-Host " Connection: Host=localhost;Database=stellaops_platform;Username=${Schema}_user;..."
Write-Host ""
# Verify project exists
if (-not (Test-Path "$ProjectPath\*.csproj")) {
Write-Error "Project not found at: $ProjectPath"
Write-Host "Create the project first with: dotnet new classlib -n StellaOps.$Module.Persistence.EfCore"
exit 1
}
# Step 1: Clean existing generated files
Write-Host "[1/4] Cleaning existing generated files..." -ForegroundColor Yellow
$paths = @(
(Join-Path $ProjectPath $EntitiesDir),
(Join-Path $ProjectPath $CompiledModelsDir),
(Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs")
)
foreach ($path in $paths) {
if (Test-Path $path) {
Remove-Item -Recurse -Force $path
Write-Host " Removed: $path" -ForegroundColor DarkGray
}
}
# Recreate directories
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $EntitiesDir) | Out-Null
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $CompiledModelsDir) | Out-Null
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $ContextDir) | Out-Null
# Step 2: Scaffold DbContext and entities
Write-Host "[2/4] Scaffolding DbContext and entities from schema '$Schema'..." -ForegroundColor Yellow
$scaffoldArgs = @(
"ef", "dbcontext", "scaffold",
"`"$ConnectionString`"",
"Npgsql.EntityFrameworkCore.PostgreSQL",
"--project", "`"$ProjectPath`"",
"--schema", $Schema,
"--context", "${Module}DbContext",
"--context-dir", $ContextDir,
"--output-dir", $EntitiesDir,
"--namespace", "StellaOps.$Module.Persistence.EfCore.Entities",
"--context-namespace", "StellaOps.$Module.Persistence.EfCore.Context",
"--data-annotations",
"--no-onconfiguring",
"--force"
)
$process = Start-Process -FilePath "dotnet" -ArgumentList $scaffoldArgs -Wait -PassThru -NoNewWindow
if ($process.ExitCode -ne 0) {
Write-Error "Scaffold failed with exit code: $($process.ExitCode)"
exit 1
}
Write-Host " Scaffolded entities to: $EntitiesDir" -ForegroundColor DarkGray
# Step 3: Generate compiled models
Write-Host "[3/4] Generating compiled models..." -ForegroundColor Yellow
$optimizeArgs = @(
"ef", "dbcontext", "optimize",
"--project", "`"$ProjectPath`"",
"--context", "StellaOps.$Module.Persistence.EfCore.Context.${Module}DbContext",
"--output-dir", $CompiledModelsDir,
"--namespace", "StellaOps.$Module.Persistence.EfCore.CompiledModels"
)
$process = Start-Process -FilePath "dotnet" -ArgumentList $optimizeArgs -Wait -PassThru -NoNewWindow
if ($process.ExitCode -ne 0) {
Write-Error "Compiled model generation failed with exit code: $($process.ExitCode)"
exit 1
}
Write-Host " Generated compiled models to: $CompiledModelsDir" -ForegroundColor DarkGray
# Step 4: Summary
Write-Host "[4/4] Scaffolding complete!" -ForegroundColor Green
Write-Host ""
Write-Host "Generated files:" -ForegroundColor Cyan
$contextFile = Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs"
$entityFiles = Get-ChildItem -Path (Join-Path $ProjectPath $EntitiesDir) -Filter "*.cs" -ErrorAction SilentlyContinue
$compiledFiles = Get-ChildItem -Path (Join-Path $ProjectPath $CompiledModelsDir) -Filter "*.cs" -ErrorAction SilentlyContinue
Write-Host " Context: $(if (Test-Path $contextFile) { $contextFile } else { 'Not found' })"
Write-Host " Entities: $($entityFiles.Count) files"
Write-Host " Compiled Models: $($compiledFiles.Count) files"
Write-Host ""
Write-Host "Next steps:" -ForegroundColor Yellow
Write-Host " 1. Review generated entities for any customization needs"
Write-Host " 2. Create repository implementations in Repositories/"
Write-Host " 3. Add DI registration in Extensions/"
Write-Host ""

View File

@@ -0,0 +1,88 @@
#!/bin/bash
# ============================================================================
# EF Core Scaffolding for All StellaOps Modules
# ============================================================================
# Iterates through all configured modules and runs scaffold-module.sh for each.
# Use this after schema changes or for initial setup.
#
# Usage: ./scaffold-all-modules.sh [--no-skip-missing]
# ============================================================================
set -e
SKIP_MISSING=true
if [ "$1" = "--no-skip-missing" ]; then
SKIP_MISSING=false
fi
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
# Module definitions: "Module:Schema"
MODULES=(
"Unknowns:unknowns"
"PacksRegistry:packs"
"Authority:authority"
"Scanner:scanner"
"Scheduler:scheduler"
"TaskRunner:taskrunner"
"Policy:policy"
"Notify:notify"
"Concelier:vuln"
"Excititor:vex"
"Signals:signals"
"Attestor:proofchain"
"Signer:signer"
)
echo ""
echo "============================================================================"
echo " EF Core Scaffolding for All Modules"
echo "============================================================================"
echo ""
SUCCESS_COUNT=0
SKIP_COUNT=0
FAIL_COUNT=0
for entry in "${MODULES[@]}"; do
MODULE="${entry%%:*}"
SCHEMA="${entry##*:}"
PROJECT_PATH="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
if [ ! -f "$PROJECT_PATH"/*.csproj ]; then
if [ "$SKIP_MISSING" = true ]; then
echo "SKIP: $MODULE - Project not found"
((SKIP_COUNT++))
continue
else
echo "FAIL: $MODULE - Project not found at: $PROJECT_PATH"
((FAIL_COUNT++))
continue
fi
fi
echo ""
echo ">>> Scaffolding $MODULE..."
if "$SCRIPT_DIR/scaffold-module.sh" "$MODULE" "$SCHEMA"; then
((SUCCESS_COUNT++))
else
echo "FAIL: $MODULE - Scaffolding failed"
((FAIL_COUNT++))
fi
done
echo ""
echo "============================================================================"
echo " Summary"
echo "============================================================================"
echo " Success: $SUCCESS_COUNT"
echo " Skipped: $SKIP_COUNT"
echo " Failed: $FAIL_COUNT"
echo ""
if [ "$FAIL_COUNT" -gt 0 ]; then
exit 1
fi

View File

@@ -0,0 +1,113 @@
#!/bin/bash
# ============================================================================
# EF Core Scaffolding Script for StellaOps Modules
# ============================================================================
# Usage: ./scaffold-module.sh <Module> [Schema] [ConnectionString]
#
# Examples:
# ./scaffold-module.sh Unknowns
# ./scaffold-module.sh Unknowns unknowns
# ./scaffold-module.sh PacksRegistry packs "Host=localhost;..."
# ============================================================================
set -e
MODULE=$1
SCHEMA=${2:-$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')}
CONNECTION_STRING=$3
if [ -z "$MODULE" ]; then
echo "Usage: $0 <Module> [Schema] [ConnectionString]"
echo ""
echo "Examples:"
echo " $0 Unknowns"
echo " $0 Unknowns unknowns"
echo " $0 PacksRegistry packs \"Host=localhost;Database=stellaops_platform;Username=packs_user;Password=packs_dev\""
exit 1
fi
# Resolve repository root
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
# Default connection string
if [ -z "$CONNECTION_STRING" ]; then
USER="${SCHEMA}_user"
PASSWORD="${SCHEMA}_dev"
CONNECTION_STRING="Host=localhost;Port=5432;Database=stellaops_platform;Username=$USER;Password=$PASSWORD;SearchPath=$SCHEMA"
fi
PROJECT_DIR="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
CONTEXT_DIR="Context"
ENTITIES_DIR="Entities"
COMPILED_DIR="CompiledModels"
echo ""
echo "============================================================================"
echo " EF Core Scaffolding for Module: $MODULE"
echo "============================================================================"
echo " Schema: $SCHEMA"
echo " Project: $PROJECT_DIR"
echo " Connection: Host=localhost;Database=stellaops_platform;Username=${SCHEMA}_user;..."
echo ""
# Verify project exists
if [ ! -f "$PROJECT_DIR"/*.csproj ]; then
echo "ERROR: Project not found at: $PROJECT_DIR"
echo "Create the project first with: dotnet new classlib -n StellaOps.$MODULE.Persistence.EfCore"
exit 1
fi
# Step 1: Clean existing generated files
echo "[1/4] Cleaning existing generated files..."
rm -rf "$PROJECT_DIR/$ENTITIES_DIR"
rm -rf "$PROJECT_DIR/$COMPILED_DIR"
rm -f "$PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
mkdir -p "$PROJECT_DIR/$ENTITIES_DIR"
mkdir -p "$PROJECT_DIR/$COMPILED_DIR"
mkdir -p "$PROJECT_DIR/$CONTEXT_DIR"
echo " Cleaned: $ENTITIES_DIR, $COMPILED_DIR, ${MODULE}DbContext.cs"
# Step 2: Scaffold DbContext and entities
echo "[2/4] Scaffolding DbContext and entities from schema '$SCHEMA'..."
dotnet ef dbcontext scaffold \
"$CONNECTION_STRING" \
Npgsql.EntityFrameworkCore.PostgreSQL \
--project "$PROJECT_DIR" \
--schema "$SCHEMA" \
--context "${MODULE}DbContext" \
--context-dir "$CONTEXT_DIR" \
--output-dir "$ENTITIES_DIR" \
--namespace "StellaOps.$MODULE.Persistence.EfCore.Entities" \
--context-namespace "StellaOps.$MODULE.Persistence.EfCore.Context" \
--data-annotations \
--no-onconfiguring \
--force
echo " Scaffolded entities to: $ENTITIES_DIR"
# Step 3: Generate compiled models
echo "[3/4] Generating compiled models..."
dotnet ef dbcontext optimize \
--project "$PROJECT_DIR" \
--context "StellaOps.$MODULE.Persistence.EfCore.Context.${MODULE}DbContext" \
--output-dir "$COMPILED_DIR" \
--namespace "StellaOps.$MODULE.Persistence.EfCore.CompiledModels"
echo " Generated compiled models to: $COMPILED_DIR"
# Step 4: Summary
echo "[4/4] Scaffolding complete!"
echo ""
echo "Generated files:"
echo " Context: $PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
echo " Entities: $(ls -1 "$PROJECT_DIR/$ENTITIES_DIR"/*.cs 2>/dev/null | wc -l) files"
echo " Compiled Models: $(ls -1 "$PROJECT_DIR/$COMPILED_DIR"/*.cs 2>/dev/null | wc -l) files"
echo ""
echo "Next steps:"
echo " 1. Review generated entities for any customization needs"
echo " 2. Create repository implementations in Repositories/"
echo " 3. Add DI registration in Extensions/"
echo ""

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env pwsh
# fix-duplicate-packages.ps1 - Remove duplicate PackageReference items from test projects
# These are already provided by Directory.Build.props
param([switch]$DryRun)
$packagesToRemove = @(
"coverlet.collector",
"Microsoft.NET.Test.Sdk",
"Microsoft.AspNetCore.Mvc.Testing",
"xunit",
"xunit.runner.visualstudio",
"Microsoft.Extensions.TimeProvider.Testing"
)
$sharpCompressPackage = "SharpCompress"
# Find all test project files
$testProjects = Get-ChildItem -Path "src" -Filter "*.Tests.csproj" -Recurse
$corpusProjects = Get-ChildItem -Path "src" -Filter "*.Corpus.*.csproj" -Recurse
Write-Host "=== Fix Duplicate Package References ===" -ForegroundColor Cyan
Write-Host "Found $($testProjects.Count) test projects" -ForegroundColor Yellow
Write-Host "Found $($corpusProjects.Count) corpus projects (SharpCompress)" -ForegroundColor Yellow
$fixedCount = 0
foreach ($proj in $testProjects) {
$content = Get-Content $proj.FullName -Raw
$modified = $false
# Skip projects that opt out of common test infrastructure
if ($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") {
Write-Host " Skipped (UseConcelierTestInfra=false): $($proj.Name)" -ForegroundColor DarkGray
continue
}
foreach ($pkg in $packagesToRemove) {
# Match PackageReference for this package (various formats)
$patterns = @(
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
)
foreach ($pattern in $patterns) {
if ($content -match $pattern) {
$content = $content -replace $pattern, ""
$modified = $true
}
}
}
# Clean up empty ItemGroups
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
# Clean up ItemGroups with only whitespace/comments
$content = $content -replace "(?s)<ItemGroup>\s*<!--[^-]*-->\s*</ItemGroup>", ""
if ($modified) {
$fixedCount++
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
if (-not $DryRun) {
$content | Set-Content $proj.FullName -NoNewline
}
}
}
# Fix SharpCompress in corpus projects
foreach ($proj in $corpusProjects) {
$content = Get-Content $proj.FullName -Raw
$modified = $false
$patterns = @(
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
)
foreach ($pattern in $patterns) {
if ($content -match $pattern) {
$content = $content -replace $pattern, ""
$modified = $true
}
}
# Clean up empty ItemGroups
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
if ($modified) {
$fixedCount++
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
if (-not $DryRun) {
$content | Set-Content $proj.FullName -NoNewline
}
}
}
Write-Host ""
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan
if ($DryRun) {
Write-Host "(Dry run - no changes made)" -ForegroundColor Yellow
}

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env pwsh
# fix-duplicate-projects.ps1 - Remove duplicate project entries from solution file
param(
[string]$SlnPath = "src/StellaOps.sln"
)
$content = Get-Content $SlnPath -Raw
$lines = $content -split "`r?`n"
$projectNames = @{}
$duplicateGuids = @()
$newLines = @()
$skipNextEndProject = $false
foreach ($line in $lines) {
if ($skipNextEndProject -and $line -eq "EndProject") {
$skipNextEndProject = $false
continue
}
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
$name = $Matches[1]
$guid = $Matches[2]
if ($projectNames.ContainsKey($name)) {
$duplicateGuids += $guid
Write-Host "Removing duplicate: $name ($guid)"
$skipNextEndProject = $true
continue
} else {
$projectNames[$name] = $true
}
}
$newLines += $line
}
# Also remove duplicate GUIDs from GlobalSection
$finalLines = @()
foreach ($line in $newLines) {
$skip = $false
foreach ($guid in $duplicateGuids) {
if ($line -match $guid) {
$skip = $true
break
}
}
if (-not $skip) {
$finalLines += $line
}
}
$finalLines | Out-File -FilePath $SlnPath -Encoding UTF8 -NoNewline
Write-Host "`nRemoved $($duplicateGuids.Count) duplicate projects"

View File

@@ -0,0 +1,55 @@
# Fix duplicate "using StellaOps.TestKit;" statements in C# files
# The pattern shows files have this statement both at top (correct) and in middle (wrong)
# This script removes all occurrences AFTER the first one
$ErrorActionPreference = "Stop"
$srcPath = Join-Path $PSScriptRoot "..\..\src"
$pattern = "using StellaOps.TestKit;"
# Find all .cs files containing the pattern
$files = Get-ChildItem -Path $srcPath -Recurse -Filter "*.cs" |
Where-Object { (Get-Content $_.FullName -Raw) -match [regex]::Escape($pattern) }
Write-Host "Found $($files.Count) files with 'using StellaOps.TestKit;'" -ForegroundColor Cyan
$fixedCount = 0
$errorCount = 0
foreach ($file in $files) {
try {
$lines = Get-Content $file.FullName
$newLines = @()
$foundFirst = $false
$removedAny = $false
foreach ($line in $lines) {
if ($line.Trim() -eq $pattern) {
if (-not $foundFirst) {
# Keep the first occurrence
$newLines += $line
$foundFirst = $true
} else {
# Skip subsequent occurrences
$removedAny = $true
}
} else {
$newLines += $line
}
}
if ($removedAny) {
$newLines | Set-Content -Path $file.FullName -Encoding UTF8
Write-Host "Fixed: $($file.Name)" -ForegroundColor Green
$fixedCount++
}
} catch {
Write-Host "Error processing $($file.FullName): $_" -ForegroundColor Red
$errorCount++
}
}
Write-Host ""
Write-Host "Summary:" -ForegroundColor Cyan
Write-Host " Files fixed: $fixedCount" -ForegroundColor Green
Write-Host " Errors: $errorCount" -ForegroundColor $(if ($errorCount -gt 0) { "Red" } else { "Green" })

View File

@@ -0,0 +1,51 @@
# Fix projects with UseConcelierTestInfra=false that don't have xunit
# These projects relied on TestKit for xunit, but now need their own reference
$ErrorActionPreference = "Stop"
$srcPath = "E:\dev\git.stella-ops.org\src"
# Find test projects with UseConcelierTestInfra=false
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object {
$content = Get-Content $_.FullName -Raw
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
(-not ($content -match "xunit\.v3")) -and # Skip xunit.v3 projects
(-not ($content -match '<PackageReference\s+Include="xunit"')) # Skip projects that already have xunit
}
Write-Host "Found $($projects.Count) projects needing xunit" -ForegroundColor Cyan
$xunitPackages = @'
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
'@
$fixedCount = 0
foreach ($proj in $projects) {
$content = Get-Content $proj.FullName -Raw
# Check if it has an ItemGroup with PackageReference
if ($content -match '(<ItemGroup>[\s\S]*?<PackageReference)') {
# Add xunit packages after first PackageReference ItemGroup opening
$newContent = $content -replace '(<ItemGroup>\s*\r?\n)(\s*<PackageReference)', "`$1$xunitPackages`n`$2"
} else {
# No PackageReference ItemGroup, add one before </Project>
$itemGroup = @"
<ItemGroup>
$xunitPackages
</ItemGroup>
"@
$newContent = $content -replace '</Project>', "$itemGroup`n</Project>"
}
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,44 @@
# Fix project references in src/__Tests/** that point to wrong relative paths
# Pattern: ../../<Module>/... should be ../../../<Module>/...
$ErrorActionPreference = "Stop"
$testsPath = "E:\dev\git.stella-ops.org\src\__Tests"
# Known module prefixes that exist at src/<Module>/
$modules = @("Signals", "Scanner", "Concelier", "Scheduler", "Authority", "Attestor",
"BinaryIndex", "EvidenceLocker", "Excititor", "ExportCenter", "Gateway",
"Graph", "IssuerDirectory", "Notify", "Orchestrator", "Policy", "AirGap",
"Provenance", "Replay", "RiskEngine", "SbomService", "Signer", "TaskRunner",
"Telemetry", "TimelineIndexer", "Unknowns", "VexHub", "VexLens", "VulnExplorer",
"Zastava", "Cli", "Aoc", "Web", "Bench", "Cryptography", "PacksRegistry",
"Notifier", "Findings")
$fixedCount = 0
Get-ChildItem -Path $testsPath -Recurse -Filter "*.csproj" | ForEach-Object {
$proj = $_
$content = Get-Content $proj.FullName -Raw
$originalContent = $content
foreach ($module in $modules) {
# Fix ../../<Module>/ to ../../../<Module>/
# But not ../../../<Module> (already correct)
$pattern = "Include=`"../../$module/"
$replacement = "Include=`"../../../$module/"
if ($content -match [regex]::Escape($pattern) -and $content -notmatch [regex]::Escape("Include=`"../../../$module/")) {
$content = $content -replace [regex]::Escape($pattern), $replacement
}
}
# Fix __Libraries references that are one level short
$content = $content -replace 'Include="../../__Libraries/', 'Include="../../../__Libraries/'
if ($content -ne $originalContent) {
Set-Content -Path $proj.FullName -Value $content -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,68 @@
#!/usr/bin/env pwsh
# fix-sln-duplicates.ps1 - Remove duplicate project entries from solution file
param(
[string]$SlnPath = "src/StellaOps.sln"
)
$ErrorActionPreference = "Stop"
Write-Host "=== Solution Duplicate Cleanup ===" -ForegroundColor Cyan
Write-Host "Solution: $SlnPath"
$content = Get-Content $SlnPath -Raw
$lines = $content -split "`r?`n"
# Track seen project names
$seenProjects = @{}
$duplicateGuids = @()
$newLines = @()
$skipNext = $false
for ($i = 0; $i -lt $lines.Count; $i++) {
$line = $lines[$i]
if ($skipNext) {
$skipNext = $false
continue
}
# Check for project declaration
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
$name = $Matches[1]
$guid = $Matches[2]
if ($seenProjects.ContainsKey($name)) {
Write-Host "Removing duplicate: $name ($guid)" -ForegroundColor Yellow
$duplicateGuids += $guid
# Skip this line and the next EndProject line
$skipNext = $true
continue
} else {
$seenProjects[$name] = $true
}
}
$newLines += $line
}
# Remove GlobalSection references to duplicate GUIDs
$finalLines = @()
foreach ($line in $newLines) {
$skip = $false
foreach ($guid in $duplicateGuids) {
if ($line -match $guid) {
$skip = $true
break
}
}
if (-not $skip) {
$finalLines += $line
}
}
# Write back
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
Write-Host ""
Write-Host "Removed $($duplicateGuids.Count) duplicate projects" -ForegroundColor Green

View File

@@ -0,0 +1,40 @@
# Add <Using Include="Xunit" /> to test projects with UseConcelierTestInfra=false
# that have xunit but don't have the global using
$ErrorActionPreference = "Stop"
$srcPath = "E:\dev\git.stella-ops.org\src"
# Find test projects with UseConcelierTestInfra=false that have xunit but no Using Include="Xunit"
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object {
$content = Get-Content $_.FullName -Raw
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
($content -match '<PackageReference\s+Include="xunit"') -and
(-not ($content -match '<Using\s+Include="Xunit"'))
}
Write-Host "Found $($projects.Count) projects needing Xunit using" -ForegroundColor Cyan
$fixedCount = 0
foreach ($proj in $projects) {
$content = Get-Content $proj.FullName -Raw
# Add Using Include="Xunit" before first ProjectReference ItemGroup or at end
if ($content -match '(<ItemGroup>\s*\r?\n\s*<ProjectReference)') {
$usingBlock = " <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n`n"
$newContent = $content -replace '(\s*)(<ItemGroup>\s*\r?\n\s*<ProjectReference)', "$usingBlock`$1`$2"
} else {
# Add before </Project>
$usingBlock = "`n <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n"
$newContent = $content -replace '</Project>', "$usingBlock</Project>"
}
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,37 @@
# Fix xunit.v3 projects that conflict with Directory.Build.props xunit 2.x
# Add UseConcelierTestInfra=false to exclude them from common test infrastructure
$ErrorActionPreference = "Stop"
$srcPath = Join-Path $PSScriptRoot "..\..\src"
# Find all csproj files that reference xunit.v3
$xunitV3Projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object { (Get-Content $_.FullName -Raw) -match "xunit\.v3" }
Write-Host "Found $($xunitV3Projects.Count) projects with xunit.v3" -ForegroundColor Cyan
$fixedCount = 0
foreach ($proj in $xunitV3Projects) {
$content = Get-Content $proj.FullName -Raw
# Check if already has UseConcelierTestInfra set
if ($content -match "<UseConcelierTestInfra>") {
Write-Host " Skipped (already configured): $($proj.Name)" -ForegroundColor DarkGray
continue
}
# Add UseConcelierTestInfra=false after the first <PropertyGroup>
$newContent = $content -replace "(<PropertyGroup>)", "`$1`n <UseConcelierTestInfra>false</UseConcelierTestInfra>"
# Only write if changed
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host ""
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,247 @@
<#
.SYNOPSIS
Generates plugin configuration files for StellaOps modules.
.DESCRIPTION
This script generates plugin.json manifests and config.yaml files for all
plugins based on the plugin catalog definition.
.PARAMETER RepoRoot
Path to the repository root. Defaults to the parent of the devops folder.
.PARAMETER OutputDir
Output directory for generated configs. Defaults to etc/plugins/.
.PARAMETER Force
Overwrite existing configuration files.
.EXAMPLE
.\generate-plugin-configs.ps1
.\generate-plugin-configs.ps1 -Force
#>
param(
[string]$RepoRoot = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
[string]$OutputDir = "",
[switch]$Force
)
if (-not $OutputDir) {
$OutputDir = Join-Path $RepoRoot "etc/plugins"
}
# Plugin catalog - defines all plugins and their metadata
$PluginCatalog = @{
# Router transports
"router/transports" = @{
category = "router.transports"
plugins = @(
@{ id = "tcp"; name = "TCP Transport"; assembly = "StellaOps.Router.Transport.Tcp.dll"; enabled = $true; priority = 50 }
@{ id = "tls"; name = "TLS Transport"; assembly = "StellaOps.Router.Transport.Tls.dll"; enabled = $true; priority = 60 }
@{ id = "udp"; name = "UDP Transport"; assembly = "StellaOps.Router.Transport.Udp.dll"; enabled = $false; priority = 40 }
@{ id = "rabbitmq"; name = "RabbitMQ Transport"; assembly = "StellaOps.Router.Transport.RabbitMq.dll"; enabled = $false; priority = 30 }
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Router.Transport.InMemory.dll"; enabled = $false; priority = 10 }
)
}
# Excititor connectors
"excititor" = @{
category = "excititor.connectors"
plugins = @(
@{ id = "redhat-csaf"; name = "Red Hat CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.RedHat.CSAF.dll"; enabled = $true; priority = 100; vendor = "Red Hat" }
@{ id = "cisco-csaf"; name = "Cisco CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Cisco.CSAF.dll"; enabled = $false; priority = 90; vendor = "Cisco" }
@{ id = "msrc-csaf"; name = "Microsoft CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.MSRC.CSAF.dll"; enabled = $false; priority = 85; vendor = "Microsoft" }
@{ id = "oracle-csaf"; name = "Oracle CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Oracle.CSAF.dll"; enabled = $false; priority = 80; vendor = "Oracle" }
@{ id = "ubuntu-csaf"; name = "Ubuntu CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.dll"; enabled = $false; priority = 75; vendor = "Canonical" }
@{ id = "suse-rancher"; name = "SUSE Rancher VEX Hub"; assembly = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.dll"; enabled = $false; priority = 70; vendor = "SUSE" }
@{ id = "oci-openvex"; name = "OCI OpenVEX Connector"; assembly = "StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.dll"; enabled = $false; priority = 60 }
)
}
# Scanner language analyzers
"scanner/analyzers/lang" = @{
category = "scanner.analyzers.lang"
plugins = @(
@{ id = "dotnet"; name = ".NET Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.DotNet.dll"; enabled = $true; priority = 100 }
@{ id = "go"; name = "Go Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Go.dll"; enabled = $true; priority = 95 }
@{ id = "node"; name = "Node.js Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Node.dll"; enabled = $true; priority = 90 }
@{ id = "python"; name = "Python Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Python.dll"; enabled = $true; priority = 85 }
@{ id = "java"; name = "Java Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Java.dll"; enabled = $true; priority = 80 }
@{ id = "rust"; name = "Rust Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Rust.dll"; enabled = $false; priority = 75 }
@{ id = "ruby"; name = "Ruby Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Ruby.dll"; enabled = $false; priority = 70 }
@{ id = "php"; name = "PHP Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Php.dll"; enabled = $false; priority = 65 }
@{ id = "swift"; name = "Swift Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Swift.dll"; enabled = $false; priority = 60 }
@{ id = "cpp"; name = "C/C++ Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Cpp.dll"; enabled = $false; priority = 55 }
)
}
# Scanner OS analyzers
"scanner/analyzers/os" = @{
category = "scanner.analyzers.os"
plugins = @(
@{ id = "apk"; name = "Alpine APK Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Apk.dll"; enabled = $true; priority = 100 }
@{ id = "dpkg"; name = "Debian DPKG Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Dpkg.dll"; enabled = $true; priority = 95 }
@{ id = "rpm"; name = "RPM Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Rpm.dll"; enabled = $true; priority = 90 }
@{ id = "pacman"; name = "Arch Pacman Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Pacman.dll"; enabled = $false; priority = 80 }
@{ id = "homebrew"; name = "Homebrew Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Homebrew.dll"; enabled = $false; priority = 70 }
@{ id = "chocolatey"; name = "Chocolatey Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Chocolatey.dll"; enabled = $false; priority = 65 }
)
}
# Notify channels
"notify" = @{
category = "notify.channels"
plugins = @(
@{ id = "email"; name = "Email Notifier"; assembly = "StellaOps.Notify.Connectors.Email.dll"; enabled = $true; priority = 100 }
@{ id = "slack"; name = "Slack Notifier"; assembly = "StellaOps.Notify.Connectors.Slack.dll"; enabled = $true; priority = 90 }
@{ id = "webhook"; name = "Webhook Notifier"; assembly = "StellaOps.Notify.Connectors.Webhook.dll"; enabled = $true; priority = 80 }
@{ id = "teams"; name = "Microsoft Teams Notifier"; assembly = "StellaOps.Notify.Connectors.Teams.dll"; enabled = $false; priority = 85 }
@{ id = "pagerduty"; name = "PagerDuty Notifier"; assembly = "StellaOps.Notify.Connectors.PagerDuty.dll"; enabled = $false; priority = 75 }
@{ id = "opsgenie"; name = "OpsGenie Notifier"; assembly = "StellaOps.Notify.Connectors.OpsGenie.dll"; enabled = $false; priority = 70 }
@{ id = "telegram"; name = "Telegram Notifier"; assembly = "StellaOps.Notify.Connectors.Telegram.dll"; enabled = $false; priority = 65 }
@{ id = "discord"; name = "Discord Notifier"; assembly = "StellaOps.Notify.Connectors.Discord.dll"; enabled = $false; priority = 60 }
)
}
# Messaging transports
"messaging" = @{
category = "messaging.transports"
plugins = @(
@{ id = "valkey"; name = "Valkey Transport"; assembly = "StellaOps.Messaging.Transport.Valkey.dll"; enabled = $true; priority = 100 }
@{ id = "postgres"; name = "PostgreSQL Transport"; assembly = "StellaOps.Messaging.Transport.Postgres.dll"; enabled = $false; priority = 90 }
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Messaging.Transport.InMemory.dll"; enabled = $false; priority = 10 }
)
}
}
function New-PluginManifest {
param(
[string]$ModulePath,
[hashtable]$Plugin,
[string]$Category
)
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
$manifest = @{
'$schema' = "https://schema.stella-ops.org/plugin-manifest/v2.json"
schemaVersion = "2.0"
id = $fullId
name = $Plugin.name
version = "1.0.0"
assembly = @{
path = $Plugin.assembly
}
capabilities = @()
platforms = @("linux-x64", "linux-arm64", "win-x64", "osx-x64", "osx-arm64")
compliance = @("NIST")
jurisdiction = "world"
priority = $Plugin.priority
enabled = $Plugin.enabled
metadata = @{
author = "StellaOps"
license = "AGPL-3.0-or-later"
}
}
if ($Plugin.vendor) {
$manifest.metadata["vendor"] = $Plugin.vendor
}
return $manifest | ConvertTo-Json -Depth 10
}
function New-PluginConfig {
param(
[string]$ModulePath,
[hashtable]$Plugin,
[string]$Category
)
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
$config = @"
id: $fullId
name: $($Plugin.name)
enabled: $($Plugin.enabled.ToString().ToLower())
priority: $($Plugin.priority)
config:
# Plugin-specific configuration
# Add settings here as needed
"@
return $config
}
function New-RegistryFile {
param(
[string]$Category,
[array]$Plugins
)
$entries = $Plugins | ForEach-Object {
" $($_.id):`n enabled: $($_.enabled.ToString().ToLower())`n priority: $($_.priority)`n config: $($_.id)/config.yaml"
}
$registry = @"
version: "1.0"
category: $Category
defaults:
enabled: false
timeout: "00:05:00"
plugins:
$($entries -join "`n")
"@
return $registry
}
# Main generation logic
Write-Host "Generating plugin configurations to: $OutputDir" -ForegroundColor Cyan
foreach ($modulePath in $PluginCatalog.Keys) {
$moduleConfig = $PluginCatalog[$modulePath]
$moduleDir = Join-Path $OutputDir $modulePath
Write-Host "Processing module: $modulePath" -ForegroundColor Yellow
# Create module directory
if (-not (Test-Path $moduleDir)) {
New-Item -ItemType Directory -Path $moduleDir -Force | Out-Null
}
# Generate registry.yaml
$registryPath = Join-Path $moduleDir "registry.yaml"
if ($Force -or -not (Test-Path $registryPath)) {
$registryContent = New-RegistryFile -Category $moduleConfig.category -Plugins $moduleConfig.plugins
Set-Content -Path $registryPath -Value $registryContent -Encoding utf8
Write-Host " Created: registry.yaml" -ForegroundColor Green
}
# Generate plugin configs
foreach ($plugin in $moduleConfig.plugins) {
$pluginDir = Join-Path $moduleDir $plugin.id
if (-not (Test-Path $pluginDir)) {
New-Item -ItemType Directory -Path $pluginDir -Force | Out-Null
}
# plugin.json
$manifestPath = Join-Path $pluginDir "plugin.json"
if ($Force -or -not (Test-Path $manifestPath)) {
$manifestContent = New-PluginManifest -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
Set-Content -Path $manifestPath -Value $manifestContent -Encoding utf8
Write-Host " Created: $($plugin.id)/plugin.json" -ForegroundColor Green
}
# config.yaml
$configPath = Join-Path $pluginDir "config.yaml"
if ($Force -or -not (Test-Path $configPath)) {
$configContent = New-PluginConfig -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
Set-Content -Path $configPath -Value $configContent -Encoding utf8
Write-Host " Created: $($plugin.id)/config.yaml" -ForegroundColor Green
}
}
}
Write-Host "`nPlugin configuration generation complete!" -ForegroundColor Cyan

View File

@@ -0,0 +1,406 @@
#!/usr/bin/env bash
# =============================================================================
# CI COMMON FUNCTIONS
# =============================================================================
# Shared utility functions for local CI testing scripts.
#
# Usage:
# source "$SCRIPT_DIR/lib/ci-common.sh"
#
# =============================================================================
# Prevent multiple sourcing
[[ -n "${_CI_COMMON_LOADED:-}" ]] && return
_CI_COMMON_LOADED=1
# =============================================================================
# COLOR DEFINITIONS
# =============================================================================
if [[ -t 1 ]] && [[ -n "${TERM:-}" ]] && [[ "${TERM}" != "dumb" ]]; then
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
BLUE='\033[0;34m'
MAGENTA='\033[0;35m'
CYAN='\033[0;36m'
WHITE='\033[0;37m'
BOLD='\033[1m'
DIM='\033[2m'
RESET='\033[0m'
else
RED=''
GREEN=''
YELLOW=''
BLUE=''
MAGENTA=''
CYAN=''
WHITE=''
BOLD=''
DIM=''
RESET=''
fi
# =============================================================================
# LOGGING FUNCTIONS
# =============================================================================
# Log an info message
log_info() {
echo -e "${BLUE}[INFO]${RESET} $*"
}
# Log a success message
log_success() {
echo -e "${GREEN}[OK]${RESET} $*"
}
# Log a warning message
log_warn() {
echo -e "${YELLOW}[WARN]${RESET} $*" >&2
}
# Log an error message
log_error() {
echo -e "${RED}[ERROR]${RESET} $*" >&2
}
# Log a debug message (only if VERBOSE is true)
log_debug() {
if [[ "${VERBOSE:-false}" == "true" ]]; then
echo -e "${DIM}[DEBUG]${RESET} $*"
fi
}
# Log a step in a process
log_step() {
local step_num="$1"
local total_steps="$2"
local message="$3"
echo -e "${CYAN}[${step_num}/${total_steps}]${RESET} ${BOLD}${message}${RESET}"
}
# Log a section header
log_section() {
echo ""
echo -e "${BOLD}${MAGENTA}=== $* ===${RESET}"
echo ""
}
# Log a subsection header
log_subsection() {
echo -e "${CYAN}--- $* ---${RESET}"
}
# =============================================================================
# ERROR HANDLING
# =============================================================================
# Exit with error message
die() {
log_error "$@"
exit 1
}
# Check if a command exists
require_command() {
local cmd="$1"
local install_hint="${2:-}"
if ! command -v "$cmd" &>/dev/null; then
log_error "Required command not found: $cmd"
if [[ -n "$install_hint" ]]; then
log_info "Install with: $install_hint"
fi
return 1
fi
return 0
}
# Check if a file exists
require_file() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "Required file not found: $file"
return 1
fi
return 0
}
# Check if a directory exists
require_dir() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
log_error "Required directory not found: $dir"
return 1
fi
return 0
}
# =============================================================================
# TIMING FUNCTIONS
# =============================================================================
# Get current timestamp in seconds
get_timestamp() {
date +%s
}
# Format duration in human-readable format
format_duration() {
local seconds="$1"
local minutes=$((seconds / 60))
local remaining_seconds=$((seconds % 60))
if [[ $minutes -gt 0 ]]; then
echo "${minutes}m ${remaining_seconds}s"
else
echo "${remaining_seconds}s"
fi
}
# Start a timer and return the start time
start_timer() {
get_timestamp
}
# Stop a timer and print the duration
stop_timer() {
local start_time="$1"
local label="${2:-Operation}"
local end_time
end_time=$(get_timestamp)
local duration=$((end_time - start_time))
log_info "$label completed in $(format_duration $duration)"
}
# =============================================================================
# STRING FUNCTIONS
# =============================================================================
# Convert string to lowercase
to_lower() {
echo "$1" | tr '[:upper:]' '[:lower:]'
}
# Convert string to uppercase
to_upper() {
echo "$1" | tr '[:lower:]' '[:upper:]'
}
# Trim whitespace from string
trim() {
local var="$*"
var="${var#"${var%%[![:space:]]*}"}"
var="${var%"${var##*[![:space:]]}"}"
echo -n "$var"
}
# Join array elements with delimiter
join_by() {
local delimiter="$1"
shift
local first="$1"
shift
printf '%s' "$first" "${@/#/$delimiter}"
}
# =============================================================================
# ARRAY FUNCTIONS
# =============================================================================
# Check if array contains element
array_contains() {
local needle="$1"
shift
local element
for element in "$@"; do
[[ "$element" == "$needle" ]] && return 0
done
return 1
}
# =============================================================================
# FILE FUNCTIONS
# =============================================================================
# Create directory if it doesn't exist
ensure_dir() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
log_debug "Created directory: $dir"
fi
}
# Get absolute path
get_absolute_path() {
local path="$1"
if [[ -d "$path" ]]; then
(cd "$path" && pwd)
elif [[ -f "$path" ]]; then
local dir
dir=$(dirname "$path")
echo "$(cd "$dir" && pwd)/$(basename "$path")"
else
echo "$path"
fi
}
# =============================================================================
# GIT FUNCTIONS
# =============================================================================
# Get the repository root directory
get_repo_root() {
git rev-parse --show-toplevel 2>/dev/null
}
# Get current branch name
get_current_branch() {
git rev-parse --abbrev-ref HEAD 2>/dev/null
}
# Get current commit SHA
get_current_sha() {
git rev-parse HEAD 2>/dev/null
}
# Get short commit SHA
get_short_sha() {
git rev-parse --short HEAD 2>/dev/null
}
# Check if working directory is clean
is_git_clean() {
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
}
# Get list of changed files compared to main branch
get_changed_files() {
local base_branch="${1:-main}"
git diff --name-only "$base_branch"...HEAD 2>/dev/null
}
# =============================================================================
# MODULE DETECTION
# =============================================================================
# Map of module names to source paths
declare -A MODULE_PATHS=(
["Scanner"]="src/Scanner src/BinaryIndex"
["Concelier"]="src/Concelier src/Excititor"
["Authority"]="src/Authority"
["Policy"]="src/Policy src/RiskEngine"
["Attestor"]="src/Attestor src/Provenance"
["EvidenceLocker"]="src/EvidenceLocker"
["ExportCenter"]="src/ExportCenter"
["Findings"]="src/Findings"
["SbomService"]="src/SbomService"
["Notify"]="src/Notify src/Notifier"
["Router"]="src/Router src/Gateway"
["Cryptography"]="src/Cryptography"
["AirGap"]="src/AirGap"
["Cli"]="src/Cli"
["AdvisoryAI"]="src/AdvisoryAI"
["ReachGraph"]="src/ReachGraph"
["Orchestrator"]="src/Orchestrator"
["PacksRegistry"]="src/PacksRegistry"
["Replay"]="src/Replay"
["Aoc"]="src/Aoc"
["IssuerDirectory"]="src/IssuerDirectory"
["Telemetry"]="src/Telemetry"
["Signals"]="src/Signals"
["Web"]="src/Web"
["DevPortal"]="src/DevPortal"
)
# Modules that use Node.js/npm instead of .NET
declare -a NODE_MODULES=("Web" "DevPortal")
# Detect which modules have changed based on git diff
detect_changed_modules() {
local base_branch="${1:-main}"
local changed_files
changed_files=$(get_changed_files "$base_branch")
local changed_modules=()
local module
local paths
for module in "${!MODULE_PATHS[@]}"; do
paths="${MODULE_PATHS[$module]}"
for path in $paths; do
if echo "$changed_files" | grep -q "^${path}/"; then
if ! array_contains "$module" "${changed_modules[@]}"; then
changed_modules+=("$module")
fi
break
fi
done
done
# Check for infrastructure changes that affect all modules
if echo "$changed_files" | grep -qE "^(Directory\.Build\.props|Directory\.Packages\.props|nuget\.config)"; then
echo "ALL"
return
fi
# Check for shared library changes
if echo "$changed_files" | grep -q "^src/__Libraries/"; then
echo "ALL"
return
fi
if [[ ${#changed_modules[@]} -eq 0 ]]; then
echo "NONE"
else
echo "${changed_modules[*]}"
fi
}
# =============================================================================
# RESULT REPORTING
# =============================================================================
# Print a summary table row
print_table_row() {
local col1="$1"
local col2="$2"
local col3="${3:-}"
printf " %-30s %-15s %s\n" "$col1" "$col2" "$col3"
}
# Print pass/fail status
print_status() {
local name="$1"
local passed="$2"
local duration="${3:-}"
if [[ "$passed" == "true" ]]; then
print_table_row "$name" "${GREEN}PASSED${RESET}" "$duration"
else
print_table_row "$name" "${RED}FAILED${RESET}" "$duration"
fi
}
# =============================================================================
# ENVIRONMENT LOADING
# =============================================================================
# Load environment file if it exists
load_env_file() {
local env_file="$1"
if [[ -f "$env_file" ]]; then
log_debug "Loading environment from: $env_file"
set -a
# shellcheck source=/dev/null
source "$env_file"
set +a
return 0
fi
return 1
}

View File

@@ -0,0 +1,342 @@
#!/usr/bin/env bash
# =============================================================================
# CI DOCKER UTILITIES
# =============================================================================
# Docker-related utility functions for local CI testing.
#
# Usage:
# source "$SCRIPT_DIR/lib/ci-docker.sh"
#
# =============================================================================
# Prevent multiple sourcing
[[ -n "${_CI_DOCKER_LOADED:-}" ]] && return
_CI_DOCKER_LOADED=1
# =============================================================================
# CONFIGURATION
# =============================================================================
CI_COMPOSE_FILE="${CI_COMPOSE_FILE:-devops/compose/docker-compose.ci.yaml}"
CI_IMAGE="${CI_IMAGE:-stellaops-ci:local}"
CI_DOCKERFILE="${CI_DOCKERFILE:-devops/docker/Dockerfile.ci}"
CI_PROJECT_NAME="${CI_PROJECT_NAME:-stellaops-ci}"
# Service names from docker-compose.ci.yaml
CI_SERVICES=(postgres-ci valkey-ci nats-ci mock-registry minio-ci)
# =============================================================================
# DOCKER CHECK
# =============================================================================
# Check if Docker is available and running
check_docker() {
if ! command -v docker &>/dev/null; then
log_error "Docker is not installed or not in PATH"
log_info "Install Docker: https://docs.docker.com/get-docker/"
return 1
fi
if ! docker info &>/dev/null; then
log_error "Docker daemon is not running"
log_info "Start Docker Desktop or run: sudo systemctl start docker"
return 1
fi
log_debug "Docker is available and running"
return 0
}
# Check if Docker Compose is available
check_docker_compose() {
if docker compose version &>/dev/null; then
DOCKER_COMPOSE="docker compose"
log_debug "Using Docker Compose plugin"
return 0
elif command -v docker-compose &>/dev/null; then
DOCKER_COMPOSE="docker-compose"
log_debug "Using standalone docker-compose"
return 0
else
log_error "Docker Compose is not installed"
log_info "Install with: docker compose plugin or standalone docker-compose"
return 1
fi
}
# =============================================================================
# CI SERVICES MANAGEMENT
# =============================================================================
# Start CI services
start_ci_services() {
local services=("$@")
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
if [[ ! -f "$compose_file" ]]; then
log_error "Compose file not found: $compose_file"
return 1
fi
check_docker || return 1
check_docker_compose || return 1
log_section "Starting CI Services"
if [[ ${#services[@]} -eq 0 ]]; then
# Start all services
log_info "Starting all CI services..."
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d
else
# Start specific services
log_info "Starting services: ${services[*]}"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" up -d "${services[@]}"
fi
local result=$?
if [[ $result -ne 0 ]]; then
log_error "Failed to start CI services"
return $result
fi
# Wait for services to be healthy
wait_for_services "${services[@]}"
}
# Stop CI services
stop_ci_services() {
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
if [[ ! -f "$compose_file" ]]; then
log_debug "Compose file not found, nothing to stop"
return 0
fi
check_docker_compose || return 1
log_section "Stopping CI Services"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down
}
# Stop CI services and remove volumes
cleanup_ci_services() {
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
if [[ ! -f "$compose_file" ]]; then
return 0
fi
check_docker_compose || return 1
log_section "Cleaning Up CI Services"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" down -v --remove-orphans
}
# Check status of CI services
check_ci_services_status() {
local compose_file="$REPO_ROOT/$CI_COMPOSE_FILE"
check_docker_compose || return 1
log_subsection "CI Services Status"
$DOCKER_COMPOSE -f "$compose_file" -p "$CI_PROJECT_NAME" ps
}
# =============================================================================
# HEALTH CHECKS
# =============================================================================
# Wait for a specific service to be healthy
wait_for_service() {
local service="$1"
local timeout="${2:-60}"
local interval="${3:-2}"
log_info "Waiting for $service to be healthy..."
local elapsed=0
while [[ $elapsed -lt $timeout ]]; do
local status
status=$(docker inspect --format='{{.State.Health.Status}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "not found")
if [[ "$status" == "healthy" ]]; then
log_success "$service is healthy"
return 0
elif [[ "$status" == "not found" ]]; then
# Container might not have health check, check if running
local running
running=$(docker inspect --format='{{.State.Running}}' "${CI_PROJECT_NAME}-${service}-1" 2>/dev/null || echo "false")
if [[ "$running" == "true" ]]; then
log_success "$service is running (no health check)"
return 0
fi
fi
sleep "$interval"
elapsed=$((elapsed + interval))
done
log_error "$service did not become healthy within ${timeout}s"
return 1
}
# Wait for multiple services to be healthy
wait_for_services() {
local services=("$@")
local failed=0
if [[ ${#services[@]} -eq 0 ]]; then
services=("${CI_SERVICES[@]}")
fi
log_info "Waiting for services to be ready..."
for service in "${services[@]}"; do
if ! wait_for_service "$service" 60 2; then
failed=1
fi
done
return $failed
}
# Check if PostgreSQL is accepting connections
check_postgres_ready() {
local host="${1:-localhost}"
local port="${2:-5433}"
local user="${3:-stellaops_ci}"
local db="${4:-stellaops_test}"
if command -v pg_isready &>/dev/null; then
pg_isready -h "$host" -p "$port" -U "$user" -d "$db" &>/dev/null
else
# Fallback to nc if pg_isready not available
nc -z "$host" "$port" &>/dev/null
fi
}
# Check if Valkey/Redis is accepting connections
check_valkey_ready() {
local host="${1:-localhost}"
local port="${2:-6380}"
if command -v valkey-cli &>/dev/null; then
valkey-cli -h "$host" -p "$port" ping &>/dev/null
elif command -v redis-cli &>/dev/null; then
redis-cli -h "$host" -p "$port" ping &>/dev/null
else
nc -z "$host" "$port" &>/dev/null
fi
}
# =============================================================================
# CI DOCKER IMAGE MANAGEMENT
# =============================================================================
# Check if CI image exists
ci_image_exists() {
docker image inspect "$CI_IMAGE" &>/dev/null
}
# Build CI Docker image
build_ci_image() {
local force_rebuild="${1:-false}"
local dockerfile="$REPO_ROOT/$CI_DOCKERFILE"
if [[ ! -f "$dockerfile" ]]; then
log_error "Dockerfile not found: $dockerfile"
return 1
fi
check_docker || return 1
if ci_image_exists && [[ "$force_rebuild" != "true" ]]; then
log_info "CI image already exists: $CI_IMAGE"
log_info "Use --rebuild to force rebuild"
return 0
fi
log_section "Building CI Docker Image"
log_info "Dockerfile: $dockerfile"
log_info "Image: $CI_IMAGE"
docker build -t "$CI_IMAGE" -f "$dockerfile" "$REPO_ROOT"
if [[ $? -ne 0 ]]; then
log_error "Failed to build CI image"
return 1
fi
log_success "CI image built successfully: $CI_IMAGE"
}
# =============================================================================
# CONTAINER EXECUTION
# =============================================================================
# Run a command inside the CI container
run_in_ci_container() {
local command="$*"
check_docker || return 1
if ! ci_image_exists; then
log_info "CI image not found, building..."
build_ci_image || return 1
fi
local docker_args=(
--rm
-v "$REPO_ROOT:/src"
-v "$REPO_ROOT/TestResults:/src/TestResults"
-e DOTNET_NOLOGO=1
-e DOTNET_CLI_TELEMETRY_OPTOUT=1
-e DOTNET_SYSTEM_GLOBALIZATION_INVARIANT=1
-e TZ=UTC
-w /src
)
# Mount Docker socket for Testcontainers
if [[ -S /var/run/docker.sock ]]; then
docker_args+=(-v /var/run/docker.sock:/var/run/docker.sock)
fi
# Load environment file if exists
local env_file="$REPO_ROOT/devops/ci-local/.env.local"
if [[ -f "$env_file" ]]; then
docker_args+=(--env-file "$env_file")
fi
# Connect to CI network if services are running
if docker network inspect stellaops-ci-net &>/dev/null; then
docker_args+=(--network stellaops-ci-net)
fi
log_debug "Running in CI container: $command"
docker run "${docker_args[@]}" "$CI_IMAGE" bash -c "$command"
}
# =============================================================================
# DOCKER NETWORK UTILITIES
# =============================================================================
# Get the IP address of a running container
get_container_ip() {
local container="$1"
docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$container" 2>/dev/null
}
# Check if container is running
is_container_running() {
local container="$1"
[[ "$(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null)" == "true" ]]
}
# Get container logs
get_container_logs() {
local container="$1"
local lines="${2:-100}"
docker logs --tail "$lines" "$container" 2>&1
}

View File

@@ -0,0 +1,475 @@
#!/usr/bin/env bash
# =============================================================================
# CI-WEB.SH - Angular Web Testing Utilities
# =============================================================================
# Functions for running Angular/Web frontend tests locally.
#
# Test Types:
# - Unit Tests (Karma/Jasmine)
# - E2E Tests (Playwright)
# - Accessibility Tests (Axe-core)
# - Lighthouse Audits
# - Storybook Build
#
# =============================================================================
# Prevent direct execution
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
echo "This script should be sourced, not executed directly."
exit 1
fi
# =============================================================================
# CONSTANTS
# =============================================================================
WEB_DIR="${REPO_ROOT:-$(git rev-parse --show-toplevel)}/src/Web/StellaOps.Web"
WEB_NODE_VERSION="20"
# Test categories for Web
WEB_TEST_CATEGORIES=(
"web:unit" # Karma unit tests
"web:e2e" # Playwright E2E
"web:a11y" # Accessibility
"web:lighthouse" # Performance/a11y audit
"web:build" # Production build
"web:storybook" # Storybook build
)
# =============================================================================
# DEPENDENCY CHECKS
# =============================================================================
check_node_version() {
if ! command -v node &>/dev/null; then
log_error "Node.js not found"
log_info "Install Node.js $WEB_NODE_VERSION+: https://nodejs.org"
return 1
fi
local version
version=$(node --version | sed 's/v//' | cut -d. -f1)
if [[ "$version" -lt "$WEB_NODE_VERSION" ]]; then
log_warn "Node.js version $version is below recommended $WEB_NODE_VERSION"
else
log_debug "Node.js version: $(node --version)"
fi
return 0
}
check_npm() {
if ! command -v npm &>/dev/null; then
log_error "npm not found"
return 1
fi
log_debug "npm version: $(npm --version)"
return 0
}
check_web_dependencies() {
log_subsection "Checking Web Dependencies"
check_node_version || return 1
check_npm || return 1
# Check if node_modules exists
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
log_warn "node_modules not found - will install dependencies"
fi
return 0
}
# =============================================================================
# SETUP
# =============================================================================
install_web_dependencies() {
log_subsection "Installing Web Dependencies"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
pushd "$WEB_DIR" > /dev/null || return 1
# Check if package-lock.json exists
if [[ -f "package-lock.json" ]]; then
log_info "Running npm ci (clean install)..."
npm ci --prefer-offline --no-audit --no-fund || {
log_error "npm ci failed"
popd > /dev/null
return 1
}
else
log_info "Running npm install..."
npm install --no-audit --no-fund || {
log_error "npm install failed"
popd > /dev/null
return 1
}
fi
popd > /dev/null
log_success "Web dependencies installed"
return 0
}
ensure_web_dependencies() {
if [[ ! -d "$WEB_DIR/node_modules" ]]; then
install_web_dependencies || return 1
fi
return 0
}
# =============================================================================
# TEST RUNNERS
# =============================================================================
run_web_unit_tests() {
log_subsection "Running Web Unit Tests (Karma/Jasmine)"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run test:ci"
popd > /dev/null
return 0
fi
# Run tests
npm run test:ci
local result=$?
stop_timer "$start_time" "Web unit tests"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web unit tests passed"
else
log_error "Web unit tests failed"
fi
return $result
}
run_web_e2e_tests() {
log_subsection "Running Web E2E Tests (Playwright)"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
# Install Playwright browsers if needed
if [[ ! -d "$HOME/.cache/ms-playwright" ]] && [[ ! -d "node_modules/.cache/ms-playwright" ]]; then
log_info "Installing Playwright browsers..."
npx playwright install --with-deps chromium || {
log_warn "Playwright browser installation failed - E2E tests may fail"
}
fi
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run test:e2e"
popd > /dev/null
return 0
fi
# Run E2E tests
npm run test:e2e
local result=$?
stop_timer "$start_time" "Web E2E tests"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web E2E tests passed"
else
log_error "Web E2E tests failed"
fi
return $result
}
run_web_a11y_tests() {
log_subsection "Running Web Accessibility Tests (Axe)"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run test:a11y"
popd > /dev/null
return 0
fi
# Run accessibility tests
npm run test:a11y
local result=$?
stop_timer "$start_time" "Web accessibility tests"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web accessibility tests passed"
else
log_warn "Web accessibility tests had issues (non-blocking)"
fi
# A11y tests are non-blocking by default
return 0
}
run_web_build() {
log_subsection "Building Web Application"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run build -- --configuration production"
popd > /dev/null
return 0
fi
# Build production bundle
npm run build -- --configuration production --progress=false
local result=$?
stop_timer "$start_time" "Web build"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Web build completed"
# Check bundle size
if [[ -d "$WEB_DIR/dist" ]]; then
local size
size=$(du -sh "$WEB_DIR/dist" 2>/dev/null | cut -f1)
log_info "Bundle size: $size"
fi
else
log_error "Web build failed"
fi
return $result
}
run_web_storybook_build() {
log_subsection "Building Storybook"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
ensure_web_dependencies || return 1
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: npm run storybook:build"
popd > /dev/null
return 0
fi
# Build Storybook
npm run storybook:build
local result=$?
stop_timer "$start_time" "Storybook build"
popd > /dev/null
if [[ $result -eq 0 ]]; then
log_success "Storybook build completed"
else
log_error "Storybook build failed"
fi
return $result
}
run_web_lighthouse() {
log_subsection "Running Lighthouse Audit"
if [[ ! -d "$WEB_DIR" ]]; then
log_error "Web directory not found: $WEB_DIR"
return 1
fi
# Check if lighthouse is available
if ! command -v lhci &>/dev/null && ! npx lhci --version &>/dev/null 2>&1; then
log_warn "Lighthouse CI not installed - skipping audit"
log_info "Install with: npm install -g @lhci/cli"
return 0
fi
ensure_web_dependencies || return 1
# Build first if not already built
if [[ ! -d "$WEB_DIR/dist" ]]; then
run_web_build || return 1
fi
pushd "$WEB_DIR" > /dev/null || return 1
local start_time
start_time=$(start_timer)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would run: lhci autorun"
popd > /dev/null
return 0
fi
# Run Lighthouse
npx lhci autorun \
--collect.staticDistDir=./dist/stellaops-web/browser \
--collect.numberOfRuns=1 \
--upload.target=filesystem \
--upload.outputDir=./lighthouse-results 2>/dev/null || {
log_warn "Lighthouse audit had issues"
}
stop_timer "$start_time" "Lighthouse audit"
popd > /dev/null
log_success "Lighthouse audit completed"
return 0
}
# =============================================================================
# COMPOSITE RUNNERS
# =============================================================================
run_web_smoke() {
log_section "Web Smoke Tests"
log_info "Running quick web validation"
local failed=0
run_web_build || failed=1
if [[ $failed -eq 0 ]]; then
run_web_unit_tests || failed=1
fi
return $failed
}
run_web_pr_gating() {
log_section "Web PR-Gating Tests"
log_info "Running full web PR-gating suite"
local failed=0
local results=()
# Build
run_web_build
results+=("Build:$?")
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
# Unit tests
if [[ $failed -eq 0 ]]; then
run_web_unit_tests
results+=("Unit:$?")
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
fi
# E2E tests
if [[ $failed -eq 0 ]]; then
run_web_e2e_tests
results+=("E2E:$?")
[[ ${results[-1]##*:} -ne 0 ]] && failed=1
fi
# A11y tests (non-blocking)
run_web_a11y_tests
results+=("A11y:$?")
# Print summary
log_section "Web Test Results"
for result in "${results[@]}"; do
local name="${result%%:*}"
local status="${result##*:}"
if [[ "$status" == "0" ]]; then
print_status "Web $name" "true"
else
print_status "Web $name" "false"
fi
done
return $failed
}
run_web_full() {
log_section "Full Web Test Suite"
log_info "Running all web tests including extended categories"
local failed=0
# PR-gating tests
run_web_pr_gating || failed=1
# Extended tests
run_web_storybook_build || log_warn "Storybook build failed (non-blocking)"
run_web_lighthouse || log_warn "Lighthouse audit failed (non-blocking)"
return $failed
}
# =============================================================================
# EXPORTS
# =============================================================================
export -f check_web_dependencies
export -f install_web_dependencies
export -f ensure_web_dependencies
export -f run_web_unit_tests
export -f run_web_e2e_tests
export -f run_web_a11y_tests
export -f run_web_build
export -f run_web_storybook_build
export -f run_web_lighthouse
export -f run_web_smoke
export -f run_web_pr_gating
export -f run_web_full

View File

@@ -0,0 +1,178 @@
#!/usr/bin/env bash
# Shared Exit Codes Registry
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard exit codes for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/exit-codes.sh"
#
# Exit codes follow POSIX conventions (0-125)
# 126-127 reserved for shell errors
# 128+ reserved for signal handling
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_EXIT_CODES_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_EXIT_CODES_LOADED=1
# ============================================================================
# Standard Exit Codes
# ============================================================================
# Success
export EXIT_SUCCESS=0
# General errors (1-9)
export EXIT_ERROR=1 # Generic error
export EXIT_USAGE=2 # Invalid usage/arguments
export EXIT_CONFIG_ERROR=3 # Configuration error
export EXIT_NOT_FOUND=4 # File/resource not found
export EXIT_PERMISSION=5 # Permission denied
export EXIT_IO_ERROR=6 # I/O error
export EXIT_NETWORK_ERROR=7 # Network error
export EXIT_TIMEOUT=8 # Operation timed out
export EXIT_INTERRUPTED=9 # User interrupted (Ctrl+C)
# Tool/dependency errors (10-19)
export EXIT_MISSING_TOOL=10 # Required tool not installed
export EXIT_TOOL_ERROR=11 # Tool execution failed
export EXIT_VERSION_MISMATCH=12 # Wrong tool version
export EXIT_DEPENDENCY_ERROR=13 # Dependency resolution failed
# Build errors (20-29)
export EXIT_BUILD_FAILED=20 # Build compilation failed
export EXIT_RESTORE_FAILED=21 # Package restore failed
export EXIT_PUBLISH_FAILED=22 # Publish failed
export EXIT_PACKAGING_FAILED=23 # Packaging failed
# Test errors (30-39)
export EXIT_TEST_FAILED=30 # Tests failed
export EXIT_TEST_TIMEOUT=31 # Test timed out
export EXIT_FIXTURE_ERROR=32 # Test fixture error
export EXIT_DETERMINISM_FAIL=33 # Determinism check failed
# Deployment errors (40-49)
export EXIT_DEPLOY_FAILED=40 # Deployment failed
export EXIT_ROLLBACK_FAILED=41 # Rollback failed
export EXIT_HEALTH_CHECK_FAIL=42 # Health check failed
export EXIT_REGISTRY_ERROR=43 # Container registry error
# Validation errors (50-59)
export EXIT_VALIDATION_FAILED=50 # General validation failed
export EXIT_SCHEMA_ERROR=51 # Schema validation failed
export EXIT_LINT_ERROR=52 # Lint check failed
export EXIT_FORMAT_ERROR=53 # Format check failed
export EXIT_LICENSE_ERROR=54 # License compliance failed
# Security errors (60-69)
export EXIT_SECURITY_ERROR=60 # Security check failed
export EXIT_SECRETS_FOUND=61 # Secrets detected in code
export EXIT_VULN_FOUND=62 # Vulnerabilities found
export EXIT_SIGN_FAILED=63 # Signing failed
export EXIT_VERIFY_FAILED=64 # Verification failed
# Git/VCS errors (70-79)
export EXIT_GIT_ERROR=70 # Git operation failed
export EXIT_DIRTY_WORKTREE=71 # Uncommitted changes
export EXIT_MERGE_CONFLICT=72 # Merge conflict
export EXIT_BRANCH_ERROR=73 # Branch operation failed
# Reserved for specific tools (80-99)
export EXIT_DOTNET_ERROR=80 # .NET specific error
export EXIT_DOCKER_ERROR=81 # Docker specific error
export EXIT_HELM_ERROR=82 # Helm specific error
export EXIT_KUBECTL_ERROR=83 # kubectl specific error
export EXIT_NPM_ERROR=84 # npm specific error
export EXIT_PYTHON_ERROR=85 # Python specific error
# Legacy compatibility
export EXIT_TOOLCHAIN=69 # Tool not found (legacy, use EXIT_MISSING_TOOL)
# ============================================================================
# Helper Functions
# ============================================================================
# Get exit code name from number
exit_code_name() {
local code="${1:-}"
case "$code" in
0) echo "SUCCESS" ;;
1) echo "ERROR" ;;
2) echo "USAGE" ;;
3) echo "CONFIG_ERROR" ;;
4) echo "NOT_FOUND" ;;
5) echo "PERMISSION" ;;
6) echo "IO_ERROR" ;;
7) echo "NETWORK_ERROR" ;;
8) echo "TIMEOUT" ;;
9) echo "INTERRUPTED" ;;
10) echo "MISSING_TOOL" ;;
11) echo "TOOL_ERROR" ;;
12) echo "VERSION_MISMATCH" ;;
13) echo "DEPENDENCY_ERROR" ;;
20) echo "BUILD_FAILED" ;;
21) echo "RESTORE_FAILED" ;;
22) echo "PUBLISH_FAILED" ;;
23) echo "PACKAGING_FAILED" ;;
30) echo "TEST_FAILED" ;;
31) echo "TEST_TIMEOUT" ;;
32) echo "FIXTURE_ERROR" ;;
33) echo "DETERMINISM_FAIL" ;;
40) echo "DEPLOY_FAILED" ;;
41) echo "ROLLBACK_FAILED" ;;
42) echo "HEALTH_CHECK_FAIL" ;;
43) echo "REGISTRY_ERROR" ;;
50) echo "VALIDATION_FAILED" ;;
51) echo "SCHEMA_ERROR" ;;
52) echo "LINT_ERROR" ;;
53) echo "FORMAT_ERROR" ;;
54) echo "LICENSE_ERROR" ;;
60) echo "SECURITY_ERROR" ;;
61) echo "SECRETS_FOUND" ;;
62) echo "VULN_FOUND" ;;
63) echo "SIGN_FAILED" ;;
64) echo "VERIFY_FAILED" ;;
69) echo "TOOLCHAIN (legacy)" ;;
70) echo "GIT_ERROR" ;;
71) echo "DIRTY_WORKTREE" ;;
72) echo "MERGE_CONFLICT" ;;
73) echo "BRANCH_ERROR" ;;
80) echo "DOTNET_ERROR" ;;
81) echo "DOCKER_ERROR" ;;
82) echo "HELM_ERROR" ;;
83) echo "KUBECTL_ERROR" ;;
84) echo "NPM_ERROR" ;;
85) echo "PYTHON_ERROR" ;;
126) echo "COMMAND_NOT_EXECUTABLE" ;;
127) echo "COMMAND_NOT_FOUND" ;;
*)
if [[ $code -ge 128 ]] && [[ $code -le 255 ]]; then
local signal=$((code - 128))
echo "SIGNAL_${signal}"
else
echo "UNKNOWN_${code}"
fi
;;
esac
}
# Check if exit code indicates success
is_success() {
[[ "${1:-1}" -eq 0 ]]
}
# Check if exit code indicates error
is_error() {
[[ "${1:-0}" -ne 0 ]]
}
# Exit with message and code
exit_with() {
local code="${1:-1}"
shift
if [[ $# -gt 0 ]]; then
echo "$@" >&2
fi
exit "$code"
}

View File

@@ -0,0 +1,262 @@
#!/usr/bin/env bash
# Shared Git Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Common git operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/git-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_GIT_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_GIT_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Repository Information
# ============================================================================
# Get repository root directory
git_root() {
git rev-parse --show-toplevel 2>/dev/null || echo "."
}
# Check if current directory is a git repository
is_git_repo() {
git rev-parse --git-dir >/dev/null 2>&1
}
# Get current commit SHA (full)
git_sha() {
git rev-parse HEAD 2>/dev/null
}
# Get current commit SHA (short)
git_sha_short() {
git rev-parse --short HEAD 2>/dev/null
}
# Get current branch name
git_branch() {
git rev-parse --abbrev-ref HEAD 2>/dev/null
}
# Get current tag (if HEAD is tagged)
git_tag() {
git describe --tags --exact-match HEAD 2>/dev/null || echo ""
}
# Get latest tag
git_latest_tag() {
git describe --tags --abbrev=0 2>/dev/null || echo ""
}
# Get remote URL
git_remote_url() {
local remote="${1:-origin}"
git remote get-url "$remote" 2>/dev/null
}
# Get repository name from remote URL
git_repo_name() {
local url
url=$(git_remote_url "${1:-origin}")
basename "$url" .git
}
# ============================================================================
# Commit Information
# ============================================================================
# Get commit message
git_commit_message() {
local sha="${1:-HEAD}"
git log -1 --format="%s" "$sha" 2>/dev/null
}
# Get commit author
git_commit_author() {
local sha="${1:-HEAD}"
git log -1 --format="%an" "$sha" 2>/dev/null
}
# Get commit author email
git_commit_author_email() {
local sha="${1:-HEAD}"
git log -1 --format="%ae" "$sha" 2>/dev/null
}
# Get commit timestamp (ISO 8601)
git_commit_timestamp() {
local sha="${1:-HEAD}"
git log -1 --format="%aI" "$sha" 2>/dev/null
}
# Get commit timestamp (Unix epoch)
git_commit_epoch() {
local sha="${1:-HEAD}"
git log -1 --format="%at" "$sha" 2>/dev/null
}
# ============================================================================
# Working Tree State
# ============================================================================
# Check if working tree is clean
git_is_clean() {
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
}
# Check if working tree is dirty
git_is_dirty() {
! git_is_clean
}
# Get list of changed files
git_changed_files() {
git status --porcelain 2>/dev/null | awk '{print $2}'
}
# Get list of staged files
git_staged_files() {
git diff --cached --name-only 2>/dev/null
}
# Get list of untracked files
git_untracked_files() {
git ls-files --others --exclude-standard 2>/dev/null
}
# ============================================================================
# Diff and History
# ============================================================================
# Get files changed between two refs
git_diff_files() {
local from="${1:-HEAD~1}"
local to="${2:-HEAD}"
git diff --name-only "$from" "$to" 2>/dev/null
}
# Get files changed in last N commits
git_recent_files() {
local count="${1:-1}"
git diff --name-only "HEAD~${count}" HEAD 2>/dev/null
}
# Check if file was changed between two refs
git_file_changed() {
local file="$1"
local from="${2:-HEAD~1}"
local to="${3:-HEAD}"
git diff --name-only "$from" "$to" -- "$file" 2>/dev/null | grep -q "$file"
}
# Get commits between two refs
git_commits_between() {
local from="${1:-HEAD~10}"
local to="${2:-HEAD}"
git log --oneline "$from".."$to" 2>/dev/null
}
# ============================================================================
# Tag Operations
# ============================================================================
# Create a tag
git_create_tag() {
local tag="$1"
local message="${2:-}"
if [[ -n "$message" ]]; then
git tag -a "$tag" -m "$message"
else
git tag "$tag"
fi
}
# Delete a tag
git_delete_tag() {
local tag="$1"
git tag -d "$tag" 2>/dev/null
}
# Push tag to remote
git_push_tag() {
local tag="$1"
local remote="${2:-origin}"
git push "$remote" "$tag"
}
# List tags matching pattern
git_list_tags() {
local pattern="${1:-*}"
git tag -l "$pattern" 2>/dev/null
}
# ============================================================================
# Branch Operations
# ============================================================================
# Check if branch exists
git_branch_exists() {
local branch="$1"
git show-ref --verify --quiet "refs/heads/$branch" 2>/dev/null
}
# Check if remote branch exists
git_remote_branch_exists() {
local branch="$1"
local remote="${2:-origin}"
git show-ref --verify --quiet "refs/remotes/$remote/$branch" 2>/dev/null
}
# Get default branch
git_default_branch() {
local remote="${1:-origin}"
git remote show "$remote" 2>/dev/null | grep "HEAD branch" | awk '{print $NF}'
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get version string for CI builds
git_ci_version() {
local tag
tag=$(git_tag)
if [[ -n "$tag" ]]; then
echo "$tag"
else
local branch sha
branch=$(git_branch | tr '/' '-')
sha=$(git_sha_short)
echo "${branch}-${sha}"
fi
}
# Check if current commit is on default branch
git_is_default_branch() {
local current default
current=$(git_branch)
default=$(git_default_branch)
[[ "$current" == "$default" ]]
}
# Check if running in CI environment
git_is_ci() {
[[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]] || [[ -n "${GITLAB_CI:-}" ]]
}
# Ensure clean worktree or fail
git_require_clean() {
if git_is_dirty; then
log_error "Working tree is dirty. Commit or stash changes first."
return "${EXIT_DIRTY_WORKTREE:-71}"
fi
}

View File

@@ -0,0 +1,266 @@
#!/usr/bin/env bash
# Shared Hash/Checksum Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Cryptographic hash and checksum operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/hash-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_HASH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_HASH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Hash Computation
# ============================================================================
# Compute SHA-256 hash of a file
compute_sha256() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha256sum >/dev/null 2>&1; then
sha256sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 256 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha256 "$file" | awk '{print $NF}'
else
log_error "No SHA-256 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute SHA-512 hash of a file
compute_sha512() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha512sum >/dev/null 2>&1; then
sha512sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 512 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha512 "$file" | awk '{print $NF}'
else
log_error "No SHA-512 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute MD5 hash of a file (for compatibility, not security)
compute_md5() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v md5sum >/dev/null 2>&1; then
md5sum "$file" | awk '{print $1}'
elif command -v md5 >/dev/null 2>&1; then
md5 -q "$file"
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -md5 "$file" | awk '{print $NF}'
else
log_error "No MD5 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute hash of string
compute_string_hash() {
local string="$1"
local algorithm="${2:-sha256}"
case "$algorithm" in
sha256)
echo -n "$string" | sha256sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 256 2>/dev/null | awk '{print $1}'
;;
sha512)
echo -n "$string" | sha512sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 512 2>/dev/null | awk '{print $1}'
;;
md5)
echo -n "$string" | md5sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | md5 2>/dev/null
;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
}
# ============================================================================
# Checksum Files
# ============================================================================
# Write checksum file for a single file
write_checksum() {
local file="$1"
local checksum_file="${2:-${file}.sha256}"
local algorithm="${3:-sha256}"
local hash
case "$algorithm" in
sha256) hash=$(compute_sha256 "$file") ;;
sha512) hash=$(compute_sha512 "$file") ;;
md5) hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ -z "$hash" ]]; then
return "${EXIT_ERROR:-1}"
fi
local basename
basename=$(basename "$file")
echo "$hash $basename" > "$checksum_file"
log_debug "Wrote checksum to $checksum_file"
}
# Write checksums for multiple files
write_checksums() {
local output_file="$1"
shift
local files=("$@")
: > "$output_file"
for file in "${files[@]}"; do
if [[ -f "$file" ]]; then
local hash basename
hash=$(compute_sha256 "$file")
basename=$(basename "$file")
echo "$hash $basename" >> "$output_file"
fi
done
log_debug "Wrote checksums to $output_file"
}
# ============================================================================
# Checksum Verification
# ============================================================================
# Verify checksum of a file
verify_checksum() {
local file="$1"
local expected_hash="$2"
local algorithm="${3:-sha256}"
local actual_hash
case "$algorithm" in
sha256) actual_hash=$(compute_sha256 "$file") ;;
sha512) actual_hash=$(compute_sha512 "$file") ;;
md5) actual_hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ "$actual_hash" == "$expected_hash" ]]; then
log_debug "Checksum verified: $file"
return 0
else
log_error "Checksum mismatch for $file"
log_error " Expected: $expected_hash"
log_error " Actual: $actual_hash"
return "${EXIT_VERIFY_FAILED:-64}"
fi
}
# Verify checksums from file (sha256sum -c style)
verify_checksums_file() {
local checksum_file="$1"
local base_dir="${2:-.}"
if [[ ! -f "$checksum_file" ]]; then
log_error "Checksum file not found: $checksum_file"
return "${EXIT_NOT_FOUND:-4}"
fi
local failures=0
while IFS= read -r line; do
# Skip empty lines and comments
[[ -z "$line" ]] && continue
[[ "$line" == \#* ]] && continue
local hash filename
hash=$(echo "$line" | awk '{print $1}')
filename=$(echo "$line" | awk '{print $2}')
local filepath="${base_dir}/${filename}"
if [[ ! -f "$filepath" ]]; then
log_error "File not found: $filepath"
((failures++))
continue
fi
if ! verify_checksum "$filepath" "$hash"; then
((failures++))
fi
done < "$checksum_file"
if [[ $failures -gt 0 ]]; then
log_error "$failures checksum verification(s) failed"
return "${EXIT_VERIFY_FAILED:-64}"
fi
log_info "All checksums verified"
return 0
}
# ============================================================================
# Helpers
# ============================================================================
# Check if two files have the same content
files_identical() {
local file1="$1"
local file2="$2"
[[ -f "$file1" ]] && [[ -f "$file2" ]] || return 1
local hash1 hash2
hash1=$(compute_sha256 "$file1")
hash2=$(compute_sha256 "$file2")
[[ "$hash1" == "$hash2" ]]
}
# Get short hash for display
short_hash() {
local hash="$1"
local length="${2:-8}"
echo "${hash:0:$length}"
}
# Generate deterministic ID from inputs
generate_id() {
local inputs="$*"
compute_string_hash "$inputs" sha256 | head -c 16
}

View File

@@ -0,0 +1,181 @@
#!/usr/bin/env bash
# Shared Logging Library
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard logging functions for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/logging.sh"
#
# Log Levels: DEBUG, INFO, WARN, ERROR
# Set LOG_LEVEL environment variable to control verbosity (default: INFO)
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_LOGGING_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_LOGGING_LOADED=1
# Colors (disable with NO_COLOR=1)
if [[ -z "${NO_COLOR:-}" ]] && [[ -t 1 ]]; then
export LOG_COLOR_RED='\033[0;31m'
export LOG_COLOR_GREEN='\033[0;32m'
export LOG_COLOR_YELLOW='\033[1;33m'
export LOG_COLOR_BLUE='\033[0;34m'
export LOG_COLOR_MAGENTA='\033[0;35m'
export LOG_COLOR_CYAN='\033[0;36m'
export LOG_COLOR_GRAY='\033[0;90m'
export LOG_COLOR_RESET='\033[0m'
else
export LOG_COLOR_RED=''
export LOG_COLOR_GREEN=''
export LOG_COLOR_YELLOW=''
export LOG_COLOR_BLUE=''
export LOG_COLOR_MAGENTA=''
export LOG_COLOR_CYAN=''
export LOG_COLOR_GRAY=''
export LOG_COLOR_RESET=''
fi
# Log level configuration
export LOG_LEVEL="${LOG_LEVEL:-INFO}"
# Convert log level to numeric for comparison
_log_level_to_num() {
case "$1" in
DEBUG) echo 0 ;;
INFO) echo 1 ;;
WARN) echo 2 ;;
ERROR) echo 3 ;;
*) echo 1 ;;
esac
}
# Check if message should be logged based on level
_should_log() {
local msg_level="$1"
local current_level="${LOG_LEVEL:-INFO}"
local msg_num current_num
msg_num=$(_log_level_to_num "$msg_level")
current_num=$(_log_level_to_num "$current_level")
[[ $msg_num -ge $current_num ]]
}
# Format timestamp
_log_timestamp() {
if [[ "${LOG_TIMESTAMPS:-true}" == "true" ]]; then
date -u +"%Y-%m-%dT%H:%M:%SZ"
fi
}
# Core logging function
_log() {
local level="$1"
local color="$2"
shift 2
if ! _should_log "$level"; then
return 0
fi
local timestamp
timestamp=$(_log_timestamp)
local prefix=""
if [[ -n "$timestamp" ]]; then
prefix="${LOG_COLOR_GRAY}${timestamp}${LOG_COLOR_RESET} "
fi
echo -e "${prefix}${color}[${level}]${LOG_COLOR_RESET} $*"
}
# Public logging functions
log_debug() {
_log "DEBUG" "${LOG_COLOR_GRAY}" "$@"
}
log_info() {
_log "INFO" "${LOG_COLOR_GREEN}" "$@"
}
log_warn() {
_log "WARN" "${LOG_COLOR_YELLOW}" "$@"
}
log_error() {
_log "ERROR" "${LOG_COLOR_RED}" "$@" >&2
}
# Step logging (for workflow stages)
log_step() {
_log "STEP" "${LOG_COLOR_BLUE}" "$@"
}
# Success message
log_success() {
_log "OK" "${LOG_COLOR_GREEN}" "$@"
}
# GitHub Actions annotations
log_gh_notice() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::notice::$*"
else
log_info "$@"
fi
}
log_gh_warning() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::warning::$*"
else
log_warn "$@"
fi
}
log_gh_error() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::error::$*"
else
log_error "$@"
fi
}
# Group logging (for GitHub Actions)
log_group_start() {
local title="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::group::$title"
else
log_step "=== $title ==="
fi
}
log_group_end() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::endgroup::"
fi
}
# Masked logging (for secrets)
log_masked() {
local value="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::add-mask::$value"
fi
}
# Die with error message
die() {
log_error "$@"
exit 1
}
# Conditional die
die_if() {
local condition="$1"
shift
if eval "$condition"; then
die "$@"
fi
}

View File

@@ -0,0 +1,274 @@
#!/usr/bin/env bash
# Shared Path Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Path manipulation and file operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/path-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_PATH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_PATH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Path Normalization
# ============================================================================
# Normalize path (resolve .., ., symlinks)
normalize_path() {
local path="$1"
# Handle empty path
if [[ -z "$path" ]]; then
echo "."
return 0
fi
# Try realpath first (most reliable)
if command -v realpath >/dev/null 2>&1; then
realpath -m "$path" 2>/dev/null && return 0
fi
# Fallback to Python
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os; print(os.path.normpath('$path'))" 2>/dev/null && return 0
fi
# Manual normalization (basic)
echo "$path" | sed 's|/\./|/|g' | sed 's|/[^/]*/\.\./|/|g' | sed 's|//|/|g'
}
# Get absolute path
absolute_path() {
local path="$1"
if [[ "$path" == /* ]]; then
normalize_path "$path"
else
normalize_path "$(pwd)/$path"
fi
}
# Get relative path from one path to another
relative_path() {
local from="$1"
local to="$2"
if command -v realpath >/dev/null 2>&1; then
realpath --relative-to="$from" "$to" 2>/dev/null && return 0
fi
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os.path; print(os.path.relpath('$to', '$from'))" 2>/dev/null && return 0
fi
# Fallback: just return absolute path
absolute_path "$to"
}
# ============================================================================
# Path Components
# ============================================================================
# Get directory name
dir_name() {
dirname "$1"
}
# Get base name
base_name() {
basename "$1"
}
# Get file extension
file_extension() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base##*.}"
else
echo ""
fi
}
# Get file name without extension
file_stem() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base%.*}"
else
echo "$base"
fi
}
# ============================================================================
# Directory Operations
# ============================================================================
# Ensure directory exists
ensure_directory() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
fi
}
# Create temporary directory
create_temp_dir() {
local prefix="${1:-stellaops}"
mktemp -d "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
}
# Create temporary file
create_temp_file() {
local prefix="${1:-stellaops}"
local suffix="${2:-}"
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX${suffix}"
}
# Clean temporary directory
clean_temp() {
local path="$1"
if [[ -d "$path" ]] && [[ "$path" == *stellaops* ]]; then
rm -rf "$path"
fi
}
# ============================================================================
# File Existence Checks
# ============================================================================
# Check if file exists
file_exists() {
[[ -f "$1" ]]
}
# Check if directory exists
dir_exists() {
[[ -d "$1" ]]
}
# Check if path exists (file or directory)
path_exists() {
[[ -e "$1" ]]
}
# Check if file is readable
file_readable() {
[[ -r "$1" ]]
}
# Check if file is writable
file_writable() {
[[ -w "$1" ]]
}
# Check if file is executable
file_executable() {
[[ -x "$1" ]]
}
# ============================================================================
# File Discovery
# ============================================================================
# Find files by pattern
find_files() {
local dir="${1:-.}"
local pattern="${2:-*}"
find "$dir" -type f -name "$pattern" 2>/dev/null
}
# Find files by extension
find_by_extension() {
local dir="${1:-.}"
local ext="${2:-}"
find "$dir" -type f -name "*.${ext}" 2>/dev/null
}
# Find project files (csproj, package.json, etc.)
find_project_files() {
local dir="${1:-.}"
find "$dir" -type f \( \
-name "*.csproj" -o \
-name "*.fsproj" -o \
-name "package.json" -o \
-name "Cargo.toml" -o \
-name "go.mod" -o \
-name "pom.xml" -o \
-name "build.gradle" \
\) 2>/dev/null | grep -v node_modules | grep -v bin | grep -v obj
}
# Find test projects
find_test_projects() {
local dir="${1:-.}"
find "$dir" -type f -name "*.Tests.csproj" 2>/dev/null | grep -v bin | grep -v obj
}
# ============================================================================
# Path Validation
# ============================================================================
# Check if path is under directory
path_under() {
local path="$1"
local dir="$2"
local abs_path abs_dir
abs_path=$(absolute_path "$path")
abs_dir=$(absolute_path "$dir")
[[ "$abs_path" == "$abs_dir"* ]]
}
# Validate path is safe (no directory traversal)
path_is_safe() {
local path="$1"
local base="${2:-.}"
# Check for obvious traversal attempts
if [[ "$path" == *".."* ]] || [[ "$path" == "/*" ]]; then
return 1
fi
# Verify resolved path is under base
path_under "$path" "$base"
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get artifact output directory
get_artifact_dir() {
local name="${1:-artifacts}"
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/out/${name}"
}
# Get test results directory
get_test_results_dir() {
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/TestResults"
}
# Ensure artifact directory exists and return path
ensure_artifact_dir() {
local name="${1:-artifacts}"
local dir
dir=$(get_artifact_dir "$name")
ensure_directory "$dir"
echo "$dir"
}

264
devops/scripts/local-ci.ps1 Normal file
View File

@@ -0,0 +1,264 @@
<#
.SYNOPSIS
Local CI Runner for Windows
PowerShell wrapper for local-ci.sh
.DESCRIPTION
Unified local CI/CD testing runner for StellaOps on Windows.
This script wraps the Bash implementation via WSL2 or Git Bash.
.PARAMETER Mode
The testing mode to run:
- smoke : Quick smoke test (unit tests only, ~2 min)
- pr : Full PR-gating suite (all required checks, ~15 min)
- module : Module-specific tests (auto-detect or specified)
- workflow : Simulate specific workflow via act
- release : Release simulation (dry-run)
- full : All tests including extended categories (~45 min)
.PARAMETER Category
Specific test category to run (Unit, Architecture, Contract, Integration, Security, Golden)
.PARAMETER Module
Specific module to test (Scanner, Concelier, Authority, etc.)
.PARAMETER Workflow
Specific workflow to simulate (for workflow mode)
.PARAMETER Docker
Force Docker execution mode
.PARAMETER Native
Force native execution mode
.PARAMETER Act
Force act execution mode
.PARAMETER Parallel
Number of parallel test runners (default: auto-detect)
.PARAMETER Verbose
Enable verbose output
.PARAMETER DryRun
Show what would run without executing
.PARAMETER Rebuild
Force rebuild of CI Docker image
.PARAMETER NoServices
Skip starting CI services
.PARAMETER KeepServices
Don't stop services after tests
.EXAMPLE
.\local-ci.ps1 smoke
Quick validation before push
.EXAMPLE
.\local-ci.ps1 pr
Full PR check
.EXAMPLE
.\local-ci.ps1 module -Module Scanner
Test specific module
.EXAMPLE
.\local-ci.ps1 workflow -Workflow test-matrix
Simulate specific workflow
.NOTES
Requires WSL2 or Git Bash to execute the underlying Bash script.
For full feature support, use WSL2 with Ubuntu.
#>
[CmdletBinding()]
param(
[Parameter(Position = 0)]
[ValidateSet('smoke', 'pr', 'module', 'workflow', 'release', 'full')]
[string]$Mode = 'smoke',
[string]$Category,
[string]$Module,
[string]$Workflow,
[switch]$Docker,
[switch]$Native,
[switch]$Act,
[int]$Parallel,
[switch]$Verbose,
[switch]$DryRun,
[switch]$Rebuild,
[switch]$NoServices,
[switch]$KeepServices,
[switch]$Help
)
# Script location
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
$RepoRoot = Split-Path -Parent (Split-Path -Parent $ScriptDir)
# Show help if requested
if ($Help) {
Get-Help $MyInvocation.MyCommand.Path -Detailed
exit 0
}
function Write-ColoredOutput {
param(
[string]$Message,
[ConsoleColor]$Color = [ConsoleColor]::White
)
$originalColor = $Host.UI.RawUI.ForegroundColor
$Host.UI.RawUI.ForegroundColor = $Color
Write-Host $Message
$Host.UI.RawUI.ForegroundColor = $originalColor
}
function Write-Info { Write-ColoredOutput "[INFO] $args" -Color Cyan }
function Write-Success { Write-ColoredOutput "[OK] $args" -Color Green }
function Write-Warning { Write-ColoredOutput "[WARN] $args" -Color Yellow }
function Write-Error { Write-ColoredOutput "[ERROR] $args" -Color Red }
# Find Bash executable
function Find-BashExecutable {
# Priority: WSL2 > Git Bash > Windows Subsystem for Linux (legacy)
# Check for WSL
$wsl = Get-Command wsl -ErrorAction SilentlyContinue
if ($wsl) {
# Verify WSL is working
$wslCheck = & wsl --status 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Info "Using WSL2 for Bash execution"
return @{ Type = 'wsl'; Path = 'wsl' }
}
}
# Check for Git Bash
$gitBashPaths = @(
"C:\Program Files\Git\bin\bash.exe",
"C:\Program Files (x86)\Git\bin\bash.exe",
"$env:LOCALAPPDATA\Programs\Git\bin\bash.exe"
)
foreach ($path in $gitBashPaths) {
if (Test-Path $path) {
Write-Info "Using Git Bash for execution"
return @{ Type = 'gitbash'; Path = $path }
}
}
# Check PATH for bash
$bashInPath = Get-Command bash -ErrorAction SilentlyContinue
if ($bashInPath) {
Write-Info "Using Bash from PATH"
return @{ Type = 'path'; Path = $bashInPath.Source }
}
return $null
}
# Convert Windows path to Unix path for WSL
function Convert-ToUnixPath {
param([string]$WindowsPath)
if ($WindowsPath -match '^([A-Za-z]):(.*)$') {
$drive = $Matches[1].ToLower()
$rest = $Matches[2] -replace '\\', '/'
return "/mnt/$drive$rest"
}
return $WindowsPath -replace '\\', '/'
}
# Build argument list
function Build-Arguments {
$args = @($Mode)
if ($Category) { $args += "--category"; $args += $Category }
if ($Module) { $args += "--module"; $args += $Module }
if ($Workflow) { $args += "--workflow"; $args += $Workflow }
if ($Docker) { $args += "--docker" }
if ($Native) { $args += "--native" }
if ($Act) { $args += "--act" }
if ($Parallel) { $args += "--parallel"; $args += $Parallel }
if ($Verbose) { $args += "--verbose" }
if ($DryRun) { $args += "--dry-run" }
if ($Rebuild) { $args += "--rebuild" }
if ($NoServices) { $args += "--no-services" }
if ($KeepServices) { $args += "--keep-services" }
return $args
}
# Main execution
Write-Host ""
Write-Host "=========================================" -ForegroundColor Magenta
Write-Host " StellaOps Local CI Runner (Windows) " -ForegroundColor Magenta
Write-Host "=========================================" -ForegroundColor Magenta
Write-Host ""
# Find Bash
$bash = Find-BashExecutable
if (-not $bash) {
Write-Error "Bash not found. Please install one of the following:"
Write-Host " - WSL2: https://docs.microsoft.com/en-us/windows/wsl/install"
Write-Host " - Git for Windows: https://git-scm.com/download/win"
exit 1
}
# Build script path
$scriptPath = Join-Path $ScriptDir "local-ci.sh"
if (-not (Test-Path $scriptPath)) {
Write-Error "Script not found: $scriptPath"
exit 1
}
# Build arguments
$bashArgs = Build-Arguments
Write-Info "Mode: $Mode"
Write-Info "Bash: $($bash.Type)"
Write-Info "Repository: $RepoRoot"
Write-Host ""
# Execute based on Bash type
try {
switch ($bash.Type) {
'wsl' {
$unixScript = Convert-ToUnixPath $scriptPath
Write-Info "Executing: wsl bash $unixScript $($bashArgs -join ' ')"
& wsl bash $unixScript @bashArgs
}
'gitbash' {
# Git Bash uses its own path conversion
$unixScript = $scriptPath -replace '\\', '/'
Write-Info "Executing: $($bash.Path) $unixScript $($bashArgs -join ' ')"
& $bash.Path $unixScript @bashArgs
}
'path' {
Write-Info "Executing: bash $scriptPath $($bashArgs -join ' ')"
& bash $scriptPath @bashArgs
}
}
$exitCode = $LASTEXITCODE
}
catch {
Write-Error "Execution failed: $_"
$exitCode = 1
}
# Report result
Write-Host ""
if ($exitCode -eq 0) {
Write-Success "Local CI completed successfully!"
} else {
Write-Error "Local CI failed with exit code: $exitCode"
}
exit $exitCode

818
devops/scripts/local-ci.sh Normal file
View File

@@ -0,0 +1,818 @@
#!/usr/bin/env bash
# =============================================================================
# LOCAL CI RUNNER
# =============================================================================
# Unified local CI/CD testing runner for StellaOps.
#
# Usage:
# ./devops/scripts/local-ci.sh [mode] [options]
#
# Modes:
# smoke - Quick smoke test (unit tests only, ~2 min)
# pr - Full PR-gating suite (all required checks, ~15 min)
# module - Module-specific tests (auto-detect or specified)
# workflow - Simulate specific workflow via act
# release - Release simulation (dry-run)
# full - All tests including extended categories (~45 min)
#
# Options:
# --category <cat> Run specific test category
# --workflow <name> Specific workflow to simulate
# --module <name> Specific module to test
# --docker Force Docker execution
# --native Force native execution
# --act Force act execution
# --parallel <n> Parallel test runners (default: CPU count)
# --verbose Verbose output
# --dry-run Show what would run without executing
# --rebuild Force rebuild of CI Docker image
# --no-services Skip starting CI services
# --keep-services Don't stop services after tests
# --help Show this help message
#
# Examples:
# ./local-ci.sh smoke # Quick validation
# ./local-ci.sh pr # Full PR check
# ./local-ci.sh module --module Scanner # Test Scanner module
# ./local-ci.sh workflow --workflow test-matrix
# ./local-ci.sh release --dry-run
#
# =============================================================================
set -euo pipefail
# =============================================================================
# SCRIPT INITIALIZATION
# =============================================================================
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
export REPO_ROOT
# Source libraries
source "$SCRIPT_DIR/lib/ci-common.sh"
source "$SCRIPT_DIR/lib/ci-docker.sh"
source "$SCRIPT_DIR/lib/ci-web.sh" 2>/dev/null || true # Web testing utilities
# =============================================================================
# CONSTANTS
# =============================================================================
# Modes
MODE_SMOKE="smoke"
MODE_PR="pr"
MODE_MODULE="module"
MODE_WORKFLOW="workflow"
MODE_RELEASE="release"
MODE_FULL="full"
# Test categories
PR_GATING_CATEGORIES=(Unit Architecture Contract Integration Security Golden)
EXTENDED_CATEGORIES=(Performance Benchmark AirGap Chaos Determinism Resilience Observability)
ALL_CATEGORIES=("${PR_GATING_CATEGORIES[@]}" "${EXTENDED_CATEGORIES[@]}")
# Default configuration
RESULTS_DIR="$REPO_ROOT/out/local-ci"
TRX_DIR="$RESULTS_DIR/trx"
LOGS_DIR="$RESULTS_DIR/logs"
# =============================================================================
# CONFIGURATION
# =============================================================================
MODE=""
EXECUTION_ENGINE="" # docker, native, act
SPECIFIC_CATEGORY=""
SPECIFIC_MODULE=""
SPECIFIC_WORKFLOW=""
PARALLEL_JOBS=""
VERBOSE=false
DRY_RUN=false
REBUILD_IMAGE=false
SKIP_SERVICES=false
KEEP_SERVICES=false
# =============================================================================
# USAGE
# =============================================================================
usage() {
cat <<EOF
Usage: $(basename "$0") [mode] [options]
Modes:
smoke Quick smoke test (unit tests only, ~2 min)
pr Full PR-gating suite (all required checks, ~15 min)
module Module-specific tests (auto-detect or specified)
workflow Simulate specific workflow via act
release Release simulation (dry-run)
full All tests including extended categories (~45 min)
Options:
--category <cat> Run specific test category (${ALL_CATEGORIES[*]})
--workflow <name> Specific workflow to simulate (for workflow mode)
--module <name> Specific module to test (for module mode)
--docker Force Docker execution
--native Force native execution
--act Force act execution
--parallel <n> Parallel test runners (default: auto-detect)
--verbose Verbose output
--dry-run Show what would run without executing
--rebuild Force rebuild of CI Docker image
--no-services Skip starting CI services
--keep-services Don't stop services after tests
--help Show this help message
Examples:
$(basename "$0") smoke # Quick validation before push
$(basename "$0") pr # Full PR check
$(basename "$0") pr --category Unit # Only run Unit tests
$(basename "$0") module # Auto-detect changed modules
$(basename "$0") module --module Scanner # Test specific module
$(basename "$0") workflow --workflow test-matrix
$(basename "$0") release --dry-run
$(basename "$0") pr --verbose --docker
Test Categories:
PR-Gating: ${PR_GATING_CATEGORIES[*]}
Extended: ${EXTENDED_CATEGORIES[*]}
EOF
}
# =============================================================================
# ARGUMENT PARSING
# =============================================================================
parse_args() {
while [[ $# -gt 0 ]]; do
case $1 in
smoke|pr|module|workflow|release|full)
MODE="$1"
shift
;;
--category)
SPECIFIC_CATEGORY="$2"
shift 2
;;
--workflow)
SPECIFIC_WORKFLOW="$2"
shift 2
;;
--module)
SPECIFIC_MODULE="$2"
shift 2
;;
--docker)
EXECUTION_ENGINE="docker"
shift
;;
--native)
EXECUTION_ENGINE="native"
shift
;;
--act)
EXECUTION_ENGINE="act"
shift
;;
--parallel)
PARALLEL_JOBS="$2"
shift 2
;;
--verbose|-v)
VERBOSE=true
shift
;;
--dry-run)
DRY_RUN=true
shift
;;
--rebuild)
REBUILD_IMAGE=true
shift
;;
--no-services)
SKIP_SERVICES=true
shift
;;
--keep-services)
KEEP_SERVICES=true
shift
;;
--help|-h)
usage
exit 0
;;
*)
log_error "Unknown option: $1"
usage
exit 1
;;
esac
done
# Default mode is smoke
if [[ -z "$MODE" ]]; then
MODE="$MODE_SMOKE"
fi
# Default execution engine based on mode
if [[ -z "$EXECUTION_ENGINE" ]]; then
case "$MODE" in
workflow)
EXECUTION_ENGINE="act"
;;
*)
EXECUTION_ENGINE="native"
;;
esac
fi
# Auto-detect parallel jobs
if [[ -z "$PARALLEL_JOBS" ]]; then
PARALLEL_JOBS=$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 4)
fi
export VERBOSE
}
# =============================================================================
# DEPENDENCY CHECKS
# =============================================================================
check_dependencies() {
log_subsection "Checking Dependencies"
local missing=0
# Always required
if ! require_command "dotnet" "https://dot.net/download"; then
missing=1
else
local dotnet_version
dotnet_version=$(dotnet --version 2>/dev/null || echo "unknown")
log_debug "dotnet version: $dotnet_version"
fi
if ! require_command "git"; then
missing=1
fi
# Docker required for docker mode
if [[ "$EXECUTION_ENGINE" == "docker" ]]; then
if ! check_docker; then
missing=1
fi
fi
# Act required for workflow mode
if [[ "$EXECUTION_ENGINE" == "act" ]] || [[ "$MODE" == "$MODE_WORKFLOW" ]]; then
if ! require_command "act" "brew install act (macOS) or https://github.com/nektos/act"; then
log_warn "act not found - workflow simulation will be limited"
fi
fi
# Check for solution file
if ! require_file "$REPO_ROOT/src/StellaOps.sln"; then
missing=1
fi
return $missing
}
# =============================================================================
# RESULT INITIALIZATION
# =============================================================================
init_results() {
ensure_dir "$RESULTS_DIR"
ensure_dir "$TRX_DIR"
ensure_dir "$LOGS_DIR"
# Create run metadata
local run_id
run_id=$(date +%Y%m%d_%H%M%S)
export RUN_ID="$run_id"
log_debug "Results directory: $RESULTS_DIR"
log_debug "Run ID: $RUN_ID"
}
# =============================================================================
# TEST EXECUTION
# =============================================================================
run_dotnet_tests() {
local category="$1"
local filter="Category=$category"
log_subsection "Running $category Tests"
local trx_file="$TRX_DIR/${category}-${RUN_ID}.trx"
local log_file="$LOGS_DIR/${category}-${RUN_ID}.log"
local test_cmd=(
dotnet test "$REPO_ROOT/src/StellaOps.sln"
--filter "$filter"
--configuration Release
--no-build
--logger "trx;LogFileName=$trx_file"
--results-directory "$TRX_DIR"
--verbosity minimal
)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would execute: ${test_cmd[*]}"
return 0
fi
local start_time
start_time=$(start_timer)
if [[ "$VERBOSE" == "true" ]]; then
"${test_cmd[@]}" 2>&1 | tee "$log_file"
else
"${test_cmd[@]}" > "$log_file" 2>&1
fi
local result=$?
stop_timer "$start_time" "$category tests"
if [[ $result -eq 0 ]]; then
log_success "$category tests passed"
else
log_error "$category tests failed (see $log_file)"
fi
return $result
}
run_dotnet_build() {
log_subsection "Building Solution"
local build_cmd=(
dotnet build "$REPO_ROOT/src/StellaOps.sln"
--configuration Release
)
if [[ "$DRY_RUN" == "true" ]]; then
log_info "[DRY-RUN] Would execute: ${build_cmd[*]}"
return 0
fi
local start_time
start_time=$(start_timer)
"${build_cmd[@]}"
local result=$?
stop_timer "$start_time" "Build"
if [[ $result -eq 0 ]]; then
log_success "Build completed successfully"
else
log_error "Build failed"
fi
return $result
}
# =============================================================================
# MODE IMPLEMENTATIONS
# =============================================================================
run_smoke_mode() {
log_section "Smoke Test Mode"
log_info "Running quick validation (Unit tests only)"
local start_time
start_time=$(start_timer)
# Build
run_dotnet_build || return 1
# Run Unit tests only
run_dotnet_tests "Unit"
local result=$?
stop_timer "$start_time" "Smoke test"
return $result
}
run_pr_mode() {
log_section "PR-Gating Mode"
log_info "Running full PR-gating suite"
log_info "Categories: ${PR_GATING_CATEGORIES[*]}"
local start_time
start_time=$(start_timer)
local failed=0
local results=()
# Check if Web module has changes
local web_changed=false
local changed_files
changed_files=$(get_changed_files main 2>/dev/null || echo "")
if echo "$changed_files" | grep -q "^src/Web/"; then
web_changed=true
log_info "Web module changes detected - will run Web tests"
fi
# Start services if needed
if [[ "$SKIP_SERVICES" != "true" ]]; then
start_ci_services postgres-ci valkey-ci || {
log_warn "Failed to start services, continuing anyway..."
}
fi
# Build .NET solution
run_dotnet_build || return 1
# Run each .NET category
if [[ -n "$SPECIFIC_CATEGORY" ]]; then
if [[ "$SPECIFIC_CATEGORY" == "Web" ]] || [[ "$SPECIFIC_CATEGORY" == "web" ]]; then
# Run Web tests only
if type run_web_pr_gating &>/dev/null; then
run_web_pr_gating
results+=("Web:$?")
fi
else
run_dotnet_tests "$SPECIFIC_CATEGORY"
results+=("$SPECIFIC_CATEGORY:$?")
fi
else
for category in "${PR_GATING_CATEGORIES[@]}"; do
run_dotnet_tests "$category"
local cat_result=$?
results+=("$category:$cat_result")
if [[ $cat_result -ne 0 ]]; then
failed=1
fi
done
# Run Web tests if Web module changed
if [[ "$web_changed" == "true" ]]; then
log_subsection "Web Module Tests"
if type run_web_pr_gating &>/dev/null; then
run_web_pr_gating
local web_result=$?
results+=("Web:$web_result")
if [[ $web_result -ne 0 ]]; then
failed=1
fi
else
log_warn "Web testing library not loaded"
fi
fi
fi
# Stop services
if [[ "$SKIP_SERVICES" != "true" ]] && [[ "$KEEP_SERVICES" != "true" ]]; then
stop_ci_services
fi
# Print summary
log_section "PR-Gating Results"
for result in "${results[@]}"; do
local name="${result%%:*}"
local status="${result##*:}"
if [[ "$status" == "0" ]]; then
print_status "$name" "true"
else
print_status "$name" "false"
fi
done
stop_timer "$start_time" "PR-gating suite"
return $failed
}
run_module_mode() {
log_section "Module-Specific Mode"
local modules_to_test=()
local has_dotnet_modules=false
local has_node_modules=false
if [[ -n "$SPECIFIC_MODULE" ]]; then
modules_to_test=("$SPECIFIC_MODULE")
log_info "Testing specified module: $SPECIFIC_MODULE"
else
log_info "Auto-detecting changed modules..."
local detected
detected=$(detect_changed_modules main)
if [[ "$detected" == "ALL" ]]; then
log_info "Infrastructure changes detected - running all tests"
run_pr_mode
return $?
elif [[ "$detected" == "NONE" ]]; then
log_info "No module changes detected"
return 0
else
read -ra modules_to_test <<< "$detected"
log_info "Detected changed modules: ${modules_to_test[*]}"
fi
fi
# Categorize modules
for module in "${modules_to_test[@]}"; do
if [[ " ${NODE_MODULES[*]} " =~ " ${module} " ]]; then
has_node_modules=true
else
has_dotnet_modules=true
fi
done
local start_time
start_time=$(start_timer)
local failed=0
# Build .NET solution if we have .NET modules
if [[ "$has_dotnet_modules" == "true" ]]; then
run_dotnet_build || return 1
fi
for module in "${modules_to_test[@]}"; do
log_subsection "Testing Module: $module"
# Check if this is a Node.js module (Web, DevPortal)
if [[ " ${NODE_MODULES[*]} " =~ " ${module} " ]]; then
log_info "Running Node.js tests for $module"
case "$module" in
Web)
if type run_web_pr_gating &>/dev/null; then
run_web_pr_gating || failed=1
else
log_warn "Web testing library not loaded - running basic npm test"
pushd "$REPO_ROOT/src/Web/StellaOps.Web" > /dev/null 2>&1 || continue
npm ci --prefer-offline --no-audit 2>/dev/null || npm install
npm run test:ci || failed=1
popd > /dev/null
fi
;;
DevPortal)
local portal_dir="$REPO_ROOT/src/DevPortal/StellaOps.DevPortal.Site"
if [[ -d "$portal_dir" ]]; then
pushd "$portal_dir" > /dev/null || continue
npm ci --prefer-offline --no-audit 2>/dev/null || npm install
npm test 2>/dev/null || log_warn "DevPortal tests not configured"
popd > /dev/null
fi
;;
esac
continue
fi
# .NET module handling
local test_paths="${MODULE_PATHS[$module]:-}"
if [[ -z "$test_paths" ]]; then
log_warn "Unknown module: $module"
continue
fi
# Run tests for each path
for path in $test_paths; do
local test_dir="$REPO_ROOT/$path/__Tests"
if [[ -d "$test_dir" ]]; then
log_info "Running tests in: $test_dir"
local test_projects
test_projects=$(find "$test_dir" -name "*.Tests.csproj" -type f 2>/dev/null)
for project in $test_projects; do
log_debug "Testing: $project"
dotnet test "$project" --configuration Release --no-build --verbosity minimal || {
failed=1
}
done
fi
done
done
stop_timer "$start_time" "Module tests"
return $failed
}
run_workflow_mode() {
log_section "Workflow Simulation Mode"
if [[ -z "$SPECIFIC_WORKFLOW" ]]; then
log_error "No workflow specified. Use --workflow <name>"
log_info "Example: --workflow test-matrix"
return 1
fi
local workflow_file="$REPO_ROOT/.gitea/workflows/${SPECIFIC_WORKFLOW}.yml"
if [[ ! -f "$workflow_file" ]]; then
# Try without .yml extension
workflow_file="$REPO_ROOT/.gitea/workflows/${SPECIFIC_WORKFLOW}"
if [[ ! -f "$workflow_file" ]]; then
log_error "Workflow not found: $SPECIFIC_WORKFLOW"
log_info "Available workflows:"
ls -1 "$REPO_ROOT/.gitea/workflows/"*.yml 2>/dev/null | xargs -n1 basename | head -20
return 1
fi
fi
log_info "Simulating workflow: $SPECIFIC_WORKFLOW"
log_info "Workflow file: $workflow_file"
if ! command -v act &>/dev/null; then
log_error "act is required for workflow simulation"
log_info "Install with: brew install act (macOS)"
return 1
fi
# Build CI image if needed
if [[ "$REBUILD_IMAGE" == "true" ]] || ! ci_image_exists; then
build_ci_image "$REBUILD_IMAGE" || return 1
fi
local event_file="$REPO_ROOT/devops/ci-local/events/pull-request.json"
local actrc_file="$REPO_ROOT/.actrc"
local act_args=(
-W "$workflow_file"
--platform "ubuntu-22.04=$CI_IMAGE"
--platform "ubuntu-latest=$CI_IMAGE"
--env "DOTNET_NOLOGO=1"
--env "DOTNET_CLI_TELEMETRY_OPTOUT=1"
--env "TZ=UTC"
--bind
)
if [[ -f "$event_file" ]]; then
act_args+=(--eventpath "$event_file")
fi
if [[ -f "$REPO_ROOT/devops/ci-local/.env.local" ]]; then
act_args+=(--env-file "$REPO_ROOT/devops/ci-local/.env.local")
fi
if [[ "$DRY_RUN" == "true" ]]; then
act_args+=(-n)
fi
if [[ "$VERBOSE" == "true" ]]; then
act_args+=(--verbose)
fi
log_info "Running: act ${act_args[*]}"
act "${act_args[@]}"
}
run_release_mode() {
log_section "Release Simulation Mode"
log_info "Running release dry-run"
if [[ "$DRY_RUN" != "true" ]]; then
log_warn "Release mode always runs as dry-run for safety"
DRY_RUN=true
fi
local start_time
start_time=$(start_timer)
# Build all modules
log_subsection "Building All Modules"
run_dotnet_build || return 1
# Package CLI
log_subsection "Packaging CLI"
local cli_project="$REPO_ROOT/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj"
if [[ -f "$cli_project" ]]; then
log_info "[DRY-RUN] Would build CLI for: linux-x64, linux-arm64, osx-arm64, win-x64"
fi
# Validate Helm chart
log_subsection "Validating Helm Chart"
if command -v helm &>/dev/null; then
local helm_chart="$REPO_ROOT/devops/helm/stellaops"
if [[ -d "$helm_chart" ]]; then
helm lint "$helm_chart" || log_warn "Helm lint warnings"
fi
else
log_info "helm not found - skipping chart validation"
fi
# Generate release manifest
log_subsection "Release Manifest"
log_info "[DRY-RUN] Would generate:"
log_info " - Release notes"
log_info " - Changelog"
log_info " - Docker Compose files"
log_info " - SBOM"
log_info " - Checksums"
stop_timer "$start_time" "Release simulation"
return 0
}
run_full_mode() {
log_section "Full Test Mode"
log_info "Running all tests including extended categories"
log_info "Categories: ${ALL_CATEGORIES[*]}"
local start_time
start_time=$(start_timer)
local failed=0
# Start all services
if [[ "$SKIP_SERVICES" != "true" ]]; then
start_ci_services || {
log_warn "Failed to start services, continuing anyway..."
}
fi
# Build
run_dotnet_build || return 1
# Run all categories
for category in "${ALL_CATEGORIES[@]}"; do
run_dotnet_tests "$category" || {
failed=1
log_warn "Continuing after $category failure..."
}
done
# Stop services
if [[ "$SKIP_SERVICES" != "true" ]] && [[ "$KEEP_SERVICES" != "true" ]]; then
stop_ci_services
fi
stop_timer "$start_time" "Full test suite"
return $failed
}
# =============================================================================
# MAIN
# =============================================================================
main() {
parse_args "$@"
log_section "StellaOps Local CI Runner"
log_info "Mode: $MODE"
log_info "Engine: $EXECUTION_ENGINE"
log_info "Parallel: $PARALLEL_JOBS jobs"
log_info "Repository: $REPO_ROOT"
if [[ "$DRY_RUN" == "true" ]]; then
log_warn "DRY-RUN MODE - No changes will be made"
fi
# Check dependencies
check_dependencies || exit 1
# Initialize results directory
init_results
# Load environment
load_env_file "$REPO_ROOT/devops/ci-local/.env.local" || true
# Run selected mode
case "$MODE" in
"$MODE_SMOKE")
run_smoke_mode
;;
"$MODE_PR")
run_pr_mode
;;
"$MODE_MODULE")
run_module_mode
;;
"$MODE_WORKFLOW")
run_workflow_mode
;;
"$MODE_RELEASE")
run_release_mode
;;
"$MODE_FULL")
run_full_mode
;;
*)
log_error "Unknown mode: $MODE"
usage
exit 1
;;
esac
local result=$?
log_section "Summary"
log_info "Results saved to: $RESULTS_DIR"
if [[ $result -eq 0 ]]; then
log_success "All tests passed!"
else
log_error "Some tests failed"
fi
return $result
}
# Run main if executed directly
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
main "$@"
fi

View File

@@ -0,0 +1,244 @@
-- ============================================================================
-- StellaOps Migration Reset Script for Pre-1.0 Deployments
-- ============================================================================
-- This script updates schema_migrations tables to recognize the 1.0.0 compacted
-- migrations for deployments that upgraded from pre-1.0 versions.
--
-- Run via: psql -f migrations-reset-pre-1.0.sql
-- Or with connection: psql -h <host> -U <user> -d <db> -f migrations-reset-pre-1.0.sql
-- ============================================================================
BEGIN;
-- ============================================================================
-- Authority Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 002_mongo_store_equivalents, 003_enable_rls,
-- 004_offline_kit_audit, 005_verdict_manifests
-- New: 001_initial_schema (compacted)
DELETE FROM authority.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_mongo_store_equivalents.sql',
'003_enable_rls.sql',
'004_offline_kit_audit.sql',
'005_verdict_manifests.sql'
);
INSERT INTO authority.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Scheduler Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 002_graph_jobs, 003_runs_policy,
-- 010_generated_columns_runs, 011_enable_rls, 012_partition_audit,
-- 012b_migrate_audit_data
-- New: 001_initial_schema (compacted)
DELETE FROM scheduler.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_graph_jobs.sql',
'003_runs_policy.sql',
'010_generated_columns_runs.sql',
'011_enable_rls.sql',
'012_partition_audit.sql',
'012b_migrate_audit_data.sql'
);
INSERT INTO scheduler.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Scanner Module Reset
-- ============================================================================
-- Original: 001-034 plus various numbered files (27 total)
-- New: 001_initial_schema (compacted)
DELETE FROM scanner.schema_migrations
WHERE migration_name IN (
'001_create_tables.sql',
'002_proof_spine_tables.sql',
'003_classification_history.sql',
'004_scan_metrics.sql',
'005_smart_diff_tables.sql',
'006_score_replay_tables.sql',
'007_unknowns_ranking_containment.sql',
'008_epss_integration.sql',
'0059_scans_table.sql',
'0065_unknowns_table.sql',
'0075_scan_findings_table.sql',
'020_call_graph_tables.sql',
'021_smart_diff_tables_search_path.sql',
'022_reachability_drift_tables.sql',
'023_scanner_api_ingestion.sql',
'024_smart_diff_priority_score_widen.sql',
'025_epss_raw_layer.sql',
'026_epss_signal_layer.sql',
'027_witness_storage.sql',
'028_epss_triage_columns.sql',
'029_vuln_surfaces.sql',
'030_vuln_surface_triggers_update.sql',
'031_reach_cache.sql',
'032_idempotency_keys.sql',
'033_binary_evidence.sql',
'034_func_proof_tables.sql',
'DM001_rename_scanner_migrations.sql'
);
INSERT INTO scanner.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Policy Module Reset
-- ============================================================================
-- Original: 001-013 (14 files, includes duplicate 010 prefix)
-- New: 001_initial_schema (compacted)
DELETE FROM policy.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_cvss_receipts.sql',
'003_snapshots_violations.sql',
'004_epss_risk_scores.sql',
'005_cvss_multiversion.sql',
'006_enable_rls.sql',
'007_unknowns_registry.sql',
'008_exception_objects.sql',
'009_exception_applications.sql',
'010_recheck_evidence.sql',
'010_unknowns_blast_radius_containment.sql',
'011_unknowns_reason_codes.sql',
'012_budget_ledger.sql',
'013_exception_approval.sql'
);
INSERT INTO policy.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Notify Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 010_enable_rls, 011_partition_deliveries,
-- 011b_migrate_deliveries_data
-- New: 001_initial_schema (compacted)
DELETE FROM notify.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'010_enable_rls.sql',
'011_partition_deliveries.sql',
'011b_migrate_deliveries_data.sql'
);
INSERT INTO notify.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Concelier Module Reset
-- ============================================================================
-- Original: 17 migration files
-- New: 001_initial_schema (compacted)
DELETE FROM concelier.schema_migrations
WHERE migration_name ~ '^[0-9]{3}_.*\.sql$';
INSERT INTO concelier.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Attestor Module Reset (proofchain + attestor schemas)
-- ============================================================================
-- Original: 20251214000001_AddProofChainSchema.sql, 20251216_001_create_rekor_submission_queue.sql
-- New: 001_initial_schema (compacted)
DELETE FROM proofchain.schema_migrations
WHERE migration_name IN (
'20251214000001_AddProofChainSchema.sql',
'20251214000002_RollbackProofChainSchema.sql',
'20251216_001_create_rekor_submission_queue.sql'
);
INSERT INTO proofchain.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Signer Module Reset
-- ============================================================================
-- Original: 20251214000001_AddKeyManagementSchema.sql
-- New: 001_initial_schema (compacted)
DELETE FROM signer.schema_migrations
WHERE migration_name IN (
'20251214000001_AddKeyManagementSchema.sql'
);
INSERT INTO signer.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Signals Module Reset
-- ============================================================================
-- Original: V0000_001__extensions.sql, V1102_001__unknowns_scoring_schema.sql,
-- V1105_001__deploy_refs_graph_metrics.sql, V3102_001__callgraph_relational_tables.sql
-- New: 001_initial_schema (compacted)
DELETE FROM signals.schema_migrations
WHERE migration_name IN (
'V0000_001__extensions.sql',
'V1102_001__unknowns_scoring_schema.sql',
'V1105_001__deploy_refs_graph_metrics.sql',
'V3102_001__callgraph_relational_tables.sql'
);
INSERT INTO signals.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Verification
-- ============================================================================
-- Display current migration status per module
DO $$
DECLARE
v_module TEXT;
v_count INT;
BEGIN
FOR v_module IN SELECT unnest(ARRAY['authority', 'scheduler', 'scanner', 'policy', 'notify', 'concelier', 'proofchain', 'signer', 'signals']) LOOP
EXECUTE format('SELECT COUNT(*) FROM %I.schema_migrations', v_module) INTO v_count;
RAISE NOTICE '% module: % migrations registered', v_module, v_count;
END LOOP;
END $$;
COMMIT;
-- ============================================================================
-- Post-Reset Notes
-- ============================================================================
-- After running this script:
-- 1. All modules should show exactly 1 migration registered
-- 2. The schema structure should be identical to a fresh 1.0.0 deployment
-- 3. Future migrations (002+) will apply normally
--
-- To verify manually:
-- SELECT * FROM authority.schema_migrations;
-- SELECT * FROM scheduler.schema_migrations;
-- SELECT * FROM scanner.schema_migrations;
-- SELECT * FROM policy.schema_migrations;
-- SELECT * FROM notify.schema_migrations;
-- SELECT * FROM concelier.schema_migrations;
-- SELECT * FROM proofchain.schema_migrations;
-- SELECT * FROM signer.schema_migrations;
-- SELECT * FROM signals.schema_migrations;
-- ============================================================================

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env pwsh
# regenerate-solution.ps1 - Regenerate StellaOps.sln without duplicate projects
#
# This script:
# 1. Backs up the existing solution
# 2. Creates a new solution
# 3. Adds all .csproj files, skipping duplicates
# 4. Preserves solution folders where possible
param(
[string]$SolutionPath = "src/StellaOps.sln",
[switch]$DryRun
)
$ErrorActionPreference = "Stop"
# Canonical locations for test projects (in priority order)
# Later entries win when there are duplicates
$canonicalPatterns = @(
# Module-local tests (highest priority)
"src/*/__Tests/*/*.csproj",
"src/*/__Libraries/__Tests/*/*.csproj",
"src/__Libraries/__Tests/*/*.csproj",
# Cross-module integration tests
"src/__Tests/Integration/*/*.csproj",
"src/__Tests/__Libraries/*/*.csproj",
# Category-based cross-module tests
"src/__Tests/chaos/*/*.csproj",
"src/__Tests/security/*/*.csproj",
"src/__Tests/interop/*/*.csproj",
"src/__Tests/parity/*/*.csproj",
"src/__Tests/reachability/*/*.csproj",
# Single global tests
"src/__Tests/*/*.csproj"
)
Write-Host "=== Solution Regeneration Script ===" -ForegroundColor Cyan
Write-Host "Solution: $SolutionPath"
Write-Host "Dry Run: $DryRun"
Write-Host ""
# Find all .csproj files
Write-Host "Finding all project files..." -ForegroundColor Yellow
$allProjects = Get-ChildItem -Path "src" -Filter "*.csproj" -Recurse |
Where-Object { $_.FullName -notmatch "\\obj\\" -and $_.FullName -notmatch "\\bin\\" }
Write-Host "Found $($allProjects.Count) project files"
# Build a map of project name -> list of paths
$projectMap = @{}
foreach ($proj in $allProjects) {
$name = $proj.BaseName
if (-not $projectMap.ContainsKey($name)) {
$projectMap[$name] = @()
}
$projectMap[$name] += $proj.FullName
}
# Find duplicates
$duplicates = $projectMap.GetEnumerator() | Where-Object { $_.Value.Count -gt 1 }
Write-Host ""
Write-Host "Found $($duplicates.Count) projects with duplicate names:" -ForegroundColor Yellow
foreach ($dup in $duplicates) {
Write-Host " $($dup.Key):" -ForegroundColor Red
foreach ($path in $dup.Value) {
Write-Host " - $path"
}
}
# Select canonical path for each project
function Get-CanonicalPath {
param([string[]]$Paths)
# Prefer module-local __Tests over global __Tests
$moduleTests = $Paths | Where-Object { $_ -match "src\\[^_][^\\]+\\__Tests\\" }
if ($moduleTests.Count -gt 0) { return $moduleTests[0] }
# Prefer __Libraries/__Tests
$libTests = $Paths | Where-Object { $_ -match "__Libraries\\__Tests\\" }
if ($libTests.Count -gt 0) { return $libTests[0] }
# Prefer __Tests over non-__Tests location in same parent
$testsPath = $Paths | Where-Object { $_ -match "\\__Tests\\" }
if ($testsPath.Count -gt 0) { return $testsPath[0] }
# Otherwise, take first
return $Paths[0]
}
# Build final project list
$finalProjects = @()
foreach ($entry in $projectMap.GetEnumerator()) {
$canonical = Get-CanonicalPath -Paths $entry.Value
$finalProjects += $canonical
}
Write-Host ""
Write-Host "Final project count: $($finalProjects.Count)" -ForegroundColor Green
if ($DryRun) {
Write-Host ""
Write-Host "=== DRY RUN - No changes made ===" -ForegroundColor Magenta
Write-Host "Would add the following projects to solution:"
$finalProjects | ForEach-Object { Write-Host " $_" }
exit 0
}
# Backup existing solution
$backupPath = "$SolutionPath.bak"
if (Test-Path $SolutionPath) {
Copy-Item $SolutionPath $backupPath -Force
Write-Host "Backed up existing solution to $backupPath" -ForegroundColor Gray
}
# Create new solution
Write-Host ""
Write-Host "Creating new solution..." -ForegroundColor Yellow
$slnDir = Split-Path $SolutionPath -Parent
$slnName = [System.IO.Path]::GetFileNameWithoutExtension($SolutionPath)
# Remove old solution
if (Test-Path $SolutionPath) {
Remove-Item $SolutionPath -Force
}
# Create fresh solution
Push-Location $slnDir
dotnet new sln -n $slnName --force 2>$null
Pop-Location
# Add projects in batches (dotnet sln add can handle multiple)
Write-Host "Adding projects to solution..." -ForegroundColor Yellow
$added = 0
$failed = 0
foreach ($proj in $finalProjects) {
try {
$result = dotnet sln $SolutionPath add $proj 2>&1
if ($LASTEXITCODE -eq 0) {
$added++
if ($added % 50 -eq 0) {
Write-Host " Added $added projects..." -ForegroundColor Gray
}
} else {
Write-Host " Failed to add: $proj" -ForegroundColor Red
$failed++
}
} catch {
Write-Host " Error adding: $proj - $_" -ForegroundColor Red
$failed++
}
}
Write-Host ""
Write-Host "=== Summary ===" -ForegroundColor Cyan
Write-Host "Projects added: $added" -ForegroundColor Green
Write-Host "Projects failed: $failed" -ForegroundColor $(if ($failed -gt 0) { "Red" } else { "Green" })
Write-Host ""
Write-Host "Solution regenerated at: $SolutionPath"
# Verify
Write-Host ""
Write-Host "Verifying solution..." -ForegroundColor Yellow
$verifyResult = dotnet build $SolutionPath --no-restore -t:ValidateSolutionConfiguration 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Host "Solution validation passed!" -ForegroundColor Green
} else {
Write-Host "Solution validation had issues - check manually" -ForegroundColor Yellow
}

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env pwsh
# remove-stale-refs.ps1 - Remove stale project references that don't exist
param([string]$SlnPath = "src/StellaOps.sln")
$content = Get-Content $SlnPath -Raw
$lines = $content -split "`r?`n"
# Stale project paths (relative from solution location)
$staleProjects = @(
"__Tests\AirGap\StellaOps.AirGap.Controller.Tests",
"__Tests\AirGap\StellaOps.AirGap.Importer.Tests",
"__Tests\AirGap\StellaOps.AirGap.Time.Tests",
"__Tests\StellaOps.Gateway.WebService.Tests",
"__Tests\Graph\StellaOps.Graph.Indexer.Tests",
"Scanner\StellaOps.Scanner.Analyzers.Native",
"__Libraries\__Tests\StellaOps.Signals.Tests",
"__Tests\StellaOps.Audit.ReplayToken.Tests",
"__Tests\StellaOps.Router.Gateway.Tests",
"__Libraries\StellaOps.Cryptography"
)
$staleGuids = @()
$newLines = @()
$skipNext = $false
for ($i = 0; $i -lt $lines.Count; $i++) {
$line = $lines[$i]
if ($skipNext) {
$skipNext = $false
continue
}
$isStale = $false
foreach ($stalePath in $staleProjects) {
if ($line -like "*$stalePath*") {
# Extract GUID
if ($line -match '\{([A-F0-9-]+)\}"?$') {
$staleGuids += $Matches[1]
}
Write-Host "Removing stale: $stalePath"
$isStale = $true
$skipNext = $true
break
}
}
if (-not $isStale) {
$newLines += $line
}
}
# Remove GlobalSection references to stale GUIDs
$finalLines = @()
foreach ($line in $newLines) {
$skip = $false
foreach ($guid in $staleGuids) {
if ($line -match $guid) {
$skip = $true
break
}
}
if (-not $skip) {
$finalLines += $line
}
}
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
Write-Host "Removed $($staleGuids.Count) stale project references"

View File

@@ -0,0 +1,61 @@
# Restore deleted test files from commit parent
# Maps old locations to new locations
$ErrorActionPreference = "Stop"
$parentCommit = "74c7aa250c401ee9ac332686832b256159efa604^"
# Mapping: old path -> new path
$mappings = @{
"src/__Tests/AirGap/StellaOps.AirGap.Importer.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Importer.Tests"
"src/__Tests/AirGap/StellaOps.AirGap.Controller.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Controller.Tests"
"src/__Tests/AirGap/StellaOps.AirGap.Time.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Time.Tests"
"src/__Tests/StellaOps.Gateway.WebService.Tests" = "src/Gateway/__Tests/StellaOps.Gateway.WebService.Tests"
"src/__Tests/Replay/StellaOps.Replay.Core.Tests" = "src/Replay/__Tests/StellaOps.Replay.Core.Tests"
"src/__Tests/Provenance/StellaOps.Provenance.Attestation.Tests" = "src/Provenance/__Tests/StellaOps.Provenance.Attestation.Tests"
"src/__Tests/Policy/StellaOps.Policy.Scoring.Tests" = "src/Policy/__Tests/StellaOps.Policy.Scoring.Tests"
}
Set-Location "E:\dev\git.stella-ops.org"
foreach ($mapping in $mappings.GetEnumerator()) {
$oldPath = $mapping.Key
$newPath = $mapping.Value
Write-Host "`nProcessing: $oldPath -> $newPath" -ForegroundColor Cyan
# Get list of files from old location in git
$files = git ls-tree -r --name-only "$parentCommit" -- $oldPath 2>$null
if (-not $files) {
Write-Host " No files found at old path" -ForegroundColor Yellow
continue
}
foreach ($file in $files) {
# Calculate relative path and new file path
$relativePath = $file.Substring($oldPath.Length + 1)
$newFilePath = Join-Path $newPath $relativePath
# Create directory if needed
$newDir = Split-Path $newFilePath -Parent
if (-not (Test-Path $newDir)) {
New-Item -ItemType Directory -Path $newDir -Force | Out-Null
}
# Check if file exists
if (Test-Path $newFilePath) {
Write-Host " Exists: $relativePath" -ForegroundColor DarkGray
continue
}
# Restore file
git show "${parentCommit}:${file}" > $newFilePath 2>$null
if ($LASTEXITCODE -eq 0) {
Write-Host " Restored: $relativePath" -ForegroundColor Green
} else {
Write-Host " Failed: $relativePath" -ForegroundColor Red
}
}
}
Write-Host "`nDone!" -ForegroundColor Cyan

View File

@@ -0,0 +1,176 @@
<#
.SYNOPSIS
Pre-Commit Validation Script for Windows
.DESCRIPTION
Run this script before committing to ensure all CI checks will pass.
Wraps the Bash validation script via WSL2 or Git Bash.
.PARAMETER Level
Validation level:
- quick : Smoke test only (~2 min)
- pr : Full PR-gating suite (~15 min) [default]
- full : All tests including extended (~45 min)
.EXAMPLE
.\validate-before-commit.ps1
Run PR-gating validation
.EXAMPLE
.\validate-before-commit.ps1 quick
Run quick smoke test only
.EXAMPLE
.\validate-before-commit.ps1 full
Run full test suite
#>
[CmdletBinding()]
param(
[Parameter(Position = 0)]
[ValidateSet('quick', 'pr', 'full')]
[string]$Level = 'pr',
[switch]$Help
)
# Script location
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
$RepoRoot = Split-Path -Parent (Split-Path -Parent $ScriptDir)
if ($Help) {
Get-Help $MyInvocation.MyCommand.Path -Detailed
exit 0
}
# Colors
function Write-ColoredOutput {
param(
[string]$Message,
[ConsoleColor]$Color = [ConsoleColor]::White
)
$originalColor = $Host.UI.RawUI.ForegroundColor
$Host.UI.RawUI.ForegroundColor = $Color
Write-Host $Message
$Host.UI.RawUI.ForegroundColor = $originalColor
}
function Write-Header {
param([string]$Message)
Write-Host ""
Write-ColoredOutput "=============================================" -Color Cyan
Write-ColoredOutput " $Message" -Color Cyan
Write-ColoredOutput "=============================================" -Color Cyan
Write-Host ""
}
function Write-Step { Write-ColoredOutput ">>> $args" -Color Blue }
function Write-Pass { Write-ColoredOutput "[PASS] $args" -Color Green }
function Write-Fail { Write-ColoredOutput "[FAIL] $args" -Color Red }
function Write-Warn { Write-ColoredOutput "[WARN] $args" -Color Yellow }
function Write-Info { Write-ColoredOutput "[INFO] $args" -Color Cyan }
# Find Bash
function Find-BashExecutable {
# Check WSL
$wsl = Get-Command wsl -ErrorAction SilentlyContinue
if ($wsl) {
$wslCheck = & wsl --status 2>&1
if ($LASTEXITCODE -eq 0) {
return @{ Type = 'wsl'; Path = 'wsl' }
}
}
# Check Git Bash
$gitBashPaths = @(
"C:\Program Files\Git\bin\bash.exe",
"C:\Program Files (x86)\Git\bin\bash.exe",
"$env:LOCALAPPDATA\Programs\Git\bin\bash.exe"
)
foreach ($path in $gitBashPaths) {
if (Test-Path $path) {
return @{ Type = 'gitbash'; Path = $path }
}
}
return $null
}
function Convert-ToUnixPath {
param([string]$WindowsPath)
if ($WindowsPath -match '^([A-Za-z]):(.*)$') {
$drive = $Matches[1].ToLower()
$rest = $Matches[2] -replace '\\', '/'
return "/mnt/$drive$rest"
}
return $WindowsPath -replace '\\', '/'
}
# Main
Write-Header "Pre-Commit Validation (Windows)"
Write-Info "Level: $Level"
Write-Info "Repository: $RepoRoot"
$bash = Find-BashExecutable
if (-not $bash) {
Write-Fail "Bash not found. Install WSL2 or Git for Windows."
exit 1
}
Write-Info "Using: $($bash.Type)"
$scriptPath = Join-Path $ScriptDir "validate-before-commit.sh"
if (-not (Test-Path $scriptPath)) {
Write-Fail "Script not found: $scriptPath"
exit 1
}
$startTime = Get-Date
try {
switch ($bash.Type) {
'wsl' {
$unixScript = Convert-ToUnixPath $scriptPath
& wsl bash $unixScript $Level
}
'gitbash' {
$unixScript = $scriptPath -replace '\\', '/'
& $bash.Path $unixScript $Level
}
}
$exitCode = $LASTEXITCODE
}
catch {
Write-Fail "Execution failed: $_"
$exitCode = 1
}
$duration = (Get-Date) - $startTime
$minutes = [math]::Floor($duration.TotalMinutes)
$seconds = $duration.Seconds
Write-Header "Summary"
Write-Info "Duration: ${minutes}m ${seconds}s"
if ($exitCode -eq 0) {
Write-Host ""
Write-ColoredOutput "=============================================" -Color Green
Write-ColoredOutput " ALL CHECKS PASSED - Ready to commit!" -Color Green
Write-ColoredOutput "=============================================" -Color Green
Write-Host ""
Write-Host "Next steps:"
Write-Host " git add -A"
Write-Host ' git commit -m "Your commit message"'
Write-Host ""
} else {
Write-Host ""
Write-ColoredOutput "=============================================" -Color Red
Write-ColoredOutput " VALIDATION FAILED - Do not commit!" -Color Red
Write-ColoredOutput "=============================================" -Color Red
Write-Host ""
Write-Host "Check the logs in: out/local-ci/logs/"
Write-Host ""
}
exit $exitCode

View File

@@ -0,0 +1,318 @@
#!/usr/bin/env bash
# =============================================================================
# PRE-COMMIT VALIDATION SCRIPT
# =============================================================================
# Run this script before committing to ensure all CI checks will pass.
#
# Usage:
# ./devops/scripts/validate-before-commit.sh [level]
#
# Levels:
# quick - Smoke test only (~2 min)
# pr - Full PR-gating suite (~15 min) [default]
# full - All tests including extended (~45 min)
#
# Examples:
# ./devops/scripts/validate-before-commit.sh # PR-gating
# ./devops/scripts/validate-before-commit.sh quick # Smoke only
# ./devops/scripts/validate-before-commit.sh full # Everything
#
# =============================================================================
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'
# Validation level
LEVEL="${1:-pr}"
# =============================================================================
# UTILITIES
# =============================================================================
print_header() {
echo ""
echo -e "${CYAN}=============================================${NC}"
echo -e "${CYAN} $1${NC}"
echo -e "${CYAN}=============================================${NC}"
echo ""
}
print_step() {
echo -e "${BLUE}>>> $1${NC}"
}
print_success() {
echo -e "${GREEN}[PASS] $1${NC}"
}
print_fail() {
echo -e "${RED}[FAIL] $1${NC}"
}
print_warn() {
echo -e "${YELLOW}[WARN] $1${NC}"
}
print_info() {
echo -e "${CYAN}[INFO] $1${NC}"
}
# =============================================================================
# CHECKS
# =============================================================================
check_git_status() {
print_step "Checking git status..."
# Check for uncommitted changes
if ! git diff --quiet 2>/dev/null; then
print_warn "You have unstaged changes"
fi
# Check for untracked files
local untracked
untracked=$(git ls-files --others --exclude-standard 2>/dev/null | wc -l)
if [[ "$untracked" -gt 0 ]]; then
print_warn "You have $untracked untracked file(s)"
fi
# Show current branch
local branch
branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null)
print_info "Current branch: $branch"
}
check_dependencies() {
print_step "Checking dependencies..."
local missing=0
# Check .NET
if ! command -v dotnet &>/dev/null; then
print_fail ".NET SDK not found"
missing=1
else
local version
version=$(dotnet --version)
print_success ".NET SDK: $version"
fi
# Check Docker
if ! command -v docker &>/dev/null; then
print_warn "Docker not found (some tests may fail)"
else
if docker info &>/dev/null; then
print_success "Docker: running"
else
print_warn "Docker: not running"
fi
fi
# Check Git
if ! command -v git &>/dev/null; then
print_fail "Git not found"
missing=1
else
print_success "Git: installed"
fi
return $missing
}
run_smoke_tests() {
print_step "Running smoke tests..."
if "$SCRIPT_DIR/local-ci.sh" smoke; then
print_success "Smoke tests passed"
return 0
else
print_fail "Smoke tests failed"
return 1
fi
}
run_pr_tests() {
print_step "Running PR-gating suite..."
if "$SCRIPT_DIR/local-ci.sh" pr; then
print_success "PR-gating suite passed"
return 0
else
print_fail "PR-gating suite failed"
return 1
fi
}
run_full_tests() {
print_step "Running full test suite..."
if "$SCRIPT_DIR/local-ci.sh" full; then
print_success "Full test suite passed"
return 0
else
print_fail "Full test suite failed"
return 1
fi
}
run_module_tests() {
print_step "Running module tests..."
if "$SCRIPT_DIR/local-ci.sh" module; then
print_success "Module tests passed"
return 0
else
print_fail "Module tests failed"
return 1
fi
}
validate_helm() {
if command -v helm &>/dev/null; then
print_step "Validating Helm chart..."
local chart="$REPO_ROOT/devops/helm/stellaops"
if [[ -d "$chart" ]]; then
if helm lint "$chart" &>/dev/null; then
print_success "Helm chart valid"
else
print_warn "Helm chart has warnings"
fi
fi
fi
}
validate_compose() {
print_step "Validating Docker Compose..."
local compose="$REPO_ROOT/devops/compose/docker-compose.ci.yaml"
if [[ -f "$compose" ]]; then
if docker compose -f "$compose" config &>/dev/null; then
print_success "Docker Compose valid"
else
print_warn "Docker Compose has issues"
fi
fi
}
# =============================================================================
# MAIN
# =============================================================================
main() {
print_header "Pre-Commit Validation"
print_info "Level: $LEVEL"
print_info "Repository: $REPO_ROOT"
local start_time
start_time=$(date +%s)
local failed=0
# Always run these checks
check_git_status
check_dependencies || failed=1
if [[ $failed -eq 1 ]]; then
print_fail "Dependency check failed"
exit 1
fi
# Run appropriate test level
case "$LEVEL" in
quick|smoke)
run_smoke_tests || failed=1
;;
pr|default)
run_smoke_tests || failed=1
if [[ $failed -eq 0 ]]; then
run_module_tests || failed=1
fi
if [[ $failed -eq 0 ]]; then
run_pr_tests || failed=1
fi
validate_helm
validate_compose
;;
full|all)
run_smoke_tests || failed=1
if [[ $failed -eq 0 ]]; then
run_full_tests || failed=1
fi
validate_helm
validate_compose
;;
*)
print_fail "Unknown level: $LEVEL"
echo "Valid levels: quick, pr, full"
exit 1
;;
esac
# Calculate duration
local end_time
end_time=$(date +%s)
local duration=$((end_time - start_time))
local minutes=$((duration / 60))
local seconds=$((duration % 60))
# Final summary
print_header "Summary"
print_info "Duration: ${minutes}m ${seconds}s"
if [[ $failed -eq 0 ]]; then
echo ""
echo -e "${GREEN}=============================================${NC}"
echo -e "${GREEN} ALL CHECKS PASSED - Ready to commit!${NC}"
echo -e "${GREEN}=============================================${NC}"
echo ""
echo "Next steps:"
echo " git add -A"
echo " git commit -m \"Your commit message\""
echo ""
exit 0
else
echo ""
echo -e "${RED}=============================================${NC}"
echo -e "${RED} VALIDATION FAILED - Do not commit!${NC}"
echo -e "${RED}=============================================${NC}"
echo ""
echo "Check the logs in: out/local-ci/logs/"
echo ""
exit 1
fi
}
# Show usage if --help
if [[ "${1:-}" == "--help" ]] || [[ "${1:-}" == "-h" ]]; then
cat <<EOF
Pre-Commit Validation Script
Usage: $(basename "$0") [level]
Levels:
quick Smoke test only (~2 min)
pr Full PR-gating suite (~15 min) [default]
full All tests including extended (~45 min)
Examples:
$(basename "$0") # Run PR-gating validation
$(basename "$0") quick # Quick smoke test only
$(basename "$0") full # Run everything
What each level validates:
quick: Build + Unit tests
pr: Build + Unit + Architecture + Contract + Integration + Security + Golden
full: All PR-gating + Performance + Benchmark + AirGap + Chaos + Determinism
EOF
exit 0
fi
main "$@"