save progress

This commit is contained in:
StellaOps Bot
2025-12-26 22:03:32 +02:00
parent 9a4cd2e0f7
commit e6c47c8f50
3634 changed files with 253222 additions and 56632 deletions

View File

@@ -0,0 +1,93 @@
<#
.SYNOPSIS
Scaffolds EF Core DbContext, entities, and compiled models for all StellaOps modules.
.DESCRIPTION
Iterates through all configured modules and runs Scaffold-Module.ps1 for each.
Use this after schema changes or for initial setup.
.PARAMETER SkipMissing
Skip modules whose projects don't exist yet (default: true)
.EXAMPLE
.\Scaffold-AllModules.ps1
.EXAMPLE
.\Scaffold-AllModules.ps1 -SkipMissing:$false
#>
param(
[bool]$SkipMissing = $true
)
$ErrorActionPreference = "Stop"
# Module definitions: Module name -> Schema name
$modules = @(
@{ Module = "Unknowns"; Schema = "unknowns" },
@{ Module = "PacksRegistry"; Schema = "packs" },
@{ Module = "Authority"; Schema = "authority" },
@{ Module = "Scanner"; Schema = "scanner" },
@{ Module = "Scheduler"; Schema = "scheduler" },
@{ Module = "TaskRunner"; Schema = "taskrunner" },
@{ Module = "Policy"; Schema = "policy" },
@{ Module = "Notify"; Schema = "notify" },
@{ Module = "Concelier"; Schema = "vuln" },
@{ Module = "Excititor"; Schema = "vex" },
@{ Module = "Signals"; Schema = "signals" },
@{ Module = "Attestor"; Schema = "proofchain" },
@{ Module = "Signer"; Schema = "signer" }
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
$RepoRoot = (Get-Item $ScriptDir).Parent.Parent.Parent.FullName
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " EF Core Scaffolding for All Modules" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host ""
$successCount = 0
$skipCount = 0
$failCount = 0
foreach ($m in $modules) {
$projectPath = Join-Path $RepoRoot "src" $m.Module "__Libraries" "StellaOps.$($m.Module).Persistence.EfCore"
if (-not (Test-Path "$projectPath\*.csproj")) {
if ($SkipMissing) {
Write-Host "SKIP: $($m.Module) - Project not found" -ForegroundColor DarkGray
$skipCount++
continue
} else {
Write-Host "FAIL: $($m.Module) - Project not found at: $projectPath" -ForegroundColor Red
$failCount++
continue
}
}
Write-Host ""
Write-Host ">>> Scaffolding $($m.Module)..." -ForegroundColor Magenta
try {
& "$ScriptDir\Scaffold-Module.ps1" -Module $m.Module -Schema $m.Schema
$successCount++
}
catch {
Write-Host "FAIL: $($m.Module) - $($_.Exception.Message)" -ForegroundColor Red
$failCount++
}
}
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Summary" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Success: $successCount"
Write-Host " Skipped: $skipCount"
Write-Host " Failed: $failCount"
Write-Host ""
if ($failCount -gt 0) {
exit 1
}

View File

@@ -0,0 +1,162 @@
<#
.SYNOPSIS
Scaffolds EF Core DbContext, entities, and compiled models from PostgreSQL schema.
.DESCRIPTION
This script performs database-first scaffolding for a StellaOps module:
1. Cleans existing generated files (Entities, CompiledModels, DbContext)
2. Scaffolds DbContext and entities from live PostgreSQL schema
3. Generates compiled models for startup performance
.PARAMETER Module
The module name (e.g., Unknowns, PacksRegistry, Authority)
.PARAMETER Schema
The PostgreSQL schema name (defaults to lowercase module name)
.PARAMETER ConnectionString
PostgreSQL connection string. If not provided, uses default dev connection.
.PARAMETER ProjectPath
Optional custom project path. Defaults to src/{Module}/__Libraries/StellaOps.{Module}.Persistence.EfCore
.EXAMPLE
.\Scaffold-Module.ps1 -Module Unknowns
.EXAMPLE
.\Scaffold-Module.ps1 -Module Unknowns -Schema unknowns -ConnectionString "Host=localhost;Database=stellaops_platform;Username=unknowns_user;Password=unknowns_dev"
.EXAMPLE
.\Scaffold-Module.ps1 -Module PacksRegistry -Schema packs
#>
param(
[Parameter(Mandatory=$true)]
[string]$Module,
[string]$Schema,
[string]$ConnectionString,
[string]$ProjectPath
)
$ErrorActionPreference = "Stop"
# Resolve repository root
$RepoRoot = (Get-Item $PSScriptRoot).Parent.Parent.Parent.FullName
# Default schema to lowercase module name
if (-not $Schema) {
$Schema = $Module.ToLower()
}
# Default connection string
if (-not $ConnectionString) {
$user = "${Schema}_user"
$password = "${Schema}_dev"
$ConnectionString = "Host=localhost;Port=5432;Database=stellaops_platform;Username=$user;Password=$password;SearchPath=$Schema"
}
# Default project path
if (-not $ProjectPath) {
$ProjectPath = Join-Path $RepoRoot "src" $Module "__Libraries" "StellaOps.$Module.Persistence.EfCore"
}
$ContextDir = "Context"
$EntitiesDir = "Entities"
$CompiledModelsDir = "CompiledModels"
Write-Host ""
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " EF Core Scaffolding for Module: $Module" -ForegroundColor Cyan
Write-Host "============================================================================" -ForegroundColor Cyan
Write-Host " Schema: $Schema"
Write-Host " Project: $ProjectPath"
Write-Host " Connection: Host=localhost;Database=stellaops_platform;Username=${Schema}_user;..."
Write-Host ""
# Verify project exists
if (-not (Test-Path "$ProjectPath\*.csproj")) {
Write-Error "Project not found at: $ProjectPath"
Write-Host "Create the project first with: dotnet new classlib -n StellaOps.$Module.Persistence.EfCore"
exit 1
}
# Step 1: Clean existing generated files
Write-Host "[1/4] Cleaning existing generated files..." -ForegroundColor Yellow
$paths = @(
(Join-Path $ProjectPath $EntitiesDir),
(Join-Path $ProjectPath $CompiledModelsDir),
(Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs")
)
foreach ($path in $paths) {
if (Test-Path $path) {
Remove-Item -Recurse -Force $path
Write-Host " Removed: $path" -ForegroundColor DarkGray
}
}
# Recreate directories
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $EntitiesDir) | Out-Null
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $CompiledModelsDir) | Out-Null
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $ContextDir) | Out-Null
# Step 2: Scaffold DbContext and entities
Write-Host "[2/4] Scaffolding DbContext and entities from schema '$Schema'..." -ForegroundColor Yellow
$scaffoldArgs = @(
"ef", "dbcontext", "scaffold",
"`"$ConnectionString`"",
"Npgsql.EntityFrameworkCore.PostgreSQL",
"--project", "`"$ProjectPath`"",
"--schema", $Schema,
"--context", "${Module}DbContext",
"--context-dir", $ContextDir,
"--output-dir", $EntitiesDir,
"--namespace", "StellaOps.$Module.Persistence.EfCore.Entities",
"--context-namespace", "StellaOps.$Module.Persistence.EfCore.Context",
"--data-annotations",
"--no-onconfiguring",
"--force"
)
$process = Start-Process -FilePath "dotnet" -ArgumentList $scaffoldArgs -Wait -PassThru -NoNewWindow
if ($process.ExitCode -ne 0) {
Write-Error "Scaffold failed with exit code: $($process.ExitCode)"
exit 1
}
Write-Host " Scaffolded entities to: $EntitiesDir" -ForegroundColor DarkGray
# Step 3: Generate compiled models
Write-Host "[3/4] Generating compiled models..." -ForegroundColor Yellow
$optimizeArgs = @(
"ef", "dbcontext", "optimize",
"--project", "`"$ProjectPath`"",
"--context", "StellaOps.$Module.Persistence.EfCore.Context.${Module}DbContext",
"--output-dir", $CompiledModelsDir,
"--namespace", "StellaOps.$Module.Persistence.EfCore.CompiledModels"
)
$process = Start-Process -FilePath "dotnet" -ArgumentList $optimizeArgs -Wait -PassThru -NoNewWindow
if ($process.ExitCode -ne 0) {
Write-Error "Compiled model generation failed with exit code: $($process.ExitCode)"
exit 1
}
Write-Host " Generated compiled models to: $CompiledModelsDir" -ForegroundColor DarkGray
# Step 4: Summary
Write-Host "[4/4] Scaffolding complete!" -ForegroundColor Green
Write-Host ""
Write-Host "Generated files:" -ForegroundColor Cyan
$contextFile = Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs"
$entityFiles = Get-ChildItem -Path (Join-Path $ProjectPath $EntitiesDir) -Filter "*.cs" -ErrorAction SilentlyContinue
$compiledFiles = Get-ChildItem -Path (Join-Path $ProjectPath $CompiledModelsDir) -Filter "*.cs" -ErrorAction SilentlyContinue
Write-Host " Context: $(if (Test-Path $contextFile) { $contextFile } else { 'Not found' })"
Write-Host " Entities: $($entityFiles.Count) files"
Write-Host " Compiled Models: $($compiledFiles.Count) files"
Write-Host ""
Write-Host "Next steps:" -ForegroundColor Yellow
Write-Host " 1. Review generated entities for any customization needs"
Write-Host " 2. Create repository implementations in Repositories/"
Write-Host " 3. Add DI registration in Extensions/"
Write-Host ""

View File

@@ -0,0 +1,88 @@
#!/bin/bash
# ============================================================================
# EF Core Scaffolding for All StellaOps Modules
# ============================================================================
# Iterates through all configured modules and runs scaffold-module.sh for each.
# Use this after schema changes or for initial setup.
#
# Usage: ./scaffold-all-modules.sh [--no-skip-missing]
# ============================================================================
set -e
SKIP_MISSING=true
if [ "$1" = "--no-skip-missing" ]; then
SKIP_MISSING=false
fi
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
# Module definitions: "Module:Schema"
MODULES=(
"Unknowns:unknowns"
"PacksRegistry:packs"
"Authority:authority"
"Scanner:scanner"
"Scheduler:scheduler"
"TaskRunner:taskrunner"
"Policy:policy"
"Notify:notify"
"Concelier:vuln"
"Excititor:vex"
"Signals:signals"
"Attestor:proofchain"
"Signer:signer"
)
echo ""
echo "============================================================================"
echo " EF Core Scaffolding for All Modules"
echo "============================================================================"
echo ""
SUCCESS_COUNT=0
SKIP_COUNT=0
FAIL_COUNT=0
for entry in "${MODULES[@]}"; do
MODULE="${entry%%:*}"
SCHEMA="${entry##*:}"
PROJECT_PATH="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
if [ ! -f "$PROJECT_PATH"/*.csproj ]; then
if [ "$SKIP_MISSING" = true ]; then
echo "SKIP: $MODULE - Project not found"
((SKIP_COUNT++))
continue
else
echo "FAIL: $MODULE - Project not found at: $PROJECT_PATH"
((FAIL_COUNT++))
continue
fi
fi
echo ""
echo ">>> Scaffolding $MODULE..."
if "$SCRIPT_DIR/scaffold-module.sh" "$MODULE" "$SCHEMA"; then
((SUCCESS_COUNT++))
else
echo "FAIL: $MODULE - Scaffolding failed"
((FAIL_COUNT++))
fi
done
echo ""
echo "============================================================================"
echo " Summary"
echo "============================================================================"
echo " Success: $SUCCESS_COUNT"
echo " Skipped: $SKIP_COUNT"
echo " Failed: $FAIL_COUNT"
echo ""
if [ "$FAIL_COUNT" -gt 0 ]; then
exit 1
fi

View File

@@ -0,0 +1,113 @@
#!/bin/bash
# ============================================================================
# EF Core Scaffolding Script for StellaOps Modules
# ============================================================================
# Usage: ./scaffold-module.sh <Module> [Schema] [ConnectionString]
#
# Examples:
# ./scaffold-module.sh Unknowns
# ./scaffold-module.sh Unknowns unknowns
# ./scaffold-module.sh PacksRegistry packs "Host=localhost;..."
# ============================================================================
set -e
MODULE=$1
SCHEMA=${2:-$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')}
CONNECTION_STRING=$3
if [ -z "$MODULE" ]; then
echo "Usage: $0 <Module> [Schema] [ConnectionString]"
echo ""
echo "Examples:"
echo " $0 Unknowns"
echo " $0 Unknowns unknowns"
echo " $0 PacksRegistry packs \"Host=localhost;Database=stellaops_platform;Username=packs_user;Password=packs_dev\""
exit 1
fi
# Resolve repository root
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
# Default connection string
if [ -z "$CONNECTION_STRING" ]; then
USER="${SCHEMA}_user"
PASSWORD="${SCHEMA}_dev"
CONNECTION_STRING="Host=localhost;Port=5432;Database=stellaops_platform;Username=$USER;Password=$PASSWORD;SearchPath=$SCHEMA"
fi
PROJECT_DIR="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
CONTEXT_DIR="Context"
ENTITIES_DIR="Entities"
COMPILED_DIR="CompiledModels"
echo ""
echo "============================================================================"
echo " EF Core Scaffolding for Module: $MODULE"
echo "============================================================================"
echo " Schema: $SCHEMA"
echo " Project: $PROJECT_DIR"
echo " Connection: Host=localhost;Database=stellaops_platform;Username=${SCHEMA}_user;..."
echo ""
# Verify project exists
if [ ! -f "$PROJECT_DIR"/*.csproj ]; then
echo "ERROR: Project not found at: $PROJECT_DIR"
echo "Create the project first with: dotnet new classlib -n StellaOps.$MODULE.Persistence.EfCore"
exit 1
fi
# Step 1: Clean existing generated files
echo "[1/4] Cleaning existing generated files..."
rm -rf "$PROJECT_DIR/$ENTITIES_DIR"
rm -rf "$PROJECT_DIR/$COMPILED_DIR"
rm -f "$PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
mkdir -p "$PROJECT_DIR/$ENTITIES_DIR"
mkdir -p "$PROJECT_DIR/$COMPILED_DIR"
mkdir -p "$PROJECT_DIR/$CONTEXT_DIR"
echo " Cleaned: $ENTITIES_DIR, $COMPILED_DIR, ${MODULE}DbContext.cs"
# Step 2: Scaffold DbContext and entities
echo "[2/4] Scaffolding DbContext and entities from schema '$SCHEMA'..."
dotnet ef dbcontext scaffold \
"$CONNECTION_STRING" \
Npgsql.EntityFrameworkCore.PostgreSQL \
--project "$PROJECT_DIR" \
--schema "$SCHEMA" \
--context "${MODULE}DbContext" \
--context-dir "$CONTEXT_DIR" \
--output-dir "$ENTITIES_DIR" \
--namespace "StellaOps.$MODULE.Persistence.EfCore.Entities" \
--context-namespace "StellaOps.$MODULE.Persistence.EfCore.Context" \
--data-annotations \
--no-onconfiguring \
--force
echo " Scaffolded entities to: $ENTITIES_DIR"
# Step 3: Generate compiled models
echo "[3/4] Generating compiled models..."
dotnet ef dbcontext optimize \
--project "$PROJECT_DIR" \
--context "StellaOps.$MODULE.Persistence.EfCore.Context.${MODULE}DbContext" \
--output-dir "$COMPILED_DIR" \
--namespace "StellaOps.$MODULE.Persistence.EfCore.CompiledModels"
echo " Generated compiled models to: $COMPILED_DIR"
# Step 4: Summary
echo "[4/4] Scaffolding complete!"
echo ""
echo "Generated files:"
echo " Context: $PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
echo " Entities: $(ls -1 "$PROJECT_DIR/$ENTITIES_DIR"/*.cs 2>/dev/null | wc -l) files"
echo " Compiled Models: $(ls -1 "$PROJECT_DIR/$COMPILED_DIR"/*.cs 2>/dev/null | wc -l) files"
echo ""
echo "Next steps:"
echo " 1. Review generated entities for any customization needs"
echo " 2. Create repository implementations in Repositories/"
echo " 3. Add DI registration in Extensions/"
echo ""

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env pwsh
# fix-duplicate-packages.ps1 - Remove duplicate PackageReference items from test projects
# These are already provided by Directory.Build.props
param([switch]$DryRun)
$packagesToRemove = @(
"coverlet.collector",
"Microsoft.NET.Test.Sdk",
"Microsoft.AspNetCore.Mvc.Testing",
"xunit",
"xunit.runner.visualstudio",
"Microsoft.Extensions.TimeProvider.Testing"
)
$sharpCompressPackage = "SharpCompress"
# Find all test project files
$testProjects = Get-ChildItem -Path "src" -Filter "*.Tests.csproj" -Recurse
$corpusProjects = Get-ChildItem -Path "src" -Filter "*.Corpus.*.csproj" -Recurse
Write-Host "=== Fix Duplicate Package References ===" -ForegroundColor Cyan
Write-Host "Found $($testProjects.Count) test projects" -ForegroundColor Yellow
Write-Host "Found $($corpusProjects.Count) corpus projects (SharpCompress)" -ForegroundColor Yellow
$fixedCount = 0
foreach ($proj in $testProjects) {
$content = Get-Content $proj.FullName -Raw
$modified = $false
# Skip projects that opt out of common test infrastructure
if ($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") {
Write-Host " Skipped (UseConcelierTestInfra=false): $($proj.Name)" -ForegroundColor DarkGray
continue
}
foreach ($pkg in $packagesToRemove) {
# Match PackageReference for this package (various formats)
$patterns = @(
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
)
foreach ($pattern in $patterns) {
if ($content -match $pattern) {
$content = $content -replace $pattern, ""
$modified = $true
}
}
}
# Clean up empty ItemGroups
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
# Clean up ItemGroups with only whitespace/comments
$content = $content -replace "(?s)<ItemGroup>\s*<!--[^-]*-->\s*</ItemGroup>", ""
if ($modified) {
$fixedCount++
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
if (-not $DryRun) {
$content | Set-Content $proj.FullName -NoNewline
}
}
}
# Fix SharpCompress in corpus projects
foreach ($proj in $corpusProjects) {
$content = Get-Content $proj.FullName -Raw
$modified = $false
$patterns = @(
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
)
foreach ($pattern in $patterns) {
if ($content -match $pattern) {
$content = $content -replace $pattern, ""
$modified = $true
}
}
# Clean up empty ItemGroups
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
if ($modified) {
$fixedCount++
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
if (-not $DryRun) {
$content | Set-Content $proj.FullName -NoNewline
}
}
}
Write-Host ""
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan
if ($DryRun) {
Write-Host "(Dry run - no changes made)" -ForegroundColor Yellow
}

View File

@@ -0,0 +1,55 @@
# Fix duplicate "using StellaOps.TestKit;" statements in C# files
# The pattern shows files have this statement both at top (correct) and in middle (wrong)
# This script removes all occurrences AFTER the first one
$ErrorActionPreference = "Stop"
$srcPath = Join-Path $PSScriptRoot "..\..\src"
$pattern = "using StellaOps.TestKit;"
# Find all .cs files containing the pattern
$files = Get-ChildItem -Path $srcPath -Recurse -Filter "*.cs" |
Where-Object { (Get-Content $_.FullName -Raw) -match [regex]::Escape($pattern) }
Write-Host "Found $($files.Count) files with 'using StellaOps.TestKit;'" -ForegroundColor Cyan
$fixedCount = 0
$errorCount = 0
foreach ($file in $files) {
try {
$lines = Get-Content $file.FullName
$newLines = @()
$foundFirst = $false
$removedAny = $false
foreach ($line in $lines) {
if ($line.Trim() -eq $pattern) {
if (-not $foundFirst) {
# Keep the first occurrence
$newLines += $line
$foundFirst = $true
} else {
# Skip subsequent occurrences
$removedAny = $true
}
} else {
$newLines += $line
}
}
if ($removedAny) {
$newLines | Set-Content -Path $file.FullName -Encoding UTF8
Write-Host "Fixed: $($file.Name)" -ForegroundColor Green
$fixedCount++
}
} catch {
Write-Host "Error processing $($file.FullName): $_" -ForegroundColor Red
$errorCount++
}
}
Write-Host ""
Write-Host "Summary:" -ForegroundColor Cyan
Write-Host " Files fixed: $fixedCount" -ForegroundColor Green
Write-Host " Errors: $errorCount" -ForegroundColor $(if ($errorCount -gt 0) { "Red" } else { "Green" })

View File

@@ -0,0 +1,51 @@
# Fix projects with UseConcelierTestInfra=false that don't have xunit
# These projects relied on TestKit for xunit, but now need their own reference
$ErrorActionPreference = "Stop"
$srcPath = "E:\dev\git.stella-ops.org\src"
# Find test projects with UseConcelierTestInfra=false
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object {
$content = Get-Content $_.FullName -Raw
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
(-not ($content -match "xunit\.v3")) -and # Skip xunit.v3 projects
(-not ($content -match '<PackageReference\s+Include="xunit"')) # Skip projects that already have xunit
}
Write-Host "Found $($projects.Count) projects needing xunit" -ForegroundColor Cyan
$xunitPackages = @'
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
'@
$fixedCount = 0
foreach ($proj in $projects) {
$content = Get-Content $proj.FullName -Raw
# Check if it has an ItemGroup with PackageReference
if ($content -match '(<ItemGroup>[\s\S]*?<PackageReference)') {
# Add xunit packages after first PackageReference ItemGroup opening
$newContent = $content -replace '(<ItemGroup>\s*\r?\n)(\s*<PackageReference)', "`$1$xunitPackages`n`$2"
} else {
# No PackageReference ItemGroup, add one before </Project>
$itemGroup = @"
<ItemGroup>
$xunitPackages
</ItemGroup>
"@
$newContent = $content -replace '</Project>', "$itemGroup`n</Project>"
}
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,44 @@
# Fix project references in src/__Tests/** that point to wrong relative paths
# Pattern: ../../<Module>/... should be ../../../<Module>/...
$ErrorActionPreference = "Stop"
$testsPath = "E:\dev\git.stella-ops.org\src\__Tests"
# Known module prefixes that exist at src/<Module>/
$modules = @("Signals", "Scanner", "Concelier", "Scheduler", "Authority", "Attestor",
"BinaryIndex", "EvidenceLocker", "Excititor", "ExportCenter", "Gateway",
"Graph", "IssuerDirectory", "Notify", "Orchestrator", "Policy", "AirGap",
"Provenance", "Replay", "RiskEngine", "SbomService", "Signer", "TaskRunner",
"Telemetry", "TimelineIndexer", "Unknowns", "VexHub", "VexLens", "VulnExplorer",
"Zastava", "Cli", "Aoc", "Web", "Bench", "Cryptography", "PacksRegistry",
"Notifier", "Findings")
$fixedCount = 0
Get-ChildItem -Path $testsPath -Recurse -Filter "*.csproj" | ForEach-Object {
$proj = $_
$content = Get-Content $proj.FullName -Raw
$originalContent = $content
foreach ($module in $modules) {
# Fix ../../<Module>/ to ../../../<Module>/
# But not ../../../<Module> (already correct)
$pattern = "Include=`"../../$module/"
$replacement = "Include=`"../../../$module/"
if ($content -match [regex]::Escape($pattern) -and $content -notmatch [regex]::Escape("Include=`"../../../$module/")) {
$content = $content -replace [regex]::Escape($pattern), $replacement
}
}
# Fix __Libraries references that are one level short
$content = $content -replace 'Include="../../__Libraries/', 'Include="../../../__Libraries/'
if ($content -ne $originalContent) {
Set-Content -Path $proj.FullName -Value $content -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,68 @@
#!/usr/bin/env pwsh
# fix-sln-duplicates.ps1 - Remove duplicate project entries from solution file
param(
[string]$SlnPath = "src/StellaOps.sln"
)
$ErrorActionPreference = "Stop"
Write-Host "=== Solution Duplicate Cleanup ===" -ForegroundColor Cyan
Write-Host "Solution: $SlnPath"
$content = Get-Content $SlnPath -Raw
$lines = $content -split "`r?`n"
# Track seen project names
$seenProjects = @{}
$duplicateGuids = @()
$newLines = @()
$skipNext = $false
for ($i = 0; $i -lt $lines.Count; $i++) {
$line = $lines[$i]
if ($skipNext) {
$skipNext = $false
continue
}
# Check for project declaration
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
$name = $Matches[1]
$guid = $Matches[2]
if ($seenProjects.ContainsKey($name)) {
Write-Host "Removing duplicate: $name ($guid)" -ForegroundColor Yellow
$duplicateGuids += $guid
# Skip this line and the next EndProject line
$skipNext = $true
continue
} else {
$seenProjects[$name] = $true
}
}
$newLines += $line
}
# Remove GlobalSection references to duplicate GUIDs
$finalLines = @()
foreach ($line in $newLines) {
$skip = $false
foreach ($guid in $duplicateGuids) {
if ($line -match $guid) {
$skip = $true
break
}
}
if (-not $skip) {
$finalLines += $line
}
}
# Write back
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
Write-Host ""
Write-Host "Removed $($duplicateGuids.Count) duplicate projects" -ForegroundColor Green

View File

@@ -0,0 +1,40 @@
# Add <Using Include="Xunit" /> to test projects with UseConcelierTestInfra=false
# that have xunit but don't have the global using
$ErrorActionPreference = "Stop"
$srcPath = "E:\dev\git.stella-ops.org\src"
# Find test projects with UseConcelierTestInfra=false that have xunit but no Using Include="Xunit"
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object {
$content = Get-Content $_.FullName -Raw
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
($content -match '<PackageReference\s+Include="xunit"') -and
(-not ($content -match '<Using\s+Include="Xunit"'))
}
Write-Host "Found $($projects.Count) projects needing Xunit using" -ForegroundColor Cyan
$fixedCount = 0
foreach ($proj in $projects) {
$content = Get-Content $proj.FullName -Raw
# Add Using Include="Xunit" before first ProjectReference ItemGroup or at end
if ($content -match '(<ItemGroup>\s*\r?\n\s*<ProjectReference)') {
$usingBlock = " <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n`n"
$newContent = $content -replace '(\s*)(<ItemGroup>\s*\r?\n\s*<ProjectReference)', "$usingBlock`$1`$2"
} else {
# Add before </Project>
$usingBlock = "`n <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n"
$newContent = $content -replace '</Project>', "$usingBlock</Project>"
}
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,37 @@
# Fix xunit.v3 projects that conflict with Directory.Build.props xunit 2.x
# Add UseConcelierTestInfra=false to exclude them from common test infrastructure
$ErrorActionPreference = "Stop"
$srcPath = Join-Path $PSScriptRoot "..\..\src"
# Find all csproj files that reference xunit.v3
$xunitV3Projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
Where-Object { (Get-Content $_.FullName -Raw) -match "xunit\.v3" }
Write-Host "Found $($xunitV3Projects.Count) projects with xunit.v3" -ForegroundColor Cyan
$fixedCount = 0
foreach ($proj in $xunitV3Projects) {
$content = Get-Content $proj.FullName -Raw
# Check if already has UseConcelierTestInfra set
if ($content -match "<UseConcelierTestInfra>") {
Write-Host " Skipped (already configured): $($proj.Name)" -ForegroundColor DarkGray
continue
}
# Add UseConcelierTestInfra=false after the first <PropertyGroup>
$newContent = $content -replace "(<PropertyGroup>)", "`$1`n <UseConcelierTestInfra>false</UseConcelierTestInfra>"
# Only write if changed
if ($newContent -ne $content) {
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
$fixedCount++
}
}
Write-Host ""
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan

View File

@@ -0,0 +1,247 @@
<#
.SYNOPSIS
Generates plugin configuration files for StellaOps modules.
.DESCRIPTION
This script generates plugin.json manifests and config.yaml files for all
plugins based on the plugin catalog definition.
.PARAMETER RepoRoot
Path to the repository root. Defaults to the parent of the devops folder.
.PARAMETER OutputDir
Output directory for generated configs. Defaults to etc/plugins/.
.PARAMETER Force
Overwrite existing configuration files.
.EXAMPLE
.\generate-plugin-configs.ps1
.\generate-plugin-configs.ps1 -Force
#>
param(
[string]$RepoRoot = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
[string]$OutputDir = "",
[switch]$Force
)
if (-not $OutputDir) {
$OutputDir = Join-Path $RepoRoot "etc/plugins"
}
# Plugin catalog - defines all plugins and their metadata
$PluginCatalog = @{
# Router transports
"router/transports" = @{
category = "router.transports"
plugins = @(
@{ id = "tcp"; name = "TCP Transport"; assembly = "StellaOps.Router.Transport.Tcp.dll"; enabled = $true; priority = 50 }
@{ id = "tls"; name = "TLS Transport"; assembly = "StellaOps.Router.Transport.Tls.dll"; enabled = $true; priority = 60 }
@{ id = "udp"; name = "UDP Transport"; assembly = "StellaOps.Router.Transport.Udp.dll"; enabled = $false; priority = 40 }
@{ id = "rabbitmq"; name = "RabbitMQ Transport"; assembly = "StellaOps.Router.Transport.RabbitMq.dll"; enabled = $false; priority = 30 }
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Router.Transport.InMemory.dll"; enabled = $false; priority = 10 }
)
}
# Excititor connectors
"excititor" = @{
category = "excititor.connectors"
plugins = @(
@{ id = "redhat-csaf"; name = "Red Hat CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.RedHat.CSAF.dll"; enabled = $true; priority = 100; vendor = "Red Hat" }
@{ id = "cisco-csaf"; name = "Cisco CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Cisco.CSAF.dll"; enabled = $false; priority = 90; vendor = "Cisco" }
@{ id = "msrc-csaf"; name = "Microsoft CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.MSRC.CSAF.dll"; enabled = $false; priority = 85; vendor = "Microsoft" }
@{ id = "oracle-csaf"; name = "Oracle CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Oracle.CSAF.dll"; enabled = $false; priority = 80; vendor = "Oracle" }
@{ id = "ubuntu-csaf"; name = "Ubuntu CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.dll"; enabled = $false; priority = 75; vendor = "Canonical" }
@{ id = "suse-rancher"; name = "SUSE Rancher VEX Hub"; assembly = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.dll"; enabled = $false; priority = 70; vendor = "SUSE" }
@{ id = "oci-openvex"; name = "OCI OpenVEX Connector"; assembly = "StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.dll"; enabled = $false; priority = 60 }
)
}
# Scanner language analyzers
"scanner/analyzers/lang" = @{
category = "scanner.analyzers.lang"
plugins = @(
@{ id = "dotnet"; name = ".NET Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.DotNet.dll"; enabled = $true; priority = 100 }
@{ id = "go"; name = "Go Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Go.dll"; enabled = $true; priority = 95 }
@{ id = "node"; name = "Node.js Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Node.dll"; enabled = $true; priority = 90 }
@{ id = "python"; name = "Python Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Python.dll"; enabled = $true; priority = 85 }
@{ id = "java"; name = "Java Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Java.dll"; enabled = $true; priority = 80 }
@{ id = "rust"; name = "Rust Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Rust.dll"; enabled = $false; priority = 75 }
@{ id = "ruby"; name = "Ruby Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Ruby.dll"; enabled = $false; priority = 70 }
@{ id = "php"; name = "PHP Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Php.dll"; enabled = $false; priority = 65 }
@{ id = "swift"; name = "Swift Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Swift.dll"; enabled = $false; priority = 60 }
@{ id = "cpp"; name = "C/C++ Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Cpp.dll"; enabled = $false; priority = 55 }
)
}
# Scanner OS analyzers
"scanner/analyzers/os" = @{
category = "scanner.analyzers.os"
plugins = @(
@{ id = "apk"; name = "Alpine APK Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Apk.dll"; enabled = $true; priority = 100 }
@{ id = "dpkg"; name = "Debian DPKG Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Dpkg.dll"; enabled = $true; priority = 95 }
@{ id = "rpm"; name = "RPM Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Rpm.dll"; enabled = $true; priority = 90 }
@{ id = "pacman"; name = "Arch Pacman Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Pacman.dll"; enabled = $false; priority = 80 }
@{ id = "homebrew"; name = "Homebrew Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Homebrew.dll"; enabled = $false; priority = 70 }
@{ id = "chocolatey"; name = "Chocolatey Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Chocolatey.dll"; enabled = $false; priority = 65 }
)
}
# Notify channels
"notify" = @{
category = "notify.channels"
plugins = @(
@{ id = "email"; name = "Email Notifier"; assembly = "StellaOps.Notify.Connectors.Email.dll"; enabled = $true; priority = 100 }
@{ id = "slack"; name = "Slack Notifier"; assembly = "StellaOps.Notify.Connectors.Slack.dll"; enabled = $true; priority = 90 }
@{ id = "webhook"; name = "Webhook Notifier"; assembly = "StellaOps.Notify.Connectors.Webhook.dll"; enabled = $true; priority = 80 }
@{ id = "teams"; name = "Microsoft Teams Notifier"; assembly = "StellaOps.Notify.Connectors.Teams.dll"; enabled = $false; priority = 85 }
@{ id = "pagerduty"; name = "PagerDuty Notifier"; assembly = "StellaOps.Notify.Connectors.PagerDuty.dll"; enabled = $false; priority = 75 }
@{ id = "opsgenie"; name = "OpsGenie Notifier"; assembly = "StellaOps.Notify.Connectors.OpsGenie.dll"; enabled = $false; priority = 70 }
@{ id = "telegram"; name = "Telegram Notifier"; assembly = "StellaOps.Notify.Connectors.Telegram.dll"; enabled = $false; priority = 65 }
@{ id = "discord"; name = "Discord Notifier"; assembly = "StellaOps.Notify.Connectors.Discord.dll"; enabled = $false; priority = 60 }
)
}
# Messaging transports
"messaging" = @{
category = "messaging.transports"
plugins = @(
@{ id = "valkey"; name = "Valkey Transport"; assembly = "StellaOps.Messaging.Transport.Valkey.dll"; enabled = $true; priority = 100 }
@{ id = "postgres"; name = "PostgreSQL Transport"; assembly = "StellaOps.Messaging.Transport.Postgres.dll"; enabled = $false; priority = 90 }
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Messaging.Transport.InMemory.dll"; enabled = $false; priority = 10 }
)
}
}
function New-PluginManifest {
param(
[string]$ModulePath,
[hashtable]$Plugin,
[string]$Category
)
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
$manifest = @{
'$schema' = "https://schema.stella-ops.org/plugin-manifest/v2.json"
schemaVersion = "2.0"
id = $fullId
name = $Plugin.name
version = "1.0.0"
assembly = @{
path = $Plugin.assembly
}
capabilities = @()
platforms = @("linux-x64", "linux-arm64", "win-x64", "osx-x64", "osx-arm64")
compliance = @("NIST")
jurisdiction = "world"
priority = $Plugin.priority
enabled = $Plugin.enabled
metadata = @{
author = "StellaOps"
license = "AGPL-3.0-or-later"
}
}
if ($Plugin.vendor) {
$manifest.metadata["vendor"] = $Plugin.vendor
}
return $manifest | ConvertTo-Json -Depth 10
}
function New-PluginConfig {
param(
[string]$ModulePath,
[hashtable]$Plugin,
[string]$Category
)
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
$config = @"
id: $fullId
name: $($Plugin.name)
enabled: $($Plugin.enabled.ToString().ToLower())
priority: $($Plugin.priority)
config:
# Plugin-specific configuration
# Add settings here as needed
"@
return $config
}
function New-RegistryFile {
param(
[string]$Category,
[array]$Plugins
)
$entries = $Plugins | ForEach-Object {
" $($_.id):`n enabled: $($_.enabled.ToString().ToLower())`n priority: $($_.priority)`n config: $($_.id)/config.yaml"
}
$registry = @"
version: "1.0"
category: $Category
defaults:
enabled: false
timeout: "00:05:00"
plugins:
$($entries -join "`n")
"@
return $registry
}
# Main generation logic
Write-Host "Generating plugin configurations to: $OutputDir" -ForegroundColor Cyan
foreach ($modulePath in $PluginCatalog.Keys) {
$moduleConfig = $PluginCatalog[$modulePath]
$moduleDir = Join-Path $OutputDir $modulePath
Write-Host "Processing module: $modulePath" -ForegroundColor Yellow
# Create module directory
if (-not (Test-Path $moduleDir)) {
New-Item -ItemType Directory -Path $moduleDir -Force | Out-Null
}
# Generate registry.yaml
$registryPath = Join-Path $moduleDir "registry.yaml"
if ($Force -or -not (Test-Path $registryPath)) {
$registryContent = New-RegistryFile -Category $moduleConfig.category -Plugins $moduleConfig.plugins
Set-Content -Path $registryPath -Value $registryContent -Encoding utf8
Write-Host " Created: registry.yaml" -ForegroundColor Green
}
# Generate plugin configs
foreach ($plugin in $moduleConfig.plugins) {
$pluginDir = Join-Path $moduleDir $plugin.id
if (-not (Test-Path $pluginDir)) {
New-Item -ItemType Directory -Path $pluginDir -Force | Out-Null
}
# plugin.json
$manifestPath = Join-Path $pluginDir "plugin.json"
if ($Force -or -not (Test-Path $manifestPath)) {
$manifestContent = New-PluginManifest -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
Set-Content -Path $manifestPath -Value $manifestContent -Encoding utf8
Write-Host " Created: $($plugin.id)/plugin.json" -ForegroundColor Green
}
# config.yaml
$configPath = Join-Path $pluginDir "config.yaml"
if ($Force -or -not (Test-Path $configPath)) {
$configContent = New-PluginConfig -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
Set-Content -Path $configPath -Value $configContent -Encoding utf8
Write-Host " Created: $($plugin.id)/config.yaml" -ForegroundColor Green
}
}
}
Write-Host "`nPlugin configuration generation complete!" -ForegroundColor Cyan

View File

@@ -0,0 +1,178 @@
#!/usr/bin/env bash
# Shared Exit Codes Registry
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard exit codes for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/exit-codes.sh"
#
# Exit codes follow POSIX conventions (0-125)
# 126-127 reserved for shell errors
# 128+ reserved for signal handling
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_EXIT_CODES_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_EXIT_CODES_LOADED=1
# ============================================================================
# Standard Exit Codes
# ============================================================================
# Success
export EXIT_SUCCESS=0
# General errors (1-9)
export EXIT_ERROR=1 # Generic error
export EXIT_USAGE=2 # Invalid usage/arguments
export EXIT_CONFIG_ERROR=3 # Configuration error
export EXIT_NOT_FOUND=4 # File/resource not found
export EXIT_PERMISSION=5 # Permission denied
export EXIT_IO_ERROR=6 # I/O error
export EXIT_NETWORK_ERROR=7 # Network error
export EXIT_TIMEOUT=8 # Operation timed out
export EXIT_INTERRUPTED=9 # User interrupted (Ctrl+C)
# Tool/dependency errors (10-19)
export EXIT_MISSING_TOOL=10 # Required tool not installed
export EXIT_TOOL_ERROR=11 # Tool execution failed
export EXIT_VERSION_MISMATCH=12 # Wrong tool version
export EXIT_DEPENDENCY_ERROR=13 # Dependency resolution failed
# Build errors (20-29)
export EXIT_BUILD_FAILED=20 # Build compilation failed
export EXIT_RESTORE_FAILED=21 # Package restore failed
export EXIT_PUBLISH_FAILED=22 # Publish failed
export EXIT_PACKAGING_FAILED=23 # Packaging failed
# Test errors (30-39)
export EXIT_TEST_FAILED=30 # Tests failed
export EXIT_TEST_TIMEOUT=31 # Test timed out
export EXIT_FIXTURE_ERROR=32 # Test fixture error
export EXIT_DETERMINISM_FAIL=33 # Determinism check failed
# Deployment errors (40-49)
export EXIT_DEPLOY_FAILED=40 # Deployment failed
export EXIT_ROLLBACK_FAILED=41 # Rollback failed
export EXIT_HEALTH_CHECK_FAIL=42 # Health check failed
export EXIT_REGISTRY_ERROR=43 # Container registry error
# Validation errors (50-59)
export EXIT_VALIDATION_FAILED=50 # General validation failed
export EXIT_SCHEMA_ERROR=51 # Schema validation failed
export EXIT_LINT_ERROR=52 # Lint check failed
export EXIT_FORMAT_ERROR=53 # Format check failed
export EXIT_LICENSE_ERROR=54 # License compliance failed
# Security errors (60-69)
export EXIT_SECURITY_ERROR=60 # Security check failed
export EXIT_SECRETS_FOUND=61 # Secrets detected in code
export EXIT_VULN_FOUND=62 # Vulnerabilities found
export EXIT_SIGN_FAILED=63 # Signing failed
export EXIT_VERIFY_FAILED=64 # Verification failed
# Git/VCS errors (70-79)
export EXIT_GIT_ERROR=70 # Git operation failed
export EXIT_DIRTY_WORKTREE=71 # Uncommitted changes
export EXIT_MERGE_CONFLICT=72 # Merge conflict
export EXIT_BRANCH_ERROR=73 # Branch operation failed
# Reserved for specific tools (80-99)
export EXIT_DOTNET_ERROR=80 # .NET specific error
export EXIT_DOCKER_ERROR=81 # Docker specific error
export EXIT_HELM_ERROR=82 # Helm specific error
export EXIT_KUBECTL_ERROR=83 # kubectl specific error
export EXIT_NPM_ERROR=84 # npm specific error
export EXIT_PYTHON_ERROR=85 # Python specific error
# Legacy compatibility
export EXIT_TOOLCHAIN=69 # Tool not found (legacy, use EXIT_MISSING_TOOL)
# ============================================================================
# Helper Functions
# ============================================================================
# Get exit code name from number
exit_code_name() {
local code="${1:-}"
case "$code" in
0) echo "SUCCESS" ;;
1) echo "ERROR" ;;
2) echo "USAGE" ;;
3) echo "CONFIG_ERROR" ;;
4) echo "NOT_FOUND" ;;
5) echo "PERMISSION" ;;
6) echo "IO_ERROR" ;;
7) echo "NETWORK_ERROR" ;;
8) echo "TIMEOUT" ;;
9) echo "INTERRUPTED" ;;
10) echo "MISSING_TOOL" ;;
11) echo "TOOL_ERROR" ;;
12) echo "VERSION_MISMATCH" ;;
13) echo "DEPENDENCY_ERROR" ;;
20) echo "BUILD_FAILED" ;;
21) echo "RESTORE_FAILED" ;;
22) echo "PUBLISH_FAILED" ;;
23) echo "PACKAGING_FAILED" ;;
30) echo "TEST_FAILED" ;;
31) echo "TEST_TIMEOUT" ;;
32) echo "FIXTURE_ERROR" ;;
33) echo "DETERMINISM_FAIL" ;;
40) echo "DEPLOY_FAILED" ;;
41) echo "ROLLBACK_FAILED" ;;
42) echo "HEALTH_CHECK_FAIL" ;;
43) echo "REGISTRY_ERROR" ;;
50) echo "VALIDATION_FAILED" ;;
51) echo "SCHEMA_ERROR" ;;
52) echo "LINT_ERROR" ;;
53) echo "FORMAT_ERROR" ;;
54) echo "LICENSE_ERROR" ;;
60) echo "SECURITY_ERROR" ;;
61) echo "SECRETS_FOUND" ;;
62) echo "VULN_FOUND" ;;
63) echo "SIGN_FAILED" ;;
64) echo "VERIFY_FAILED" ;;
69) echo "TOOLCHAIN (legacy)" ;;
70) echo "GIT_ERROR" ;;
71) echo "DIRTY_WORKTREE" ;;
72) echo "MERGE_CONFLICT" ;;
73) echo "BRANCH_ERROR" ;;
80) echo "DOTNET_ERROR" ;;
81) echo "DOCKER_ERROR" ;;
82) echo "HELM_ERROR" ;;
83) echo "KUBECTL_ERROR" ;;
84) echo "NPM_ERROR" ;;
85) echo "PYTHON_ERROR" ;;
126) echo "COMMAND_NOT_EXECUTABLE" ;;
127) echo "COMMAND_NOT_FOUND" ;;
*)
if [[ $code -ge 128 ]] && [[ $code -le 255 ]]; then
local signal=$((code - 128))
echo "SIGNAL_${signal}"
else
echo "UNKNOWN_${code}"
fi
;;
esac
}
# Check if exit code indicates success
is_success() {
[[ "${1:-1}" -eq 0 ]]
}
# Check if exit code indicates error
is_error() {
[[ "${1:-0}" -ne 0 ]]
}
# Exit with message and code
exit_with() {
local code="${1:-1}"
shift
if [[ $# -gt 0 ]]; then
echo "$@" >&2
fi
exit "$code"
}

View File

@@ -0,0 +1,262 @@
#!/usr/bin/env bash
# Shared Git Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Common git operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/git-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_GIT_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_GIT_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Repository Information
# ============================================================================
# Get repository root directory
git_root() {
git rev-parse --show-toplevel 2>/dev/null || echo "."
}
# Check if current directory is a git repository
is_git_repo() {
git rev-parse --git-dir >/dev/null 2>&1
}
# Get current commit SHA (full)
git_sha() {
git rev-parse HEAD 2>/dev/null
}
# Get current commit SHA (short)
git_sha_short() {
git rev-parse --short HEAD 2>/dev/null
}
# Get current branch name
git_branch() {
git rev-parse --abbrev-ref HEAD 2>/dev/null
}
# Get current tag (if HEAD is tagged)
git_tag() {
git describe --tags --exact-match HEAD 2>/dev/null || echo ""
}
# Get latest tag
git_latest_tag() {
git describe --tags --abbrev=0 2>/dev/null || echo ""
}
# Get remote URL
git_remote_url() {
local remote="${1:-origin}"
git remote get-url "$remote" 2>/dev/null
}
# Get repository name from remote URL
git_repo_name() {
local url
url=$(git_remote_url "${1:-origin}")
basename "$url" .git
}
# ============================================================================
# Commit Information
# ============================================================================
# Get commit message
git_commit_message() {
local sha="${1:-HEAD}"
git log -1 --format="%s" "$sha" 2>/dev/null
}
# Get commit author
git_commit_author() {
local sha="${1:-HEAD}"
git log -1 --format="%an" "$sha" 2>/dev/null
}
# Get commit author email
git_commit_author_email() {
local sha="${1:-HEAD}"
git log -1 --format="%ae" "$sha" 2>/dev/null
}
# Get commit timestamp (ISO 8601)
git_commit_timestamp() {
local sha="${1:-HEAD}"
git log -1 --format="%aI" "$sha" 2>/dev/null
}
# Get commit timestamp (Unix epoch)
git_commit_epoch() {
local sha="${1:-HEAD}"
git log -1 --format="%at" "$sha" 2>/dev/null
}
# ============================================================================
# Working Tree State
# ============================================================================
# Check if working tree is clean
git_is_clean() {
[[ -z "$(git status --porcelain 2>/dev/null)" ]]
}
# Check if working tree is dirty
git_is_dirty() {
! git_is_clean
}
# Get list of changed files
git_changed_files() {
git status --porcelain 2>/dev/null | awk '{print $2}'
}
# Get list of staged files
git_staged_files() {
git diff --cached --name-only 2>/dev/null
}
# Get list of untracked files
git_untracked_files() {
git ls-files --others --exclude-standard 2>/dev/null
}
# ============================================================================
# Diff and History
# ============================================================================
# Get files changed between two refs
git_diff_files() {
local from="${1:-HEAD~1}"
local to="${2:-HEAD}"
git diff --name-only "$from" "$to" 2>/dev/null
}
# Get files changed in last N commits
git_recent_files() {
local count="${1:-1}"
git diff --name-only "HEAD~${count}" HEAD 2>/dev/null
}
# Check if file was changed between two refs
git_file_changed() {
local file="$1"
local from="${2:-HEAD~1}"
local to="${3:-HEAD}"
git diff --name-only "$from" "$to" -- "$file" 2>/dev/null | grep -q "$file"
}
# Get commits between two refs
git_commits_between() {
local from="${1:-HEAD~10}"
local to="${2:-HEAD}"
git log --oneline "$from".."$to" 2>/dev/null
}
# ============================================================================
# Tag Operations
# ============================================================================
# Create a tag
git_create_tag() {
local tag="$1"
local message="${2:-}"
if [[ -n "$message" ]]; then
git tag -a "$tag" -m "$message"
else
git tag "$tag"
fi
}
# Delete a tag
git_delete_tag() {
local tag="$1"
git tag -d "$tag" 2>/dev/null
}
# Push tag to remote
git_push_tag() {
local tag="$1"
local remote="${2:-origin}"
git push "$remote" "$tag"
}
# List tags matching pattern
git_list_tags() {
local pattern="${1:-*}"
git tag -l "$pattern" 2>/dev/null
}
# ============================================================================
# Branch Operations
# ============================================================================
# Check if branch exists
git_branch_exists() {
local branch="$1"
git show-ref --verify --quiet "refs/heads/$branch" 2>/dev/null
}
# Check if remote branch exists
git_remote_branch_exists() {
local branch="$1"
local remote="${2:-origin}"
git show-ref --verify --quiet "refs/remotes/$remote/$branch" 2>/dev/null
}
# Get default branch
git_default_branch() {
local remote="${1:-origin}"
git remote show "$remote" 2>/dev/null | grep "HEAD branch" | awk '{print $NF}'
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get version string for CI builds
git_ci_version() {
local tag
tag=$(git_tag)
if [[ -n "$tag" ]]; then
echo "$tag"
else
local branch sha
branch=$(git_branch | tr '/' '-')
sha=$(git_sha_short)
echo "${branch}-${sha}"
fi
}
# Check if current commit is on default branch
git_is_default_branch() {
local current default
current=$(git_branch)
default=$(git_default_branch)
[[ "$current" == "$default" ]]
}
# Check if running in CI environment
git_is_ci() {
[[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]] || [[ -n "${GITLAB_CI:-}" ]]
}
# Ensure clean worktree or fail
git_require_clean() {
if git_is_dirty; then
log_error "Working tree is dirty. Commit or stash changes first."
return "${EXIT_DIRTY_WORKTREE:-71}"
fi
}

View File

@@ -0,0 +1,266 @@
#!/usr/bin/env bash
# Shared Hash/Checksum Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Cryptographic hash and checksum operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/hash-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_HASH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_HASH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Hash Computation
# ============================================================================
# Compute SHA-256 hash of a file
compute_sha256() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha256sum >/dev/null 2>&1; then
sha256sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 256 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha256 "$file" | awk '{print $NF}'
else
log_error "No SHA-256 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute SHA-512 hash of a file
compute_sha512() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v sha512sum >/dev/null 2>&1; then
sha512sum "$file" | awk '{print $1}'
elif command -v shasum >/dev/null 2>&1; then
shasum -a 512 "$file" | awk '{print $1}'
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -sha512 "$file" | awk '{print $NF}'
else
log_error "No SHA-512 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute MD5 hash of a file (for compatibility, not security)
compute_md5() {
local file="$1"
if [[ ! -f "$file" ]]; then
log_error "File not found: $file"
return "${EXIT_NOT_FOUND:-4}"
fi
if command -v md5sum >/dev/null 2>&1; then
md5sum "$file" | awk '{print $1}'
elif command -v md5 >/dev/null 2>&1; then
md5 -q "$file"
elif command -v openssl >/dev/null 2>&1; then
openssl dgst -md5 "$file" | awk '{print $NF}'
else
log_error "No MD5 tool available"
return "${EXIT_MISSING_TOOL:-10}"
fi
}
# Compute hash of string
compute_string_hash() {
local string="$1"
local algorithm="${2:-sha256}"
case "$algorithm" in
sha256)
echo -n "$string" | sha256sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 256 2>/dev/null | awk '{print $1}'
;;
sha512)
echo -n "$string" | sha512sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | shasum -a 512 2>/dev/null | awk '{print $1}'
;;
md5)
echo -n "$string" | md5sum 2>/dev/null | awk '{print $1}' || \
echo -n "$string" | md5 2>/dev/null
;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
}
# ============================================================================
# Checksum Files
# ============================================================================
# Write checksum file for a single file
write_checksum() {
local file="$1"
local checksum_file="${2:-${file}.sha256}"
local algorithm="${3:-sha256}"
local hash
case "$algorithm" in
sha256) hash=$(compute_sha256 "$file") ;;
sha512) hash=$(compute_sha512 "$file") ;;
md5) hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ -z "$hash" ]]; then
return "${EXIT_ERROR:-1}"
fi
local basename
basename=$(basename "$file")
echo "$hash $basename" > "$checksum_file"
log_debug "Wrote checksum to $checksum_file"
}
# Write checksums for multiple files
write_checksums() {
local output_file="$1"
shift
local files=("$@")
: > "$output_file"
for file in "${files[@]}"; do
if [[ -f "$file" ]]; then
local hash basename
hash=$(compute_sha256 "$file")
basename=$(basename "$file")
echo "$hash $basename" >> "$output_file"
fi
done
log_debug "Wrote checksums to $output_file"
}
# ============================================================================
# Checksum Verification
# ============================================================================
# Verify checksum of a file
verify_checksum() {
local file="$1"
local expected_hash="$2"
local algorithm="${3:-sha256}"
local actual_hash
case "$algorithm" in
sha256) actual_hash=$(compute_sha256 "$file") ;;
sha512) actual_hash=$(compute_sha512 "$file") ;;
md5) actual_hash=$(compute_md5 "$file") ;;
*)
log_error "Unknown algorithm: $algorithm"
return "${EXIT_USAGE:-2}"
;;
esac
if [[ "$actual_hash" == "$expected_hash" ]]; then
log_debug "Checksum verified: $file"
return 0
else
log_error "Checksum mismatch for $file"
log_error " Expected: $expected_hash"
log_error " Actual: $actual_hash"
return "${EXIT_VERIFY_FAILED:-64}"
fi
}
# Verify checksums from file (sha256sum -c style)
verify_checksums_file() {
local checksum_file="$1"
local base_dir="${2:-.}"
if [[ ! -f "$checksum_file" ]]; then
log_error "Checksum file not found: $checksum_file"
return "${EXIT_NOT_FOUND:-4}"
fi
local failures=0
while IFS= read -r line; do
# Skip empty lines and comments
[[ -z "$line" ]] && continue
[[ "$line" == \#* ]] && continue
local hash filename
hash=$(echo "$line" | awk '{print $1}')
filename=$(echo "$line" | awk '{print $2}')
local filepath="${base_dir}/${filename}"
if [[ ! -f "$filepath" ]]; then
log_error "File not found: $filepath"
((failures++))
continue
fi
if ! verify_checksum "$filepath" "$hash"; then
((failures++))
fi
done < "$checksum_file"
if [[ $failures -gt 0 ]]; then
log_error "$failures checksum verification(s) failed"
return "${EXIT_VERIFY_FAILED:-64}"
fi
log_info "All checksums verified"
return 0
}
# ============================================================================
# Helpers
# ============================================================================
# Check if two files have the same content
files_identical() {
local file1="$1"
local file2="$2"
[[ -f "$file1" ]] && [[ -f "$file2" ]] || return 1
local hash1 hash2
hash1=$(compute_sha256 "$file1")
hash2=$(compute_sha256 "$file2")
[[ "$hash1" == "$hash2" ]]
}
# Get short hash for display
short_hash() {
local hash="$1"
local length="${2:-8}"
echo "${hash:0:$length}"
}
# Generate deterministic ID from inputs
generate_id() {
local inputs="$*"
compute_string_hash "$inputs" sha256 | head -c 16
}

View File

@@ -0,0 +1,181 @@
#!/usr/bin/env bash
# Shared Logging Library
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Standard logging functions for all CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/logging.sh"
#
# Log Levels: DEBUG, INFO, WARN, ERROR
# Set LOG_LEVEL environment variable to control verbosity (default: INFO)
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_LOGGING_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_LOGGING_LOADED=1
# Colors (disable with NO_COLOR=1)
if [[ -z "${NO_COLOR:-}" ]] && [[ -t 1 ]]; then
export LOG_COLOR_RED='\033[0;31m'
export LOG_COLOR_GREEN='\033[0;32m'
export LOG_COLOR_YELLOW='\033[1;33m'
export LOG_COLOR_BLUE='\033[0;34m'
export LOG_COLOR_MAGENTA='\033[0;35m'
export LOG_COLOR_CYAN='\033[0;36m'
export LOG_COLOR_GRAY='\033[0;90m'
export LOG_COLOR_RESET='\033[0m'
else
export LOG_COLOR_RED=''
export LOG_COLOR_GREEN=''
export LOG_COLOR_YELLOW=''
export LOG_COLOR_BLUE=''
export LOG_COLOR_MAGENTA=''
export LOG_COLOR_CYAN=''
export LOG_COLOR_GRAY=''
export LOG_COLOR_RESET=''
fi
# Log level configuration
export LOG_LEVEL="${LOG_LEVEL:-INFO}"
# Convert log level to numeric for comparison
_log_level_to_num() {
case "$1" in
DEBUG) echo 0 ;;
INFO) echo 1 ;;
WARN) echo 2 ;;
ERROR) echo 3 ;;
*) echo 1 ;;
esac
}
# Check if message should be logged based on level
_should_log() {
local msg_level="$1"
local current_level="${LOG_LEVEL:-INFO}"
local msg_num current_num
msg_num=$(_log_level_to_num "$msg_level")
current_num=$(_log_level_to_num "$current_level")
[[ $msg_num -ge $current_num ]]
}
# Format timestamp
_log_timestamp() {
if [[ "${LOG_TIMESTAMPS:-true}" == "true" ]]; then
date -u +"%Y-%m-%dT%H:%M:%SZ"
fi
}
# Core logging function
_log() {
local level="$1"
local color="$2"
shift 2
if ! _should_log "$level"; then
return 0
fi
local timestamp
timestamp=$(_log_timestamp)
local prefix=""
if [[ -n "$timestamp" ]]; then
prefix="${LOG_COLOR_GRAY}${timestamp}${LOG_COLOR_RESET} "
fi
echo -e "${prefix}${color}[${level}]${LOG_COLOR_RESET} $*"
}
# Public logging functions
log_debug() {
_log "DEBUG" "${LOG_COLOR_GRAY}" "$@"
}
log_info() {
_log "INFO" "${LOG_COLOR_GREEN}" "$@"
}
log_warn() {
_log "WARN" "${LOG_COLOR_YELLOW}" "$@"
}
log_error() {
_log "ERROR" "${LOG_COLOR_RED}" "$@" >&2
}
# Step logging (for workflow stages)
log_step() {
_log "STEP" "${LOG_COLOR_BLUE}" "$@"
}
# Success message
log_success() {
_log "OK" "${LOG_COLOR_GREEN}" "$@"
}
# GitHub Actions annotations
log_gh_notice() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::notice::$*"
else
log_info "$@"
fi
}
log_gh_warning() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::warning::$*"
else
log_warn "$@"
fi
}
log_gh_error() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::error::$*"
else
log_error "$@"
fi
}
# Group logging (for GitHub Actions)
log_group_start() {
local title="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::group::$title"
else
log_step "=== $title ==="
fi
}
log_group_end() {
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::endgroup::"
fi
}
# Masked logging (for secrets)
log_masked() {
local value="$1"
if [[ -n "${GITHUB_ACTIONS:-}" ]]; then
echo "::add-mask::$value"
fi
}
# Die with error message
die() {
log_error "$@"
exit 1
}
# Conditional die
die_if() {
local condition="$1"
shift
if eval "$condition"; then
die "$@"
fi
}

View File

@@ -0,0 +1,274 @@
#!/usr/bin/env bash
# Shared Path Utilities
# Sprint: CI/CD Enhancement - Script Consolidation
#
# Purpose: Path manipulation and file operations for CI/CD scripts
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/lib/path-utils.sh"
# Prevent multiple sourcing
if [[ -n "${__STELLAOPS_PATH_UTILS_LOADED:-}" ]]; then
return 0
fi
export __STELLAOPS_PATH_UTILS_LOADED=1
# Source dependencies
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/logging.sh" 2>/dev/null || true
source "${SCRIPT_DIR}/exit-codes.sh" 2>/dev/null || true
# ============================================================================
# Path Normalization
# ============================================================================
# Normalize path (resolve .., ., symlinks)
normalize_path() {
local path="$1"
# Handle empty path
if [[ -z "$path" ]]; then
echo "."
return 0
fi
# Try realpath first (most reliable)
if command -v realpath >/dev/null 2>&1; then
realpath -m "$path" 2>/dev/null && return 0
fi
# Fallback to Python
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os; print(os.path.normpath('$path'))" 2>/dev/null && return 0
fi
# Manual normalization (basic)
echo "$path" | sed 's|/\./|/|g' | sed 's|/[^/]*/\.\./|/|g' | sed 's|//|/|g'
}
# Get absolute path
absolute_path() {
local path="$1"
if [[ "$path" == /* ]]; then
normalize_path "$path"
else
normalize_path "$(pwd)/$path"
fi
}
# Get relative path from one path to another
relative_path() {
local from="$1"
local to="$2"
if command -v realpath >/dev/null 2>&1; then
realpath --relative-to="$from" "$to" 2>/dev/null && return 0
fi
if command -v python3 >/dev/null 2>&1; then
python3 -c "import os.path; print(os.path.relpath('$to', '$from'))" 2>/dev/null && return 0
fi
# Fallback: just return absolute path
absolute_path "$to"
}
# ============================================================================
# Path Components
# ============================================================================
# Get directory name
dir_name() {
dirname "$1"
}
# Get base name
base_name() {
basename "$1"
}
# Get file extension
file_extension() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base##*.}"
else
echo ""
fi
}
# Get file name without extension
file_stem() {
local path="$1"
local base
base=$(basename "$path")
if [[ "$base" == *.* ]]; then
echo "${base%.*}"
else
echo "$base"
fi
}
# ============================================================================
# Directory Operations
# ============================================================================
# Ensure directory exists
ensure_directory() {
local dir="$1"
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
fi
}
# Create temporary directory
create_temp_dir() {
local prefix="${1:-stellaops}"
mktemp -d "${TMPDIR:-/tmp}/${prefix}.XXXXXX"
}
# Create temporary file
create_temp_file() {
local prefix="${1:-stellaops}"
local suffix="${2:-}"
mktemp "${TMPDIR:-/tmp}/${prefix}.XXXXXX${suffix}"
}
# Clean temporary directory
clean_temp() {
local path="$1"
if [[ -d "$path" ]] && [[ "$path" == *stellaops* ]]; then
rm -rf "$path"
fi
}
# ============================================================================
# File Existence Checks
# ============================================================================
# Check if file exists
file_exists() {
[[ -f "$1" ]]
}
# Check if directory exists
dir_exists() {
[[ -d "$1" ]]
}
# Check if path exists (file or directory)
path_exists() {
[[ -e "$1" ]]
}
# Check if file is readable
file_readable() {
[[ -r "$1" ]]
}
# Check if file is writable
file_writable() {
[[ -w "$1" ]]
}
# Check if file is executable
file_executable() {
[[ -x "$1" ]]
}
# ============================================================================
# File Discovery
# ============================================================================
# Find files by pattern
find_files() {
local dir="${1:-.}"
local pattern="${2:-*}"
find "$dir" -type f -name "$pattern" 2>/dev/null
}
# Find files by extension
find_by_extension() {
local dir="${1:-.}"
local ext="${2:-}"
find "$dir" -type f -name "*.${ext}" 2>/dev/null
}
# Find project files (csproj, package.json, etc.)
find_project_files() {
local dir="${1:-.}"
find "$dir" -type f \( \
-name "*.csproj" -o \
-name "*.fsproj" -o \
-name "package.json" -o \
-name "Cargo.toml" -o \
-name "go.mod" -o \
-name "pom.xml" -o \
-name "build.gradle" \
\) 2>/dev/null | grep -v node_modules | grep -v bin | grep -v obj
}
# Find test projects
find_test_projects() {
local dir="${1:-.}"
find "$dir" -type f -name "*.Tests.csproj" 2>/dev/null | grep -v bin | grep -v obj
}
# ============================================================================
# Path Validation
# ============================================================================
# Check if path is under directory
path_under() {
local path="$1"
local dir="$2"
local abs_path abs_dir
abs_path=$(absolute_path "$path")
abs_dir=$(absolute_path "$dir")
[[ "$abs_path" == "$abs_dir"* ]]
}
# Validate path is safe (no directory traversal)
path_is_safe() {
local path="$1"
local base="${2:-.}"
# Check for obvious traversal attempts
if [[ "$path" == *".."* ]] || [[ "$path" == "/*" ]]; then
return 1
fi
# Verify resolved path is under base
path_under "$path" "$base"
}
# ============================================================================
# CI/CD Helpers
# ============================================================================
# Get artifact output directory
get_artifact_dir() {
local name="${1:-artifacts}"
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/out/${name}"
}
# Get test results directory
get_test_results_dir() {
local base="${GITHUB_WORKSPACE:-$(pwd)}"
echo "${base}/TestResults"
}
# Ensure artifact directory exists and return path
ensure_artifact_dir() {
local name="${1:-artifacts}"
local dir
dir=$(get_artifact_dir "$name")
ensure_directory "$dir"
echo "$dir"
}

View File

@@ -0,0 +1,244 @@
-- ============================================================================
-- StellaOps Migration Reset Script for Pre-1.0 Deployments
-- ============================================================================
-- This script updates schema_migrations tables to recognize the 1.0.0 compacted
-- migrations for deployments that upgraded from pre-1.0 versions.
--
-- Run via: psql -f migrations-reset-pre-1.0.sql
-- Or with connection: psql -h <host> -U <user> -d <db> -f migrations-reset-pre-1.0.sql
-- ============================================================================
BEGIN;
-- ============================================================================
-- Authority Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 002_mongo_store_equivalents, 003_enable_rls,
-- 004_offline_kit_audit, 005_verdict_manifests
-- New: 001_initial_schema (compacted)
DELETE FROM authority.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_mongo_store_equivalents.sql',
'003_enable_rls.sql',
'004_offline_kit_audit.sql',
'005_verdict_manifests.sql'
);
INSERT INTO authority.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Scheduler Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 002_graph_jobs, 003_runs_policy,
-- 010_generated_columns_runs, 011_enable_rls, 012_partition_audit,
-- 012b_migrate_audit_data
-- New: 001_initial_schema (compacted)
DELETE FROM scheduler.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_graph_jobs.sql',
'003_runs_policy.sql',
'010_generated_columns_runs.sql',
'011_enable_rls.sql',
'012_partition_audit.sql',
'012b_migrate_audit_data.sql'
);
INSERT INTO scheduler.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Scanner Module Reset
-- ============================================================================
-- Original: 001-034 plus various numbered files (27 total)
-- New: 001_initial_schema (compacted)
DELETE FROM scanner.schema_migrations
WHERE migration_name IN (
'001_create_tables.sql',
'002_proof_spine_tables.sql',
'003_classification_history.sql',
'004_scan_metrics.sql',
'005_smart_diff_tables.sql',
'006_score_replay_tables.sql',
'007_unknowns_ranking_containment.sql',
'008_epss_integration.sql',
'0059_scans_table.sql',
'0065_unknowns_table.sql',
'0075_scan_findings_table.sql',
'020_call_graph_tables.sql',
'021_smart_diff_tables_search_path.sql',
'022_reachability_drift_tables.sql',
'023_scanner_api_ingestion.sql',
'024_smart_diff_priority_score_widen.sql',
'025_epss_raw_layer.sql',
'026_epss_signal_layer.sql',
'027_witness_storage.sql',
'028_epss_triage_columns.sql',
'029_vuln_surfaces.sql',
'030_vuln_surface_triggers_update.sql',
'031_reach_cache.sql',
'032_idempotency_keys.sql',
'033_binary_evidence.sql',
'034_func_proof_tables.sql',
'DM001_rename_scanner_migrations.sql'
);
INSERT INTO scanner.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Policy Module Reset
-- ============================================================================
-- Original: 001-013 (14 files, includes duplicate 010 prefix)
-- New: 001_initial_schema (compacted)
DELETE FROM policy.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'002_cvss_receipts.sql',
'003_snapshots_violations.sql',
'004_epss_risk_scores.sql',
'005_cvss_multiversion.sql',
'006_enable_rls.sql',
'007_unknowns_registry.sql',
'008_exception_objects.sql',
'009_exception_applications.sql',
'010_recheck_evidence.sql',
'010_unknowns_blast_radius_containment.sql',
'011_unknowns_reason_codes.sql',
'012_budget_ledger.sql',
'013_exception_approval.sql'
);
INSERT INTO policy.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Notify Module Reset
-- ============================================================================
-- Original: 001_initial_schema, 010_enable_rls, 011_partition_deliveries,
-- 011b_migrate_deliveries_data
-- New: 001_initial_schema (compacted)
DELETE FROM notify.schema_migrations
WHERE migration_name IN (
'001_initial_schema.sql',
'010_enable_rls.sql',
'011_partition_deliveries.sql',
'011b_migrate_deliveries_data.sql'
);
INSERT INTO notify.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Concelier Module Reset
-- ============================================================================
-- Original: 17 migration files
-- New: 001_initial_schema (compacted)
DELETE FROM concelier.schema_migrations
WHERE migration_name ~ '^[0-9]{3}_.*\.sql$';
INSERT INTO concelier.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Attestor Module Reset (proofchain + attestor schemas)
-- ============================================================================
-- Original: 20251214000001_AddProofChainSchema.sql, 20251216_001_create_rekor_submission_queue.sql
-- New: 001_initial_schema (compacted)
DELETE FROM proofchain.schema_migrations
WHERE migration_name IN (
'20251214000001_AddProofChainSchema.sql',
'20251214000002_RollbackProofChainSchema.sql',
'20251216_001_create_rekor_submission_queue.sql'
);
INSERT INTO proofchain.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Signer Module Reset
-- ============================================================================
-- Original: 20251214000001_AddKeyManagementSchema.sql
-- New: 001_initial_schema (compacted)
DELETE FROM signer.schema_migrations
WHERE migration_name IN (
'20251214000001_AddKeyManagementSchema.sql'
);
INSERT INTO signer.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Signals Module Reset
-- ============================================================================
-- Original: V0000_001__extensions.sql, V1102_001__unknowns_scoring_schema.sql,
-- V1105_001__deploy_refs_graph_metrics.sql, V3102_001__callgraph_relational_tables.sql
-- New: 001_initial_schema (compacted)
DELETE FROM signals.schema_migrations
WHERE migration_name IN (
'V0000_001__extensions.sql',
'V1102_001__unknowns_scoring_schema.sql',
'V1105_001__deploy_refs_graph_metrics.sql',
'V3102_001__callgraph_relational_tables.sql'
);
INSERT INTO signals.schema_migrations (migration_name, category, checksum, applied_at)
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
ON CONFLICT (migration_name) DO NOTHING;
-- ============================================================================
-- Verification
-- ============================================================================
-- Display current migration status per module
DO $$
DECLARE
v_module TEXT;
v_count INT;
BEGIN
FOR v_module IN SELECT unnest(ARRAY['authority', 'scheduler', 'scanner', 'policy', 'notify', 'concelier', 'proofchain', 'signer', 'signals']) LOOP
EXECUTE format('SELECT COUNT(*) FROM %I.schema_migrations', v_module) INTO v_count;
RAISE NOTICE '% module: % migrations registered', v_module, v_count;
END LOOP;
END $$;
COMMIT;
-- ============================================================================
-- Post-Reset Notes
-- ============================================================================
-- After running this script:
-- 1. All modules should show exactly 1 migration registered
-- 2. The schema structure should be identical to a fresh 1.0.0 deployment
-- 3. Future migrations (002+) will apply normally
--
-- To verify manually:
-- SELECT * FROM authority.schema_migrations;
-- SELECT * FROM scheduler.schema_migrations;
-- SELECT * FROM scanner.schema_migrations;
-- SELECT * FROM policy.schema_migrations;
-- SELECT * FROM notify.schema_migrations;
-- SELECT * FROM concelier.schema_migrations;
-- SELECT * FROM proofchain.schema_migrations;
-- SELECT * FROM signer.schema_migrations;
-- SELECT * FROM signals.schema_migrations;
-- ============================================================================

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env pwsh
# regenerate-solution.ps1 - Regenerate StellaOps.sln without duplicate projects
#
# This script:
# 1. Backs up the existing solution
# 2. Creates a new solution
# 3. Adds all .csproj files, skipping duplicates
# 4. Preserves solution folders where possible
param(
[string]$SolutionPath = "src/StellaOps.sln",
[switch]$DryRun
)
$ErrorActionPreference = "Stop"
# Canonical locations for test projects (in priority order)
# Later entries win when there are duplicates
$canonicalPatterns = @(
# Module-local tests (highest priority)
"src/*/__Tests/*/*.csproj",
"src/*/__Libraries/__Tests/*/*.csproj",
"src/__Libraries/__Tests/*/*.csproj",
# Cross-module integration tests
"src/__Tests/Integration/*/*.csproj",
"src/__Tests/__Libraries/*/*.csproj",
# Category-based cross-module tests
"src/__Tests/chaos/*/*.csproj",
"src/__Tests/security/*/*.csproj",
"src/__Tests/interop/*/*.csproj",
"src/__Tests/parity/*/*.csproj",
"src/__Tests/reachability/*/*.csproj",
# Single global tests
"src/__Tests/*/*.csproj"
)
Write-Host "=== Solution Regeneration Script ===" -ForegroundColor Cyan
Write-Host "Solution: $SolutionPath"
Write-Host "Dry Run: $DryRun"
Write-Host ""
# Find all .csproj files
Write-Host "Finding all project files..." -ForegroundColor Yellow
$allProjects = Get-ChildItem -Path "src" -Filter "*.csproj" -Recurse |
Where-Object { $_.FullName -notmatch "\\obj\\" -and $_.FullName -notmatch "\\bin\\" }
Write-Host "Found $($allProjects.Count) project files"
# Build a map of project name -> list of paths
$projectMap = @{}
foreach ($proj in $allProjects) {
$name = $proj.BaseName
if (-not $projectMap.ContainsKey($name)) {
$projectMap[$name] = @()
}
$projectMap[$name] += $proj.FullName
}
# Find duplicates
$duplicates = $projectMap.GetEnumerator() | Where-Object { $_.Value.Count -gt 1 }
Write-Host ""
Write-Host "Found $($duplicates.Count) projects with duplicate names:" -ForegroundColor Yellow
foreach ($dup in $duplicates) {
Write-Host " $($dup.Key):" -ForegroundColor Red
foreach ($path in $dup.Value) {
Write-Host " - $path"
}
}
# Select canonical path for each project
function Get-CanonicalPath {
param([string[]]$Paths)
# Prefer module-local __Tests over global __Tests
$moduleTests = $Paths | Where-Object { $_ -match "src\\[^_][^\\]+\\__Tests\\" }
if ($moduleTests.Count -gt 0) { return $moduleTests[0] }
# Prefer __Libraries/__Tests
$libTests = $Paths | Where-Object { $_ -match "__Libraries\\__Tests\\" }
if ($libTests.Count -gt 0) { return $libTests[0] }
# Prefer __Tests over non-__Tests location in same parent
$testsPath = $Paths | Where-Object { $_ -match "\\__Tests\\" }
if ($testsPath.Count -gt 0) { return $testsPath[0] }
# Otherwise, take first
return $Paths[0]
}
# Build final project list
$finalProjects = @()
foreach ($entry in $projectMap.GetEnumerator()) {
$canonical = Get-CanonicalPath -Paths $entry.Value
$finalProjects += $canonical
}
Write-Host ""
Write-Host "Final project count: $($finalProjects.Count)" -ForegroundColor Green
if ($DryRun) {
Write-Host ""
Write-Host "=== DRY RUN - No changes made ===" -ForegroundColor Magenta
Write-Host "Would add the following projects to solution:"
$finalProjects | ForEach-Object { Write-Host " $_" }
exit 0
}
# Backup existing solution
$backupPath = "$SolutionPath.bak"
if (Test-Path $SolutionPath) {
Copy-Item $SolutionPath $backupPath -Force
Write-Host "Backed up existing solution to $backupPath" -ForegroundColor Gray
}
# Create new solution
Write-Host ""
Write-Host "Creating new solution..." -ForegroundColor Yellow
$slnDir = Split-Path $SolutionPath -Parent
$slnName = [System.IO.Path]::GetFileNameWithoutExtension($SolutionPath)
# Remove old solution
if (Test-Path $SolutionPath) {
Remove-Item $SolutionPath -Force
}
# Create fresh solution
Push-Location $slnDir
dotnet new sln -n $slnName --force 2>$null
Pop-Location
# Add projects in batches (dotnet sln add can handle multiple)
Write-Host "Adding projects to solution..." -ForegroundColor Yellow
$added = 0
$failed = 0
foreach ($proj in $finalProjects) {
try {
$result = dotnet sln $SolutionPath add $proj 2>&1
if ($LASTEXITCODE -eq 0) {
$added++
if ($added % 50 -eq 0) {
Write-Host " Added $added projects..." -ForegroundColor Gray
}
} else {
Write-Host " Failed to add: $proj" -ForegroundColor Red
$failed++
}
} catch {
Write-Host " Error adding: $proj - $_" -ForegroundColor Red
$failed++
}
}
Write-Host ""
Write-Host "=== Summary ===" -ForegroundColor Cyan
Write-Host "Projects added: $added" -ForegroundColor Green
Write-Host "Projects failed: $failed" -ForegroundColor $(if ($failed -gt 0) { "Red" } else { "Green" })
Write-Host ""
Write-Host "Solution regenerated at: $SolutionPath"
# Verify
Write-Host ""
Write-Host "Verifying solution..." -ForegroundColor Yellow
$verifyResult = dotnet build $SolutionPath --no-restore -t:ValidateSolutionConfiguration 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Host "Solution validation passed!" -ForegroundColor Green
} else {
Write-Host "Solution validation had issues - check manually" -ForegroundColor Yellow
}

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env pwsh
# remove-stale-refs.ps1 - Remove stale project references that don't exist
param([string]$SlnPath = "src/StellaOps.sln")
$content = Get-Content $SlnPath -Raw
$lines = $content -split "`r?`n"
# Stale project paths (relative from solution location)
$staleProjects = @(
"__Tests\AirGap\StellaOps.AirGap.Controller.Tests",
"__Tests\AirGap\StellaOps.AirGap.Importer.Tests",
"__Tests\AirGap\StellaOps.AirGap.Time.Tests",
"__Tests\StellaOps.Gateway.WebService.Tests",
"__Tests\Graph\StellaOps.Graph.Indexer.Tests",
"Scanner\StellaOps.Scanner.Analyzers.Native",
"__Libraries\__Tests\StellaOps.Signals.Tests",
"__Tests\StellaOps.Audit.ReplayToken.Tests",
"__Tests\StellaOps.Router.Gateway.Tests",
"__Libraries\StellaOps.Cryptography"
)
$staleGuids = @()
$newLines = @()
$skipNext = $false
for ($i = 0; $i -lt $lines.Count; $i++) {
$line = $lines[$i]
if ($skipNext) {
$skipNext = $false
continue
}
$isStale = $false
foreach ($stalePath in $staleProjects) {
if ($line -like "*$stalePath*") {
# Extract GUID
if ($line -match '\{([A-F0-9-]+)\}"?$') {
$staleGuids += $Matches[1]
}
Write-Host "Removing stale: $stalePath"
$isStale = $true
$skipNext = $true
break
}
}
if (-not $isStale) {
$newLines += $line
}
}
# Remove GlobalSection references to stale GUIDs
$finalLines = @()
foreach ($line in $newLines) {
$skip = $false
foreach ($guid in $staleGuids) {
if ($line -match $guid) {
$skip = $true
break
}
}
if (-not $skip) {
$finalLines += $line
}
}
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
Write-Host "Removed $($staleGuids.Count) stale project references"

View File

@@ -0,0 +1,61 @@
# Restore deleted test files from commit parent
# Maps old locations to new locations
$ErrorActionPreference = "Stop"
$parentCommit = "74c7aa250c401ee9ac332686832b256159efa604^"
# Mapping: old path -> new path
$mappings = @{
"src/__Tests/AirGap/StellaOps.AirGap.Importer.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Importer.Tests"
"src/__Tests/AirGap/StellaOps.AirGap.Controller.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Controller.Tests"
"src/__Tests/AirGap/StellaOps.AirGap.Time.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Time.Tests"
"src/__Tests/StellaOps.Gateway.WebService.Tests" = "src/Gateway/__Tests/StellaOps.Gateway.WebService.Tests"
"src/__Tests/Replay/StellaOps.Replay.Core.Tests" = "src/Replay/__Tests/StellaOps.Replay.Core.Tests"
"src/__Tests/Provenance/StellaOps.Provenance.Attestation.Tests" = "src/Provenance/__Tests/StellaOps.Provenance.Attestation.Tests"
"src/__Tests/Policy/StellaOps.Policy.Scoring.Tests" = "src/Policy/__Tests/StellaOps.Policy.Scoring.Tests"
}
Set-Location "E:\dev\git.stella-ops.org"
foreach ($mapping in $mappings.GetEnumerator()) {
$oldPath = $mapping.Key
$newPath = $mapping.Value
Write-Host "`nProcessing: $oldPath -> $newPath" -ForegroundColor Cyan
# Get list of files from old location in git
$files = git ls-tree -r --name-only "$parentCommit" -- $oldPath 2>$null
if (-not $files) {
Write-Host " No files found at old path" -ForegroundColor Yellow
continue
}
foreach ($file in $files) {
# Calculate relative path and new file path
$relativePath = $file.Substring($oldPath.Length + 1)
$newFilePath = Join-Path $newPath $relativePath
# Create directory if needed
$newDir = Split-Path $newFilePath -Parent
if (-not (Test-Path $newDir)) {
New-Item -ItemType Directory -Path $newDir -Force | Out-Null
}
# Check if file exists
if (Test-Path $newFilePath) {
Write-Host " Exists: $relativePath" -ForegroundColor DarkGray
continue
}
# Restore file
git show "${parentCommit}:${file}" > $newFilePath 2>$null
if ($LASTEXITCODE -eq 0) {
Write-Host " Restored: $relativePath" -ForegroundColor Green
} else {
Write-Host " Failed: $relativePath" -ForegroundColor Red
}
}
}
Write-Host "`nDone!" -ForegroundColor Cyan