devops folders consolidate
This commit is contained in:
@@ -1,45 +0,0 @@
|
||||
# add-test-projects.ps1 - Add all test projects to StellaOps.Tests.sln
|
||||
# Sprint: SPRINT_20251226_007_CICD
|
||||
|
||||
$ErrorActionPreference = 'Continue'
|
||||
|
||||
$slnPath = "src\StellaOps.Tests.sln"
|
||||
$srcPath = "src"
|
||||
|
||||
Write-Host "=== Adding test projects to StellaOps.Tests.sln ==="
|
||||
Write-Host "Solution: $slnPath"
|
||||
|
||||
# Find all test project files
|
||||
$testProjects = Get-ChildItem -Path $srcPath -Recurse -Filter "*Tests.csproj" |
|
||||
Where-Object {
|
||||
$_.Name -notlike "*Testing.csproj" -and
|
||||
$_.Name -notlike "*TestKit.csproj" -and
|
||||
$_.FullName -notlike "*node_modules*" -and
|
||||
$_.FullName -notlike "*bin*" -and
|
||||
$_.FullName -notlike "*obj*"
|
||||
}
|
||||
|
||||
Write-Host "Found $($testProjects.Count) test projects"
|
||||
|
||||
$added = 0
|
||||
$failed = 0
|
||||
|
||||
foreach ($proj in $testProjects) {
|
||||
$relativePath = $proj.FullName.Replace((Get-Location).Path + "\", "")
|
||||
Write-Host "Adding: $relativePath"
|
||||
|
||||
$result = dotnet sln $slnPath add $proj.FullName 2>&1
|
||||
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$added++
|
||||
} else {
|
||||
Write-Host " Failed: $result" -ForegroundColor Yellow
|
||||
$failed++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "=== Summary ==="
|
||||
Write-Host "Added: $added"
|
||||
Write-Host "Failed: $failed"
|
||||
Write-Host "Total: $($testProjects.Count)"
|
||||
@@ -1,130 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Adds StellaOps.TestKit ProjectReference to test projects that use TestCategories
|
||||
but are missing the reference.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_relative_path_to_testkit(csproj_path: Path) -> str:
|
||||
"""Calculate relative path from csproj to TestKit project."""
|
||||
# TestKit is at src/__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj
|
||||
csproj_dir = csproj_path.parent
|
||||
src_root = None
|
||||
|
||||
# Walk up to find src directory
|
||||
current = csproj_dir
|
||||
depth = 0
|
||||
while current.name != 'src' and depth < 10:
|
||||
current = current.parent
|
||||
depth += 1
|
||||
|
||||
if current.name == 'src':
|
||||
src_root = current
|
||||
else:
|
||||
return None
|
||||
|
||||
# Calculate relative path from csproj to src/__Libraries/StellaOps.TestKit
|
||||
rel_path = os.path.relpath(
|
||||
src_root / '__Libraries' / 'StellaOps.TestKit' / 'StellaOps.TestKit.csproj',
|
||||
csproj_dir
|
||||
)
|
||||
# Normalize to forward slashes for XML
|
||||
return rel_path.replace('\\', '/')
|
||||
|
||||
|
||||
def project_uses_testkit(csproj_dir: Path) -> bool:
|
||||
"""Check if any .cs file in the project directory uses TestCategories."""
|
||||
for cs_file in csproj_dir.rglob('*.cs'):
|
||||
if '/obj/' in str(cs_file) or '/bin/' in str(cs_file):
|
||||
continue
|
||||
try:
|
||||
content = cs_file.read_text(encoding='utf-8-sig', errors='ignore')
|
||||
if 'TestCategories.' in content:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def project_has_testkit_reference(content: str) -> bool:
|
||||
"""Check if csproj already references TestKit."""
|
||||
return 'StellaOps.TestKit' in content
|
||||
|
||||
|
||||
def add_testkit_reference(csproj_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Add TestKit reference to csproj if needed."""
|
||||
try:
|
||||
content = csproj_path.read_text(encoding='utf-8')
|
||||
except Exception as e:
|
||||
print(f" Error reading {csproj_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if project_has_testkit_reference(content):
|
||||
return False
|
||||
|
||||
if not project_uses_testkit(csproj_path.parent):
|
||||
return False
|
||||
|
||||
rel_path = get_relative_path_to_testkit(csproj_path)
|
||||
if not rel_path:
|
||||
print(f" Could not determine path to TestKit from {csproj_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Find a good place to insert the reference - look for existing ProjectReference
|
||||
if '<ProjectReference' in content:
|
||||
# Insert before the last </ItemGroup> that contains ProjectReference
|
||||
pattern = r'( <ProjectReference [^>]+/>\s*\n)( </ItemGroup>)'
|
||||
replacement = f'\\1 <ProjectReference Include="{rel_path}" />\n\\2'
|
||||
fixed = re.sub(pattern, replacement, content, count=1)
|
||||
else:
|
||||
# No ProjectReference, add a new ItemGroup before </Project>
|
||||
pattern = r'(</Project>)'
|
||||
new_item_group = f''' <ItemGroup>
|
||||
<ProjectReference Include="{rel_path}" />
|
||||
</ItemGroup>
|
||||
\\1'''
|
||||
fixed = re.sub(pattern, new_item_group, content)
|
||||
|
||||
if fixed == content:
|
||||
print(f" Could not find insertion point in {csproj_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
if not dry_run:
|
||||
csproj_path.write_text(fixed, encoding='utf-8')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Add TestKit reference to test projects')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
fixed_count = 0
|
||||
|
||||
# Find all test project files
|
||||
for csproj in root.rglob('*.Tests.csproj'):
|
||||
if add_testkit_reference(csproj, dry_run=args.dry_run):
|
||||
print(f"{'Would add' if args.dry_run else 'Added'} TestKit reference to: {csproj}")
|
||||
fixed_count += 1
|
||||
|
||||
# Also check *UnitTests, *SmokeTests, etc.
|
||||
for pattern in ['*UnitTests.csproj', '*IntegrationTests.csproj', '*SmokeTests.csproj', '*FixtureTests.csproj']:
|
||||
for csproj in root.rglob(pattern):
|
||||
if add_testkit_reference(csproj, dry_run=args.dry_run):
|
||||
print(f"{'Would add' if args.dry_run else 'Added'} TestKit reference to: {csproj}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nAdded TestKit reference to: {fixed_count} projects")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,93 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Scaffolds EF Core DbContext, entities, and compiled models for all StellaOps modules.
|
||||
|
||||
.DESCRIPTION
|
||||
Iterates through all configured modules and runs Scaffold-Module.ps1 for each.
|
||||
Use this after schema changes or for initial setup.
|
||||
|
||||
.PARAMETER SkipMissing
|
||||
Skip modules whose projects don't exist yet (default: true)
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-AllModules.ps1
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-AllModules.ps1 -SkipMissing:$false
|
||||
#>
|
||||
param(
|
||||
[bool]$SkipMissing = $true
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Module definitions: Module name -> Schema name
|
||||
$modules = @(
|
||||
@{ Module = "Unknowns"; Schema = "unknowns" },
|
||||
@{ Module = "PacksRegistry"; Schema = "packs" },
|
||||
@{ Module = "Authority"; Schema = "authority" },
|
||||
@{ Module = "Scanner"; Schema = "scanner" },
|
||||
@{ Module = "Scheduler"; Schema = "scheduler" },
|
||||
@{ Module = "TaskRunner"; Schema = "taskrunner" },
|
||||
@{ Module = "Policy"; Schema = "policy" },
|
||||
@{ Module = "Notify"; Schema = "notify" },
|
||||
@{ Module = "Concelier"; Schema = "vuln" },
|
||||
@{ Module = "Excititor"; Schema = "vex" },
|
||||
@{ Module = "Signals"; Schema = "signals" },
|
||||
@{ Module = "Attestor"; Schema = "proofchain" },
|
||||
@{ Module = "Signer"; Schema = "signer" }
|
||||
)
|
||||
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$RepoRoot = (Get-Item $ScriptDir).Parent.Parent.Parent.FullName
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " EF Core Scaffolding for All Modules" -ForegroundColor Cyan
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
$successCount = 0
|
||||
$skipCount = 0
|
||||
$failCount = 0
|
||||
|
||||
foreach ($m in $modules) {
|
||||
$projectPath = Join-Path $RepoRoot "src" $m.Module "__Libraries" "StellaOps.$($m.Module).Persistence.EfCore"
|
||||
|
||||
if (-not (Test-Path "$projectPath\*.csproj")) {
|
||||
if ($SkipMissing) {
|
||||
Write-Host "SKIP: $($m.Module) - Project not found" -ForegroundColor DarkGray
|
||||
$skipCount++
|
||||
continue
|
||||
} else {
|
||||
Write-Host "FAIL: $($m.Module) - Project not found at: $projectPath" -ForegroundColor Red
|
||||
$failCount++
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host ">>> Scaffolding $($m.Module)..." -ForegroundColor Magenta
|
||||
|
||||
try {
|
||||
& "$ScriptDir\Scaffold-Module.ps1" -Module $m.Module -Schema $m.Schema
|
||||
$successCount++
|
||||
}
|
||||
catch {
|
||||
Write-Host "FAIL: $($m.Module) - $($_.Exception.Message)" -ForegroundColor Red
|
||||
$failCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " Summary" -ForegroundColor Cyan
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " Success: $successCount"
|
||||
Write-Host " Skipped: $skipCount"
|
||||
Write-Host " Failed: $failCount"
|
||||
Write-Host ""
|
||||
|
||||
if ($failCount -gt 0) {
|
||||
exit 1
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Scaffolds EF Core DbContext, entities, and compiled models from PostgreSQL schema.
|
||||
|
||||
.DESCRIPTION
|
||||
This script performs database-first scaffolding for a StellaOps module:
|
||||
1. Cleans existing generated files (Entities, CompiledModels, DbContext)
|
||||
2. Scaffolds DbContext and entities from live PostgreSQL schema
|
||||
3. Generates compiled models for startup performance
|
||||
|
||||
.PARAMETER Module
|
||||
The module name (e.g., Unknowns, PacksRegistry, Authority)
|
||||
|
||||
.PARAMETER Schema
|
||||
The PostgreSQL schema name (defaults to lowercase module name)
|
||||
|
||||
.PARAMETER ConnectionString
|
||||
PostgreSQL connection string. If not provided, uses default dev connection.
|
||||
|
||||
.PARAMETER ProjectPath
|
||||
Optional custom project path. Defaults to src/{Module}/__Libraries/StellaOps.{Module}.Persistence.EfCore
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-Module.ps1 -Module Unknowns
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-Module.ps1 -Module Unknowns -Schema unknowns -ConnectionString "Host=localhost;Database=stellaops_platform;Username=unknowns_user;Password=unknowns_dev"
|
||||
|
||||
.EXAMPLE
|
||||
.\Scaffold-Module.ps1 -Module PacksRegistry -Schema packs
|
||||
#>
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Module,
|
||||
|
||||
[string]$Schema,
|
||||
|
||||
[string]$ConnectionString,
|
||||
|
||||
[string]$ProjectPath
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Resolve repository root
|
||||
$RepoRoot = (Get-Item $PSScriptRoot).Parent.Parent.Parent.FullName
|
||||
|
||||
# Default schema to lowercase module name
|
||||
if (-not $Schema) {
|
||||
$Schema = $Module.ToLower()
|
||||
}
|
||||
|
||||
# Default connection string
|
||||
if (-not $ConnectionString) {
|
||||
$user = "${Schema}_user"
|
||||
$password = "${Schema}_dev"
|
||||
$ConnectionString = "Host=localhost;Port=5432;Database=stellaops_platform;Username=$user;Password=$password;SearchPath=$Schema"
|
||||
}
|
||||
|
||||
# Default project path
|
||||
if (-not $ProjectPath) {
|
||||
$ProjectPath = Join-Path $RepoRoot "src" $Module "__Libraries" "StellaOps.$Module.Persistence.EfCore"
|
||||
}
|
||||
|
||||
$ContextDir = "Context"
|
||||
$EntitiesDir = "Entities"
|
||||
$CompiledModelsDir = "CompiledModels"
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " EF Core Scaffolding for Module: $Module" -ForegroundColor Cyan
|
||||
Write-Host "============================================================================" -ForegroundColor Cyan
|
||||
Write-Host " Schema: $Schema"
|
||||
Write-Host " Project: $ProjectPath"
|
||||
Write-Host " Connection: Host=localhost;Database=stellaops_platform;Username=${Schema}_user;..."
|
||||
Write-Host ""
|
||||
|
||||
# Verify project exists
|
||||
if (-not (Test-Path "$ProjectPath\*.csproj")) {
|
||||
Write-Error "Project not found at: $ProjectPath"
|
||||
Write-Host "Create the project first with: dotnet new classlib -n StellaOps.$Module.Persistence.EfCore"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 1: Clean existing generated files
|
||||
Write-Host "[1/4] Cleaning existing generated files..." -ForegroundColor Yellow
|
||||
$paths = @(
|
||||
(Join-Path $ProjectPath $EntitiesDir),
|
||||
(Join-Path $ProjectPath $CompiledModelsDir),
|
||||
(Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs")
|
||||
)
|
||||
foreach ($path in $paths) {
|
||||
if (Test-Path $path) {
|
||||
Remove-Item -Recurse -Force $path
|
||||
Write-Host " Removed: $path" -ForegroundColor DarkGray
|
||||
}
|
||||
}
|
||||
|
||||
# Recreate directories
|
||||
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $EntitiesDir) | Out-Null
|
||||
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $CompiledModelsDir) | Out-Null
|
||||
New-Item -ItemType Directory -Force -Path (Join-Path $ProjectPath $ContextDir) | Out-Null
|
||||
|
||||
# Step 2: Scaffold DbContext and entities
|
||||
Write-Host "[2/4] Scaffolding DbContext and entities from schema '$Schema'..." -ForegroundColor Yellow
|
||||
$scaffoldArgs = @(
|
||||
"ef", "dbcontext", "scaffold",
|
||||
"`"$ConnectionString`"",
|
||||
"Npgsql.EntityFrameworkCore.PostgreSQL",
|
||||
"--project", "`"$ProjectPath`"",
|
||||
"--schema", $Schema,
|
||||
"--context", "${Module}DbContext",
|
||||
"--context-dir", $ContextDir,
|
||||
"--output-dir", $EntitiesDir,
|
||||
"--namespace", "StellaOps.$Module.Persistence.EfCore.Entities",
|
||||
"--context-namespace", "StellaOps.$Module.Persistence.EfCore.Context",
|
||||
"--data-annotations",
|
||||
"--no-onconfiguring",
|
||||
"--force"
|
||||
)
|
||||
|
||||
$process = Start-Process -FilePath "dotnet" -ArgumentList $scaffoldArgs -Wait -PassThru -NoNewWindow
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Write-Error "Scaffold failed with exit code: $($process.ExitCode)"
|
||||
exit 1
|
||||
}
|
||||
Write-Host " Scaffolded entities to: $EntitiesDir" -ForegroundColor DarkGray
|
||||
|
||||
# Step 3: Generate compiled models
|
||||
Write-Host "[3/4] Generating compiled models..." -ForegroundColor Yellow
|
||||
$optimizeArgs = @(
|
||||
"ef", "dbcontext", "optimize",
|
||||
"--project", "`"$ProjectPath`"",
|
||||
"--context", "StellaOps.$Module.Persistence.EfCore.Context.${Module}DbContext",
|
||||
"--output-dir", $CompiledModelsDir,
|
||||
"--namespace", "StellaOps.$Module.Persistence.EfCore.CompiledModels"
|
||||
)
|
||||
|
||||
$process = Start-Process -FilePath "dotnet" -ArgumentList $optimizeArgs -Wait -PassThru -NoNewWindow
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Write-Error "Compiled model generation failed with exit code: $($process.ExitCode)"
|
||||
exit 1
|
||||
}
|
||||
Write-Host " Generated compiled models to: $CompiledModelsDir" -ForegroundColor DarkGray
|
||||
|
||||
# Step 4: Summary
|
||||
Write-Host "[4/4] Scaffolding complete!" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
Write-Host "Generated files:" -ForegroundColor Cyan
|
||||
$contextFile = Join-Path $ProjectPath $ContextDir "${Module}DbContext.cs"
|
||||
$entityFiles = Get-ChildItem -Path (Join-Path $ProjectPath $EntitiesDir) -Filter "*.cs" -ErrorAction SilentlyContinue
|
||||
$compiledFiles = Get-ChildItem -Path (Join-Path $ProjectPath $CompiledModelsDir) -Filter "*.cs" -ErrorAction SilentlyContinue
|
||||
|
||||
Write-Host " Context: $(if (Test-Path $contextFile) { $contextFile } else { 'Not found' })"
|
||||
Write-Host " Entities: $($entityFiles.Count) files"
|
||||
Write-Host " Compiled Models: $($compiledFiles.Count) files"
|
||||
Write-Host ""
|
||||
Write-Host "Next steps:" -ForegroundColor Yellow
|
||||
Write-Host " 1. Review generated entities for any customization needs"
|
||||
Write-Host " 2. Create repository implementations in Repositories/"
|
||||
Write-Host " 3. Add DI registration in Extensions/"
|
||||
Write-Host ""
|
||||
@@ -1,88 +0,0 @@
|
||||
#!/bin/bash
|
||||
# ============================================================================
|
||||
# EF Core Scaffolding for All StellaOps Modules
|
||||
# ============================================================================
|
||||
# Iterates through all configured modules and runs scaffold-module.sh for each.
|
||||
# Use this after schema changes or for initial setup.
|
||||
#
|
||||
# Usage: ./scaffold-all-modules.sh [--no-skip-missing]
|
||||
# ============================================================================
|
||||
|
||||
set -e
|
||||
|
||||
SKIP_MISSING=true
|
||||
if [ "$1" = "--no-skip-missing" ]; then
|
||||
SKIP_MISSING=false
|
||||
fi
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
# Module definitions: "Module:Schema"
|
||||
MODULES=(
|
||||
"Unknowns:unknowns"
|
||||
"PacksRegistry:packs"
|
||||
"Authority:authority"
|
||||
"Scanner:scanner"
|
||||
"Scheduler:scheduler"
|
||||
"TaskRunner:taskrunner"
|
||||
"Policy:policy"
|
||||
"Notify:notify"
|
||||
"Concelier:vuln"
|
||||
"Excititor:vex"
|
||||
"Signals:signals"
|
||||
"Attestor:proofchain"
|
||||
"Signer:signer"
|
||||
)
|
||||
|
||||
echo ""
|
||||
echo "============================================================================"
|
||||
echo " EF Core Scaffolding for All Modules"
|
||||
echo "============================================================================"
|
||||
echo ""
|
||||
|
||||
SUCCESS_COUNT=0
|
||||
SKIP_COUNT=0
|
||||
FAIL_COUNT=0
|
||||
|
||||
for entry in "${MODULES[@]}"; do
|
||||
MODULE="${entry%%:*}"
|
||||
SCHEMA="${entry##*:}"
|
||||
|
||||
PROJECT_PATH="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
|
||||
|
||||
if [ ! -f "$PROJECT_PATH"/*.csproj ]; then
|
||||
if [ "$SKIP_MISSING" = true ]; then
|
||||
echo "SKIP: $MODULE - Project not found"
|
||||
((SKIP_COUNT++))
|
||||
continue
|
||||
else
|
||||
echo "FAIL: $MODULE - Project not found at: $PROJECT_PATH"
|
||||
((FAIL_COUNT++))
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo ">>> Scaffolding $MODULE..."
|
||||
|
||||
if "$SCRIPT_DIR/scaffold-module.sh" "$MODULE" "$SCHEMA"; then
|
||||
((SUCCESS_COUNT++))
|
||||
else
|
||||
echo "FAIL: $MODULE - Scaffolding failed"
|
||||
((FAIL_COUNT++))
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "============================================================================"
|
||||
echo " Summary"
|
||||
echo "============================================================================"
|
||||
echo " Success: $SUCCESS_COUNT"
|
||||
echo " Skipped: $SKIP_COUNT"
|
||||
echo " Failed: $FAIL_COUNT"
|
||||
echo ""
|
||||
|
||||
if [ "$FAIL_COUNT" -gt 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,113 +0,0 @@
|
||||
#!/bin/bash
|
||||
# ============================================================================
|
||||
# EF Core Scaffolding Script for StellaOps Modules
|
||||
# ============================================================================
|
||||
# Usage: ./scaffold-module.sh <Module> [Schema] [ConnectionString]
|
||||
#
|
||||
# Examples:
|
||||
# ./scaffold-module.sh Unknowns
|
||||
# ./scaffold-module.sh Unknowns unknowns
|
||||
# ./scaffold-module.sh PacksRegistry packs "Host=localhost;..."
|
||||
# ============================================================================
|
||||
|
||||
set -e
|
||||
|
||||
MODULE=$1
|
||||
SCHEMA=${2:-$(echo "$MODULE" | tr '[:upper:]' '[:lower:]')}
|
||||
CONNECTION_STRING=$3
|
||||
|
||||
if [ -z "$MODULE" ]; then
|
||||
echo "Usage: $0 <Module> [Schema] [ConnectionString]"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 Unknowns"
|
||||
echo " $0 Unknowns unknowns"
|
||||
echo " $0 PacksRegistry packs \"Host=localhost;Database=stellaops_platform;Username=packs_user;Password=packs_dev\""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Resolve repository root
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
# Default connection string
|
||||
if [ -z "$CONNECTION_STRING" ]; then
|
||||
USER="${SCHEMA}_user"
|
||||
PASSWORD="${SCHEMA}_dev"
|
||||
CONNECTION_STRING="Host=localhost;Port=5432;Database=stellaops_platform;Username=$USER;Password=$PASSWORD;SearchPath=$SCHEMA"
|
||||
fi
|
||||
|
||||
PROJECT_DIR="$REPO_ROOT/src/$MODULE/__Libraries/StellaOps.$MODULE.Persistence.EfCore"
|
||||
CONTEXT_DIR="Context"
|
||||
ENTITIES_DIR="Entities"
|
||||
COMPILED_DIR="CompiledModels"
|
||||
|
||||
echo ""
|
||||
echo "============================================================================"
|
||||
echo " EF Core Scaffolding for Module: $MODULE"
|
||||
echo "============================================================================"
|
||||
echo " Schema: $SCHEMA"
|
||||
echo " Project: $PROJECT_DIR"
|
||||
echo " Connection: Host=localhost;Database=stellaops_platform;Username=${SCHEMA}_user;..."
|
||||
echo ""
|
||||
|
||||
# Verify project exists
|
||||
if [ ! -f "$PROJECT_DIR"/*.csproj ]; then
|
||||
echo "ERROR: Project not found at: $PROJECT_DIR"
|
||||
echo "Create the project first with: dotnet new classlib -n StellaOps.$MODULE.Persistence.EfCore"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 1: Clean existing generated files
|
||||
echo "[1/4] Cleaning existing generated files..."
|
||||
rm -rf "$PROJECT_DIR/$ENTITIES_DIR"
|
||||
rm -rf "$PROJECT_DIR/$COMPILED_DIR"
|
||||
rm -f "$PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
|
||||
|
||||
mkdir -p "$PROJECT_DIR/$ENTITIES_DIR"
|
||||
mkdir -p "$PROJECT_DIR/$COMPILED_DIR"
|
||||
mkdir -p "$PROJECT_DIR/$CONTEXT_DIR"
|
||||
|
||||
echo " Cleaned: $ENTITIES_DIR, $COMPILED_DIR, ${MODULE}DbContext.cs"
|
||||
|
||||
# Step 2: Scaffold DbContext and entities
|
||||
echo "[2/4] Scaffolding DbContext and entities from schema '$SCHEMA'..."
|
||||
dotnet ef dbcontext scaffold \
|
||||
"$CONNECTION_STRING" \
|
||||
Npgsql.EntityFrameworkCore.PostgreSQL \
|
||||
--project "$PROJECT_DIR" \
|
||||
--schema "$SCHEMA" \
|
||||
--context "${MODULE}DbContext" \
|
||||
--context-dir "$CONTEXT_DIR" \
|
||||
--output-dir "$ENTITIES_DIR" \
|
||||
--namespace "StellaOps.$MODULE.Persistence.EfCore.Entities" \
|
||||
--context-namespace "StellaOps.$MODULE.Persistence.EfCore.Context" \
|
||||
--data-annotations \
|
||||
--no-onconfiguring \
|
||||
--force
|
||||
|
||||
echo " Scaffolded entities to: $ENTITIES_DIR"
|
||||
|
||||
# Step 3: Generate compiled models
|
||||
echo "[3/4] Generating compiled models..."
|
||||
dotnet ef dbcontext optimize \
|
||||
--project "$PROJECT_DIR" \
|
||||
--context "StellaOps.$MODULE.Persistence.EfCore.Context.${MODULE}DbContext" \
|
||||
--output-dir "$COMPILED_DIR" \
|
||||
--namespace "StellaOps.$MODULE.Persistence.EfCore.CompiledModels"
|
||||
|
||||
echo " Generated compiled models to: $COMPILED_DIR"
|
||||
|
||||
# Step 4: Summary
|
||||
echo "[4/4] Scaffolding complete!"
|
||||
echo ""
|
||||
echo "Generated files:"
|
||||
echo " Context: $PROJECT_DIR/$CONTEXT_DIR/${MODULE}DbContext.cs"
|
||||
echo " Entities: $(ls -1 "$PROJECT_DIR/$ENTITIES_DIR"/*.cs 2>/dev/null | wc -l) files"
|
||||
echo " Compiled Models: $(ls -1 "$PROJECT_DIR/$COMPILED_DIR"/*.cs 2>/dev/null | wc -l) files"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. Review generated entities for any customization needs"
|
||||
echo " 2. Create repository implementations in Repositories/"
|
||||
echo " 3. Add DI registration in Extensions/"
|
||||
echo ""
|
||||
@@ -1,100 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# fix-duplicate-packages.ps1 - Remove duplicate PackageReference items from test projects
|
||||
# These are already provided by Directory.Build.props
|
||||
|
||||
param([switch]$DryRun)
|
||||
|
||||
$packagesToRemove = @(
|
||||
"coverlet.collector",
|
||||
"Microsoft.NET.Test.Sdk",
|
||||
"Microsoft.AspNetCore.Mvc.Testing",
|
||||
"xunit",
|
||||
"xunit.runner.visualstudio",
|
||||
"Microsoft.Extensions.TimeProvider.Testing"
|
||||
)
|
||||
|
||||
$sharpCompressPackage = "SharpCompress"
|
||||
|
||||
# Find all test project files
|
||||
$testProjects = Get-ChildItem -Path "src" -Filter "*.Tests.csproj" -Recurse
|
||||
$corpusProjects = Get-ChildItem -Path "src" -Filter "*.Corpus.*.csproj" -Recurse
|
||||
|
||||
Write-Host "=== Fix Duplicate Package References ===" -ForegroundColor Cyan
|
||||
Write-Host "Found $($testProjects.Count) test projects" -ForegroundColor Yellow
|
||||
Write-Host "Found $($corpusProjects.Count) corpus projects (SharpCompress)" -ForegroundColor Yellow
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $testProjects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
$modified = $false
|
||||
|
||||
# Skip projects that opt out of common test infrastructure
|
||||
if ($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") {
|
||||
Write-Host " Skipped (UseConcelierTestInfra=false): $($proj.Name)" -ForegroundColor DarkGray
|
||||
continue
|
||||
}
|
||||
|
||||
foreach ($pkg in $packagesToRemove) {
|
||||
# Match PackageReference for this package (various formats)
|
||||
$patterns = @(
|
||||
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
|
||||
"(?s)\s*<PackageReference\s+Include=`"$pkg`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
|
||||
)
|
||||
|
||||
foreach ($pattern in $patterns) {
|
||||
if ($content -match $pattern) {
|
||||
$content = $content -replace $pattern, ""
|
||||
$modified = $true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Clean up empty ItemGroups
|
||||
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
|
||||
# Clean up ItemGroups with only whitespace/comments
|
||||
$content = $content -replace "(?s)<ItemGroup>\s*<!--[^-]*-->\s*</ItemGroup>", ""
|
||||
|
||||
if ($modified) {
|
||||
$fixedCount++
|
||||
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
if (-not $DryRun) {
|
||||
$content | Set-Content $proj.FullName -NoNewline
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Fix SharpCompress in corpus projects
|
||||
foreach ($proj in $corpusProjects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
$modified = $false
|
||||
|
||||
$patterns = @(
|
||||
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*/>\r?\n?",
|
||||
"(?s)\s*<PackageReference\s+Include=`"$sharpCompressPackage`"\s+Version=`"[^`"]+`"\s*>\s*</PackageReference>\r?\n?"
|
||||
)
|
||||
|
||||
foreach ($pattern in $patterns) {
|
||||
if ($content -match $pattern) {
|
||||
$content = $content -replace $pattern, ""
|
||||
$modified = $true
|
||||
}
|
||||
}
|
||||
|
||||
# Clean up empty ItemGroups
|
||||
$content = $content -replace "(?s)\s*<ItemGroup>\s*</ItemGroup>", ""
|
||||
|
||||
if ($modified) {
|
||||
$fixedCount++
|
||||
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
if (-not $DryRun) {
|
||||
$content | Set-Content $proj.FullName -NoNewline
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan
|
||||
if ($DryRun) {
|
||||
Write-Host "(Dry run - no changes made)" -ForegroundColor Yellow
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# fix-duplicate-projects.ps1 - Remove duplicate project entries from solution file
|
||||
|
||||
param(
|
||||
[string]$SlnPath = "src/StellaOps.sln"
|
||||
)
|
||||
|
||||
$content = Get-Content $SlnPath -Raw
|
||||
$lines = $content -split "`r?`n"
|
||||
|
||||
$projectNames = @{}
|
||||
$duplicateGuids = @()
|
||||
$newLines = @()
|
||||
$skipNextEndProject = $false
|
||||
|
||||
foreach ($line in $lines) {
|
||||
if ($skipNextEndProject -and $line -eq "EndProject") {
|
||||
$skipNextEndProject = $false
|
||||
continue
|
||||
}
|
||||
|
||||
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
|
||||
$name = $Matches[1]
|
||||
$guid = $Matches[2]
|
||||
|
||||
if ($projectNames.ContainsKey($name)) {
|
||||
$duplicateGuids += $guid
|
||||
Write-Host "Removing duplicate: $name ($guid)"
|
||||
$skipNextEndProject = $true
|
||||
continue
|
||||
} else {
|
||||
$projectNames[$name] = $true
|
||||
}
|
||||
}
|
||||
|
||||
$newLines += $line
|
||||
}
|
||||
|
||||
# Also remove duplicate GUIDs from GlobalSection
|
||||
$finalLines = @()
|
||||
foreach ($line in $newLines) {
|
||||
$skip = $false
|
||||
foreach ($guid in $duplicateGuids) {
|
||||
if ($line -match $guid) {
|
||||
$skip = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (-not $skip) {
|
||||
$finalLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
$finalLines | Out-File -FilePath $SlnPath -Encoding UTF8 -NoNewline
|
||||
Write-Host "`nRemoved $($duplicateGuids.Count) duplicate projects"
|
||||
@@ -1,55 +0,0 @@
|
||||
# Fix duplicate "using StellaOps.TestKit;" statements in C# files
|
||||
# The pattern shows files have this statement both at top (correct) and in middle (wrong)
|
||||
# This script removes all occurrences AFTER the first one
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$srcPath = Join-Path $PSScriptRoot "..\..\src"
|
||||
$pattern = "using StellaOps.TestKit;"
|
||||
|
||||
# Find all .cs files containing the pattern
|
||||
$files = Get-ChildItem -Path $srcPath -Recurse -Filter "*.cs" |
|
||||
Where-Object { (Get-Content $_.FullName -Raw) -match [regex]::Escape($pattern) }
|
||||
|
||||
Write-Host "Found $($files.Count) files with 'using StellaOps.TestKit;'" -ForegroundColor Cyan
|
||||
|
||||
$fixedCount = 0
|
||||
$errorCount = 0
|
||||
|
||||
foreach ($file in $files) {
|
||||
try {
|
||||
$lines = Get-Content $file.FullName
|
||||
$newLines = @()
|
||||
$foundFirst = $false
|
||||
$removedAny = $false
|
||||
|
||||
foreach ($line in $lines) {
|
||||
if ($line.Trim() -eq $pattern) {
|
||||
if (-not $foundFirst) {
|
||||
# Keep the first occurrence
|
||||
$newLines += $line
|
||||
$foundFirst = $true
|
||||
} else {
|
||||
# Skip subsequent occurrences
|
||||
$removedAny = $true
|
||||
}
|
||||
} else {
|
||||
$newLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
if ($removedAny) {
|
||||
$newLines | Set-Content -Path $file.FullName -Encoding UTF8
|
||||
Write-Host "Fixed: $($file.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
} catch {
|
||||
Write-Host "Error processing $($file.FullName): $_" -ForegroundColor Red
|
||||
$errorCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Summary:" -ForegroundColor Cyan
|
||||
Write-Host " Files fixed: $fixedCount" -ForegroundColor Green
|
||||
Write-Host " Errors: $errorCount" -ForegroundColor $(if ($errorCount -gt 0) { "Red" } else { "Green" })
|
||||
@@ -1,69 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Fixes misplaced 'using StellaOps.TestKit;' statements in test files.
|
||||
|
||||
.DESCRIPTION
|
||||
The validate-test-traits.py --fix script has a bug that inserts
|
||||
'using StellaOps.TestKit;' after 'using var' statements inside methods,
|
||||
causing compilation errors.
|
||||
|
||||
This script:
|
||||
1. Finds all affected .cs files
|
||||
2. Removes the misplaced 'using StellaOps.TestKit;' lines
|
||||
3. Ensures 'using StellaOps.TestKit;' exists at the top of the file
|
||||
#>
|
||||
|
||||
param(
|
||||
[string]$Path = "src",
|
||||
[switch]$DryRun
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Pattern to find misplaced using statements (after 'using var' in method body)
|
||||
$brokenPattern = "(?m)^(\s*using var .+;\s*\r?\n)(using StellaOps\.TestKit;\s*\r?\n)"
|
||||
|
||||
# Counter for fixed files
|
||||
$fixedCount = 0
|
||||
$checkedCount = 0
|
||||
|
||||
# Get all .cs test files
|
||||
$files = Get-ChildItem -Path $Path -Recurse -Include "*.cs" |
|
||||
Where-Object { $_.FullName -match "Tests?" }
|
||||
|
||||
foreach ($file in $files) {
|
||||
$checkedCount++
|
||||
$content = Get-Content -Path $file.FullName -Raw -Encoding UTF8
|
||||
|
||||
# Check if file has the broken pattern
|
||||
if ($content -match $brokenPattern) {
|
||||
Write-Host "Fixing: $($file.FullName)" -ForegroundColor Yellow
|
||||
|
||||
# Remove all misplaced 'using StellaOps.TestKit;' lines
|
||||
$fixed = $content -replace $brokenPattern, '$1'
|
||||
|
||||
# Check if 'using StellaOps.TestKit;' exists at the top of the file (in the using block)
|
||||
$hasTopUsing = $fixed -match "(?m)^using StellaOps\.TestKit;\s*$"
|
||||
|
||||
if (-not $hasTopUsing) {
|
||||
# Find the last 'using' statement at the top of the file and add after it
|
||||
$fixed = $fixed -replace "(?m)(^using [^;]+;\s*\r?\n)(?!using)", "`$1using StellaOps.TestKit;`r`n"
|
||||
}
|
||||
|
||||
if (-not $DryRun) {
|
||||
# Preserve BOM if original file had one
|
||||
$encoding = [System.Text.UTF8Encoding]::new($true)
|
||||
[System.IO.File]::WriteAllText($file.FullName, $fixed, $encoding)
|
||||
}
|
||||
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nChecked: $checkedCount files" -ForegroundColor Cyan
|
||||
Write-Host "Fixed: $fixedCount files" -ForegroundColor Green
|
||||
|
||||
if ($DryRun) {
|
||||
Write-Host "`n(Dry run - no files were modified)" -ForegroundColor Magenta
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Fixes misplaced 'using StellaOps.TestKit;' statements in test files.
|
||||
|
||||
The validate-test-traits.py --fix script has a bug that inserts
|
||||
'using StellaOps.TestKit;' after 'using var' statements inside methods,
|
||||
causing CS1001 compilation errors.
|
||||
|
||||
This script:
|
||||
1. Finds all affected .cs files
|
||||
2. Removes the misplaced 'using StellaOps.TestKit;' lines (inside methods)
|
||||
3. Ensures 'using StellaOps.TestKit;' exists at the top of the file
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def fix_file(file_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Fix a single file by removing misplaced using statements."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8-sig') # Handle BOM
|
||||
except Exception as e:
|
||||
print(f" Error reading {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
original = content
|
||||
|
||||
# Pattern to find 'using var' followed by 'using StellaOps.TestKit;' (bug)
|
||||
# This matches the broken pattern inside method bodies
|
||||
broken_pattern = re.compile(
|
||||
r'(using var [^;]+;\s*\n)(using StellaOps\.TestKit;\s*\n)',
|
||||
re.MULTILINE
|
||||
)
|
||||
|
||||
# Check if file has the broken pattern
|
||||
if not broken_pattern.search(content):
|
||||
return False
|
||||
|
||||
# Remove all misplaced 'using StellaOps.TestKit;' lines after 'using var'
|
||||
fixed = broken_pattern.sub(r'\1', content)
|
||||
|
||||
# Check if 'using StellaOps.TestKit;' exists at top of file (before namespace)
|
||||
namespace_match = re.search(r'^namespace\s+\w+', fixed, re.MULTILINE)
|
||||
if namespace_match:
|
||||
top_section = fixed[:namespace_match.start()]
|
||||
has_top_using = 'using StellaOps.TestKit;' in top_section
|
||||
|
||||
if not has_top_using:
|
||||
# Find the last 'using' statement before namespace and add after it
|
||||
last_using = None
|
||||
for match in re.finditer(r'^using [^;]+;\s*$', top_section, re.MULTILINE):
|
||||
last_using = match
|
||||
|
||||
if last_using:
|
||||
insert_pos = last_using.end()
|
||||
fixed = fixed[:insert_pos] + '\nusing StellaOps.TestKit;' + fixed[insert_pos:]
|
||||
|
||||
if fixed != original:
|
||||
if not dry_run:
|
||||
# Preserve UTF-8 BOM if present
|
||||
encoding = 'utf-8-sig' if content.startswith('\ufeff') else 'utf-8'
|
||||
file_path.write_text(fixed, encoding=encoding)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Fix misplaced using statements')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
if not root.exists():
|
||||
print(f"Path not found: {root}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
fixed_count = 0
|
||||
checked_count = 0
|
||||
|
||||
# Find all test .cs files
|
||||
for file_path in root.rglob('*.cs'):
|
||||
# Skip non-test files
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
if 'Test' not in str(file_path):
|
||||
continue
|
||||
|
||||
checked_count += 1
|
||||
if fix_file(file_path, dry_run=args.dry_run):
|
||||
print(f"{'Would fix' if args.dry_run else 'Fixed'}: {file_path}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nChecked: {checked_count} files")
|
||||
print(f"Fixed: {fixed_count} files")
|
||||
|
||||
if args.dry_run:
|
||||
print("\n(Dry run - no files were modified)")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,82 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Adds 'using StellaOps.TestKit;' to files that use TestCategories but are missing the import.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def fix_file(file_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Add using StellaOps.TestKit; to files that need it."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8-sig')
|
||||
except Exception as e:
|
||||
print(f" Error reading {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Check if file uses TestCategories
|
||||
if 'TestCategories.' not in content:
|
||||
return False
|
||||
|
||||
# Check if 'using StellaOps.TestKit;' exists anywhere in the file
|
||||
if 'using StellaOps.TestKit;' in content:
|
||||
return False
|
||||
|
||||
# Find the namespace declaration
|
||||
namespace_match = re.search(r'^namespace\s+[\w.]+', content, re.MULTILINE)
|
||||
if not namespace_match:
|
||||
print(f" No namespace found in {file_path}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Find the last 'using' statement before the namespace
|
||||
top_section = content[:namespace_match.start()]
|
||||
last_using = None
|
||||
for match in re.finditer(r'^using [^;]+;\s*$', top_section, re.MULTILINE):
|
||||
last_using = match
|
||||
|
||||
if last_using:
|
||||
insert_pos = last_using.end()
|
||||
fixed = content[:insert_pos] + '\nusing StellaOps.TestKit;' + content[insert_pos:]
|
||||
else:
|
||||
# No using statements, add at the beginning
|
||||
fixed = 'using StellaOps.TestKit;\n' + content
|
||||
|
||||
if not dry_run:
|
||||
encoding = 'utf-8-sig' if content.startswith('\ufeff') else 'utf-8'
|
||||
file_path.write_text(fixed, encoding=encoding)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Add missing using StellaOps.TestKit statements')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
fixed_count = 0
|
||||
checked_count = 0
|
||||
|
||||
for file_path in root.rglob('*.cs'):
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
if 'Test' not in str(file_path):
|
||||
continue
|
||||
|
||||
checked_count += 1
|
||||
if fix_file(file_path, dry_run=args.dry_run):
|
||||
print(f"{'Would add' if args.dry_run else 'Added'} using to: {file_path}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nChecked: {checked_count} files")
|
||||
print(f"Fixed: {fixed_count} files")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,51 +0,0 @@
|
||||
# Fix projects with UseConcelierTestInfra=false that don't have xunit
|
||||
# These projects relied on TestKit for xunit, but now need their own reference
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$srcPath = "E:\dev\git.stella-ops.org\src"
|
||||
|
||||
# Find test projects with UseConcelierTestInfra=false
|
||||
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
|
||||
Where-Object {
|
||||
$content = Get-Content $_.FullName -Raw
|
||||
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
|
||||
(-not ($content -match "xunit\.v3")) -and # Skip xunit.v3 projects
|
||||
(-not ($content -match '<PackageReference\s+Include="xunit"')) # Skip projects that already have xunit
|
||||
}
|
||||
|
||||
Write-Host "Found $($projects.Count) projects needing xunit" -ForegroundColor Cyan
|
||||
|
||||
$xunitPackages = @'
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
'@
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $projects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
|
||||
# Check if it has an ItemGroup with PackageReference
|
||||
if ($content -match '(<ItemGroup>[\s\S]*?<PackageReference)') {
|
||||
# Add xunit packages after first PackageReference ItemGroup opening
|
||||
$newContent = $content -replace '(<ItemGroup>\s*\r?\n)(\s*<PackageReference)', "`$1$xunitPackages`n`$2"
|
||||
} else {
|
||||
# No PackageReference ItemGroup, add one before </Project>
|
||||
$itemGroup = @"
|
||||
|
||||
<ItemGroup>
|
||||
$xunitPackages
|
||||
</ItemGroup>
|
||||
"@
|
||||
$newContent = $content -replace '</Project>', "$itemGroup`n</Project>"
|
||||
}
|
||||
|
||||
if ($newContent -ne $content) {
|
||||
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
|
||||
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan
|
||||
@@ -1,44 +0,0 @@
|
||||
# Fix project references in src/__Tests/** that point to wrong relative paths
|
||||
# Pattern: ../../<Module>/... should be ../../../<Module>/...
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$testsPath = "E:\dev\git.stella-ops.org\src\__Tests"
|
||||
|
||||
# Known module prefixes that exist at src/<Module>/
|
||||
$modules = @("Signals", "Scanner", "Concelier", "Scheduler", "Authority", "Attestor",
|
||||
"BinaryIndex", "EvidenceLocker", "Excititor", "ExportCenter", "Gateway",
|
||||
"Graph", "IssuerDirectory", "Notify", "Orchestrator", "Policy", "AirGap",
|
||||
"Provenance", "Replay", "RiskEngine", "SbomService", "Signer", "TaskRunner",
|
||||
"Telemetry", "TimelineIndexer", "Unknowns", "VexHub", "VexLens", "VulnExplorer",
|
||||
"Zastava", "Cli", "Aoc", "Web", "Bench", "Cryptography", "PacksRegistry",
|
||||
"Notifier", "Findings")
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
Get-ChildItem -Path $testsPath -Recurse -Filter "*.csproj" | ForEach-Object {
|
||||
$proj = $_
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
$originalContent = $content
|
||||
|
||||
foreach ($module in $modules) {
|
||||
# Fix ../../<Module>/ to ../../../<Module>/
|
||||
# But not ../../../<Module> (already correct)
|
||||
$pattern = "Include=`"../../$module/"
|
||||
$replacement = "Include=`"../../../$module/"
|
||||
|
||||
if ($content -match [regex]::Escape($pattern) -and $content -notmatch [regex]::Escape("Include=`"../../../$module/")) {
|
||||
$content = $content -replace [regex]::Escape($pattern), $replacement
|
||||
}
|
||||
}
|
||||
|
||||
# Fix __Libraries references that are one level short
|
||||
$content = $content -replace 'Include="../../__Libraries/', 'Include="../../../__Libraries/'
|
||||
|
||||
if ($content -ne $originalContent) {
|
||||
Set-Content -Path $proj.FullName -Value $content -NoNewline
|
||||
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# fix-sln-duplicates.ps1 - Remove duplicate project entries from solution file
|
||||
|
||||
param(
|
||||
[string]$SlnPath = "src/StellaOps.sln"
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
Write-Host "=== Solution Duplicate Cleanup ===" -ForegroundColor Cyan
|
||||
Write-Host "Solution: $SlnPath"
|
||||
|
||||
$content = Get-Content $SlnPath -Raw
|
||||
$lines = $content -split "`r?`n"
|
||||
|
||||
# Track seen project names
|
||||
$seenProjects = @{}
|
||||
$duplicateGuids = @()
|
||||
$newLines = @()
|
||||
$skipNext = $false
|
||||
|
||||
for ($i = 0; $i -lt $lines.Count; $i++) {
|
||||
$line = $lines[$i]
|
||||
|
||||
if ($skipNext) {
|
||||
$skipNext = $false
|
||||
continue
|
||||
}
|
||||
|
||||
# Check for project declaration
|
||||
if ($line -match 'Project\(.+\) = "([^"]+)",.*\{([A-F0-9-]+)\}"?$') {
|
||||
$name = $Matches[1]
|
||||
$guid = $Matches[2]
|
||||
|
||||
if ($seenProjects.ContainsKey($name)) {
|
||||
Write-Host "Removing duplicate: $name ($guid)" -ForegroundColor Yellow
|
||||
$duplicateGuids += $guid
|
||||
# Skip this line and the next EndProject line
|
||||
$skipNext = $true
|
||||
continue
|
||||
} else {
|
||||
$seenProjects[$name] = $true
|
||||
}
|
||||
}
|
||||
|
||||
$newLines += $line
|
||||
}
|
||||
|
||||
# Remove GlobalSection references to duplicate GUIDs
|
||||
$finalLines = @()
|
||||
foreach ($line in $newLines) {
|
||||
$skip = $false
|
||||
foreach ($guid in $duplicateGuids) {
|
||||
if ($line -match $guid) {
|
||||
$skip = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (-not $skip) {
|
||||
$finalLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
# Write back
|
||||
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Removed $($duplicateGuids.Count) duplicate projects" -ForegroundColor Green
|
||||
@@ -1,57 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Fixes missing newline between 'using StellaOps.TestKit;' and 'namespace'.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def fix_file(file_path: Path, dry_run: bool = False) -> bool:
|
||||
"""Add newline between using StellaOps.TestKit; and namespace."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8-sig')
|
||||
except Exception as e:
|
||||
print(f" Error reading {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Pattern: using StellaOps.TestKit;namespace
|
||||
if 'TestKit;namespace' not in content:
|
||||
return False
|
||||
|
||||
# Fix: Add newline between them
|
||||
fixed = content.replace('TestKit;namespace', 'TestKit;\nnamespace')
|
||||
|
||||
if not dry_run:
|
||||
encoding = 'utf-8-sig' if content.startswith('\ufeff') else 'utf-8'
|
||||
file_path.write_text(fixed, encoding=encoding)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Fix missing newline between using and namespace')
|
||||
parser.add_argument('--path', default='src', help='Path to scan')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be fixed')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path)
|
||||
fixed_count = 0
|
||||
|
||||
for file_path in root.rglob('*.cs'):
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
|
||||
if fix_file(file_path, dry_run=args.dry_run):
|
||||
print(f"{'Would fix' if args.dry_run else 'Fixed'}: {file_path}")
|
||||
fixed_count += 1
|
||||
|
||||
print(f"\nFixed: {fixed_count} files")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,40 +0,0 @@
|
||||
# Add <Using Include="Xunit" /> to test projects with UseConcelierTestInfra=false
|
||||
# that have xunit but don't have the global using
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$srcPath = "E:\dev\git.stella-ops.org\src"
|
||||
|
||||
# Find test projects with UseConcelierTestInfra=false that have xunit but no Using Include="Xunit"
|
||||
$projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
|
||||
Where-Object {
|
||||
$content = Get-Content $_.FullName -Raw
|
||||
($content -match "<UseConcelierTestInfra>\s*false\s*</UseConcelierTestInfra>") -and
|
||||
($content -match '<PackageReference\s+Include="xunit"') -and
|
||||
(-not ($content -match '<Using\s+Include="Xunit"'))
|
||||
}
|
||||
|
||||
Write-Host "Found $($projects.Count) projects needing Xunit using" -ForegroundColor Cyan
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $projects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
|
||||
# Add Using Include="Xunit" before first ProjectReference ItemGroup or at end
|
||||
if ($content -match '(<ItemGroup>\s*\r?\n\s*<ProjectReference)') {
|
||||
$usingBlock = " <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n`n"
|
||||
$newContent = $content -replace '(\s*)(<ItemGroup>\s*\r?\n\s*<ProjectReference)', "$usingBlock`$1`$2"
|
||||
} else {
|
||||
# Add before </Project>
|
||||
$usingBlock = "`n <ItemGroup>`n <Using Include=`"Xunit`" />`n </ItemGroup>`n"
|
||||
$newContent = $content -replace '</Project>', "$usingBlock</Project>"
|
||||
}
|
||||
|
||||
if ($newContent -ne $content) {
|
||||
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
|
||||
Write-Host "Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nFixed $fixedCount projects" -ForegroundColor Cyan
|
||||
@@ -1,37 +0,0 @@
|
||||
# Fix xunit.v3 projects that conflict with Directory.Build.props xunit 2.x
|
||||
# Add UseConcelierTestInfra=false to exclude them from common test infrastructure
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$srcPath = Join-Path $PSScriptRoot "..\..\src"
|
||||
|
||||
# Find all csproj files that reference xunit.v3
|
||||
$xunitV3Projects = Get-ChildItem -Path $srcPath -Recurse -Filter "*.csproj" |
|
||||
Where-Object { (Get-Content $_.FullName -Raw) -match "xunit\.v3" }
|
||||
|
||||
Write-Host "Found $($xunitV3Projects.Count) projects with xunit.v3" -ForegroundColor Cyan
|
||||
|
||||
$fixedCount = 0
|
||||
|
||||
foreach ($proj in $xunitV3Projects) {
|
||||
$content = Get-Content $proj.FullName -Raw
|
||||
|
||||
# Check if already has UseConcelierTestInfra set
|
||||
if ($content -match "<UseConcelierTestInfra>") {
|
||||
Write-Host " Skipped (already configured): $($proj.Name)" -ForegroundColor DarkGray
|
||||
continue
|
||||
}
|
||||
|
||||
# Add UseConcelierTestInfra=false after the first <PropertyGroup>
|
||||
$newContent = $content -replace "(<PropertyGroup>)", "`$1`n <UseConcelierTestInfra>false</UseConcelierTestInfra>"
|
||||
|
||||
# Only write if changed
|
||||
if ($newContent -ne $content) {
|
||||
Set-Content -Path $proj.FullName -Value $newContent -NoNewline
|
||||
Write-Host " Fixed: $($proj.Name)" -ForegroundColor Green
|
||||
$fixedCount++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Fixed $fixedCount projects" -ForegroundColor Cyan
|
||||
@@ -1,247 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Generates plugin configuration files for StellaOps modules.
|
||||
|
||||
.DESCRIPTION
|
||||
This script generates plugin.json manifests and config.yaml files for all
|
||||
plugins based on the plugin catalog definition.
|
||||
|
||||
.PARAMETER RepoRoot
|
||||
Path to the repository root. Defaults to the parent of the devops folder.
|
||||
|
||||
.PARAMETER OutputDir
|
||||
Output directory for generated configs. Defaults to etc/plugins/.
|
||||
|
||||
.PARAMETER Force
|
||||
Overwrite existing configuration files.
|
||||
|
||||
.EXAMPLE
|
||||
.\generate-plugin-configs.ps1
|
||||
.\generate-plugin-configs.ps1 -Force
|
||||
#>
|
||||
|
||||
param(
|
||||
[string]$RepoRoot = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
|
||||
[string]$OutputDir = "",
|
||||
[switch]$Force
|
||||
)
|
||||
|
||||
if (-not $OutputDir) {
|
||||
$OutputDir = Join-Path $RepoRoot "etc/plugins"
|
||||
}
|
||||
|
||||
# Plugin catalog - defines all plugins and their metadata
|
||||
$PluginCatalog = @{
|
||||
# Router transports
|
||||
"router/transports" = @{
|
||||
category = "router.transports"
|
||||
plugins = @(
|
||||
@{ id = "tcp"; name = "TCP Transport"; assembly = "StellaOps.Router.Transport.Tcp.dll"; enabled = $true; priority = 50 }
|
||||
@{ id = "tls"; name = "TLS Transport"; assembly = "StellaOps.Router.Transport.Tls.dll"; enabled = $true; priority = 60 }
|
||||
@{ id = "udp"; name = "UDP Transport"; assembly = "StellaOps.Router.Transport.Udp.dll"; enabled = $false; priority = 40 }
|
||||
@{ id = "rabbitmq"; name = "RabbitMQ Transport"; assembly = "StellaOps.Router.Transport.RabbitMq.dll"; enabled = $false; priority = 30 }
|
||||
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Router.Transport.InMemory.dll"; enabled = $false; priority = 10 }
|
||||
)
|
||||
}
|
||||
|
||||
# Excititor connectors
|
||||
"excititor" = @{
|
||||
category = "excititor.connectors"
|
||||
plugins = @(
|
||||
@{ id = "redhat-csaf"; name = "Red Hat CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.RedHat.CSAF.dll"; enabled = $true; priority = 100; vendor = "Red Hat" }
|
||||
@{ id = "cisco-csaf"; name = "Cisco CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Cisco.CSAF.dll"; enabled = $false; priority = 90; vendor = "Cisco" }
|
||||
@{ id = "msrc-csaf"; name = "Microsoft CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.MSRC.CSAF.dll"; enabled = $false; priority = 85; vendor = "Microsoft" }
|
||||
@{ id = "oracle-csaf"; name = "Oracle CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Oracle.CSAF.dll"; enabled = $false; priority = 80; vendor = "Oracle" }
|
||||
@{ id = "ubuntu-csaf"; name = "Ubuntu CSAF Connector"; assembly = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.dll"; enabled = $false; priority = 75; vendor = "Canonical" }
|
||||
@{ id = "suse-rancher"; name = "SUSE Rancher VEX Hub"; assembly = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.dll"; enabled = $false; priority = 70; vendor = "SUSE" }
|
||||
@{ id = "oci-openvex"; name = "OCI OpenVEX Connector"; assembly = "StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.dll"; enabled = $false; priority = 60 }
|
||||
)
|
||||
}
|
||||
|
||||
# Scanner language analyzers
|
||||
"scanner/analyzers/lang" = @{
|
||||
category = "scanner.analyzers.lang"
|
||||
plugins = @(
|
||||
@{ id = "dotnet"; name = ".NET Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.DotNet.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "go"; name = "Go Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Go.dll"; enabled = $true; priority = 95 }
|
||||
@{ id = "node"; name = "Node.js Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Node.dll"; enabled = $true; priority = 90 }
|
||||
@{ id = "python"; name = "Python Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Python.dll"; enabled = $true; priority = 85 }
|
||||
@{ id = "java"; name = "Java Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Java.dll"; enabled = $true; priority = 80 }
|
||||
@{ id = "rust"; name = "Rust Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Rust.dll"; enabled = $false; priority = 75 }
|
||||
@{ id = "ruby"; name = "Ruby Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Ruby.dll"; enabled = $false; priority = 70 }
|
||||
@{ id = "php"; name = "PHP Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Php.dll"; enabled = $false; priority = 65 }
|
||||
@{ id = "swift"; name = "Swift Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Swift.dll"; enabled = $false; priority = 60 }
|
||||
@{ id = "cpp"; name = "C/C++ Analyzer"; assembly = "StellaOps.Scanner.Analyzers.Lang.Cpp.dll"; enabled = $false; priority = 55 }
|
||||
)
|
||||
}
|
||||
|
||||
# Scanner OS analyzers
|
||||
"scanner/analyzers/os" = @{
|
||||
category = "scanner.analyzers.os"
|
||||
plugins = @(
|
||||
@{ id = "apk"; name = "Alpine APK Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Apk.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "dpkg"; name = "Debian DPKG Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Dpkg.dll"; enabled = $true; priority = 95 }
|
||||
@{ id = "rpm"; name = "RPM Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Rpm.dll"; enabled = $true; priority = 90 }
|
||||
@{ id = "pacman"; name = "Arch Pacman Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Pacman.dll"; enabled = $false; priority = 80 }
|
||||
@{ id = "homebrew"; name = "Homebrew Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Homebrew.dll"; enabled = $false; priority = 70 }
|
||||
@{ id = "chocolatey"; name = "Chocolatey Analyzer"; assembly = "StellaOps.Scanner.Analyzers.OS.Chocolatey.dll"; enabled = $false; priority = 65 }
|
||||
)
|
||||
}
|
||||
|
||||
# Notify channels
|
||||
"notify" = @{
|
||||
category = "notify.channels"
|
||||
plugins = @(
|
||||
@{ id = "email"; name = "Email Notifier"; assembly = "StellaOps.Notify.Connectors.Email.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "slack"; name = "Slack Notifier"; assembly = "StellaOps.Notify.Connectors.Slack.dll"; enabled = $true; priority = 90 }
|
||||
@{ id = "webhook"; name = "Webhook Notifier"; assembly = "StellaOps.Notify.Connectors.Webhook.dll"; enabled = $true; priority = 80 }
|
||||
@{ id = "teams"; name = "Microsoft Teams Notifier"; assembly = "StellaOps.Notify.Connectors.Teams.dll"; enabled = $false; priority = 85 }
|
||||
@{ id = "pagerduty"; name = "PagerDuty Notifier"; assembly = "StellaOps.Notify.Connectors.PagerDuty.dll"; enabled = $false; priority = 75 }
|
||||
@{ id = "opsgenie"; name = "OpsGenie Notifier"; assembly = "StellaOps.Notify.Connectors.OpsGenie.dll"; enabled = $false; priority = 70 }
|
||||
@{ id = "telegram"; name = "Telegram Notifier"; assembly = "StellaOps.Notify.Connectors.Telegram.dll"; enabled = $false; priority = 65 }
|
||||
@{ id = "discord"; name = "Discord Notifier"; assembly = "StellaOps.Notify.Connectors.Discord.dll"; enabled = $false; priority = 60 }
|
||||
)
|
||||
}
|
||||
|
||||
# Messaging transports
|
||||
"messaging" = @{
|
||||
category = "messaging.transports"
|
||||
plugins = @(
|
||||
@{ id = "valkey"; name = "Valkey Transport"; assembly = "StellaOps.Messaging.Transport.Valkey.dll"; enabled = $true; priority = 100 }
|
||||
@{ id = "postgres"; name = "PostgreSQL Transport"; assembly = "StellaOps.Messaging.Transport.Postgres.dll"; enabled = $false; priority = 90 }
|
||||
@{ id = "inmemory"; name = "In-Memory Transport"; assembly = "StellaOps.Messaging.Transport.InMemory.dll"; enabled = $false; priority = 10 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function New-PluginManifest {
|
||||
param(
|
||||
[string]$ModulePath,
|
||||
[hashtable]$Plugin,
|
||||
[string]$Category
|
||||
)
|
||||
|
||||
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
|
||||
|
||||
$manifest = @{
|
||||
'$schema' = "https://schema.stella-ops.org/plugin-manifest/v2.json"
|
||||
schemaVersion = "2.0"
|
||||
id = $fullId
|
||||
name = $Plugin.name
|
||||
version = "1.0.0"
|
||||
assembly = @{
|
||||
path = $Plugin.assembly
|
||||
}
|
||||
capabilities = @()
|
||||
platforms = @("linux-x64", "linux-arm64", "win-x64", "osx-x64", "osx-arm64")
|
||||
compliance = @("NIST")
|
||||
jurisdiction = "world"
|
||||
priority = $Plugin.priority
|
||||
enabled = $Plugin.enabled
|
||||
metadata = @{
|
||||
author = "StellaOps"
|
||||
license = "BUSL-1.1"
|
||||
}
|
||||
}
|
||||
|
||||
if ($Plugin.vendor) {
|
||||
$manifest.metadata["vendor"] = $Plugin.vendor
|
||||
}
|
||||
|
||||
return $manifest | ConvertTo-Json -Depth 10
|
||||
}
|
||||
|
||||
function New-PluginConfig {
|
||||
param(
|
||||
[string]$ModulePath,
|
||||
[hashtable]$Plugin,
|
||||
[string]$Category
|
||||
)
|
||||
|
||||
$fullId = "stellaops.$($Category.Replace('/', '.').Replace('.', '-')).$($Plugin.id)"
|
||||
|
||||
$config = @"
|
||||
id: $fullId
|
||||
name: $($Plugin.name)
|
||||
enabled: $($Plugin.enabled.ToString().ToLower())
|
||||
priority: $($Plugin.priority)
|
||||
config:
|
||||
# Plugin-specific configuration
|
||||
# Add settings here as needed
|
||||
"@
|
||||
|
||||
return $config
|
||||
}
|
||||
|
||||
function New-RegistryFile {
|
||||
param(
|
||||
[string]$Category,
|
||||
[array]$Plugins
|
||||
)
|
||||
|
||||
$entries = $Plugins | ForEach-Object {
|
||||
" $($_.id):`n enabled: $($_.enabled.ToString().ToLower())`n priority: $($_.priority)`n config: $($_.id)/config.yaml"
|
||||
}
|
||||
|
||||
$registry = @"
|
||||
version: "1.0"
|
||||
category: $Category
|
||||
defaults:
|
||||
enabled: false
|
||||
timeout: "00:05:00"
|
||||
plugins:
|
||||
$($entries -join "`n")
|
||||
"@
|
||||
|
||||
return $registry
|
||||
}
|
||||
|
||||
# Main generation logic
|
||||
Write-Host "Generating plugin configurations to: $OutputDir" -ForegroundColor Cyan
|
||||
|
||||
foreach ($modulePath in $PluginCatalog.Keys) {
|
||||
$moduleConfig = $PluginCatalog[$modulePath]
|
||||
$moduleDir = Join-Path $OutputDir $modulePath
|
||||
|
||||
Write-Host "Processing module: $modulePath" -ForegroundColor Yellow
|
||||
|
||||
# Create module directory
|
||||
if (-not (Test-Path $moduleDir)) {
|
||||
New-Item -ItemType Directory -Path $moduleDir -Force | Out-Null
|
||||
}
|
||||
|
||||
# Generate registry.yaml
|
||||
$registryPath = Join-Path $moduleDir "registry.yaml"
|
||||
if ($Force -or -not (Test-Path $registryPath)) {
|
||||
$registryContent = New-RegistryFile -Category $moduleConfig.category -Plugins $moduleConfig.plugins
|
||||
Set-Content -Path $registryPath -Value $registryContent -Encoding utf8
|
||||
Write-Host " Created: registry.yaml" -ForegroundColor Green
|
||||
}
|
||||
|
||||
# Generate plugin configs
|
||||
foreach ($plugin in $moduleConfig.plugins) {
|
||||
$pluginDir = Join-Path $moduleDir $plugin.id
|
||||
|
||||
if (-not (Test-Path $pluginDir)) {
|
||||
New-Item -ItemType Directory -Path $pluginDir -Force | Out-Null
|
||||
}
|
||||
|
||||
# plugin.json
|
||||
$manifestPath = Join-Path $pluginDir "plugin.json"
|
||||
if ($Force -or -not (Test-Path $manifestPath)) {
|
||||
$manifestContent = New-PluginManifest -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
|
||||
Set-Content -Path $manifestPath -Value $manifestContent -Encoding utf8
|
||||
Write-Host " Created: $($plugin.id)/plugin.json" -ForegroundColor Green
|
||||
}
|
||||
|
||||
# config.yaml
|
||||
$configPath = Join-Path $pluginDir "config.yaml"
|
||||
if ($Force -or -not (Test-Path $configPath)) {
|
||||
$configContent = New-PluginConfig -ModulePath $modulePath -Plugin $plugin -Category $moduleConfig.category
|
||||
Set-Content -Path $configPath -Value $configContent -Encoding utf8
|
||||
Write-Host " Created: $($plugin.id)/config.yaml" -ForegroundColor Green
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nPlugin configuration generation complete!" -ForegroundColor Cyan
|
||||
@@ -17,13 +17,13 @@ _CI_DOCKER_LOADED=1
|
||||
# CONFIGURATION
|
||||
# =============================================================================
|
||||
|
||||
CI_COMPOSE_FILE="${CI_COMPOSE_FILE:-devops/compose/docker-compose.ci.yaml}"
|
||||
CI_COMPOSE_FILE="${CI_COMPOSE_FILE:-devops/compose/docker-compose.testing.yml}"
|
||||
CI_IMAGE="${CI_IMAGE:-stellaops-ci:local}"
|
||||
CI_DOCKERFILE="${CI_DOCKERFILE:-devops/docker/Dockerfile.ci}"
|
||||
CI_PROJECT_NAME="${CI_PROJECT_NAME:-stellaops-ci}"
|
||||
|
||||
# Service names from docker-compose.ci.yaml
|
||||
CI_SERVICES=(postgres-ci valkey-ci nats-ci mock-registry minio-ci)
|
||||
# Service names from docker-compose.testing.yml
|
||||
CI_SERVICES=(postgres-test valkey-test rustfs-test mock-registry)
|
||||
|
||||
# =============================================================================
|
||||
# DOCKER CHECK
|
||||
|
||||
@@ -1,318 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Local CI Runner for Windows
|
||||
PowerShell wrapper for local-ci.sh
|
||||
|
||||
.DESCRIPTION
|
||||
Unified local CI/CD testing runner for StellaOps on Windows.
|
||||
This script wraps the Bash implementation via WSL2 or Git Bash.
|
||||
|
||||
.PARAMETER Mode
|
||||
The testing mode to run:
|
||||
- smoke : Quick smoke test (unit tests only, ~2 min)
|
||||
- pr : Full PR-gating suite (all required checks, ~15 min)
|
||||
- module : Module-specific tests (auto-detect or specified)
|
||||
- workflow : Simulate specific workflow via act
|
||||
- release : Release simulation (dry-run)
|
||||
- full : All tests including extended categories (~45 min)
|
||||
|
||||
.PARAMETER Category
|
||||
Specific test category to run (Unit, Architecture, Contract, Integration, Security, Golden)
|
||||
|
||||
.PARAMETER Module
|
||||
Specific module to test (Scanner, Concelier, Authority, etc.)
|
||||
|
||||
.PARAMETER Workflow
|
||||
Specific workflow to simulate (for workflow mode)
|
||||
|
||||
.PARAMETER SmokeStep
|
||||
Smoke step (smoke mode only): build, unit, unit-split
|
||||
|
||||
.PARAMETER TestTimeout
|
||||
Per-test timeout (e.g., 5m) using --blame-hang (bash runner)
|
||||
|
||||
.PARAMETER ProgressInterval
|
||||
Progress heartbeat in seconds during long test runs
|
||||
|
||||
.PARAMETER ProjectStart
|
||||
Start index (1-based) for unit-split slicing
|
||||
|
||||
.PARAMETER ProjectCount
|
||||
Limit number of projects for unit-split slicing
|
||||
|
||||
.PARAMETER Docker
|
||||
Force Docker execution mode
|
||||
|
||||
.PARAMETER Native
|
||||
Force native execution mode
|
||||
|
||||
.PARAMETER Act
|
||||
Force act execution mode
|
||||
|
||||
.PARAMETER Parallel
|
||||
Number of parallel test runners (default: auto-detect)
|
||||
|
||||
.PARAMETER Verbose
|
||||
Enable verbose output
|
||||
|
||||
.PARAMETER DryRun
|
||||
Show what would run without executing
|
||||
|
||||
.PARAMETER Rebuild
|
||||
Force rebuild of CI Docker image
|
||||
|
||||
.PARAMETER NoServices
|
||||
Skip starting CI services
|
||||
|
||||
.PARAMETER KeepServices
|
||||
Don't stop services after tests
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 smoke
|
||||
Quick validation before push
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 smoke -SmokeStep unit-split
|
||||
Run Unit tests per project to isolate hangs
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 smoke -SmokeStep unit-split -TestTimeout 5m -ProgressInterval 60
|
||||
Add hang detection and progress heartbeat
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 smoke -SmokeStep unit-split -ProjectStart 1 -ProjectCount 50
|
||||
Run unit-split in chunks to narrow the slow/hanging project
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 pr
|
||||
Full PR check
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 module -Module Scanner
|
||||
Test specific module
|
||||
|
||||
.EXAMPLE
|
||||
.\local-ci.ps1 workflow -Workflow test-matrix
|
||||
Simulate specific workflow
|
||||
|
||||
.NOTES
|
||||
Requires WSL2 or Git Bash to execute the underlying Bash script.
|
||||
For full feature support, use WSL2 with Ubuntu.
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Position = 0)]
|
||||
[ValidateSet('smoke', 'pr', 'module', 'workflow', 'release', 'full')]
|
||||
[string]$Mode = 'smoke',
|
||||
|
||||
[string]$Category,
|
||||
[string]$Module,
|
||||
[string]$Workflow,
|
||||
[ValidateSet('build', 'unit', 'unit-split')]
|
||||
[string]$SmokeStep,
|
||||
[string]$TestTimeout,
|
||||
[int]$ProgressInterval,
|
||||
[int]$ProjectStart,
|
||||
[int]$ProjectCount,
|
||||
|
||||
[switch]$Docker,
|
||||
[switch]$Native,
|
||||
[switch]$Act,
|
||||
|
||||
[int]$Parallel,
|
||||
[switch]$DryRun,
|
||||
[switch]$Rebuild,
|
||||
[switch]$NoServices,
|
||||
[switch]$KeepServices,
|
||||
|
||||
[switch]$Help
|
||||
)
|
||||
|
||||
$isVerbose = $PSBoundParameters.ContainsKey('Verbose')
|
||||
|
||||
# Script location
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$RepoRoot = Split-Path -Parent (Split-Path -Parent $ScriptDir)
|
||||
|
||||
# Show help if requested
|
||||
if ($Help) {
|
||||
Get-Help $MyInvocation.MyCommand.Path -Detailed
|
||||
exit 0
|
||||
}
|
||||
|
||||
function Write-ColoredOutput {
|
||||
param(
|
||||
[string]$Message,
|
||||
[ConsoleColor]$Color = [ConsoleColor]::White
|
||||
)
|
||||
$originalColor = $Host.UI.RawUI.ForegroundColor
|
||||
$Host.UI.RawUI.ForegroundColor = $Color
|
||||
Write-Host $Message
|
||||
$Host.UI.RawUI.ForegroundColor = $originalColor
|
||||
}
|
||||
|
||||
function Write-Info { Write-ColoredOutput "[INFO] $args" -Color Cyan }
|
||||
function Write-Success { Write-ColoredOutput "[OK] $args" -Color Green }
|
||||
function Write-Warning { Write-ColoredOutput "[WARN] $args" -Color Yellow }
|
||||
function Write-Error { Write-ColoredOutput "[ERROR] $args" -Color Red }
|
||||
|
||||
# Find Bash executable
|
||||
function Find-BashExecutable {
|
||||
# Priority: WSL2 > Git Bash > Windows Subsystem for Linux (legacy)
|
||||
|
||||
# Check for WSL
|
||||
$wsl = Get-Command wsl -ErrorAction SilentlyContinue
|
||||
if ($wsl) {
|
||||
# Verify WSL is working
|
||||
$wslCheck = & wsl --status 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$wslDotnetInfo = & wsl dotnet --info 2>&1
|
||||
if ($LASTEXITCODE -eq 0 -and $wslDotnetInfo -match 'OS Name:\s+Windows') {
|
||||
Write-Warning "WSL dotnet is Windows-based; falling back to Git Bash for path-safe execution"
|
||||
} elseif ($LASTEXITCODE -eq 0) {
|
||||
Write-Info "Using WSL2 for Bash execution"
|
||||
return @{ Type = 'wsl'; Path = 'wsl' }
|
||||
} else {
|
||||
Write-Warning "WSL dotnet not available; falling back to Git Bash"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Check for Git Bash
|
||||
$gitBashPaths = @(
|
||||
"C:\Program Files\Git\bin\bash.exe",
|
||||
"C:\Program Files (x86)\Git\bin\bash.exe",
|
||||
"$env:LOCALAPPDATA\Programs\Git\bin\bash.exe"
|
||||
)
|
||||
|
||||
foreach ($path in $gitBashPaths) {
|
||||
if (Test-Path $path) {
|
||||
Write-Info "Using Git Bash for execution"
|
||||
return @{ Type = 'gitbash'; Path = $path }
|
||||
}
|
||||
}
|
||||
|
||||
# Check PATH for bash
|
||||
$bashInPath = Get-Command bash -ErrorAction SilentlyContinue
|
||||
if ($bashInPath) {
|
||||
Write-Info "Using Bash from PATH"
|
||||
return @{ Type = 'path'; Path = $bashInPath.Source }
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
# Convert Windows path to Unix path for WSL
|
||||
function Convert-ToUnixPath {
|
||||
param([string]$WindowsPath)
|
||||
|
||||
if ($WindowsPath -match '^([A-Za-z]):(.*)$') {
|
||||
$drive = $Matches[1].ToLower()
|
||||
$rest = $Matches[2] -replace '\\', '/'
|
||||
return "/mnt/$drive$rest"
|
||||
}
|
||||
return $WindowsPath -replace '\\', '/'
|
||||
}
|
||||
|
||||
function Quote-ForBash {
|
||||
param([string]$Value)
|
||||
|
||||
$replacement = "'" + '"' + "'" + '"' + "'"
|
||||
return "'" + ($Value -replace "'", $replacement) + "'"
|
||||
}
|
||||
|
||||
# Build argument list
|
||||
function Build-Arguments {
|
||||
$args = @($Mode)
|
||||
|
||||
if ($Category) { $args += "--category"; $args += $Category }
|
||||
if ($Module) { $args += "--module"; $args += $Module }
|
||||
if ($Workflow) { $args += "--workflow"; $args += $Workflow }
|
||||
if ($SmokeStep) { $args += "--smoke-step"; $args += $SmokeStep }
|
||||
if ($TestTimeout) { $args += "--test-timeout"; $args += $TestTimeout }
|
||||
if ($ProgressInterval) { $args += "--progress-interval"; $args += $ProgressInterval }
|
||||
if ($ProjectStart) { $args += "--project-start"; $args += $ProjectStart }
|
||||
if ($ProjectCount) { $args += "--project-count"; $args += $ProjectCount }
|
||||
if ($Docker) { $args += "--docker" }
|
||||
if ($Native) { $args += "--native" }
|
||||
if ($Act) { $args += "--act" }
|
||||
if ($Parallel) { $args += "--parallel"; $args += $Parallel }
|
||||
if ($isVerbose) { $args += "--verbose" }
|
||||
if ($DryRun) { $args += "--dry-run" }
|
||||
if ($Rebuild) { $args += "--rebuild" }
|
||||
if ($NoServices) { $args += "--no-services" }
|
||||
if ($KeepServices) { $args += "--keep-services" }
|
||||
|
||||
return $args
|
||||
}
|
||||
|
||||
# Main execution
|
||||
Write-Host ""
|
||||
Write-Host "=========================================" -ForegroundColor Magenta
|
||||
Write-Host " StellaOps Local CI Runner (Windows) " -ForegroundColor Magenta
|
||||
Write-Host "=========================================" -ForegroundColor Magenta
|
||||
Write-Host ""
|
||||
|
||||
# Find Bash
|
||||
$bash = Find-BashExecutable
|
||||
if (-not $bash) {
|
||||
Write-Error "Bash not found. Please install one of the following:"
|
||||
Write-Host " - WSL2: https://docs.microsoft.com/en-us/windows/wsl/install"
|
||||
Write-Host " - Git for Windows: https://git-scm.com/download/win"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build script path
|
||||
$scriptPath = Join-Path $ScriptDir "local-ci.sh"
|
||||
if (-not (Test-Path $scriptPath)) {
|
||||
Write-Error "Script not found: $scriptPath"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build arguments
|
||||
$bashArgs = Build-Arguments
|
||||
|
||||
Write-Info "Mode: $Mode"
|
||||
Write-Info "Bash: $($bash.Type)"
|
||||
Write-Info "Repository: $RepoRoot"
|
||||
Write-Host ""
|
||||
|
||||
# Execute based on Bash type
|
||||
try {
|
||||
switch ($bash.Type) {
|
||||
'wsl' {
|
||||
$unixScript = Convert-ToUnixPath $scriptPath
|
||||
Write-Info "Executing: wsl bash $unixScript $($bashArgs -join ' ')"
|
||||
& wsl bash $unixScript @bashArgs
|
||||
}
|
||||
'gitbash' {
|
||||
# Git Bash uses its own path conversion
|
||||
$unixScript = $scriptPath -replace '\\', '/'
|
||||
$commandArgs = @($unixScript) + $bashArgs
|
||||
$commandLine = ($commandArgs | ForEach-Object { Quote-ForBash $_ }) -join ' '
|
||||
Write-Info "Executing: $($bash.Path) -lc $commandLine"
|
||||
& $bash.Path -lc $commandLine
|
||||
}
|
||||
'path' {
|
||||
Write-Info "Executing: bash $scriptPath $($bashArgs -join ' ')"
|
||||
& bash $scriptPath @bashArgs
|
||||
}
|
||||
}
|
||||
|
||||
$exitCode = $LASTEXITCODE
|
||||
}
|
||||
catch {
|
||||
Write-Error "Execution failed: $_"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
# Report result
|
||||
Write-Host ""
|
||||
if ($exitCode -eq 0) {
|
||||
Write-Success "Local CI completed successfully!"
|
||||
} else {
|
||||
Write-Error "Local CI failed with exit code: $exitCode"
|
||||
}
|
||||
|
||||
exit $exitCode
|
||||
@@ -1,244 +0,0 @@
|
||||
-- ============================================================================
|
||||
-- StellaOps Migration Reset Script for Pre-1.0 Deployments
|
||||
-- ============================================================================
|
||||
-- This script updates schema_migrations tables to recognize the 1.0.0 compacted
|
||||
-- migrations for deployments that upgraded from pre-1.0 versions.
|
||||
--
|
||||
-- Run via: psql -f migrations-reset-pre-1.0.sql
|
||||
-- Or with connection: psql -h <host> -U <user> -d <db> -f migrations-reset-pre-1.0.sql
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Authority Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001_initial_schema, 002_mongo_store_equivalents, 003_enable_rls,
|
||||
-- 004_offline_kit_audit, 005_verdict_manifests
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM authority.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'002_mongo_store_equivalents.sql',
|
||||
'003_enable_rls.sql',
|
||||
'004_offline_kit_audit.sql',
|
||||
'005_verdict_manifests.sql'
|
||||
);
|
||||
|
||||
INSERT INTO authority.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Scheduler Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001_initial_schema, 002_graph_jobs, 003_runs_policy,
|
||||
-- 010_generated_columns_runs, 011_enable_rls, 012_partition_audit,
|
||||
-- 012b_migrate_audit_data
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM scheduler.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'002_graph_jobs.sql',
|
||||
'003_runs_policy.sql',
|
||||
'010_generated_columns_runs.sql',
|
||||
'011_enable_rls.sql',
|
||||
'012_partition_audit.sql',
|
||||
'012b_migrate_audit_data.sql'
|
||||
);
|
||||
|
||||
INSERT INTO scheduler.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Scanner Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001-034 plus various numbered files (27 total)
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM scanner.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_create_tables.sql',
|
||||
'002_proof_spine_tables.sql',
|
||||
'003_classification_history.sql',
|
||||
'004_scan_metrics.sql',
|
||||
'005_smart_diff_tables.sql',
|
||||
'006_score_replay_tables.sql',
|
||||
'007_unknowns_ranking_containment.sql',
|
||||
'008_epss_integration.sql',
|
||||
'0059_scans_table.sql',
|
||||
'0065_unknowns_table.sql',
|
||||
'0075_scan_findings_table.sql',
|
||||
'020_call_graph_tables.sql',
|
||||
'021_smart_diff_tables_search_path.sql',
|
||||
'022_reachability_drift_tables.sql',
|
||||
'023_scanner_api_ingestion.sql',
|
||||
'024_smart_diff_priority_score_widen.sql',
|
||||
'025_epss_raw_layer.sql',
|
||||
'026_epss_signal_layer.sql',
|
||||
'027_witness_storage.sql',
|
||||
'028_epss_triage_columns.sql',
|
||||
'029_vuln_surfaces.sql',
|
||||
'030_vuln_surface_triggers_update.sql',
|
||||
'031_reach_cache.sql',
|
||||
'032_idempotency_keys.sql',
|
||||
'033_binary_evidence.sql',
|
||||
'034_func_proof_tables.sql',
|
||||
'DM001_rename_scanner_migrations.sql'
|
||||
);
|
||||
|
||||
INSERT INTO scanner.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Policy Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001-013 (14 files, includes duplicate 010 prefix)
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM policy.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'002_cvss_receipts.sql',
|
||||
'003_snapshots_violations.sql',
|
||||
'004_epss_risk_scores.sql',
|
||||
'005_cvss_multiversion.sql',
|
||||
'006_enable_rls.sql',
|
||||
'007_unknowns_registry.sql',
|
||||
'008_exception_objects.sql',
|
||||
'009_exception_applications.sql',
|
||||
'010_recheck_evidence.sql',
|
||||
'010_unknowns_blast_radius_containment.sql',
|
||||
'011_unknowns_reason_codes.sql',
|
||||
'012_budget_ledger.sql',
|
||||
'013_exception_approval.sql'
|
||||
);
|
||||
|
||||
INSERT INTO policy.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Notify Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 001_initial_schema, 010_enable_rls, 011_partition_deliveries,
|
||||
-- 011b_migrate_deliveries_data
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM notify.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'001_initial_schema.sql',
|
||||
'010_enable_rls.sql',
|
||||
'011_partition_deliveries.sql',
|
||||
'011b_migrate_deliveries_data.sql'
|
||||
);
|
||||
|
||||
INSERT INTO notify.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Concelier Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 17 migration files
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM concelier.schema_migrations
|
||||
WHERE migration_name ~ '^[0-9]{3}_.*\.sql$';
|
||||
|
||||
INSERT INTO concelier.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Attestor Module Reset (proofchain + attestor schemas)
|
||||
-- ============================================================================
|
||||
-- Original: 20251214000001_AddProofChainSchema.sql, 20251216_001_create_rekor_submission_queue.sql
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM proofchain.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'20251214000001_AddProofChainSchema.sql',
|
||||
'20251214000002_RollbackProofChainSchema.sql',
|
||||
'20251216_001_create_rekor_submission_queue.sql'
|
||||
);
|
||||
|
||||
INSERT INTO proofchain.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Signer Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: 20251214000001_AddKeyManagementSchema.sql
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM signer.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'20251214000001_AddKeyManagementSchema.sql'
|
||||
);
|
||||
|
||||
INSERT INTO signer.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Signals Module Reset
|
||||
-- ============================================================================
|
||||
-- Original: V0000_001__extensions.sql, V1102_001__unknowns_scoring_schema.sql,
|
||||
-- V1105_001__deploy_refs_graph_metrics.sql, V3102_001__callgraph_relational_tables.sql
|
||||
-- New: 001_initial_schema (compacted)
|
||||
|
||||
DELETE FROM signals.schema_migrations
|
||||
WHERE migration_name IN (
|
||||
'V0000_001__extensions.sql',
|
||||
'V1102_001__unknowns_scoring_schema.sql',
|
||||
'V1105_001__deploy_refs_graph_metrics.sql',
|
||||
'V3102_001__callgraph_relational_tables.sql'
|
||||
);
|
||||
|
||||
INSERT INTO signals.schema_migrations (migration_name, category, checksum, applied_at)
|
||||
VALUES ('001_initial_schema.sql', 'startup', 'compacted_1.0.0', NOW())
|
||||
ON CONFLICT (migration_name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Verification
|
||||
-- ============================================================================
|
||||
-- Display current migration status per module
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
v_module TEXT;
|
||||
v_count INT;
|
||||
BEGIN
|
||||
FOR v_module IN SELECT unnest(ARRAY['authority', 'scheduler', 'scanner', 'policy', 'notify', 'concelier', 'proofchain', 'signer', 'signals']) LOOP
|
||||
EXECUTE format('SELECT COUNT(*) FROM %I.schema_migrations', v_module) INTO v_count;
|
||||
RAISE NOTICE '% module: % migrations registered', v_module, v_count;
|
||||
END LOOP;
|
||||
END $$;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- Post-Reset Notes
|
||||
-- ============================================================================
|
||||
-- After running this script:
|
||||
-- 1. All modules should show exactly 1 migration registered
|
||||
-- 2. The schema structure should be identical to a fresh 1.0.0 deployment
|
||||
-- 3. Future migrations (002+) will apply normally
|
||||
--
|
||||
-- To verify manually:
|
||||
-- SELECT * FROM authority.schema_migrations;
|
||||
-- SELECT * FROM scheduler.schema_migrations;
|
||||
-- SELECT * FROM scanner.schema_migrations;
|
||||
-- SELECT * FROM policy.schema_migrations;
|
||||
-- SELECT * FROM notify.schema_migrations;
|
||||
-- SELECT * FROM concelier.schema_migrations;
|
||||
-- SELECT * FROM proofchain.schema_migrations;
|
||||
-- SELECT * FROM signer.schema_migrations;
|
||||
-- SELECT * FROM signals.schema_migrations;
|
||||
-- ============================================================================
|
||||
@@ -1,169 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# regenerate-solution.ps1 - Regenerate StellaOps.sln without duplicate projects
|
||||
#
|
||||
# This script:
|
||||
# 1. Backs up the existing solution
|
||||
# 2. Creates a new solution
|
||||
# 3. Adds all .csproj files, skipping duplicates
|
||||
# 4. Preserves solution folders where possible
|
||||
|
||||
param(
|
||||
[string]$SolutionPath = "src/StellaOps.sln",
|
||||
[switch]$DryRun
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Canonical locations for test projects (in priority order)
|
||||
# Later entries win when there are duplicates
|
||||
$canonicalPatterns = @(
|
||||
# Module-local tests (highest priority)
|
||||
"src/*/__Tests/*/*.csproj",
|
||||
"src/*/__Libraries/__Tests/*/*.csproj",
|
||||
"src/__Libraries/__Tests/*/*.csproj",
|
||||
# Cross-module integration tests
|
||||
"src/__Tests/Integration/*/*.csproj",
|
||||
"src/__Tests/__Libraries/*/*.csproj",
|
||||
# Category-based cross-module tests
|
||||
"src/__Tests/chaos/*/*.csproj",
|
||||
"src/__Tests/security/*/*.csproj",
|
||||
"src/__Tests/interop/*/*.csproj",
|
||||
"src/__Tests/parity/*/*.csproj",
|
||||
"src/__Tests/reachability/*/*.csproj",
|
||||
# Single global tests
|
||||
"src/__Tests/*/*.csproj"
|
||||
)
|
||||
|
||||
Write-Host "=== Solution Regeneration Script ===" -ForegroundColor Cyan
|
||||
Write-Host "Solution: $SolutionPath"
|
||||
Write-Host "Dry Run: $DryRun"
|
||||
Write-Host ""
|
||||
|
||||
# Find all .csproj files
|
||||
Write-Host "Finding all project files..." -ForegroundColor Yellow
|
||||
$allProjects = Get-ChildItem -Path "src" -Filter "*.csproj" -Recurse |
|
||||
Where-Object { $_.FullName -notmatch "\\obj\\" -and $_.FullName -notmatch "\\bin\\" }
|
||||
|
||||
Write-Host "Found $($allProjects.Count) project files"
|
||||
|
||||
# Build a map of project name -> list of paths
|
||||
$projectMap = @{}
|
||||
foreach ($proj in $allProjects) {
|
||||
$name = $proj.BaseName
|
||||
if (-not $projectMap.ContainsKey($name)) {
|
||||
$projectMap[$name] = @()
|
||||
}
|
||||
$projectMap[$name] += $proj.FullName
|
||||
}
|
||||
|
||||
# Find duplicates
|
||||
$duplicates = $projectMap.GetEnumerator() | Where-Object { $_.Value.Count -gt 1 }
|
||||
Write-Host ""
|
||||
Write-Host "Found $($duplicates.Count) projects with duplicate names:" -ForegroundColor Yellow
|
||||
foreach ($dup in $duplicates) {
|
||||
Write-Host " $($dup.Key):" -ForegroundColor Red
|
||||
foreach ($path in $dup.Value) {
|
||||
Write-Host " - $path"
|
||||
}
|
||||
}
|
||||
|
||||
# Select canonical path for each project
|
||||
function Get-CanonicalPath {
|
||||
param([string[]]$Paths)
|
||||
|
||||
# Prefer module-local __Tests over global __Tests
|
||||
$moduleTests = $Paths | Where-Object { $_ -match "src\\[^_][^\\]+\\__Tests\\" }
|
||||
if ($moduleTests.Count -gt 0) { return $moduleTests[0] }
|
||||
|
||||
# Prefer __Libraries/__Tests
|
||||
$libTests = $Paths | Where-Object { $_ -match "__Libraries\\__Tests\\" }
|
||||
if ($libTests.Count -gt 0) { return $libTests[0] }
|
||||
|
||||
# Prefer __Tests over non-__Tests location in same parent
|
||||
$testsPath = $Paths | Where-Object { $_ -match "\\__Tests\\" }
|
||||
if ($testsPath.Count -gt 0) { return $testsPath[0] }
|
||||
|
||||
# Otherwise, take first
|
||||
return $Paths[0]
|
||||
}
|
||||
|
||||
# Build final project list
|
||||
$finalProjects = @()
|
||||
foreach ($entry in $projectMap.GetEnumerator()) {
|
||||
$canonical = Get-CanonicalPath -Paths $entry.Value
|
||||
$finalProjects += $canonical
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "Final project count: $($finalProjects.Count)" -ForegroundColor Green
|
||||
|
||||
if ($DryRun) {
|
||||
Write-Host ""
|
||||
Write-Host "=== DRY RUN - No changes made ===" -ForegroundColor Magenta
|
||||
Write-Host "Would add the following projects to solution:"
|
||||
$finalProjects | ForEach-Object { Write-Host " $_" }
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Backup existing solution
|
||||
$backupPath = "$SolutionPath.bak"
|
||||
if (Test-Path $SolutionPath) {
|
||||
Copy-Item $SolutionPath $backupPath -Force
|
||||
Write-Host "Backed up existing solution to $backupPath" -ForegroundColor Gray
|
||||
}
|
||||
|
||||
# Create new solution
|
||||
Write-Host ""
|
||||
Write-Host "Creating new solution..." -ForegroundColor Yellow
|
||||
$slnDir = Split-Path $SolutionPath -Parent
|
||||
$slnName = [System.IO.Path]::GetFileNameWithoutExtension($SolutionPath)
|
||||
|
||||
# Remove old solution
|
||||
if (Test-Path $SolutionPath) {
|
||||
Remove-Item $SolutionPath -Force
|
||||
}
|
||||
|
||||
# Create fresh solution
|
||||
Push-Location $slnDir
|
||||
dotnet new sln -n $slnName --force 2>$null
|
||||
Pop-Location
|
||||
|
||||
# Add projects in batches (dotnet sln add can handle multiple)
|
||||
Write-Host "Adding projects to solution..." -ForegroundColor Yellow
|
||||
$added = 0
|
||||
$failed = 0
|
||||
|
||||
foreach ($proj in $finalProjects) {
|
||||
try {
|
||||
$result = dotnet sln $SolutionPath add $proj 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$added++
|
||||
if ($added % 50 -eq 0) {
|
||||
Write-Host " Added $added projects..." -ForegroundColor Gray
|
||||
}
|
||||
} else {
|
||||
Write-Host " Failed to add: $proj" -ForegroundColor Red
|
||||
$failed++
|
||||
}
|
||||
} catch {
|
||||
Write-Host " Error adding: $proj - $_" -ForegroundColor Red
|
||||
$failed++
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "=== Summary ===" -ForegroundColor Cyan
|
||||
Write-Host "Projects added: $added" -ForegroundColor Green
|
||||
Write-Host "Projects failed: $failed" -ForegroundColor $(if ($failed -gt 0) { "Red" } else { "Green" })
|
||||
Write-Host ""
|
||||
Write-Host "Solution regenerated at: $SolutionPath"
|
||||
|
||||
# Verify
|
||||
Write-Host ""
|
||||
Write-Host "Verifying solution..." -ForegroundColor Yellow
|
||||
$verifyResult = dotnet build $SolutionPath --no-restore -t:ValidateSolutionConfiguration 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
Write-Host "Solution validation passed!" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "Solution validation had issues - check manually" -ForegroundColor Yellow
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# remove-stale-refs.ps1 - Remove stale project references that don't exist
|
||||
|
||||
param([string]$SlnPath = "src/StellaOps.sln")
|
||||
|
||||
$content = Get-Content $SlnPath -Raw
|
||||
$lines = $content -split "`r?`n"
|
||||
|
||||
# Stale project paths (relative from solution location)
|
||||
$staleProjects = @(
|
||||
"__Tests\AirGap\StellaOps.AirGap.Controller.Tests",
|
||||
"__Tests\AirGap\StellaOps.AirGap.Importer.Tests",
|
||||
"__Tests\AirGap\StellaOps.AirGap.Time.Tests",
|
||||
"__Tests\StellaOps.Gateway.WebService.Tests",
|
||||
"__Tests\Graph\StellaOps.Graph.Indexer.Tests",
|
||||
"Scanner\StellaOps.Scanner.Analyzers.Native",
|
||||
"__Libraries\__Tests\StellaOps.Signals.Tests",
|
||||
"__Tests\StellaOps.Audit.ReplayToken.Tests",
|
||||
"__Tests\StellaOps.Router.Gateway.Tests",
|
||||
"__Libraries\StellaOps.Cryptography"
|
||||
)
|
||||
|
||||
$staleGuids = @()
|
||||
$newLines = @()
|
||||
$skipNext = $false
|
||||
|
||||
for ($i = 0; $i -lt $lines.Count; $i++) {
|
||||
$line = $lines[$i]
|
||||
|
||||
if ($skipNext) {
|
||||
$skipNext = $false
|
||||
continue
|
||||
}
|
||||
|
||||
$isStale = $false
|
||||
foreach ($stalePath in $staleProjects) {
|
||||
if ($line -like "*$stalePath*") {
|
||||
# Extract GUID
|
||||
if ($line -match '\{([A-F0-9-]+)\}"?$') {
|
||||
$staleGuids += $Matches[1]
|
||||
}
|
||||
Write-Host "Removing stale: $stalePath"
|
||||
$isStale = $true
|
||||
$skipNext = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $isStale) {
|
||||
$newLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
# Remove GlobalSection references to stale GUIDs
|
||||
$finalLines = @()
|
||||
foreach ($line in $newLines) {
|
||||
$skip = $false
|
||||
foreach ($guid in $staleGuids) {
|
||||
if ($line -match $guid) {
|
||||
$skip = $true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (-not $skip) {
|
||||
$finalLines += $line
|
||||
}
|
||||
}
|
||||
|
||||
$finalLines -join "`r`n" | Set-Content $SlnPath -Encoding UTF8 -NoNewline
|
||||
Write-Host "Removed $($staleGuids.Count) stale project references"
|
||||
@@ -1,61 +0,0 @@
|
||||
# Restore deleted test files from commit parent
|
||||
# Maps old locations to new locations
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$parentCommit = "74c7aa250c401ee9ac332686832b256159efa604^"
|
||||
|
||||
# Mapping: old path -> new path
|
||||
$mappings = @{
|
||||
"src/__Tests/AirGap/StellaOps.AirGap.Importer.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Importer.Tests"
|
||||
"src/__Tests/AirGap/StellaOps.AirGap.Controller.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Controller.Tests"
|
||||
"src/__Tests/AirGap/StellaOps.AirGap.Time.Tests" = "src/AirGap/__Tests/StellaOps.AirGap.Time.Tests"
|
||||
"src/__Tests/StellaOps.Gateway.WebService.Tests" = "src/Gateway/__Tests/StellaOps.Gateway.WebService.Tests"
|
||||
"src/__Tests/Replay/StellaOps.Replay.Core.Tests" = "src/Replay/__Tests/StellaOps.Replay.Core.Tests"
|
||||
"src/__Tests/Provenance/StellaOps.Provenance.Attestation.Tests" = "src/Provenance/__Tests/StellaOps.Provenance.Attestation.Tests"
|
||||
"src/__Tests/Policy/StellaOps.Policy.Scoring.Tests" = "src/Policy/__Tests/StellaOps.Policy.Scoring.Tests"
|
||||
}
|
||||
|
||||
Set-Location "E:\dev\git.stella-ops.org"
|
||||
|
||||
foreach ($mapping in $mappings.GetEnumerator()) {
|
||||
$oldPath = $mapping.Key
|
||||
$newPath = $mapping.Value
|
||||
|
||||
Write-Host "`nProcessing: $oldPath -> $newPath" -ForegroundColor Cyan
|
||||
|
||||
# Get list of files from old location in git
|
||||
$files = git ls-tree -r --name-only "$parentCommit" -- $oldPath 2>$null
|
||||
|
||||
if (-not $files) {
|
||||
Write-Host " No files found at old path" -ForegroundColor Yellow
|
||||
continue
|
||||
}
|
||||
|
||||
foreach ($file in $files) {
|
||||
# Calculate relative path and new file path
|
||||
$relativePath = $file.Substring($oldPath.Length + 1)
|
||||
$newFilePath = Join-Path $newPath $relativePath
|
||||
|
||||
# Create directory if needed
|
||||
$newDir = Split-Path $newFilePath -Parent
|
||||
if (-not (Test-Path $newDir)) {
|
||||
New-Item -ItemType Directory -Path $newDir -Force | Out-Null
|
||||
}
|
||||
|
||||
# Check if file exists
|
||||
if (Test-Path $newFilePath) {
|
||||
Write-Host " Exists: $relativePath" -ForegroundColor DarkGray
|
||||
continue
|
||||
}
|
||||
|
||||
# Restore file
|
||||
git show "${parentCommit}:${file}" > $newFilePath 2>$null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
Write-Host " Restored: $relativePath" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host " Failed: $relativePath" -ForegroundColor Red
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nDone!" -ForegroundColor Cyan
|
||||
@@ -1,176 +0,0 @@
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Pre-Commit Validation Script for Windows
|
||||
|
||||
.DESCRIPTION
|
||||
Run this script before committing to ensure all CI checks will pass.
|
||||
Wraps the Bash validation script via WSL2 or Git Bash.
|
||||
|
||||
.PARAMETER Level
|
||||
Validation level:
|
||||
- quick : Smoke test only (~2 min)
|
||||
- pr : Full PR-gating suite (~15 min) [default]
|
||||
- full : All tests including extended (~45 min)
|
||||
|
||||
.EXAMPLE
|
||||
.\validate-before-commit.ps1
|
||||
Run PR-gating validation
|
||||
|
||||
.EXAMPLE
|
||||
.\validate-before-commit.ps1 quick
|
||||
Run quick smoke test only
|
||||
|
||||
.EXAMPLE
|
||||
.\validate-before-commit.ps1 full
|
||||
Run full test suite
|
||||
#>
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Position = 0)]
|
||||
[ValidateSet('quick', 'pr', 'full')]
|
||||
[string]$Level = 'pr',
|
||||
|
||||
[switch]$Help
|
||||
)
|
||||
|
||||
# Script location
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
$RepoRoot = Split-Path -Parent (Split-Path -Parent $ScriptDir)
|
||||
|
||||
if ($Help) {
|
||||
Get-Help $MyInvocation.MyCommand.Path -Detailed
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Colors
|
||||
function Write-ColoredOutput {
|
||||
param(
|
||||
[string]$Message,
|
||||
[ConsoleColor]$Color = [ConsoleColor]::White
|
||||
)
|
||||
$originalColor = $Host.UI.RawUI.ForegroundColor
|
||||
$Host.UI.RawUI.ForegroundColor = $Color
|
||||
Write-Host $Message
|
||||
$Host.UI.RawUI.ForegroundColor = $originalColor
|
||||
}
|
||||
|
||||
function Write-Header {
|
||||
param([string]$Message)
|
||||
Write-Host ""
|
||||
Write-ColoredOutput "=============================================" -Color Cyan
|
||||
Write-ColoredOutput " $Message" -Color Cyan
|
||||
Write-ColoredOutput "=============================================" -Color Cyan
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
function Write-Step { Write-ColoredOutput ">>> $args" -Color Blue }
|
||||
function Write-Pass { Write-ColoredOutput "[PASS] $args" -Color Green }
|
||||
function Write-Fail { Write-ColoredOutput "[FAIL] $args" -Color Red }
|
||||
function Write-Warn { Write-ColoredOutput "[WARN] $args" -Color Yellow }
|
||||
function Write-Info { Write-ColoredOutput "[INFO] $args" -Color Cyan }
|
||||
|
||||
# Find Bash
|
||||
function Find-BashExecutable {
|
||||
# Check WSL
|
||||
$wsl = Get-Command wsl -ErrorAction SilentlyContinue
|
||||
if ($wsl) {
|
||||
$wslCheck = & wsl --status 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
return @{ Type = 'wsl'; Path = 'wsl' }
|
||||
}
|
||||
}
|
||||
|
||||
# Check Git Bash
|
||||
$gitBashPaths = @(
|
||||
"C:\Program Files\Git\bin\bash.exe",
|
||||
"C:\Program Files (x86)\Git\bin\bash.exe",
|
||||
"$env:LOCALAPPDATA\Programs\Git\bin\bash.exe"
|
||||
)
|
||||
|
||||
foreach ($path in $gitBashPaths) {
|
||||
if (Test-Path $path) {
|
||||
return @{ Type = 'gitbash'; Path = $path }
|
||||
}
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
function Convert-ToUnixPath {
|
||||
param([string]$WindowsPath)
|
||||
if ($WindowsPath -match '^([A-Za-z]):(.*)$') {
|
||||
$drive = $Matches[1].ToLower()
|
||||
$rest = $Matches[2] -replace '\\', '/'
|
||||
return "/mnt/$drive$rest"
|
||||
}
|
||||
return $WindowsPath -replace '\\', '/'
|
||||
}
|
||||
|
||||
# Main
|
||||
Write-Header "Pre-Commit Validation (Windows)"
|
||||
Write-Info "Level: $Level"
|
||||
Write-Info "Repository: $RepoRoot"
|
||||
|
||||
$bash = Find-BashExecutable
|
||||
if (-not $bash) {
|
||||
Write-Fail "Bash not found. Install WSL2 or Git for Windows."
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Info "Using: $($bash.Type)"
|
||||
|
||||
$scriptPath = Join-Path $ScriptDir "validate-before-commit.sh"
|
||||
if (-not (Test-Path $scriptPath)) {
|
||||
Write-Fail "Script not found: $scriptPath"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$startTime = Get-Date
|
||||
|
||||
try {
|
||||
switch ($bash.Type) {
|
||||
'wsl' {
|
||||
$unixScript = Convert-ToUnixPath $scriptPath
|
||||
& wsl bash $unixScript $Level
|
||||
}
|
||||
'gitbash' {
|
||||
$unixScript = $scriptPath -replace '\\', '/'
|
||||
& $bash.Path $unixScript $Level
|
||||
}
|
||||
}
|
||||
$exitCode = $LASTEXITCODE
|
||||
}
|
||||
catch {
|
||||
Write-Fail "Execution failed: $_"
|
||||
$exitCode = 1
|
||||
}
|
||||
|
||||
$duration = (Get-Date) - $startTime
|
||||
$minutes = [math]::Floor($duration.TotalMinutes)
|
||||
$seconds = $duration.Seconds
|
||||
|
||||
Write-Header "Summary"
|
||||
Write-Info "Duration: ${minutes}m ${seconds}s"
|
||||
|
||||
if ($exitCode -eq 0) {
|
||||
Write-Host ""
|
||||
Write-ColoredOutput "=============================================" -Color Green
|
||||
Write-ColoredOutput " ALL CHECKS PASSED - Ready to commit!" -Color Green
|
||||
Write-ColoredOutput "=============================================" -Color Green
|
||||
Write-Host ""
|
||||
Write-Host "Next steps:"
|
||||
Write-Host " git add -A"
|
||||
Write-Host ' git commit -m "Your commit message"'
|
||||
Write-Host ""
|
||||
} else {
|
||||
Write-Host ""
|
||||
Write-ColoredOutput "=============================================" -Color Red
|
||||
Write-ColoredOutput " VALIDATION FAILED - Do not commit!" -Color Red
|
||||
Write-ColoredOutput "=============================================" -Color Red
|
||||
Write-Host ""
|
||||
Write-Host "Check the logs in: out/local-ci/logs/"
|
||||
Write-Host ""
|
||||
}
|
||||
|
||||
exit $exitCode
|
||||
@@ -1,343 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Validate and report on test Category traits across the codebase.
|
||||
|
||||
Sprint: SPRINT_20251226_007_CICD
|
||||
|
||||
This script scans all test files in the codebase and reports:
|
||||
1. Test files with Category traits
|
||||
2. Test files missing Category traits
|
||||
3. Coverage percentage by module
|
||||
|
||||
Usage:
|
||||
python devops/scripts/validate-test-traits.py [--fix] [--module <name>]
|
||||
|
||||
Options:
|
||||
--fix Attempt to add default Unit trait to tests without categories
|
||||
--module Only process tests in the specified module
|
||||
--verbose Show detailed output
|
||||
--json Output as JSON for CI consumption
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict, Set, Optional
|
||||
|
||||
|
||||
VALID_CATEGORIES = {
|
||||
"Unit",
|
||||
"Integration",
|
||||
"Architecture",
|
||||
"Contract",
|
||||
"Security",
|
||||
"Golden",
|
||||
"Performance",
|
||||
"Benchmark",
|
||||
"AirGap",
|
||||
"Chaos",
|
||||
"Determinism",
|
||||
"Resilience",
|
||||
"Observability",
|
||||
"Property",
|
||||
"Snapshot",
|
||||
"Live",
|
||||
}
|
||||
|
||||
# Patterns to identify test methods and classes
|
||||
FACT_PATTERN = re.compile(r'\[Fact[^\]]*\]')
|
||||
THEORY_PATTERN = re.compile(r'\[Theory[^\]]*\]')
|
||||
# Match both string literals and TestCategories.Xxx constants
|
||||
# Also match inline format like [Fact, Trait("Category", ...)]
|
||||
TRAIT_CATEGORY_PATTERN = re.compile(
|
||||
r'Trait\s*\(\s*["\']Category["\']\s*,\s*(?:["\'](\w+)["\']|TestCategories\.(\w+))\s*\)'
|
||||
)
|
||||
TEST_CLASS_PATTERN = re.compile(r'public\s+(?:sealed\s+)?class\s+\w+.*Tests?\b')
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestFileAnalysis:
|
||||
path: str
|
||||
has_facts: bool = False
|
||||
has_theories: bool = False
|
||||
has_category_traits: bool = False
|
||||
categories_found: Set[str] = field(default_factory=set)
|
||||
test_method_count: int = 0
|
||||
categorized_test_count: int = 0
|
||||
|
||||
|
||||
def analyze_test_file(file_path: Path) -> TestFileAnalysis:
|
||||
"""Analyze a single test file for Category traits."""
|
||||
analysis = TestFileAnalysis(path=str(file_path))
|
||||
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8', errors='ignore')
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not read {file_path}: {e}", file=sys.stderr)
|
||||
return analysis
|
||||
|
||||
# Check for test methods
|
||||
facts = FACT_PATTERN.findall(content)
|
||||
theories = THEORY_PATTERN.findall(content)
|
||||
|
||||
analysis.has_facts = len(facts) > 0
|
||||
analysis.has_theories = len(theories) > 0
|
||||
analysis.test_method_count = len(facts) + len(theories)
|
||||
|
||||
# Check for Category traits
|
||||
category_matches = TRAIT_CATEGORY_PATTERN.findall(content)
|
||||
if category_matches:
|
||||
analysis.has_category_traits = True
|
||||
# Pattern has two capture groups - one for string literal, one for constant
|
||||
# Extract non-empty values from tuples
|
||||
categories = set()
|
||||
for match in category_matches:
|
||||
cat = match[0] or match[1] # First non-empty group
|
||||
if cat:
|
||||
categories.add(cat)
|
||||
analysis.categories_found = categories
|
||||
analysis.categorized_test_count = len(category_matches)
|
||||
|
||||
return analysis
|
||||
|
||||
|
||||
def get_module_from_path(file_path: Path) -> str:
|
||||
"""Extract module name from file path."""
|
||||
parts = file_path.parts
|
||||
|
||||
# Look for src/<Module> pattern
|
||||
for i, part in enumerate(parts):
|
||||
if part == 'src' and i + 1 < len(parts):
|
||||
next_part = parts[i + 1]
|
||||
if next_part.startswith('__'):
|
||||
return next_part # e.g., __Tests, __Libraries
|
||||
return next_part
|
||||
|
||||
return "Unknown"
|
||||
|
||||
|
||||
def find_test_files(root_path: Path, module_filter: Optional[str] = None) -> List[Path]:
|
||||
"""Find all test files in the codebase."""
|
||||
test_files = []
|
||||
|
||||
for pattern in ['**/*.Tests.cs', '**/*Test.cs', '**/*Tests/*.cs']:
|
||||
for file_path in root_path.glob(pattern):
|
||||
# Skip generated files
|
||||
if '/obj/' in str(file_path) or '/bin/' in str(file_path):
|
||||
continue
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
|
||||
# Apply module filter if specified
|
||||
if module_filter:
|
||||
module = get_module_from_path(file_path)
|
||||
if module.lower() != module_filter.lower():
|
||||
continue
|
||||
|
||||
test_files.append(file_path)
|
||||
|
||||
return test_files
|
||||
|
||||
|
||||
def generate_report(analyses: List[TestFileAnalysis], verbose: bool = False) -> Dict:
|
||||
"""Generate a summary report from analyses."""
|
||||
total_files = len(analyses)
|
||||
files_with_tests = [a for a in analyses if a.has_facts or a.has_theories]
|
||||
files_with_traits = [a for a in analyses if a.has_category_traits]
|
||||
files_missing_traits = [a for a in files_with_tests if not a.has_category_traits]
|
||||
|
||||
# Group by module
|
||||
by_module: Dict[str, Dict] = {}
|
||||
for analysis in analyses:
|
||||
module = get_module_from_path(Path(analysis.path))
|
||||
if module not in by_module:
|
||||
by_module[module] = {
|
||||
'total': 0,
|
||||
'with_tests': 0,
|
||||
'with_traits': 0,
|
||||
'missing_traits': 0,
|
||||
'files_missing': []
|
||||
}
|
||||
|
||||
by_module[module]['total'] += 1
|
||||
if analysis.has_facts or analysis.has_theories:
|
||||
by_module[module]['with_tests'] += 1
|
||||
if analysis.has_category_traits:
|
||||
by_module[module]['with_traits'] += 1
|
||||
else:
|
||||
if analysis.has_facts or analysis.has_theories:
|
||||
by_module[module]['missing_traits'] += 1
|
||||
if verbose:
|
||||
by_module[module]['files_missing'].append(analysis.path)
|
||||
|
||||
# Calculate coverage
|
||||
coverage = (len(files_with_traits) / len(files_with_tests) * 100) if files_with_tests else 0
|
||||
|
||||
# Collect all categories found
|
||||
all_categories: Set[str] = set()
|
||||
for analysis in analyses:
|
||||
all_categories.update(analysis.categories_found)
|
||||
|
||||
return {
|
||||
'summary': {
|
||||
'total_test_files': total_files,
|
||||
'files_with_tests': len(files_with_tests),
|
||||
'files_with_category_traits': len(files_with_traits),
|
||||
'files_missing_traits': len(files_missing_traits),
|
||||
'coverage_percent': round(coverage, 1),
|
||||
'categories_used': sorted(all_categories),
|
||||
'valid_categories': sorted(VALID_CATEGORIES),
|
||||
},
|
||||
'by_module': by_module,
|
||||
'files_missing_traits': [a.path for a in files_missing_traits] if verbose else []
|
||||
}
|
||||
|
||||
|
||||
def add_default_trait(file_path: Path, default_category: str = "Unit") -> bool:
|
||||
"""Add default Category trait to test methods missing traits."""
|
||||
try:
|
||||
content = file_path.read_text(encoding='utf-8')
|
||||
original = content
|
||||
|
||||
# Pattern to find [Fact] or [Theory] not preceded by Category trait
|
||||
# This is a simplified approach - adds trait after [Fact] or [Theory]
|
||||
|
||||
# Check if file already has Category traits
|
||||
if TRAIT_CATEGORY_PATTERN.search(content):
|
||||
return False # Already has some traits, skip
|
||||
|
||||
# Add using statement if not present
|
||||
if 'using StellaOps.TestKit;' not in content:
|
||||
# Find last using statement and add after it
|
||||
using_pattern = re.compile(r'(using [^;]+;\s*\n)(?!using)')
|
||||
match = list(using_pattern.finditer(content))
|
||||
if match:
|
||||
last_using = match[-1]
|
||||
insert_pos = last_using.end()
|
||||
content = content[:insert_pos] + 'using StellaOps.TestKit;\n' + content[insert_pos:]
|
||||
|
||||
# Add Trait to [Fact] attributes
|
||||
content = re.sub(
|
||||
r'(\[Fact\])',
|
||||
f'[Trait("Category", TestCategories.{default_category})]\n \\1',
|
||||
content
|
||||
)
|
||||
|
||||
# Add Trait to [Theory] attributes
|
||||
content = re.sub(
|
||||
r'(\[Theory\])',
|
||||
f'[Trait("Category", TestCategories.{default_category})]\n \\1',
|
||||
content
|
||||
)
|
||||
|
||||
if content != original:
|
||||
file_path.write_text(content, encoding='utf-8')
|
||||
return True
|
||||
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"Error processing {file_path}: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Validate test Category traits')
|
||||
parser.add_argument('--fix', action='store_true', help='Add default Unit trait to tests without categories')
|
||||
parser.add_argument('--module', type=str, help='Only process tests in the specified module')
|
||||
parser.add_argument('--verbose', '-v', action='store_true', help='Show detailed output')
|
||||
parser.add_argument('--json', action='store_true', help='Output as JSON')
|
||||
parser.add_argument('--category', type=str, default='Unit', help='Default category for --fix (default: Unit)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Find repository root
|
||||
script_path = Path(__file__).resolve()
|
||||
repo_root = script_path.parent.parent.parent
|
||||
src_path = repo_root / 'src'
|
||||
|
||||
if not src_path.exists():
|
||||
print(f"Error: src directory not found at {src_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find all test files
|
||||
test_files = find_test_files(src_path, args.module)
|
||||
|
||||
if not args.json:
|
||||
print(f"Found {len(test_files)} test files to analyze...")
|
||||
|
||||
# Analyze each file
|
||||
analyses = [analyze_test_file(f) for f in test_files]
|
||||
|
||||
# Generate report
|
||||
report = generate_report(analyses, args.verbose)
|
||||
|
||||
if args.json:
|
||||
print(json.dumps(report, indent=2))
|
||||
else:
|
||||
# Print summary
|
||||
summary = report['summary']
|
||||
print("\n" + "=" * 60)
|
||||
print("TEST CATEGORY TRAIT COVERAGE REPORT")
|
||||
print("=" * 60)
|
||||
print(f"Total test files: {summary['total_test_files']}")
|
||||
print(f"Files with test methods: {summary['files_with_tests']}")
|
||||
print(f"Files with Category trait: {summary['files_with_category_traits']}")
|
||||
print(f"Files missing traits: {summary['files_missing_traits']}")
|
||||
print(f"Coverage: {summary['coverage_percent']}%")
|
||||
print(f"\nCategories in use: {', '.join(summary['categories_used']) or 'None'}")
|
||||
print(f"Valid categories: {', '.join(summary['valid_categories'])}")
|
||||
|
||||
# Print by module
|
||||
print("\n" + "-" * 60)
|
||||
print("BY MODULE")
|
||||
print("-" * 60)
|
||||
print(f"{'Module':<25} {'With Tests':<12} {'With Traits':<12} {'Missing':<10}")
|
||||
print("-" * 60)
|
||||
|
||||
for module, data in sorted(report['by_module'].items()):
|
||||
if data['with_tests'] > 0:
|
||||
print(f"{module:<25} {data['with_tests']:<12} {data['with_traits']:<12} {data['missing_traits']:<10}")
|
||||
|
||||
# Show files missing traits if verbose
|
||||
if args.verbose and report['files_missing_traits']:
|
||||
print("\n" + "-" * 60)
|
||||
print("FILES MISSING CATEGORY TRAITS")
|
||||
print("-" * 60)
|
||||
for f in sorted(report['files_missing_traits'])[:50]: # Limit to first 50
|
||||
print(f" {f}")
|
||||
if len(report['files_missing_traits']) > 50:
|
||||
print(f" ... and {len(report['files_missing_traits']) - 50} more")
|
||||
|
||||
# Fix mode
|
||||
if args.fix:
|
||||
files_to_fix = [Path(a.path) for a in analyses
|
||||
if (a.has_facts or a.has_theories) and not a.has_category_traits]
|
||||
|
||||
if not args.json:
|
||||
print(f"\n{'=' * 60}")
|
||||
print(f"FIXING {len(files_to_fix)} FILES WITH DEFAULT CATEGORY: {args.category}")
|
||||
print("=" * 60)
|
||||
|
||||
fixed_count = 0
|
||||
for file_path in files_to_fix:
|
||||
if add_default_trait(file_path, args.category):
|
||||
fixed_count += 1
|
||||
if not args.json:
|
||||
print(f" Fixed: {file_path}")
|
||||
|
||||
if not args.json:
|
||||
print(f"\nFixed {fixed_count} files")
|
||||
|
||||
# Exit with error code if coverage is below threshold
|
||||
if report['summary']['coverage_percent'] < 80:
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user