Add tests and implement StubBearer authentication for Signer endpoints
	
		
			
	
		
	
	
		
	
		
			Some checks failed
		
		
	
	
		
			
				
	
				Docs CI / lint-and-preview (push) Has been cancelled
				
			
		
		
	
	
				
					
				
			
		
			Some checks failed
		
		
	
	Docs CI / lint-and-preview (push) Has been cancelled
				
			- Created SignerEndpointsTests to validate the SignDsse and VerifyReferrers endpoints. - Implemented StubBearerAuthenticationDefaults and StubBearerAuthenticationHandler for token-based authentication. - Developed ConcelierExporterClient for managing Trivy DB settings and export operations. - Added TrivyDbSettingsPageComponent for UI interactions with Trivy DB settings, including form handling and export triggering. - Implemented styles and HTML structure for Trivy DB settings page. - Created NotifySmokeCheck tool for validating Redis event streams and Notify deliveries.
This commit is contained in:
		| @@ -2,35 +2,41 @@ | ||||
|  | ||||
| The bench harness exercises the language analyzers against representative filesystem layouts so that regressions are caught before they ship. | ||||
|  | ||||
| ## Layout | ||||
| - `run-bench.js` – Node.js script that traverses the sample `node_modules/` and `site-packages/` trees, replicating the package discovery work performed by the upcoming analyzers. | ||||
| - `config.json` – Declarative list of scenarios the harness executes. Each scenario points at a directory in `samples/`. | ||||
| - `baseline.csv` – Reference numbers captured on the 4 vCPU warm rig described in `docs/12_PERFORMANCE_WORKBOOK.md`. CI publishes fresh CSVs so perf trends stay visible. | ||||
|  | ||||
| ## Running locally | ||||
|  | ||||
| ```bash | ||||
| cd bench/Scanner.Analyzers | ||||
| node run-bench.js --out baseline.csv --samples ../.. | ||||
| ``` | ||||
|  | ||||
| The harness prints a table to stdout and writes the CSV (if `--out` is specified) with the following headers: | ||||
|  | ||||
| ``` | ||||
| scenario,iterations,sample_count,mean_ms,p95_ms,max_ms | ||||
| ``` | ||||
|  | ||||
| Use `--iterations` to override the default (5 passes per scenario) and `--threshold-ms` to customize the failure budget. Budgets default to 5 000 ms, aligned with the SBOM compose objective. | ||||
|  | ||||
| ## Adding scenarios | ||||
| 1. Drop the fixture tree under `samples/<area>/...`. | ||||
| 2. Append a new scenario entry to `config.json` describing: | ||||
|    - `id` – snake_case scenario name (also used in CSV). | ||||
|    - `label` – human-friendly description shown in logs. | ||||
|    - `root` – path to the directory that will be scanned. | ||||
|    - `matcher` – glob describing files that will be parsed (POSIX `**` patterns). | ||||
|    - `parser` – `node` or `python` to choose the metadata reader. | ||||
| 3. Re-run `node run-bench.js --out baseline.csv`. | ||||
| 4. Commit both the fixture and updated baseline. | ||||
|  | ||||
| The harness is intentionally dependency-free to remain runnable inside minimal CI runners. | ||||
| ## Layout | ||||
| - `StellaOps.Bench.ScannerAnalyzers/` – .NET 10 console harness that executes the real language analyzers (and fallback metadata walks for ecosystems that are still underway). | ||||
| - `config.json` – Declarative list of scenarios the harness executes. Each scenario points at a directory in `samples/`. | ||||
| - `baseline.csv` – Reference numbers captured on the 4 vCPU warm rig described in `docs/12_PERFORMANCE_WORKBOOK.md`. CI publishes fresh CSVs so perf trends stay visible. | ||||
|  | ||||
| ## Current scenarios | ||||
| - `node_monorepo_walk` → runs the Node analyzer across `samples/runtime/npm-monorepo`. | ||||
| - `java_demo_archive` → runs the Java analyzer against `samples/runtime/java-demo/libs/demo.jar`. | ||||
| - `python_site_packages_walk` → temporary metadata walk over `samples/runtime/python-venv` until the Python analyzer lands. | ||||
|  | ||||
| ## Running locally | ||||
|  | ||||
| ```bash | ||||
| dotnet run \ | ||||
|   --project bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj \ | ||||
|   -- \ | ||||
|   --repo-root . \ | ||||
|   --out bench/Scanner.Analyzers/baseline.csv | ||||
| ``` | ||||
|  | ||||
| The harness prints a table to stdout and writes the CSV (if `--out` is specified) with the following headers: | ||||
|  | ||||
| ``` | ||||
| scenario,iterations,sample_count,mean_ms,p95_ms,max_ms | ||||
| ``` | ||||
|  | ||||
| Use `--iterations` to override the default (5 passes per scenario) and `--threshold-ms` to customize the failure budget. Budgets default to 5 000 ms (or per-scenario overrides in `config.json`), aligned with the SBOM compose objective. | ||||
|  | ||||
| ## Adding scenarios | ||||
| 1. Drop the fixture tree under `samples/<area>/...`. | ||||
| 2. Append a new scenario entry to `config.json` describing: | ||||
|    - `id` – snake_case scenario name (also used in CSV). | ||||
|    - `label` – human-friendly description shown in logs. | ||||
|    - `root` – path to the directory that will be scanned. | ||||
|    - For analyzer-backed scenarios, set `analyzers` to the list of language analyzer ids (for example, `["node"]`). | ||||
|    - For temporary metadata walks (used until the analyzer ships), provide `parser` (`node` or `python`) and the `matcher` glob describing files to parse. | ||||
| 3. Re-run the harness (`dotnet run … --out baseline.csv`). | ||||
| 4. Commit both the fixture and updated baseline. | ||||
|   | ||||
| @@ -0,0 +1,104 @@ | ||||
| using System.Text.Json; | ||||
| using System.Text.Json.Serialization; | ||||
|  | ||||
| namespace StellaOps.Bench.ScannerAnalyzers; | ||||
|  | ||||
| internal sealed record BenchmarkConfig | ||||
| { | ||||
|     [JsonPropertyName("iterations")] | ||||
|     public int? Iterations { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("thresholdMs")] | ||||
|     public double? ThresholdMs { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("scenarios")] | ||||
|     public List<BenchmarkScenarioConfig> Scenarios { get; init; } = new(); | ||||
|  | ||||
|     public static async Task<BenchmarkConfig> LoadAsync(string path) | ||||
|     { | ||||
|         if (string.IsNullOrWhiteSpace(path)) | ||||
|         { | ||||
|             throw new ArgumentException("Config path is required.", nameof(path)); | ||||
|         } | ||||
|  | ||||
|         await using var stream = File.OpenRead(path); | ||||
|         var config = await JsonSerializer.DeserializeAsync<BenchmarkConfig>(stream, SerializerOptions).ConfigureAwait(false); | ||||
|         if (config is null) | ||||
|         { | ||||
|             throw new InvalidOperationException($"Failed to parse benchmark config '{path}'."); | ||||
|         } | ||||
|  | ||||
|         if (config.Scenarios.Count == 0) | ||||
|         { | ||||
|             throw new InvalidOperationException("config.scenarios must declare at least one scenario."); | ||||
|         } | ||||
|  | ||||
|         foreach (var scenario in config.Scenarios) | ||||
|         { | ||||
|             scenario.Validate(); | ||||
|         } | ||||
|  | ||||
|         return config; | ||||
|     } | ||||
|  | ||||
|     private static JsonSerializerOptions SerializerOptions => new() | ||||
|     { | ||||
|         PropertyNameCaseInsensitive = true, | ||||
|         ReadCommentHandling = JsonCommentHandling.Skip, | ||||
|         AllowTrailingCommas = true, | ||||
|     }; | ||||
| } | ||||
|  | ||||
| internal sealed record BenchmarkScenarioConfig | ||||
| { | ||||
|     [JsonPropertyName("id")] | ||||
|     public string? Id { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("label")] | ||||
|     public string? Label { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("root")] | ||||
|     public string? Root { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("analyzers")] | ||||
|     public List<string>? Analyzers { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("matcher")] | ||||
|     public string? Matcher { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("parser")] | ||||
|     public string? Parser { get; init; } | ||||
|  | ||||
|     [JsonPropertyName("thresholdMs")] | ||||
|     public double? ThresholdMs { get; init; } | ||||
|  | ||||
|     public bool HasAnalyzers => Analyzers is { Count: > 0 }; | ||||
|  | ||||
|     public void Validate() | ||||
|     { | ||||
|         if (string.IsNullOrWhiteSpace(Id)) | ||||
|         { | ||||
|             throw new InvalidOperationException("scenario.id is required."); | ||||
|         } | ||||
|  | ||||
|         if (string.IsNullOrWhiteSpace(Root)) | ||||
|         { | ||||
|             throw new InvalidOperationException($"Scenario '{Id}' must specify a root path."); | ||||
|         } | ||||
|  | ||||
|         if (HasAnalyzers) | ||||
|         { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         if (string.IsNullOrWhiteSpace(Parser)) | ||||
|         { | ||||
|             throw new InvalidOperationException($"Scenario '{Id}' must specify parser or analyzers."); | ||||
|         } | ||||
|  | ||||
|         if (string.IsNullOrWhiteSpace(Matcher)) | ||||
|         { | ||||
|             throw new InvalidOperationException($"Scenario '{Id}' must specify matcher when parser is used."); | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,302 @@ | ||||
| using System.Globalization; | ||||
| using StellaOps.Bench.ScannerAnalyzers.Scenarios; | ||||
|  | ||||
| namespace StellaOps.Bench.ScannerAnalyzers; | ||||
|  | ||||
| internal static class Program | ||||
| { | ||||
|     public static async Task<int> Main(string[] args) | ||||
|     { | ||||
|         try | ||||
|         { | ||||
|             var options = ProgramOptions.Parse(args); | ||||
|             var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); | ||||
|  | ||||
|             var iterations = options.Iterations ?? config.Iterations ?? 5; | ||||
|             var thresholdMs = options.ThresholdMs ?? config.ThresholdMs ?? 5000; | ||||
|             var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath); | ||||
|  | ||||
|             var results = new List<ScenarioResult>(); | ||||
|             var failures = new List<string>(); | ||||
|  | ||||
|             foreach (var scenario in config.Scenarios) | ||||
|             { | ||||
|                 var runner = ScenarioRunnerFactory.Create(scenario); | ||||
|                 var scenarioRoot = ResolveScenarioRoot(repoRoot, scenario.Root!); | ||||
|  | ||||
|                 var execution = await runner.ExecuteAsync(scenarioRoot, iterations, CancellationToken.None).ConfigureAwait(false); | ||||
|                 var stats = ScenarioStatistics.FromDurations(execution.Durations); | ||||
|                 var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs; | ||||
|  | ||||
|                 results.Add(new ScenarioResult( | ||||
|                     scenario.Id!, | ||||
|                     scenario.Label ?? scenario.Id!, | ||||
|                     execution.SampleCount, | ||||
|                     stats.MeanMs, | ||||
|                     stats.P95Ms, | ||||
|                     stats.MaxMs, | ||||
|                     iterations)); | ||||
|  | ||||
|                 if (stats.MaxMs > scenarioThreshold) | ||||
|                 { | ||||
|                     failures.Add($"{scenario.Id} exceeded threshold: {stats.MaxMs:F2} ms > {scenarioThreshold:F2} ms"); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             TablePrinter.Print(results); | ||||
|  | ||||
|             if (!string.IsNullOrWhiteSpace(options.OutPath)) | ||||
|             { | ||||
|                 CsvWriter.Write(options.OutPath!, results); | ||||
|             } | ||||
|  | ||||
|             if (failures.Count > 0) | ||||
|             { | ||||
|                 Console.Error.WriteLine(); | ||||
|                 Console.Error.WriteLine("Performance threshold exceeded:"); | ||||
|                 foreach (var failure in failures) | ||||
|                 { | ||||
|                     Console.Error.WriteLine($" - {failure}"); | ||||
|                 } | ||||
|  | ||||
|                 return 1; | ||||
|             } | ||||
|  | ||||
|             return 0; | ||||
|         } | ||||
|         catch (Exception ex) | ||||
|         { | ||||
|             Console.Error.WriteLine(ex.Message); | ||||
|             return 1; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private static string ResolveRepoRoot(string? overridePath, string configPath) | ||||
|     { | ||||
|         if (!string.IsNullOrWhiteSpace(overridePath)) | ||||
|         { | ||||
|             return Path.GetFullPath(overridePath); | ||||
|         } | ||||
|  | ||||
|         var configDirectory = Path.GetDirectoryName(configPath); | ||||
|         if (string.IsNullOrWhiteSpace(configDirectory)) | ||||
|         { | ||||
|             return Directory.GetCurrentDirectory(); | ||||
|         } | ||||
|  | ||||
|         return Path.GetFullPath(Path.Combine(configDirectory, "..", "..")); | ||||
|     } | ||||
|  | ||||
|     private static string ResolveScenarioRoot(string repoRoot, string relativeRoot) | ||||
|     { | ||||
|         if (string.IsNullOrWhiteSpace(relativeRoot)) | ||||
|         { | ||||
|             throw new InvalidOperationException("Scenario root is required."); | ||||
|         } | ||||
|  | ||||
|         var combined = Path.GetFullPath(Path.Combine(repoRoot, relativeRoot)); | ||||
|         if (!PathUtilities.IsWithinRoot(repoRoot, combined)) | ||||
|         { | ||||
|             throw new InvalidOperationException($"Scenario root '{relativeRoot}' escapes repository root '{repoRoot}'."); | ||||
|         } | ||||
|  | ||||
|         if (!Directory.Exists(combined)) | ||||
|         { | ||||
|             throw new DirectoryNotFoundException($"Scenario root '{combined}' does not exist."); | ||||
|         } | ||||
|  | ||||
|         return combined; | ||||
|     } | ||||
|  | ||||
|     private sealed record ProgramOptions(string ConfigPath, int? Iterations, double? ThresholdMs, string? OutPath, string? RepoRoot) | ||||
|     { | ||||
|         public static ProgramOptions Parse(string[] args) | ||||
|         { | ||||
|             var configPath = DefaultConfigPath(); | ||||
|             int? iterations = null; | ||||
|             double? thresholdMs = null; | ||||
|             string? outPath = null; | ||||
|             string? repoRoot = null; | ||||
|  | ||||
|             for (var index = 0; index < args.Length; index++) | ||||
|             { | ||||
|                 var current = args[index]; | ||||
|                 switch (current) | ||||
|                 { | ||||
|                     case "--config": | ||||
|                         EnsureNext(args, index); | ||||
|                         configPath = Path.GetFullPath(args[++index]); | ||||
|                         break; | ||||
|                     case "--iterations": | ||||
|                         EnsureNext(args, index); | ||||
|                         iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); | ||||
|                         break; | ||||
|                     case "--threshold-ms": | ||||
|                         EnsureNext(args, index); | ||||
|                         thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); | ||||
|                         break; | ||||
|                     case "--out": | ||||
|                         EnsureNext(args, index); | ||||
|                         outPath = args[++index]; | ||||
|                         break; | ||||
|                     case "--repo-root": | ||||
|                     case "--samples": | ||||
|                         EnsureNext(args, index); | ||||
|                         repoRoot = args[++index]; | ||||
|                         break; | ||||
|                     default: | ||||
|                         throw new ArgumentException($"Unknown argument: {current}", nameof(args)); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             return new ProgramOptions(configPath, iterations, thresholdMs, outPath, repoRoot); | ||||
|         } | ||||
|  | ||||
|         private static string DefaultConfigPath() | ||||
|         { | ||||
|             var binaryDir = AppContext.BaseDirectory; | ||||
|             var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); | ||||
|             var configDirectory = Path.GetFullPath(Path.Combine(projectRoot, "..")); | ||||
|             return Path.Combine(configDirectory, "config.json"); | ||||
|         } | ||||
|  | ||||
|         private static void EnsureNext(string[] args, int index) | ||||
|         { | ||||
|             if (index + 1 >= args.Length) | ||||
|             { | ||||
|                 throw new ArgumentException("Missing value for argument.", nameof(args)); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private sealed record ScenarioResult( | ||||
|         string Id, | ||||
|         string Label, | ||||
|         int SampleCount, | ||||
|         double MeanMs, | ||||
|         double P95Ms, | ||||
|         double MaxMs, | ||||
|         int Iterations); | ||||
|  | ||||
|     private sealed record ScenarioStatistics(double MeanMs, double P95Ms, double MaxMs) | ||||
|     { | ||||
|         public static ScenarioStatistics FromDurations(IReadOnlyList<double> durations) | ||||
|         { | ||||
|             if (durations.Count == 0) | ||||
|             { | ||||
|                 return new ScenarioStatistics(0, 0, 0); | ||||
|             } | ||||
|  | ||||
|             var sorted = durations.ToArray(); | ||||
|             Array.Sort(sorted); | ||||
|  | ||||
|             var total = 0d; | ||||
|             foreach (var value in durations) | ||||
|             { | ||||
|                 total += value; | ||||
|             } | ||||
|  | ||||
|             var mean = total / durations.Count; | ||||
|             var p95 = Percentile(sorted, 95); | ||||
|             var max = sorted[^1]; | ||||
|  | ||||
|             return new ScenarioStatistics(mean, p95, max); | ||||
|         } | ||||
|  | ||||
|         private static double Percentile(IReadOnlyList<double> sorted, double percentile) | ||||
|         { | ||||
|             if (sorted.Count == 0) | ||||
|             { | ||||
|                 return 0; | ||||
|             } | ||||
|  | ||||
|             var rank = (percentile / 100d) * (sorted.Count - 1); | ||||
|             var lower = (int)Math.Floor(rank); | ||||
|             var upper = (int)Math.Ceiling(rank); | ||||
|             var weight = rank - lower; | ||||
|  | ||||
|             if (upper >= sorted.Count) | ||||
|             { | ||||
|                 return sorted[lower]; | ||||
|             } | ||||
|  | ||||
|             return sorted[lower] + weight * (sorted[upper] - sorted[lower]); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private static class TablePrinter | ||||
|     { | ||||
|         public static void Print(IEnumerable<ScenarioResult> results) | ||||
|         { | ||||
|             Console.WriteLine("Scenario                     | Count |   Mean(ms) |    P95(ms) |     Max(ms)"); | ||||
|             Console.WriteLine("---------------------------- | ----- | --------- | --------- | ----------"); | ||||
|             foreach (var row in results) | ||||
|             { | ||||
|                 Console.WriteLine(FormatRow(row)); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         private static string FormatRow(ScenarioResult row) | ||||
|         { | ||||
|             var idColumn = row.Id.Length <= 28 | ||||
|                 ? row.Id.PadRight(28) | ||||
|                 : row.Id[..28]; | ||||
|  | ||||
|             return string.Join(" | ", new[] | ||||
|             { | ||||
|                 idColumn, | ||||
|                 row.SampleCount.ToString(CultureInfo.InvariantCulture).PadLeft(5), | ||||
|                 row.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9), | ||||
|                 row.P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9), | ||||
|                 row.MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10), | ||||
|             }); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private static class CsvWriter | ||||
|     { | ||||
|         public static void Write(string path, IEnumerable<ScenarioResult> results) | ||||
|         { | ||||
|             var resolvedPath = Path.GetFullPath(path); | ||||
|             var directory = Path.GetDirectoryName(resolvedPath); | ||||
|             if (!string.IsNullOrEmpty(directory)) | ||||
|             { | ||||
|                 Directory.CreateDirectory(directory); | ||||
|             } | ||||
|  | ||||
|             using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None); | ||||
|             using var writer = new StreamWriter(stream); | ||||
|             writer.WriteLine("scenario,iterations,sample_count,mean_ms,p95_ms,max_ms"); | ||||
|  | ||||
|             foreach (var row in results) | ||||
|             { | ||||
|                 writer.Write(row.Id); | ||||
|                 writer.Write(','); | ||||
|                 writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture)); | ||||
|                 writer.Write(','); | ||||
|                 writer.Write(row.SampleCount.ToString(CultureInfo.InvariantCulture)); | ||||
|                 writer.Write(','); | ||||
|                 writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); | ||||
|                 writer.Write(','); | ||||
|                 writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); | ||||
|                 writer.Write(','); | ||||
|                 writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); | ||||
|                 writer.WriteLine(); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     internal static class PathUtilities | ||||
|     { | ||||
|         public static bool IsWithinRoot(string root, string candidate) | ||||
|         { | ||||
|             var relative = Path.GetRelativePath(root, candidate); | ||||
|             if (string.IsNullOrEmpty(relative) || relative == ".") | ||||
|             { | ||||
|                 return true; | ||||
|             } | ||||
|  | ||||
|             return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative); | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,279 @@ | ||||
| using System.Diagnostics; | ||||
| using System.Text; | ||||
| using System.Linq; | ||||
| using System.Text.Json; | ||||
| using System.Text.RegularExpressions; | ||||
| using StellaOps.Scanner.Analyzers.Lang; | ||||
| using StellaOps.Scanner.Analyzers.Lang.Java; | ||||
| using StellaOps.Scanner.Analyzers.Lang.Node; | ||||
|  | ||||
| namespace StellaOps.Bench.ScannerAnalyzers.Scenarios; | ||||
|  | ||||
| internal interface IScenarioRunner | ||||
| { | ||||
|     Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken); | ||||
| } | ||||
|  | ||||
| internal sealed record ScenarioExecutionResult(double[] Durations, int SampleCount); | ||||
|  | ||||
| internal static class ScenarioRunnerFactory | ||||
| { | ||||
|     public static IScenarioRunner Create(BenchmarkScenarioConfig scenario) | ||||
|     { | ||||
|         if (scenario.HasAnalyzers) | ||||
|         { | ||||
|             return new LanguageAnalyzerScenarioRunner(scenario.Analyzers!); | ||||
|         } | ||||
|  | ||||
|         if (string.IsNullOrWhiteSpace(scenario.Parser) || string.IsNullOrWhiteSpace(scenario.Matcher)) | ||||
|         { | ||||
|             throw new InvalidOperationException($"Scenario '{scenario.Id}' missing parser or matcher configuration."); | ||||
|         } | ||||
|  | ||||
|         return new MetadataWalkScenarioRunner(scenario.Parser, scenario.Matcher); | ||||
|     } | ||||
| } | ||||
|  | ||||
| internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner | ||||
| { | ||||
|     private readonly IReadOnlyList<Func<ILanguageAnalyzer>> _analyzerFactories; | ||||
|  | ||||
|     public LanguageAnalyzerScenarioRunner(IEnumerable<string> analyzerIds) | ||||
|     { | ||||
|         if (analyzerIds is null) | ||||
|         { | ||||
|             throw new ArgumentNullException(nameof(analyzerIds)); | ||||
|         } | ||||
|  | ||||
|         _analyzerFactories = analyzerIds | ||||
|             .Where(static id => !string.IsNullOrWhiteSpace(id)) | ||||
|             .Select(CreateFactory) | ||||
|             .ToArray(); | ||||
|  | ||||
|         if (_analyzerFactories.Count == 0) | ||||
|         { | ||||
|             throw new InvalidOperationException("At least one analyzer id must be provided."); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     public async Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken) | ||||
|     { | ||||
|         if (iterations <= 0) | ||||
|         { | ||||
|             throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); | ||||
|         } | ||||
|  | ||||
|         var analyzers = _analyzerFactories.Select(factory => factory()).ToArray(); | ||||
|         var engine = new LanguageAnalyzerEngine(analyzers); | ||||
|         var durations = new double[iterations]; | ||||
|         var componentCount = -1; | ||||
|  | ||||
|         for (var i = 0; i < iterations; i++) | ||||
|         { | ||||
|             cancellationToken.ThrowIfCancellationRequested(); | ||||
|  | ||||
|             var context = new LanguageAnalyzerContext(rootPath, TimeProvider.System); | ||||
|             var stopwatch = Stopwatch.StartNew(); | ||||
|             var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false); | ||||
|             stopwatch.Stop(); | ||||
|  | ||||
|             durations[i] = stopwatch.Elapsed.TotalMilliseconds; | ||||
|  | ||||
|             var currentCount = result.Components.Count; | ||||
|             if (componentCount < 0) | ||||
|             { | ||||
|                 componentCount = currentCount; | ||||
|             } | ||||
|             else if (componentCount != currentCount) | ||||
|             { | ||||
|                 throw new InvalidOperationException($"Analyzer output count changed between iterations ({componentCount} vs {currentCount})."); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         if (componentCount < 0) | ||||
|         { | ||||
|             componentCount = 0; | ||||
|         } | ||||
|  | ||||
|         return new ScenarioExecutionResult(durations, componentCount); | ||||
|     } | ||||
|  | ||||
|     private static Func<ILanguageAnalyzer> CreateFactory(string analyzerId) | ||||
|     { | ||||
|         var id = analyzerId.Trim().ToLowerInvariant(); | ||||
|         return id switch | ||||
|         { | ||||
|             "java" => static () => new JavaLanguageAnalyzer(), | ||||
|             "node" => static () => new NodeLanguageAnalyzer(), | ||||
|             _ => throw new InvalidOperationException($"Unsupported analyzer '{analyzerId}'."), | ||||
|         }; | ||||
|     } | ||||
| } | ||||
|  | ||||
| internal sealed class MetadataWalkScenarioRunner : IScenarioRunner | ||||
| { | ||||
|     private readonly Regex _matcher; | ||||
|     private readonly string _parserKind; | ||||
|  | ||||
|     public MetadataWalkScenarioRunner(string parserKind, string globPattern) | ||||
|     { | ||||
|         _parserKind = parserKind?.Trim().ToLowerInvariant() ?? throw new ArgumentNullException(nameof(parserKind)); | ||||
|         _matcher = GlobToRegex(globPattern ?? throw new ArgumentNullException(nameof(globPattern))); | ||||
|     } | ||||
|  | ||||
|     public async Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken) | ||||
|     { | ||||
|         if (iterations <= 0) | ||||
|         { | ||||
|             throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); | ||||
|         } | ||||
|  | ||||
|         var durations = new double[iterations]; | ||||
|         var sampleCount = -1; | ||||
|  | ||||
|         for (var i = 0; i < iterations; i++) | ||||
|         { | ||||
|             cancellationToken.ThrowIfCancellationRequested(); | ||||
|  | ||||
|             var stopwatch = Stopwatch.StartNew(); | ||||
|             var files = EnumerateMatchingFiles(rootPath); | ||||
|             if (files.Count == 0) | ||||
|             { | ||||
|                 throw new InvalidOperationException($"Parser '{_parserKind}' matched zero files under '{rootPath}'."); | ||||
|             } | ||||
|  | ||||
|             foreach (var file in files) | ||||
|             { | ||||
|                 cancellationToken.ThrowIfCancellationRequested(); | ||||
|                 await ParseAsync(file).ConfigureAwait(false); | ||||
|             } | ||||
|  | ||||
|             stopwatch.Stop(); | ||||
|             durations[i] = stopwatch.Elapsed.TotalMilliseconds; | ||||
|  | ||||
|             if (sampleCount < 0) | ||||
|             { | ||||
|                 sampleCount = files.Count; | ||||
|             } | ||||
|             else if (sampleCount != files.Count) | ||||
|             { | ||||
|                 throw new InvalidOperationException($"File count changed between iterations ({sampleCount} vs {files.Count})."); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         if (sampleCount < 0) | ||||
|         { | ||||
|             sampleCount = 0; | ||||
|         } | ||||
|  | ||||
|         return new ScenarioExecutionResult(durations, sampleCount); | ||||
|     } | ||||
|  | ||||
|     private async ValueTask ParseAsync(string filePath) | ||||
|     { | ||||
|         switch (_parserKind) | ||||
|         { | ||||
|             case "node": | ||||
|                 { | ||||
|                     using var stream = File.OpenRead(filePath); | ||||
|                     using var document = await JsonDocument.ParseAsync(stream).ConfigureAwait(false); | ||||
|  | ||||
|                     if (!document.RootElement.TryGetProperty("name", out var name) || name.ValueKind != JsonValueKind.String) | ||||
|                     { | ||||
|                         throw new InvalidOperationException($"package.json '{filePath}' missing name."); | ||||
|                     } | ||||
|  | ||||
|                     if (!document.RootElement.TryGetProperty("version", out var version) || version.ValueKind != JsonValueKind.String) | ||||
|                     { | ||||
|                         throw new InvalidOperationException($"package.json '{filePath}' missing version."); | ||||
|                     } | ||||
|                 } | ||||
|                 break; | ||||
|             case "python": | ||||
|                 { | ||||
|                     var (name, version) = await ParsePythonMetadataAsync(filePath).ConfigureAwait(false); | ||||
|                     if (string.IsNullOrEmpty(name) || string.IsNullOrEmpty(version)) | ||||
|                     { | ||||
|                         throw new InvalidOperationException($"METADATA '{filePath}' missing Name/Version."); | ||||
|                     } | ||||
|                 } | ||||
|                 break; | ||||
|             default: | ||||
|                 throw new InvalidOperationException($"Unknown parser '{_parserKind}'."); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private static async Task<(string? Name, string? Version)> ParsePythonMetadataAsync(string filePath) | ||||
|     { | ||||
|         using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete); | ||||
|         using var reader = new StreamReader(stream); | ||||
|  | ||||
|         string? name = null; | ||||
|         string? version = null; | ||||
|  | ||||
|         while (await reader.ReadLineAsync().ConfigureAwait(false) is { } line) | ||||
|         { | ||||
|             if (line.StartsWith("Name:", StringComparison.OrdinalIgnoreCase)) | ||||
|             { | ||||
|                 name ??= line[5..].Trim(); | ||||
|             } | ||||
|             else if (line.StartsWith("Version:", StringComparison.OrdinalIgnoreCase)) | ||||
|             { | ||||
|                 version ??= line[8..].Trim(); | ||||
|             } | ||||
|  | ||||
|             if (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(version)) | ||||
|             { | ||||
|                 break; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         return (name, version); | ||||
|     } | ||||
|  | ||||
|     private IReadOnlyList<string> EnumerateMatchingFiles(string rootPath) | ||||
|     { | ||||
|         var files = new List<string>(); | ||||
|         var stack = new Stack<string>(); | ||||
|         stack.Push(rootPath); | ||||
|  | ||||
|         while (stack.Count > 0) | ||||
|         { | ||||
|             var current = stack.Pop(); | ||||
|             foreach (var directory in Directory.EnumerateDirectories(current)) | ||||
|             { | ||||
|                 stack.Push(directory); | ||||
|             } | ||||
|  | ||||
|             foreach (var file in Directory.EnumerateFiles(current)) | ||||
|             { | ||||
|                 var relative = Path.GetRelativePath(rootPath, file).Replace('\\', '/'); | ||||
|                 if (_matcher.IsMatch(relative)) | ||||
|                 { | ||||
|                     files.Add(file); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         return files; | ||||
|     } | ||||
|  | ||||
|     private static Regex GlobToRegex(string pattern) | ||||
|     { | ||||
|         if (string.IsNullOrWhiteSpace(pattern)) | ||||
|         { | ||||
|             throw new ArgumentException("Glob pattern is required.", nameof(pattern)); | ||||
|         } | ||||
|  | ||||
|         var normalized = pattern.Replace("\\", "/"); | ||||
|         normalized = normalized.Replace("**", "\u0001"); | ||||
|         normalized = normalized.Replace("*", "\u0002"); | ||||
|  | ||||
|         var escaped = Regex.Escape(normalized); | ||||
|         escaped = escaped.Replace("\u0001/", "(?:.*/)?", StringComparison.Ordinal); | ||||
|         escaped = escaped.Replace("\u0001", ".*", StringComparison.Ordinal); | ||||
|         escaped = escaped.Replace("\u0002", "[^/]*", StringComparison.Ordinal); | ||||
|  | ||||
|         return new Regex("^" + escaped + "$", RegexOptions.Compiled | RegexOptions.CultureInvariant); | ||||
|     } | ||||
| } | ||||
| @@ -0,0 +1,16 @@ | ||||
| <Project Sdk="Microsoft.NET.Sdk"> | ||||
|   <PropertyGroup> | ||||
|     <OutputType>Exe</OutputType> | ||||
|     <TargetFramework>net10.0</TargetFramework> | ||||
|     <Nullable>enable</Nullable> | ||||
|     <ImplicitUsings>enable</ImplicitUsings> | ||||
|     <LangVersion>preview</LangVersion> | ||||
|     <TreatWarningsAsErrors>true</TreatWarningsAsErrors> | ||||
|   </PropertyGroup> | ||||
|  | ||||
|   <ItemGroup> | ||||
|     <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> | ||||
|     <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj" /> | ||||
|     <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj" /> | ||||
|   </ItemGroup> | ||||
| </Project> | ||||
| @@ -1,3 +1,4 @@ | ||||
| scenario,iterations,sample_count,mean_ms,p95_ms,max_ms | ||||
| node_monorepo_walk,5,4,233.9428,319.8564,344.4611 | ||||
| python_site_packages_walk,5,3,72.9166,74.8970,74.9884 | ||||
| node_monorepo_walk,5,4,4.2314,15.3277,18.9984 | ||||
| java_demo_archive,5,1,4.5572,17.3489,21.5472 | ||||
| python_site_packages_walk,5,3,2.0049,6.4230,7.8832 | ||||
|   | ||||
| 
 | 
| @@ -2,17 +2,26 @@ | ||||
|   "thresholdMs": 5000, | ||||
|   "iterations": 5, | ||||
|   "scenarios": [ | ||||
|     { | ||||
|       "id": "node_monorepo_walk", | ||||
|       "label": "Node.js monorepo package.json harvest", | ||||
|       "root": "samples/runtime/npm-monorepo/node_modules", | ||||
|       "matcher": "**/package.json", | ||||
|       "parser": "node" | ||||
|     }, | ||||
|     { | ||||
|       "id": "python_site_packages_walk", | ||||
|       "label": "Python site-packages dist-info crawl", | ||||
|       "root": "samples/runtime/python-venv/lib/python3.11/site-packages", | ||||
|     { | ||||
|       "id": "node_monorepo_walk", | ||||
|       "label": "Node.js analyzer on monorepo fixture", | ||||
|       "root": "samples/runtime/npm-monorepo", | ||||
|       "analyzers": [ | ||||
|         "node" | ||||
|       ] | ||||
|     }, | ||||
|     { | ||||
|       "id": "java_demo_archive", | ||||
|       "label": "Java analyzer on demo jar", | ||||
|       "root": "samples/runtime/java-demo", | ||||
|       "analyzers": [ | ||||
|         "java" | ||||
|       ] | ||||
|     }, | ||||
|     { | ||||
|       "id": "python_site_packages_walk", | ||||
|       "label": "Python site-packages dist-info crawl", | ||||
|       "root": "samples/runtime/python-venv/lib/python3.11/site-packages", | ||||
|       "matcher": "**/*.dist-info/METADATA", | ||||
|       "parser": "python" | ||||
|     } | ||||
|   | ||||
| @@ -1,249 +0,0 @@ | ||||
| #!/usr/bin/env node | ||||
| 'use strict'; | ||||
|  | ||||
| const fs = require('fs'); | ||||
| const path = require('path'); | ||||
| const { performance } = require('perf_hooks'); | ||||
|  | ||||
| function globToRegExp(pattern) { | ||||
|   let working = pattern | ||||
|     .replace(/\*\*/g, ':::DOUBLE_WILDCARD:::') | ||||
|     .replace(/\*/g, ':::SINGLE_WILDCARD:::'); | ||||
|   working = working.replace(/([.+^${}()|[\]\\])/g, '\\$1'); | ||||
|   working = working | ||||
|     .replace(/:::DOUBLE_WILDCARD:::\//g, '(?:.*/)?') | ||||
|     .replace(/:::DOUBLE_WILDCARD:::/g, '.*') | ||||
|     .replace(/:::SINGLE_WILDCARD:::/g, '[^/]*'); | ||||
|   return new RegExp(`^${working}$`); | ||||
| } | ||||
|  | ||||
| function walkFiles(root, matcher) { | ||||
|   const out = []; | ||||
|   const stack = [root]; | ||||
|   while (stack.length) { | ||||
|     const current = stack.pop(); | ||||
|     const stat = fs.statSync(current, { throwIfNoEntry: true }); | ||||
|     if (stat.isDirectory()) { | ||||
|       const entries = fs.readdirSync(current); | ||||
|       for (const entry of entries) { | ||||
|         stack.push(path.join(current, entry)); | ||||
|       } | ||||
|     } else if (stat.isFile()) { | ||||
|       const relativePath = path.relative(root, current).replace(/\\/g, '/'); | ||||
|       if (matcher.test(relativePath)) { | ||||
|         out.push(current); | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|   return out; | ||||
| } | ||||
|  | ||||
| function parseArgs(argv) { | ||||
|   const args = { | ||||
|     config: path.join(__dirname, 'config.json'), | ||||
|     iterations: undefined, | ||||
|     thresholdMs: undefined, | ||||
|     out: undefined, | ||||
|     repoRoot: path.join(__dirname, '..', '..'), | ||||
|   }; | ||||
|  | ||||
|   for (let i = 2; i < argv.length; i++) { | ||||
|     const current = argv[i]; | ||||
|     switch (current) { | ||||
|       case '--config': | ||||
|         args.config = argv[++i]; | ||||
|         break; | ||||
|       case '--iterations': | ||||
|         args.iterations = Number(argv[++i]); | ||||
|         break; | ||||
|       case '--threshold-ms': | ||||
|         args.thresholdMs = Number(argv[++i]); | ||||
|         break; | ||||
|       case '--out': | ||||
|         args.out = argv[++i]; | ||||
|         break; | ||||
|       case '--repo-root': | ||||
|       case '--samples': | ||||
|         args.repoRoot = argv[++i]; | ||||
|         break; | ||||
|       default: | ||||
|         throw new Error(`Unknown argument: ${current}`); | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   return args; | ||||
| } | ||||
|  | ||||
| function loadConfig(configPath) { | ||||
|   const json = fs.readFileSync(configPath, 'utf8'); | ||||
|   const cfg = JSON.parse(json); | ||||
|   if (!Array.isArray(cfg.scenarios) || cfg.scenarios.length === 0) { | ||||
|     throw new Error('config.scenarios must be a non-empty array'); | ||||
|   } | ||||
|   return cfg; | ||||
| } | ||||
|  | ||||
| function ensureWithinRepo(repoRoot, target) { | ||||
|   const relative = path.relative(repoRoot, target); | ||||
|   if (relative === '' || relative === '.') { | ||||
|     return true; | ||||
|   } | ||||
|   return !relative.startsWith('..') && !path.isAbsolute(relative); | ||||
| } | ||||
|  | ||||
| function parseNodePackage(contents) { | ||||
|   const parsed = JSON.parse(contents); | ||||
|   if (!parsed.name || !parsed.version) { | ||||
|     throw new Error('package.json missing name/version'); | ||||
|   } | ||||
|   return { name: parsed.name, version: parsed.version }; | ||||
| } | ||||
|  | ||||
| function parsePythonMetadata(contents) { | ||||
|   let name; | ||||
|   let version; | ||||
|   for (const line of contents.split(/\r?\n/)) { | ||||
|     if (!name && line.startsWith('Name:')) { | ||||
|       name = line.slice(5).trim(); | ||||
|     } else if (!version && line.startsWith('Version:')) { | ||||
|       version = line.slice(8).trim(); | ||||
|     } | ||||
|     if (name && version) { | ||||
|       break; | ||||
|     } | ||||
|   } | ||||
|   if (!name || !version) { | ||||
|     throw new Error('METADATA missing Name/Version headers'); | ||||
|   } | ||||
|   return { name, version }; | ||||
| } | ||||
|  | ||||
| function formatRow(row) { | ||||
|   const cols = [ | ||||
|     row.id.padEnd(28), | ||||
|     row.sampleCount.toString().padStart(5), | ||||
|     row.meanMs.toFixed(2).padStart(9), | ||||
|     row.p95Ms.toFixed(2).padStart(9), | ||||
|     row.maxMs.toFixed(2).padStart(9), | ||||
|   ]; | ||||
|   return cols.join(' | '); | ||||
| } | ||||
|  | ||||
| function percentile(sortedDurations, percentile) { | ||||
|   if (sortedDurations.length === 0) { | ||||
|     return 0; | ||||
|   } | ||||
|   const rank = (percentile / 100) * (sortedDurations.length - 1); | ||||
|   const lower = Math.floor(rank); | ||||
|   const upper = Math.ceil(rank); | ||||
|   const weight = rank - lower; | ||||
|   if (upper >= sortedDurations.length) { | ||||
|     return sortedDurations[lower]; | ||||
|   } | ||||
|   return sortedDurations[lower] + weight * (sortedDurations[upper] - sortedDurations[lower]); | ||||
| } | ||||
|  | ||||
| function main() { | ||||
|   const args = parseArgs(process.argv); | ||||
|   const cfg = loadConfig(args.config); | ||||
|   const iterations = args.iterations ?? cfg.iterations ?? 5; | ||||
|   const thresholdMs = args.thresholdMs ?? cfg.thresholdMs ?? 5000; | ||||
|  | ||||
|   const results = []; | ||||
|   const failures = []; | ||||
|  | ||||
|   for (const scenario of cfg.scenarios) { | ||||
|     const scenarioRoot = path.resolve(args.repoRoot, scenario.root); | ||||
|     if (!ensureWithinRepo(args.repoRoot, scenarioRoot)) { | ||||
|       throw new Error(`Scenario root ${scenario.root} escapes repo root ${args.repoRoot}`); | ||||
|     } | ||||
|     if (!fs.existsSync(scenarioRoot)) { | ||||
|       throw new Error(`Scenario root ${scenarioRoot} does not exist`); | ||||
|     } | ||||
|  | ||||
|     const matcher = globToRegExp(scenario.matcher.replace(/\\/g, '/')); | ||||
|     const durations = []; | ||||
|     let sampleCount = 0; | ||||
|  | ||||
|     for (let attempt = 0; attempt < iterations; attempt++) { | ||||
|       const start = performance.now(); | ||||
|       const files = walkFiles(scenarioRoot, matcher); | ||||
|       if (files.length === 0) { | ||||
|         throw new Error(`Scenario ${scenario.id} matched no files`); | ||||
|       } | ||||
|  | ||||
|       for (const filePath of files) { | ||||
|         const contents = fs.readFileSync(filePath, 'utf8'); | ||||
|         if (scenario.parser === 'node') { | ||||
|           parseNodePackage(contents); | ||||
|         } else if (scenario.parser === 'python') { | ||||
|           parsePythonMetadata(contents); | ||||
|         } else { | ||||
|           throw new Error(`Unknown parser ${scenario.parser} for scenario ${scenario.id}`); | ||||
|         } | ||||
|       } | ||||
|       const end = performance.now(); | ||||
|       durations.push(end - start); | ||||
|       sampleCount = files.length; | ||||
|     } | ||||
|  | ||||
|     durations.sort((a, b) => a - b); | ||||
|     const mean = durations.reduce((acc, value) => acc + value, 0) / durations.length; | ||||
|     const p95 = percentile(durations, 95); | ||||
|     const max = durations[durations.length - 1]; | ||||
|  | ||||
|     if (max > thresholdMs) { | ||||
|       failures.push(`${scenario.id} exceeded threshold: ${(max).toFixed(2)} ms > ${thresholdMs} ms`); | ||||
|     } | ||||
|  | ||||
|     results.push({ | ||||
|       id: scenario.id, | ||||
|       label: scenario.label, | ||||
|       sampleCount, | ||||
|       meanMs: mean, | ||||
|       p95Ms: p95, | ||||
|       maxMs: max, | ||||
|       iterations, | ||||
|     }); | ||||
|   } | ||||
|  | ||||
|   console.log('Scenario                     | Count |   Mean(ms) |    P95(ms) |     Max(ms)'); | ||||
|   console.log('---------------------------- | ----- | --------- | --------- | ----------'); | ||||
|   for (const row of results) { | ||||
|     console.log(formatRow(row)); | ||||
|   } | ||||
|  | ||||
|   if (args.out) { | ||||
|     const header = 'scenario,iterations,sample_count,mean_ms,p95_ms,max_ms\n'; | ||||
|     const csvRows = results | ||||
|       .map((row) => | ||||
|         [ | ||||
|           row.id, | ||||
|           row.iterations, | ||||
|           row.sampleCount, | ||||
|           row.meanMs.toFixed(4), | ||||
|           row.p95Ms.toFixed(4), | ||||
|           row.maxMs.toFixed(4), | ||||
|         ].join(',') | ||||
|       ) | ||||
|       .join('\n'); | ||||
|     fs.writeFileSync(args.out, header + csvRows + '\n', 'utf8'); | ||||
|   } | ||||
|  | ||||
|   if (failures.length > 0) { | ||||
|     console.error('\nPerformance threshold exceeded:'); | ||||
|     for (const failure of failures) { | ||||
|       console.error(` - ${failure}`); | ||||
|     } | ||||
|     process.exitCode = 1; | ||||
|   } | ||||
| } | ||||
|  | ||||
| if (require.main === module) { | ||||
|   try { | ||||
|     main(); | ||||
|   } catch (err) { | ||||
|     console.error(err instanceof Error ? err.message : err); | ||||
|     process.exit(1); | ||||
|   } | ||||
| } | ||||
		Reference in New Issue
	
	Block a user