feat: Add Go module and workspace test fixtures

- Created expected JSON files for Go modules and workspaces.
- Added go.mod and go.sum files for example projects.
- Implemented private module structure with expected JSON output.
- Introduced vendored dependencies with corresponding expected JSON.
- Developed PostgresGraphJobStore for managing graph jobs.
- Established SQL migration scripts for graph jobs schema.
- Implemented GraphJobRepository for CRUD operations on graph jobs.
- Created IGraphJobRepository interface for repository abstraction.
- Added unit tests for GraphJobRepository to ensure functionality.
This commit is contained in:
StellaOps Bot
2025-12-06 20:04:03 +02:00
parent a6f1406509
commit 05597616d6
178 changed files with 12022 additions and 4545 deletions

View File

@@ -75,7 +75,7 @@ internal static class CommandFactory
root.Add(BuildSdkCommand(services, verboseOption, cancellationToken));
root.Add(BuildMirrorCommand(services, verboseOption, cancellationToken));
root.Add(BuildAirgapCommand(services, verboseOption, cancellationToken));
root.Add(SystemCommandBuilder.BuildSystemCommand());
root.Add(SystemCommandBuilder.BuildSystemCommand(services, verboseOption, cancellationToken));
var pluginLogger = loggerFactory.CreateLogger<CliCommandModuleLoader>();
var pluginLoader = new CliCommandModuleLoader(services, options, pluginLogger);

View File

@@ -1,6 +1,10 @@
using System;
using System.CommandLine;
using System.Threading.Tasks;
using System.Linq;
using System.Threading;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Cli.Services;
using StellaOps.Infrastructure.Postgres.Migrations;
namespace StellaOps.Cli.Commands;
@@ -23,60 +27,118 @@ internal static class SystemCommandBuilder
};
}
internal static Command BuildSystemCommand()
internal static Command BuildSystemCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var moduleOption = new Option<string?>("--module", description: "Module name (Authority, Scheduler, Concelier, Policy, Notify, Excititor, all)");
var categoryOption = new Option<string?>("--category", description: "Migration category (startup, release, seed, data)");
var moduleOption = new Option<string?>(
"--module",
description: "Module name (Authority, Scheduler, Concelier, Policy, Notify, Excititor, all)");
var categoryOption = new Option<string?>(
"--category",
description: "Migration category (startup, release, seed, data)");
var dryRunOption = new Option<bool>("--dry-run", description: "List migrations without executing");
var connectionOption = new Option<string?>(
"--connection",
description: "PostgreSQL connection string override (otherwise uses STELLAOPS_POSTGRES_* env vars)");
var timeoutOption = new Option<int?>(
"--timeout",
description: "Command timeout in seconds for each migration (default 300).");
var forceOption = new Option<bool>(
"--force",
description: "Allow execution of release migrations without --dry-run.");
var run = new Command("migrations-run", "Run migrations for the selected module(s).");
run.AddOption(moduleOption);
run.AddOption(categoryOption);
run.AddOption(dryRunOption);
run.AddOption(connectionOption);
run.AddOption(timeoutOption);
run.AddOption(forceOption);
run.SetAction(async parseResult =>
{
var modules = MigrationModuleRegistry.GetModules(parseResult.GetValue(moduleOption)).ToList();
if (!modules.Any())
{
throw new CommandLineException("No modules matched the filter; available: " + string.Join(", ", MigrationModuleRegistry.ModuleNames));
throw new CommandLineException(
"No modules matched the filter; available: " + string.Join(", ", MigrationModuleRegistry.ModuleNames));
}
var category = ParseCategory(parseResult.GetValue(categoryOption));
if (category == MigrationCategory.Release && parseResult.GetValue(dryRunOption) == false)
var dryRun = parseResult.GetValue(dryRunOption);
var force = parseResult.GetValue(forceOption);
if (category == MigrationCategory.Release && !dryRun && !force)
{
throw new CommandLineException("Release migrations require explicit approval; use --dry-run to preview or run approved release migrations manually.");
throw new CommandLineException(
"Release migrations require explicit approval; use --dry-run to preview or --force to execute.");
}
var connection = parseResult.GetValue(connectionOption);
var timeoutSeconds = parseResult.GetValue(timeoutOption);
var verbose = parseResult.GetValue(verboseOption);
var migrationService = services.GetRequiredService<MigrationCommandService>();
foreach (var module in modules)
{
var result = await migrationService
.RunAsync(module, connection, category, dryRun, timeoutSeconds, cancellationToken)
.ConfigureAwait(false);
WriteRunResult(module, result, verbose);
}
// TODO: wire MigrationRunnerAdapter to execute migrations per module/category.
await Task.CompletedTask;
});
var status = new Command("migrations-status", "Show migration status for the selected module(s).");
status.AddOption(moduleOption);
status.AddOption(categoryOption);
status.AddOption(connectionOption);
status.SetAction(async parseResult =>
{
var modules = MigrationModuleRegistry.GetModules(parseResult.GetValue(moduleOption)).ToList();
if (!modules.Any())
{
throw new CommandLineException("No modules matched the filter; available: " + string.Join(", ", MigrationModuleRegistry.ModuleNames));
throw new CommandLineException(
"No modules matched the filter; available: " + string.Join(", ", MigrationModuleRegistry.ModuleNames));
}
var connection = parseResult.GetValue(connectionOption);
var verbose = parseResult.GetValue(verboseOption);
var migrationService = services.GetRequiredService<MigrationCommandService>();
foreach (var module in modules)
{
var statusResult = await migrationService
.GetStatusAsync(module, connection, cancellationToken)
.ConfigureAwait(false);
WriteStatusResult(module, statusResult, verbose);
}
ParseCategory(parseResult.GetValue(categoryOption));
// TODO: wire MigrationRunnerAdapter to fetch status.
await Task.CompletedTask;
});
var verify = new Command("migrations-verify", "Verify migration checksums for the selected module(s).");
verify.AddOption(moduleOption);
verify.AddOption(categoryOption);
verify.AddOption(connectionOption);
verify.SetAction(async parseResult =>
{
var modules = MigrationModuleRegistry.GetModules(parseResult.GetValue(moduleOption)).ToList();
if (!modules.Any())
{
throw new CommandLineException("No modules matched the filter; available: " + string.Join(", ", MigrationModuleRegistry.ModuleNames));
throw new CommandLineException(
"No modules matched the filter; available: " + string.Join(", ", MigrationModuleRegistry.ModuleNames));
}
var connection = parseResult.GetValue(connectionOption);
var migrationService = services.GetRequiredService<MigrationCommandService>();
foreach (var module in modules)
{
var errors = await migrationService
.VerifyAsync(module, connection, cancellationToken)
.ConfigureAwait(false);
WriteVerifyResult(module, errors);
}
ParseCategory(parseResult.GetValue(categoryOption));
// TODO: wire MigrationRunnerAdapter to verify checksums.
await Task.CompletedTask;
});
var system = new Command("system", "System operations (migrations).");
@@ -85,4 +147,84 @@ internal static class SystemCommandBuilder
system.Add(verify);
return system;
}
private static void WriteRunResult(MigrationModuleInfo module, MigrationResult result, bool verbose)
{
var prefix = $"[{module.Name}]";
if (!result.Success)
{
Console.Error.WriteLine($"{prefix} FAILED: {result.ErrorMessage}");
foreach (var error in result.ChecksumErrors)
{
Console.Error.WriteLine($"{prefix} checksum: {error}");
}
if (Environment.ExitCode == 0)
{
Environment.ExitCode = 1;
}
return;
}
Console.WriteLine(
$"{prefix} applied={result.AppliedCount} skipped={result.SkippedCount} filtered={result.FilteredCount} duration_ms={result.DurationMs}");
if (verbose && result.AppliedMigrations.Count > 0)
{
foreach (var migration in result.AppliedMigrations.OrderBy(m => m.Name))
{
var mode = migration.WasDryRun ? "DRY-RUN" : "APPLIED";
Console.WriteLine($"{prefix} {mode}: {migration.Name} ({migration.Category}) {migration.DurationMs}ms");
}
}
}
private static void WriteStatusResult(MigrationModuleInfo module, MigrationStatus status, bool verbose)
{
var prefix = $"[{module.Name}]";
Console.WriteLine(
$"{prefix} applied={status.AppliedCount} pending_startup={status.PendingStartupCount} pending_release={status.PendingReleaseCount} checksum_errors={status.ChecksumErrors.Count}");
if (verbose)
{
foreach (var pending in status.PendingMigrations.OrderBy(p => p.Name))
{
Console.WriteLine($"{prefix} pending {pending.Category}: {pending.Name}");
}
foreach (var error in status.ChecksumErrors)
{
Console.WriteLine($"{prefix} checksum: {error}");
}
}
if (status.HasBlockingIssues && Environment.ExitCode == 0)
{
Environment.ExitCode = 1;
}
}
private static void WriteVerifyResult(MigrationModuleInfo module, IReadOnlyList<string> errors)
{
var prefix = $"[{module.Name}]";
if (errors.Count == 0)
{
Console.WriteLine($"{prefix} checksum verification passed.");
return;
}
Console.Error.WriteLine($"{prefix} checksum verification failed ({errors.Count}).");
foreach (var error in errors)
{
Console.Error.WriteLine($"{prefix} {error}");
}
if (Environment.ExitCode == 0)
{
Environment.ExitCode = 1;
}
}
}

View File

@@ -141,6 +141,7 @@ internal static class Program
services.AddSingleton<IScannerExecutor, ScannerExecutor>();
services.AddSingleton<IScannerInstaller, ScannerInstaller>();
services.AddSingleton<MigrationCommandService>();
// CLI-FORENSICS-53-001: Forensic snapshot client
services.AddHttpClient<IForensicSnapshotClient, ForensicSnapshotClient>(client =>

View File

@@ -0,0 +1,123 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using StellaOps.Infrastructure.Postgres.Migrations;
namespace StellaOps.Cli.Services;
/// <summary>
/// Helper for running, verifying, and querying PostgreSQL migrations from the CLI.
/// </summary>
internal sealed class MigrationCommandService
{
private readonly IConfiguration _configuration;
private readonly ILoggerFactory _loggerFactory;
public MigrationCommandService(IConfiguration configuration, ILoggerFactory loggerFactory)
{
_configuration = configuration ?? throw new ArgumentNullException(nameof(configuration));
_loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory));
}
public Task<MigrationResult> RunAsync(
MigrationModuleInfo module,
string? connectionOverride,
MigrationCategory? category,
bool dryRun,
int? timeoutSeconds,
CancellationToken cancellationToken)
{
var connectionString = ResolveConnectionString(module, connectionOverride);
var runner = CreateRunner(module, connectionString);
var options = new MigrationRunOptions
{
CategoryFilter = category,
DryRun = dryRun,
TimeoutSeconds = timeoutSeconds.GetValueOrDefault(300),
ValidateChecksums = true,
FailOnChecksumMismatch = true
};
return runner.RunFromAssemblyAsync(module.MigrationsAssembly, module.ResourcePrefix, options, cancellationToken);
}
public async Task<MigrationStatus> GetStatusAsync(
MigrationModuleInfo module,
string? connectionOverride,
CancellationToken cancellationToken)
{
var connectionString = ResolveConnectionString(module, connectionOverride);
var logger = _loggerFactory.CreateLogger($"migrationstatus.{module.Name}");
var statusService = new MigrationStatusService(
connectionString,
module.SchemaName,
module.Name,
module.MigrationsAssembly,
logger);
return await statusService.GetStatusAsync(cancellationToken).ConfigureAwait(false);
}
public Task<IReadOnlyList<string>> VerifyAsync(
MigrationModuleInfo module,
string? connectionOverride,
CancellationToken cancellationToken)
{
var connectionString = ResolveConnectionString(module, connectionOverride);
var runner = CreateRunner(module, connectionString);
return runner.ValidateChecksumsAsync(module.MigrationsAssembly, module.ResourcePrefix, cancellationToken);
}
private MigrationRunner CreateRunner(MigrationModuleInfo module, string connectionString) =>
new(connectionString, module.SchemaName, module.Name, _loggerFactory.CreateLogger($"migration.{module.Name}"));
private string ResolveConnectionString(MigrationModuleInfo module, string? connectionOverride)
{
if (!string.IsNullOrWhiteSpace(connectionOverride))
{
return connectionOverride;
}
var envCandidates = new[]
{
$"STELLAOPS_POSTGRES_{module.Name.ToUpperInvariant()}_CONNECTION",
$"STELLAOPS_POSTGRES_{module.SchemaName.ToUpperInvariant()}_CONNECTION",
"STELLAOPS_POSTGRES_CONNECTION",
"STELLAOPS_DB_CONNECTION"
};
foreach (var key in envCandidates)
{
var value = Environment.GetEnvironmentVariable(key);
if (!string.IsNullOrWhiteSpace(value))
{
return value;
}
}
var configCandidates = new[]
{
$"StellaOps:Database:{module.Name}:ConnectionString",
$"Database:{module.Name}:ConnectionString",
$"StellaOps:Postgres:ConnectionString",
$"Postgres:ConnectionString",
"Database:ConnectionString"
};
foreach (var key in configCandidates)
{
var value = _configuration[key];
if (!string.IsNullOrWhiteSpace(value))
{
return value;
}
}
throw new InvalidOperationException(
$"No PostgreSQL connection string found for module '{module.Name}'. " +
"Provide --connection or set STELLAOPS_POSTGRES_CONNECTION.");
}
}

View File

@@ -1,9 +1,13 @@
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Infrastructure.Postgres.Migrations;
namespace StellaOps.Cli.Services;
/// <summary>
/// Thin wrapper kept for DI compatibility; prefer using <see cref="MigrationCommandService"/>.
/// </summary>
internal sealed class MigrationRunnerAdapter
{
private readonly IMigrationRunner _runner;
@@ -13,9 +17,22 @@ internal sealed class MigrationRunnerAdapter
_runner = runner;
}
public Task<int> RunAsync(string migrationsPath, MigrationCategory? category, CancellationToken cancellationToken) =>
_runner.RunAsync(migrationsPath, category, cancellationToken);
public Task<MigrationResult> RunAsync(
string migrationsPath,
MigrationRunOptions? options,
CancellationToken cancellationToken) =>
_runner.RunAsync(migrationsPath, options, cancellationToken);
public Task<int> VerifyAsync(string migrationsPath, MigrationCategory? category, CancellationToken cancellationToken) =>
_runner.VerifyAsync(migrationsPath, category, cancellationToken);
public Task<MigrationResult> RunFromAssemblyAsync(
Assembly assembly,
string? resourcePrefix,
MigrationRunOptions? options,
CancellationToken cancellationToken) =>
_runner.RunFromAssemblyAsync(assembly, resourcePrefix, options, cancellationToken);
public Task<IReadOnlyList<string>> VerifyAsync(
Assembly assembly,
string? resourcePrefix,
CancellationToken cancellationToken) =>
_runner.ValidateChecksumsAsync(assembly, resourcePrefix, cancellationToken);
}

View File

@@ -1,4 +1,6 @@
using System.CommandLine;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Cli.Commands;
using StellaOps.Cli.Services;
using Xunit;
@@ -10,7 +12,7 @@ public class SystemCommandBuilderTests
[Fact]
public void BuildSystemCommand_AddsMigrationsSubcommands()
{
var system = SystemCommandBuilder.BuildSystemCommand();
var system = BuildSystemCommand();
Assert.NotNull(system);
Assert.Equal("system", system.Name);
Assert.Contains(system.Subcommands, c => c.Name == "migrations-run");
@@ -28,4 +30,16 @@ public class SystemCommandBuilderTests
Assert.Contains("Notify", MigrationModuleRegistry.ModuleNames);
Assert.Contains("Excititor", MigrationModuleRegistry.ModuleNames);
}
private static Command BuildSystemCommand()
{
var services = new ServiceCollection();
services.AddLogging();
services.AddSingleton<IConfiguration>(new ConfigurationBuilder().Build());
services.AddSingleton<MigrationCommandService>();
var provider = services.BuildServiceProvider();
var verboseOption = new Option<bool>("--verbose");
return SystemCommandBuilder.BuildSystemCommand(provider, verboseOption, CancellationToken.None);
}
}

View File

@@ -23,7 +23,6 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Storage.Postgres/StellaOps.Concelier.Storage.Postgres.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
@@ -42,4 +41,4 @@
OutputItemType="Analyzer"
ReferenceOutputAssembly="false" />
</ItemGroup>
</Project>
</Project>

View File

@@ -12,7 +12,6 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -12,6 +12,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -14,6 +14,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -8,7 +8,6 @@
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
</ItemGroup>
</Project>

View File

@@ -12,6 +12,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -14,10 +14,9 @@
<PackageReference Include="NuGet.Versioning" Version="6.9.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -13,6 +13,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -10,7 +10,6 @@
<ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
</ItemGroup>
</Project>

View File

@@ -13,6 +13,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -13,7 +13,6 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -14,7 +14,6 @@
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
@@ -27,4 +26,4 @@
</AssemblyAttribute>
</ItemGroup>
</Project>
</Project>

View File

@@ -12,6 +12,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -11,6 +11,5 @@
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -12,7 +12,6 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -12,6 +12,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -13,7 +13,6 @@
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -10,7 +10,6 @@
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
</ItemGroup>
@@ -22,4 +21,4 @@
<_Parameter1>StellaOps.Concelier.Connector.Osv.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
</Project>
</Project>

View File

@@ -13,8 +13,7 @@
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -17,8 +17,7 @@
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -20,6 +20,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -13,6 +13,5 @@
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -21,7 +21,6 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -12,6 +12,5 @@
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -12,6 +12,5 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -11,7 +11,6 @@
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
</ItemGroup>

View File

@@ -10,7 +10,6 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />

View File

@@ -10,7 +10,6 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Exporter.Json\StellaOps.Concelier.Exporter.Json.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
</ItemGroup>

View File

@@ -13,6 +13,5 @@
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -16,8 +16,7 @@
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -15,6 +15,5 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -9,7 +9,6 @@
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj" />
</ItemGroup>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="CertFr/Fixtures/**" CopyToOutputDirectory="Always" />

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="CertIn/Fixtures/**" CopyToOutputDirectory="Always" />

View File

@@ -19,7 +19,6 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -9,6 +9,5 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="RedHat/Fixtures/*.json" CopyToOutputDirectory="Always" TargetPath="Source/Distro/RedHat/Fixtures/%(Filename)%(Extension)" />

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="Source\Distro\Suse\Fixtures\**\*">

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj" />
</ItemGroup>
<ItemGroup>
@@ -17,4 +16,4 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>
</Project>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="IcsCisa/Fixtures/**" CopyToOutputDirectory="Always" />

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="Kaspersky/Fixtures/**" CopyToOutputDirectory="Always" />

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="Jvn/Fixtures/**" CopyToOutputDirectory="Always" />

View File

@@ -9,7 +9,6 @@
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj" />
</ItemGroup>

View File

@@ -11,7 +11,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="Nvd/Fixtures/*.json" CopyToOutputDirectory="Always" />

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
<ItemGroup>
@@ -17,4 +16,4 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>
</Project>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="Adobe/Fixtures/*.html" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Adobe/Fixtures/%(Filename)%(Extension)" />

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="Chromium/Fixtures/*.html" CopyToOutputDirectory="Always" />

View File

@@ -9,7 +9,6 @@
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" />
</ItemGroup>

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="Oracle/Fixtures/**/*.json" CopyToOutputDirectory="Always" />

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="Vmware/Fixtures/*.json">

View File

@@ -9,7 +9,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -9,6 +9,5 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -9,6 +9,5 @@
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -28,7 +28,6 @@
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Concelier.Storage.Postgres\StellaOps.Concelier.Storage.Postgres.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -14,7 +14,6 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
@@ -24,4 +23,4 @@
</ItemGroup>
<ItemGroup>
</ItemGroup>
</Project>
</Project>

View File

@@ -18,7 +18,94 @@ public sealed class GoLanguageAnalyzer : ILanguageAnalyzer
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(writer);
var candidatePaths = new List<string>(GoBinaryScanner.EnumerateCandidateFiles(context.RootPath));
// Track emitted modules to avoid duplicates (binary takes precedence over source)
var emittedModules = new HashSet<string>(StringComparer.Ordinal);
// Phase 1: Source scanning (go.mod, go.sum, go.work, vendor)
ScanSourceFiles(context, writer, emittedModules, cancellationToken);
// Phase 2: Binary scanning (existing behavior)
ScanBinaries(context, writer, emittedModules, cancellationToken);
return ValueTask.CompletedTask;
}
private void ScanSourceFiles(
LanguageAnalyzerContext context,
LanguageComponentWriter writer,
HashSet<string> emittedModules,
CancellationToken cancellationToken)
{
// Discover Go projects
var projects = GoProjectDiscoverer.Discover(context.RootPath, cancellationToken);
if (projects.Count == 0)
{
return;
}
foreach (var project in projects)
{
cancellationToken.ThrowIfCancellationRequested();
IReadOnlyList<GoSourceInventory.SourceInventoryResult> inventories;
if (project.IsWorkspace)
{
// Handle workspace with multiple modules
inventories = GoSourceInventory.BuildWorkspaceInventory(project, cancellationToken);
}
else
{
// Single module
var inventory = GoSourceInventory.BuildInventory(project);
inventories = inventory.IsEmpty
? Array.Empty<GoSourceInventory.SourceInventoryResult>()
: new[] { inventory };
}
foreach (var inventory in inventories)
{
if (inventory.IsEmpty)
{
continue;
}
// Emit the main module
if (!string.IsNullOrEmpty(inventory.ModulePath))
{
EmitMainModuleFromSource(inventory, project, context, writer, emittedModules);
}
// Emit dependencies
foreach (var module in inventory.Modules.OrderBy(m => m.Path, StringComparer.Ordinal))
{
cancellationToken.ThrowIfCancellationRequested();
EmitSourceModule(module, inventory, project, context, writer, emittedModules);
}
}
}
}
private void ScanBinaries(
LanguageAnalyzerContext context,
LanguageComponentWriter writer,
HashSet<string> emittedModules,
CancellationToken cancellationToken)
{
var candidatePaths = new List<string>();
// Use binary format pre-filtering for efficiency
foreach (var path in GoBinaryScanner.EnumerateCandidateFiles(context.RootPath))
{
cancellationToken.ThrowIfCancellationRequested();
// Quick check for known binary formats
if (GoBinaryFormatDetector.IsPotentialBinary(path))
{
candidatePaths.Add(path);
}
}
candidatePaths.Sort(StringComparer.Ordinal);
var fallbackBinaries = new List<GoStrippedBinaryClassification>();
@@ -37,7 +124,7 @@ public sealed class GoLanguageAnalyzer : ILanguageAnalyzer
continue;
}
EmitComponents(buildInfo, context, writer);
EmitComponents(buildInfo, context, writer, emittedModules);
}
foreach (var fallback in fallbackBinaries)
@@ -45,11 +132,197 @@ public sealed class GoLanguageAnalyzer : ILanguageAnalyzer
cancellationToken.ThrowIfCancellationRequested();
EmitFallbackComponent(fallback, context, writer);
}
return ValueTask.CompletedTask;
}
private void EmitComponents(GoBuildInfo buildInfo, LanguageAnalyzerContext context, LanguageComponentWriter writer)
private void EmitMainModuleFromSource(
GoSourceInventory.SourceInventoryResult inventory,
GoProjectDiscoverer.GoProject project,
LanguageAnalyzerContext context,
LanguageComponentWriter writer,
HashSet<string> emittedModules)
{
// Main module from go.mod (typically no version in source)
var modulePath = inventory.ModulePath!;
var moduleKey = $"{modulePath}@(devel)";
if (!emittedModules.Add(moduleKey))
{
return; // Already emitted
}
var relativePath = context.GetRelativePath(project.RootPath);
var goModRelative = project.HasGoMod ? context.GetRelativePath(project.GoModPath!) : null;
var metadata = new SortedDictionary<string, string?>(StringComparer.Ordinal)
{
["modulePath"] = modulePath,
["modulePath.main"] = modulePath,
["provenance"] = "source"
};
if (!string.IsNullOrEmpty(inventory.GoVersion))
{
metadata["go.version"] = inventory.GoVersion;
}
if (!string.IsNullOrEmpty(relativePath))
{
metadata["projectPath"] = relativePath;
}
if (project.IsWorkspace)
{
metadata["workspace"] = "true";
}
var evidence = new List<LanguageComponentEvidence>();
if (!string.IsNullOrEmpty(goModRelative))
{
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.File,
"go.mod",
goModRelative,
modulePath,
null));
}
evidence.Sort(static (l, r) => string.CompareOrdinal(l.ComparisonKey, r.ComparisonKey));
// Main module typically has (devel) as version in source context
writer.AddFromExplicitKey(
analyzerId: Id,
componentKey: $"golang::source::{modulePath}::(devel)",
purl: null,
name: modulePath,
version: "(devel)",
type: "golang",
metadata: metadata,
evidence: evidence);
}
private void EmitSourceModule(
GoSourceInventory.GoSourceModule module,
GoSourceInventory.SourceInventoryResult inventory,
GoProjectDiscoverer.GoProject project,
LanguageAnalyzerContext context,
LanguageComponentWriter writer,
HashSet<string> emittedModules)
{
var moduleKey = $"{module.Path}@{module.Version}";
if (!emittedModules.Add(moduleKey))
{
return; // Already emitted (binary takes precedence)
}
var purl = BuildPurl(module.Path, module.Version);
var goModRelative = project.HasGoMod ? context.GetRelativePath(project.GoModPath!) : null;
var metadata = new SortedDictionary<string, string?>(StringComparer.Ordinal)
{
["modulePath"] = module.Path,
["moduleVersion"] = module.Version,
["provenance"] = "source"
};
if (!string.IsNullOrEmpty(module.Checksum))
{
metadata["moduleSum"] = module.Checksum;
}
if (module.IsDirect)
{
metadata["dependency.direct"] = "true";
}
if (module.IsIndirect)
{
metadata["dependency.indirect"] = "true";
}
if (module.IsVendored)
{
metadata["vendored"] = "true";
}
if (module.IsPrivate)
{
metadata["private"] = "true";
}
if (module.ModuleCategory != "public")
{
metadata["moduleCategory"] = module.ModuleCategory;
}
if (!string.IsNullOrEmpty(module.Registry))
{
metadata["registry"] = module.Registry;
}
if (module.IsReplaced)
{
metadata["replaced"] = "true";
if (!string.IsNullOrEmpty(module.ReplacementPath))
{
metadata["replacedBy.path"] = module.ReplacementPath;
}
if (!string.IsNullOrEmpty(module.ReplacementVersion))
{
metadata["replacedBy.version"] = module.ReplacementVersion;
}
}
if (module.IsExcluded)
{
metadata["excluded"] = "true";
}
var evidence = new List<LanguageComponentEvidence>();
// Evidence from go.mod
if (!string.IsNullOrEmpty(goModRelative))
{
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Metadata,
module.Source,
goModRelative,
$"{module.Path}@{module.Version}",
module.Checksum));
}
evidence.Sort(static (l, r) => string.CompareOrdinal(l.ComparisonKey, r.ComparisonKey));
if (!string.IsNullOrEmpty(purl))
{
writer.AddFromPurl(
analyzerId: Id,
purl: purl,
name: module.Path,
version: module.Version,
type: "golang",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: false);
}
else
{
writer.AddFromExplicitKey(
analyzerId: Id,
componentKey: $"golang::source::{module.Path}@{module.Version}",
purl: null,
name: module.Path,
version: module.Version,
type: "golang",
metadata: metadata,
evidence: evidence);
}
}
private void EmitComponents(GoBuildInfo buildInfo, LanguageAnalyzerContext context, LanguageComponentWriter writer, HashSet<string> emittedModules)
{
var components = new List<GoModule> { buildInfo.MainModule };
components.AddRange(buildInfo.Dependencies
@@ -61,6 +334,10 @@ public sealed class GoLanguageAnalyzer : ILanguageAnalyzer
foreach (var module in components)
{
// Track emitted modules (binary evidence is more accurate than source)
var moduleKey = $"{module.Path}@{module.Version ?? "(devel)"}";
emittedModules.Add(moduleKey);
var metadata = BuildMetadata(buildInfo, module, binaryRelativePath);
var evidence = BuildEvidence(buildInfo, module, binaryRelativePath, context, ref binaryHash);
var usedByEntrypoint = module.IsMain && context.UsageHints.IsPathUsed(buildInfo.AbsoluteBinaryPath);

View File

@@ -0,0 +1,301 @@
using System.Buffers;
namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal;
/// <summary>
/// Detects binary file formats to quickly filter candidates for Go binary scanning.
/// Identifies ELF (Linux), PE (Windows), and Mach-O (macOS) formats.
/// </summary>
internal static class GoBinaryFormatDetector
{
// Magic bytes for different formats
private static readonly byte[] ElfMagic = [0x7F, (byte)'E', (byte)'L', (byte)'F'];
private static readonly byte[] PeMagic = [(byte)'M', (byte)'Z'];
private static readonly byte[] MachO32Magic = [0xFE, 0xED, 0xFA, 0xCE];
private static readonly byte[] MachO64Magic = [0xFE, 0xED, 0xFA, 0xCF];
private static readonly byte[] MachO32MagicReverse = [0xCE, 0xFA, 0xED, 0xFE];
private static readonly byte[] MachO64MagicReverse = [0xCF, 0xFA, 0xED, 0xFE];
private static readonly byte[] FatMagic = [0xCA, 0xFE, 0xBA, 0xBE]; // Universal binary
/// <summary>
/// Binary format type.
/// </summary>
public enum BinaryFormat
{
Unknown,
Elf,
Pe,
MachO,
Fat // Universal/Fat binary (contains multiple architectures)
}
/// <summary>
/// Result of binary format detection.
/// </summary>
public readonly record struct DetectionResult(
BinaryFormat Format,
bool IsExecutable,
string? Architecture);
/// <summary>
/// Quickly checks if a file is likely a binary executable.
/// </summary>
public static bool IsPotentialBinary(string filePath)
{
if (string.IsNullOrWhiteSpace(filePath))
{
return false;
}
try
{
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
if (stream.Length < 4)
{
return false;
}
Span<byte> header = stackalloc byte[4];
var read = stream.Read(header);
if (read < 4)
{
return false;
}
return IsKnownBinaryFormat(header);
}
catch (IOException)
{
return false;
}
catch (UnauthorizedAccessException)
{
return false;
}
}
/// <summary>
/// Detects the binary format and extracts basic metadata.
/// </summary>
public static DetectionResult Detect(string filePath)
{
if (string.IsNullOrWhiteSpace(filePath))
{
return new DetectionResult(BinaryFormat.Unknown, false, null);
}
try
{
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
return DetectFromStream(stream);
}
catch (IOException)
{
return new DetectionResult(BinaryFormat.Unknown, false, null);
}
catch (UnauthorizedAccessException)
{
return new DetectionResult(BinaryFormat.Unknown, false, null);
}
}
/// <summary>
/// Detects format from a stream.
/// </summary>
public static DetectionResult DetectFromStream(Stream stream)
{
if (stream.Length < 64)
{
return new DetectionResult(BinaryFormat.Unknown, false, null);
}
var buffer = ArrayPool<byte>.Shared.Rent(64);
try
{
var read = stream.Read(buffer, 0, 64);
if (read < 4)
{
return new DetectionResult(BinaryFormat.Unknown, false, null);
}
var header = new ReadOnlySpan<byte>(buffer, 0, read);
// Check ELF
if (header[..4].SequenceEqual(ElfMagic))
{
return DetectElf(header);
}
// Check PE (MZ header)
if (header[..2].SequenceEqual(PeMagic))
{
return DetectPe(header, stream);
}
// Check Mach-O
if (header[..4].SequenceEqual(MachO32Magic) ||
header[..4].SequenceEqual(MachO64Magic) ||
header[..4].SequenceEqual(MachO32MagicReverse) ||
header[..4].SequenceEqual(MachO64MagicReverse))
{
return DetectMachO(header);
}
// Check Fat binary
if (header[..4].SequenceEqual(FatMagic))
{
return new DetectionResult(BinaryFormat.Fat, true, "universal");
}
return new DetectionResult(BinaryFormat.Unknown, false, null);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private static bool IsKnownBinaryFormat(ReadOnlySpan<byte> header)
{
if (header.Length < 4)
{
return false;
}
// ELF
if (header[..4].SequenceEqual(ElfMagic))
{
return true;
}
// PE
if (header[..2].SequenceEqual(PeMagic))
{
return true;
}
// Mach-O (all variants)
if (header[..4].SequenceEqual(MachO32Magic) ||
header[..4].SequenceEqual(MachO64Magic) ||
header[..4].SequenceEqual(MachO32MagicReverse) ||
header[..4].SequenceEqual(MachO64MagicReverse) ||
header[..4].SequenceEqual(FatMagic))
{
return true;
}
return false;
}
private static DetectionResult DetectElf(ReadOnlySpan<byte> header)
{
if (header.Length < 20)
{
return new DetectionResult(BinaryFormat.Elf, true, null);
}
// ELF class (32 or 64 bit)
var elfClass = header[4];
var is64Bit = elfClass == 2;
// ELF type (offset 16-17)
var elfType = header[16];
var isExecutable = elfType == 2 || elfType == 3; // ET_EXEC or ET_DYN
// Machine type (offset 18-19)
var machine = header[18];
var arch = machine switch
{
0x03 => "386",
0x3E => "amd64",
0xB7 => "arm64",
0x28 => "arm",
0xF3 => "riscv64",
0x08 => "mips",
0x14 => "ppc",
0x15 => "ppc64",
0x16 => "s390x",
_ => is64Bit ? "64-bit" : "32-bit"
};
return new DetectionResult(BinaryFormat.Elf, isExecutable, arch);
}
private static DetectionResult DetectPe(ReadOnlySpan<byte> header, Stream stream)
{
// PE files have PE\0\0 signature at offset specified in header
if (header.Length < 64)
{
return new DetectionResult(BinaryFormat.Pe, true, null);
}
// Get PE header offset from offset 0x3C
var peOffset = BitConverter.ToInt32(header.Slice(0x3C, 4));
if (peOffset < 0 || peOffset > stream.Length - 6)
{
return new DetectionResult(BinaryFormat.Pe, true, null);
}
// Read PE header
stream.Position = peOffset;
Span<byte> peHeader = stackalloc byte[6];
if (stream.Read(peHeader) < 6)
{
return new DetectionResult(BinaryFormat.Pe, true, null);
}
// Verify PE signature
if (peHeader[0] != 'P' || peHeader[1] != 'E' || peHeader[2] != 0 || peHeader[3] != 0)
{
return new DetectionResult(BinaryFormat.Pe, true, null);
}
// Machine type
var machine = BitConverter.ToUInt16(peHeader.Slice(4, 2));
var arch = machine switch
{
0x014C => "386",
0x8664 => "amd64",
0xAA64 => "arm64",
0x01C4 => "arm",
_ => null
};
return new DetectionResult(BinaryFormat.Pe, true, arch);
}
private static DetectionResult DetectMachO(ReadOnlySpan<byte> header)
{
if (header.Length < 8)
{
return new DetectionResult(BinaryFormat.MachO, true, null);
}
// Check endianness and word size
var is64Bit = header[..4].SequenceEqual(MachO64Magic) || header[..4].SequenceEqual(MachO64MagicReverse);
var isLittleEndian = header[..4].SequenceEqual(MachO32MagicReverse) || header[..4].SequenceEqual(MachO64MagicReverse);
// CPU type is at offset 4
int cpuType;
if (isLittleEndian)
{
cpuType = BitConverter.ToInt32(header.Slice(4, 4));
}
else
{
// Big endian
cpuType = (header[4] << 24) | (header[5] << 16) | (header[6] << 8) | header[7];
}
var arch = (cpuType & 0xFF) switch
{
7 => is64Bit ? "amd64" : "386",
12 => is64Bit ? "arm64" : "arm",
18 => is64Bit ? "ppc64" : "ppc",
_ => is64Bit ? "64-bit" : "32-bit"
};
return new DetectionResult(BinaryFormat.MachO, true, arch);
}
}

View File

@@ -0,0 +1,243 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal;
/// <summary>
/// Aggregates Go module dependencies from source files (go.mod, go.sum, vendor/modules.txt).
/// </summary>
internal static class GoSourceInventory
{
/// <summary>
/// A Go module discovered from source files.
/// </summary>
public sealed record GoSourceModule
{
public required string Path { get; init; }
public required string Version { get; init; }
public string? Checksum { get; init; }
public bool IsDirect { get; init; }
public bool IsIndirect { get; init; }
public bool IsVendored { get; init; }
public bool IsReplaced { get; init; }
public bool IsExcluded { get; init; }
public bool IsRetracted { get; init; }
public bool IsPrivate { get; init; }
public string? ReplacementPath { get; init; }
public string? ReplacementVersion { get; init; }
public string Source { get; init; } = "go.mod";
public string ModuleCategory { get; init; } = "public";
public string? Registry { get; init; }
}
/// <summary>
/// Inventory results from source scanning.
/// </summary>
public sealed record SourceInventoryResult
{
public static readonly SourceInventoryResult Empty = new(
null,
null,
ImmutableArray<GoSourceModule>.Empty,
ImmutableArray<string>.Empty);
public SourceInventoryResult(
string? modulePath,
string? goVersion,
ImmutableArray<GoSourceModule> modules,
ImmutableArray<string> retractedVersions)
{
ModulePath = modulePath;
GoVersion = goVersion;
Modules = modules;
RetractedVersions = retractedVersions;
}
public string? ModulePath { get; }
public string? GoVersion { get; }
public ImmutableArray<GoSourceModule> Modules { get; }
public ImmutableArray<string> RetractedVersions { get; }
public bool IsEmpty => Modules.IsEmpty && string.IsNullOrEmpty(ModulePath);
}
/// <summary>
/// Builds inventory from a discovered Go project.
/// </summary>
public static SourceInventoryResult BuildInventory(GoProjectDiscoverer.GoProject project)
{
ArgumentNullException.ThrowIfNull(project);
if (!project.HasGoMod)
{
return SourceInventoryResult.Empty;
}
// Parse go.mod
var goMod = GoModParser.Parse(project.GoModPath!);
if (goMod.IsEmpty)
{
return SourceInventoryResult.Empty;
}
// Parse go.sum for checksums
var goSum = project.HasGoSum
? GoSumParser.Parse(project.GoSumPath!)
: GoSumParser.GoSumData.Empty;
// Parse vendor/modules.txt if present
var vendorData = project.HasVendor
? GoVendorParser.Parse(project.VendorModulesPath!)
: GoVendorParser.GoVendorData.Empty;
// Build replacement map
var replacements = goMod.Replaces
.ToImmutableDictionary(
r => r.OldVersion is not null ? $"{r.OldPath}@{r.OldVersion}" : r.OldPath,
r => r,
StringComparer.Ordinal);
// Build exclude set
var excludes = goMod.Excludes
.Select(e => $"{e.Path}@{e.Version}")
.ToImmutableHashSet(StringComparer.Ordinal);
// Build retracted set (these are versions of this module that are retracted)
var retractedVersions = goMod.Retracts.ToImmutableArray();
// Process requires
var modules = new List<GoSourceModule>();
foreach (var req in goMod.Requires)
{
var checksum = goSum.GetHash(req.Path, req.Version);
var isVendored = vendorData.IsVendored(req.Path);
var isPrivate = GoPrivateModuleDetector.IsLikelyPrivate(req.Path);
var moduleCategory = GoPrivateModuleDetector.GetModuleCategory(req.Path);
var registry = GoPrivateModuleDetector.GetRegistry(req.Path);
// Check for replacement
GoModParser.GoModReplace? replacement = null;
var versionedKey = $"{req.Path}@{req.Version}";
if (replacements.TryGetValue(versionedKey, out replacement) ||
replacements.TryGetValue(req.Path, out replacement))
{
// Module is replaced
}
// Check if excluded
var isExcluded = excludes.Contains(versionedKey);
var module = new GoSourceModule
{
Path = req.Path,
Version = req.Version,
Checksum = checksum,
IsDirect = !req.IsIndirect,
IsIndirect = req.IsIndirect,
IsVendored = isVendored,
IsReplaced = replacement is not null,
IsExcluded = isExcluded,
IsRetracted = false, // Can't know without checking the module's go.mod
IsPrivate = isPrivate,
ReplacementPath = replacement?.NewPath,
ReplacementVersion = replacement?.NewVersion,
Source = isVendored ? "vendor" : "go.mod",
ModuleCategory = moduleCategory,
Registry = registry
};
modules.Add(module);
}
// Add vendored modules not in requires (explicit vendored deps)
if (!vendorData.IsEmpty)
{
var requirePaths = goMod.Requires
.Select(r => r.Path)
.ToImmutableHashSet(StringComparer.Ordinal);
foreach (var vendorMod in vendorData.Modules)
{
if (!requirePaths.Contains(vendorMod.Path))
{
var isPrivate = GoPrivateModuleDetector.IsLikelyPrivate(vendorMod.Path);
var moduleCategory = GoPrivateModuleDetector.GetModuleCategory(vendorMod.Path);
modules.Add(new GoSourceModule
{
Path = vendorMod.Path,
Version = vendorMod.Version,
Checksum = goSum.GetHash(vendorMod.Path, vendorMod.Version),
IsDirect = vendorMod.IsExplicit,
IsIndirect = !vendorMod.IsExplicit,
IsVendored = true,
IsReplaced = false,
IsExcluded = false,
IsRetracted = false,
IsPrivate = isPrivate,
Source = "vendor",
ModuleCategory = moduleCategory
});
}
}
}
return new SourceInventoryResult(
goMod.ModulePath,
goMod.GoVersion,
modules.ToImmutableArray(),
retractedVersions);
}
/// <summary>
/// Builds combined inventory for a workspace (all members).
/// </summary>
public static IReadOnlyList<SourceInventoryResult> BuildWorkspaceInventory(
GoProjectDiscoverer.GoProject workspaceProject,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(workspaceProject);
var results = new List<SourceInventoryResult>();
// Build inventory for workspace root if it has go.mod
if (workspaceProject.HasGoMod)
{
var rootInventory = BuildInventory(workspaceProject);
if (!rootInventory.IsEmpty)
{
results.Add(rootInventory);
}
}
// Build inventory for each workspace member
foreach (var memberPath in workspaceProject.WorkspaceMembers)
{
cancellationToken.ThrowIfCancellationRequested();
var memberFullPath = Path.Combine(workspaceProject.RootPath, memberPath);
var memberGoMod = Path.Combine(memberFullPath, "go.mod");
var memberGoSum = Path.Combine(memberFullPath, "go.sum");
var memberVendor = Path.Combine(memberFullPath, "vendor", "modules.txt");
var memberProject = new GoProjectDiscoverer.GoProject(
memberFullPath,
File.Exists(memberGoMod) ? memberGoMod : null,
File.Exists(memberGoSum) ? memberGoSum : null,
null,
File.Exists(memberVendor) ? memberVendor : null,
ImmutableArray<string>.Empty);
if (memberProject.HasGoMod)
{
var memberInventory = BuildInventory(memberProject);
if (!memberInventory.IsEmpty)
{
results.Add(memberInventory);
}
}
}
return results;
}
}

View File

@@ -3,143 +3,83 @@ using StellaOps.Scanner.Surface.Models;
namespace StellaOps.Scanner.Surface.Discovery;
/// <summary>
/// Interface for collecting surface entries from specific sources.
/// Collectors are language/framework-specific implementations that
/// discover attack surface entry points.
/// Options for surface entry collection.
/// </summary>
public sealed record SurfaceCollectorOptions
{
/// <summary>Maximum call graph depth to analyze.</summary>
public int MaxDepth { get; init; } = 3;
/// <summary>Minimum confidence threshold for reporting.</summary>
public double MinimumConfidence { get; init; } = 0.7;
/// <summary>Surface types to include (empty = all).</summary>
public IReadOnlySet<SurfaceType> IncludeTypes { get; init; } = new HashSet<SurfaceType>();
/// <summary>Surface types to exclude.</summary>
public IReadOnlySet<SurfaceType> ExcludeTypes { get; init; } = new HashSet<SurfaceType>();
/// <summary>Whether to include code snippets in evidence.</summary>
public bool IncludeSnippets { get; init; } = true;
/// <summary>Maximum snippet length.</summary>
public int MaxSnippetLength { get; init; } = 500;
}
/// <summary>
/// Context provided to surface entry collectors.
/// </summary>
public sealed record SurfaceCollectorContext
{
/// <summary>Scan identifier.</summary>
public required string ScanId { get; init; }
/// <summary>Root path being scanned.</summary>
public required string RootPath { get; init; }
/// <summary>Collector options.</summary>
public required SurfaceCollectorOptions Options { get; init; }
/// <summary>Optional tenant identifier.</summary>
public string? TenantId { get; init; }
/// <summary>Additional context metadata.</summary>
public IReadOnlyDictionary<string, object>? Metadata { get; init; }
}
/// <summary>
/// Interface for surface entry collectors that detect specific attack surface patterns.
/// </summary>
public interface ISurfaceEntryCollector
{
/// <summary>
/// Unique identifier for this collector.
/// </summary>
/// <summary>Unique identifier for this collector.</summary>
string CollectorId { get; }
/// <summary>
/// Display name for this collector.
/// </summary>
string Name { get; }
/// <summary>Human-readable name.</summary>
string DisplayName { get; }
/// <summary>
/// Languages supported by this collector.
/// </summary>
IReadOnlyList<string> SupportedLanguages { get; }
/// <summary>Surface types this collector can detect.</summary>
IReadOnlySet<SurfaceType> SupportedTypes { get; }
/// <summary>
/// Surface types this collector can detect.
/// </summary>
IReadOnlyList<SurfaceType> DetectableTypes { get; }
/// <summary>
/// Priority for collector ordering (higher = run first).
/// </summary>
int Priority { get; }
/// <summary>
/// Determines if this collector can analyze the given context.
/// </summary>
bool CanCollect(SurfaceCollectionContext context);
/// <summary>
/// Collects surface entries from the given context.
/// </summary>
/// <summary>Collects surface entries from the given context.</summary>
IAsyncEnumerable<SurfaceEntry> CollectAsync(
SurfaceCollectionContext context,
SurfaceCollectorContext context,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for surface entry collection.
/// Interface for entry point collectors that discover application entry points.
/// </summary>
public sealed record SurfaceCollectionContext
public interface IEntryPointCollector
{
/// <summary>
/// Scan identifier.
/// </summary>
public required string ScanId { get; init; }
/// <summary>Unique identifier for this collector.</summary>
string CollectorId { get; }
/// <summary>
/// Root directory being scanned.
/// </summary>
public required string RootPath { get; init; }
/// <summary>Languages/frameworks this collector supports.</summary>
IReadOnlySet<string> SupportedLanguages { get; }
/// <summary>
/// Files to analyze (relative paths).
/// </summary>
public required IReadOnlyList<string> Files { get; init; }
/// <summary>
/// Detected languages in the codebase.
/// </summary>
public IReadOnlyList<string>? DetectedLanguages { get; init; }
/// <summary>
/// Detected frameworks.
/// </summary>
public IReadOnlyList<string>? DetectedFrameworks { get; init; }
/// <summary>
/// Analysis options.
/// </summary>
public SurfaceAnalysisOptions? Options { get; init; }
/// <summary>
/// Additional context data.
/// </summary>
public IReadOnlyDictionary<string, object>? Data { get; init; }
}
/// <summary>
/// Options for surface analysis.
/// </summary>
public sealed record SurfaceAnalysisOptions
{
/// <summary>
/// Whether surface analysis is enabled.
/// </summary>
public bool Enabled { get; init; } = true;
/// <summary>
/// Call graph depth for analysis.
/// </summary>
public int Depth { get; init; } = 3;
/// <summary>
/// Minimum confidence threshold for reporting.
/// </summary>
public double ConfidenceThreshold { get; init; } = 0.7;
/// <summary>
/// Surface types to include (null = all).
/// </summary>
public IReadOnlyList<SurfaceType>? IncludeTypes { get; init; }
/// <summary>
/// Surface types to exclude.
/// </summary>
public IReadOnlyList<SurfaceType>? ExcludeTypes { get; init; }
/// <summary>
/// Maximum entries to collect.
/// </summary>
public int? MaxEntries { get; init; }
/// <summary>
/// File patterns to include.
/// </summary>
public IReadOnlyList<string>? IncludePatterns { get; init; }
/// <summary>
/// File patterns to exclude.
/// </summary>
public IReadOnlyList<string>? ExcludePatterns { get; init; }
/// <summary>
/// Collectors to use (null = all registered).
/// </summary>
public IReadOnlyList<string>? Collectors { get; init; }
/// <summary>
/// Default analysis options.
/// </summary>
public static SurfaceAnalysisOptions Default => new();
/// <summary>Collects entry points from the given context.</summary>
IAsyncEnumerable<EntryPoint> CollectAsync(
SurfaceCollectorContext context,
CancellationToken cancellationToken = default);
}

View File

@@ -1,36 +1,30 @@
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Surface.Models;
namespace StellaOps.Scanner.Surface.Discovery;
/// <summary>
/// Registry for surface entry collectors.
/// Manages collector registration and orchestrates collection.
/// Registry for surface entry and entry point collectors.
/// </summary>
public interface ISurfaceEntryRegistry
{
/// <summary>
/// Registers a collector.
/// </summary>
void Register(ISurfaceEntryCollector collector);
/// <summary>Registers a surface entry collector.</summary>
void RegisterCollector(ISurfaceEntryCollector collector);
/// <summary>
/// Gets all registered collectors.
/// </summary>
/// <summary>Registers an entry point collector.</summary>
void RegisterEntryPointCollector(IEntryPointCollector collector);
/// <summary>Gets all registered surface entry collectors.</summary>
IReadOnlyList<ISurfaceEntryCollector> GetCollectors();
/// <summary>
/// Gets collectors that can analyze the given context.
/// </summary>
IReadOnlyList<ISurfaceEntryCollector> GetApplicableCollectors(SurfaceCollectionContext context);
/// <summary>Gets all registered entry point collectors.</summary>
IReadOnlyList<IEntryPointCollector> GetEntryPointCollectors();
/// <summary>
/// Collects entries using all applicable collectors.
/// </summary>
IAsyncEnumerable<SurfaceEntry> CollectAllAsync(
SurfaceCollectionContext context,
CancellationToken cancellationToken = default);
/// <summary>Gets collectors that support the specified surface type.</summary>
IReadOnlyList<ISurfaceEntryCollector> GetCollectorsForType(SurfaceType type);
/// <summary>Gets entry point collectors that support the specified language.</summary>
IReadOnlyList<IEntryPointCollector> GetEntryPointCollectorsForLanguage(string language);
}
/// <summary>
@@ -39,6 +33,7 @@ public interface ISurfaceEntryRegistry
public sealed class SurfaceEntryRegistry : ISurfaceEntryRegistry
{
private readonly List<ISurfaceEntryCollector> _collectors = [];
private readonly List<IEntryPointCollector> _entryPointCollectors = [];
private readonly ILogger<SurfaceEntryRegistry> _logger;
private readonly object _lock = new();
@@ -47,141 +42,61 @@ public sealed class SurfaceEntryRegistry : ISurfaceEntryRegistry
_logger = logger;
}
public void Register(ISurfaceEntryCollector collector)
public void RegisterCollector(ISurfaceEntryCollector collector)
{
ArgumentNullException.ThrowIfNull(collector);
lock (_lock)
{
// Check for duplicate
if (_collectors.Any(c => c.CollectorId == collector.CollectorId))
{
_logger.LogWarning(
"Collector {CollectorId} already registered, skipping duplicate",
collector.CollectorId);
_logger.LogWarning("Collector {CollectorId} already registered, skipping", collector.CollectorId);
return;
}
_collectors.Add(collector);
_logger.LogDebug(
"Registered surface collector {CollectorId} ({Name}) for languages: {Languages}",
collector.CollectorId,
collector.Name,
string.Join(", ", collector.SupportedLanguages));
_logger.LogDebug("Registered surface collector: {CollectorId}", collector.CollectorId);
}
}
public void RegisterEntryPointCollector(IEntryPointCollector collector)
{
ArgumentNullException.ThrowIfNull(collector);
lock (_lock)
{
if (_entryPointCollectors.Any(c => c.CollectorId == collector.CollectorId))
{
_logger.LogWarning("Entry point collector {CollectorId} already registered, skipping", collector.CollectorId);
return;
}
_entryPointCollectors.Add(collector);
_logger.LogDebug("Registered entry point collector: {CollectorId}", collector.CollectorId);
}
}
public IReadOnlyList<ISurfaceEntryCollector> GetCollectors()
{
lock (_lock) return [.. _collectors];
}
public IReadOnlyList<IEntryPointCollector> GetEntryPointCollectors()
{
lock (_lock) return [.. _entryPointCollectors];
}
public IReadOnlyList<ISurfaceEntryCollector> GetCollectorsForType(SurfaceType type)
{
lock (_lock)
{
return _collectors
.OrderByDescending(c => c.Priority)
.ToList();
return [.. _collectors.Where(c => c.SupportedTypes.Contains(type))];
}
}
public IReadOnlyList<ISurfaceEntryCollector> GetApplicableCollectors(SurfaceCollectionContext context)
public IReadOnlyList<IEntryPointCollector> GetEntryPointCollectorsForLanguage(string language)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
lock (_lock)
{
var applicable = _collectors
.Where(c => c.CanCollect(context))
.OrderByDescending(c => c.Priority)
.ToList();
// Filter by options if specified
if (context.Options?.Collectors is { Count: > 0 } allowedCollectors)
{
applicable = applicable
.Where(c => allowedCollectors.Contains(c.CollectorId))
.ToList();
}
return applicable;
return [.. _entryPointCollectors.Where(c =>
c.SupportedLanguages.Contains(language, StringComparer.OrdinalIgnoreCase))];
}
}
public async IAsyncEnumerable<SurfaceEntry> CollectAllAsync(
SurfaceCollectionContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var collectors = GetApplicableCollectors(context);
if (collectors.Count == 0)
{
_logger.LogDebug("No applicable collectors for scan {ScanId}", context.ScanId);
yield break;
}
_logger.LogDebug(
"Running {CollectorCount} collectors for scan {ScanId}",
collectors.Count,
context.ScanId);
var seenIds = new HashSet<string>();
var entryCount = 0;
var maxEntries = context.Options?.MaxEntries;
foreach (var collector in collectors)
{
if (cancellationToken.IsCancellationRequested)
break;
if (maxEntries.HasValue && entryCount >= maxEntries.Value)
{
_logger.LogDebug(
"Reached max entries limit ({MaxEntries}) for scan {ScanId}",
maxEntries.Value,
context.ScanId);
break;
}
_logger.LogDebug(
"Running collector {CollectorId} for scan {ScanId}",
collector.CollectorId,
context.ScanId);
await foreach (var entry in collector.CollectAsync(context, cancellationToken))
{
if (cancellationToken.IsCancellationRequested)
break;
// Apply confidence threshold
if (context.Options?.ConfidenceThreshold is double threshold)
{
var confidenceValue = (int)entry.Confidence / 4.0;
if (confidenceValue < threshold)
continue;
}
// Apply type filters
if (context.Options?.ExcludeTypes?.Contains(entry.Type) == true)
continue;
if (context.Options?.IncludeTypes is { Count: > 0 } includeTypes &&
!includeTypes.Contains(entry.Type))
continue;
// Deduplicate by ID
if (!seenIds.Add(entry.Id))
continue;
entryCount++;
yield return entry;
if (maxEntries.HasValue && entryCount >= maxEntries.Value)
break;
}
}
_logger.LogDebug(
"Collected {EntryCount} surface entries for scan {ScanId}",
entryCount,
context.ScanId);
}
}

View File

@@ -1,115 +1,76 @@
namespace StellaOps.Scanner.Surface.Models;
/// <summary>
/// Represents a discovered entry point in application code.
/// Entry points are language/framework-specific handlers that
/// receive external input (HTTP routes, RPC handlers, etc.).
/// An application entry point discovered during surface analysis.
/// </summary>
public sealed record EntryPoint
{
/// <summary>
/// Unique identifier for this entry point.
/// </summary>
/// <summary>Unique identifier.</summary>
public required string Id { get; init; }
/// <summary>
/// Programming language.
/// </summary>
/// <summary>Programming language.</summary>
public required string Language { get; init; }
/// <summary>
/// Web framework or runtime (e.g., "ASP.NET Core", "Express", "FastAPI").
/// </summary>
public required string Framework { get; init; }
/// <summary>Framework or runtime.</summary>
public string? Framework { get; init; }
/// <summary>
/// URL path or route pattern.
/// </summary>
/// <summary>URL path or route pattern.</summary>
public required string Path { get; init; }
/// <summary>
/// HTTP method (GET, POST, etc.) or RPC method type.
/// </summary>
public required string Method { get; init; }
/// <summary>HTTP method or RPC method name.</summary>
public string? Method { get; init; }
/// <summary>
/// Handler function/method name.
/// </summary>
/// <summary>Handler function/method name.</summary>
public required string Handler { get; init; }
/// <summary>
/// Source file containing the handler.
/// </summary>
/// <summary>Source file containing the handler.</summary>
public required string File { get; init; }
/// <summary>
/// Line number of the handler definition.
/// </summary>
public required int Line { get; init; }
/// <summary>Line number of the handler definition.</summary>
public int Line { get; init; }
/// <summary>
/// Handler parameters/arguments.
/// </summary>
/// <summary>Parameter names/types.</summary>
public IReadOnlyList<string> Parameters { get; init; } = [];
/// <summary>
/// Middleware chain applied to this endpoint.
/// </summary>
/// <summary>Applied middleware/interceptors.</summary>
public IReadOnlyList<string> Middlewares { get; init; } = [];
/// <summary>
/// Whether authentication is required.
/// </summary>
public bool? RequiresAuth { get; init; }
/// <summary>
/// Authorization policies applied.
/// </summary>
public IReadOnlyList<string>? AuthorizationPolicies { get; init; }
/// <summary>
/// Content types accepted.
/// </summary>
public IReadOnlyList<string>? AcceptsContentTypes { get; init; }
/// <summary>
/// Content types produced.
/// </summary>
public IReadOnlyList<string>? ProducesContentTypes { get; init; }
}
/// <summary>
/// Result of entry point discovery for a scan.
/// Summary of surface analysis results.
/// </summary>
public sealed record EntryPointDiscoveryResult
public sealed record SurfaceAnalysisSummary
{
/// <summary>
/// Scan identifier.
/// </summary>
/// <summary>Total number of entries detected.</summary>
public int TotalEntries { get; init; }
/// <summary>Entries grouped by type.</summary>
public IReadOnlyDictionary<SurfaceType, int> ByType { get; init; } = new Dictionary<SurfaceType, int>();
/// <summary>Overall risk score (0.0 to 1.0).</summary>
public double RiskScore { get; init; }
}
/// <summary>
/// Complete surface analysis result for a scan.
/// </summary>
public sealed record SurfaceAnalysisResult
{
/// <summary>Key for storing analysis results.</summary>
public const string StoreKey = "scanner.surface.analysis";
/// <summary>Scan identifier.</summary>
public required string ScanId { get; init; }
/// <summary>
/// When discovery was performed.
/// </summary>
public required DateTimeOffset DiscoveredAt { get; init; }
/// <summary>Analysis timestamp (UTC).</summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Discovered entry points.
/// </summary>
public required IReadOnlyList<EntryPoint> EntryPoints { get; init; }
/// <summary>Analysis summary.</summary>
public required SurfaceAnalysisSummary Summary { get; init; }
/// <summary>
/// Frameworks detected.
/// </summary>
public required IReadOnlyList<string> DetectedFrameworks { get; init; }
/// <summary>Detected surface entries.</summary>
public required IReadOnlyList<SurfaceEntry> Entries { get; init; }
/// <summary>
/// Total entry points by method.
/// </summary>
public required IReadOnlyDictionary<string, int> ByMethod { get; init; }
/// <summary>
/// Warnings or issues during discovery.
/// </summary>
public IReadOnlyList<string>? Warnings { get; init; }
/// <summary>Discovered entry points.</summary>
public IReadOnlyList<EntryPoint> EntryPoints { get; init; } = [];
}

View File

@@ -3,124 +3,58 @@ using System.Text;
namespace StellaOps.Scanner.Surface.Models;
/// <summary>
/// Represents a discovered attack surface entry point.
/// </summary>
public sealed record SurfaceEntry
{
/// <summary>
/// Unique identifier: SHA256(type|path|context).
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Type classification of this surface entry.
/// </summary>
public required SurfaceType Type { get; init; }
/// <summary>
/// File path, URL endpoint, or resource identifier.
/// </summary>
public required string Path { get; init; }
/// <summary>
/// Function, method, or handler context.
/// </summary>
public required string Context { get; init; }
/// <summary>
/// Detection confidence level.
/// </summary>
public required ConfidenceLevel Confidence { get; init; }
/// <summary>
/// Tags for categorization and filtering.
/// </summary>
public IReadOnlyList<string> Tags { get; init; } = [];
/// <summary>
/// Evidence supporting this entry detection.
/// </summary>
public required SurfaceEvidence Evidence { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// Creates a deterministic ID from type, path, and context.
/// </summary>
public static string ComputeId(SurfaceType type, string path, string context)
{
var input = $"{type}|{path}|{context}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>
/// Creates a new SurfaceEntry with computed ID.
/// </summary>
public static SurfaceEntry Create(
SurfaceType type,
string path,
string context,
ConfidenceLevel confidence,
SurfaceEvidence evidence,
IEnumerable<string>? tags = null,
IReadOnlyDictionary<string, string>? metadata = null)
{
return new SurfaceEntry
{
Id = ComputeId(type, path, context),
Type = type,
Path = path,
Context = context,
Confidence = confidence,
Evidence = evidence,
Tags = tags?.ToList() ?? [],
Metadata = metadata
};
}
}
/// <summary>
/// Evidence supporting a surface entry detection.
/// </summary>
public sealed record SurfaceEvidence
{
/// <summary>
/// Source file path.
/// </summary>
/// <summary>Source file path.</summary>
public required string File { get; init; }
/// <summary>
/// Line number in the source file.
/// </summary>
/// <summary>Line number in source file.</summary>
public required int Line { get; init; }
/// <summary>
/// Column number if available.
/// </summary>
public int? Column { get; init; }
/// <summary>Content hash of the evidence.</summary>
public required string Hash { get; init; }
/// <summary>
/// Content hash of the source file.
/// </summary>
public string? FileHash { get; init; }
/// <summary>
/// Code snippet around the detection.
/// </summary>
/// <summary>Optional code snippet.</summary>
public string? Snippet { get; init; }
/// <summary>
/// Detection method used.
/// </summary>
public string? DetectionMethod { get; init; }
/// <summary>
/// Additional evidence details.
/// </summary>
public IReadOnlyDictionary<string, string>? Details { get; init; }
/// <summary>Optional additional metadata.</summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// A detected surface analysis entry representing a potential attack surface.
/// </summary>
public sealed record SurfaceEntry
{
/// <summary>Deterministic ID: SHA256(type|path|context).</summary>
public required string Id { get; init; }
/// <summary>Type of surface entry.</summary>
public required SurfaceType Type { get; init; }
/// <summary>File path or endpoint path.</summary>
public required string Path { get; init; }
/// <summary>Function/method context where detected.</summary>
public required string Context { get; init; }
/// <summary>Detection confidence level.</summary>
public required ConfidenceLevel Confidence { get; init; }
/// <summary>Classification tags.</summary>
public IReadOnlyList<string> Tags { get; init; } = [];
/// <summary>Supporting evidence.</summary>
public required SurfaceEvidence Evidence { get; init; }
/// <summary>Creates a deterministic ID from components.</summary>
public static string ComputeId(SurfaceType type, string path, string context)
{
var input = $"{type}|{path}|{context}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -1,42 +1,33 @@
namespace StellaOps.Scanner.Surface.Models;
/// <summary>
/// Classification of attack surface entry types.
/// Surface analysis entry type classification.
/// </summary>
public enum SurfaceType
{
/// <summary>Network-exposed endpoints, listeners, ports.</summary>
/// <summary>Exposed network endpoints, ports, listeners.</summary>
NetworkEndpoint,
/// <summary>File system operations, path access.</summary>
/// <summary>File system operations, sensitive file access.</summary>
FileOperation,
/// <summary>Process/command execution, subprocess spawns.</summary>
/// <summary>Process execution, subprocess spawning.</summary>
ProcessExecution,
/// <summary>Cryptographic operations, key handling.</summary>
CryptoOperation,
/// <summary>Authentication entry points, session handling.</summary>
/// <summary>Authentication points, session handling.</summary>
AuthenticationPoint,
/// <summary>User input handling, injection points.</summary>
InputHandling,
/// <summary>Secret/credential access points.</summary>
/// <summary>Secret/credential access patterns.</summary>
SecretAccess,
/// <summary>External service calls, HTTP clients.</summary>
ExternalCall,
/// <summary>Database queries, ORM operations.</summary>
DatabaseOperation,
/// <summary>Deserialization points.</summary>
Deserialization,
/// <summary>Reflection/dynamic code execution.</summary>
DynamicCode
/// <summary>External service calls, outbound connections.</summary>
ExternalCall
}
/// <summary>
@@ -44,15 +35,15 @@ public enum SurfaceType
/// </summary>
public enum ConfidenceLevel
{
/// <summary>Low confidence - heuristic or pattern match.</summary>
Low = 1,
/// <summary>Low confidence, likely false positive.</summary>
Low,
/// <summary>Medium confidence - likely match.</summary>
Medium = 2,
/// <summary>Medium confidence, manual review recommended.</summary>
Medium,
/// <summary>High confidence - definite match.</summary>
High = 3,
/// <summary>High confidence, likely accurate.</summary>
High,
/// <summary>Verified - confirmed through multiple signals.</summary>
Verified = 4
/// <summary>Very high confidence, confirmed pattern.</summary>
VeryHigh
}

View File

@@ -0,0 +1,117 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Surface.Models;
namespace StellaOps.Scanner.Surface.Output;
/// <summary>
/// Options for surface analysis output.
/// </summary>
public sealed record SurfaceOutputOptions
{
/// <summary>Output directory path.</summary>
public string? OutputPath { get; init; }
/// <summary>Whether to write to file.</summary>
public bool WriteToFile { get; init; } = true;
/// <summary>Whether to emit NDJSON format.</summary>
public bool UseNdjson { get; init; }
/// <summary>Whether to include evidence snippets.</summary>
public bool IncludeSnippets { get; init; } = true;
/// <summary>Whether to pretty-print JSON output.</summary>
public bool PrettyPrint { get; init; }
}
/// <summary>
/// Interface for writing surface analysis results.
/// </summary>
public interface ISurfaceAnalysisWriter
{
/// <summary>Writes surface analysis result.</summary>
Task WriteAsync(
SurfaceAnalysisResult result,
SurfaceOutputOptions? options = null,
CancellationToken cancellationToken = default);
/// <summary>Writes surface entries as NDJSON stream.</summary>
IAsyncEnumerable<string> WriteNdjsonAsync(
SurfaceAnalysisResult result,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default surface analysis writer implementation.
/// </summary>
public sealed class SurfaceAnalysisWriter : ISurfaceAnalysisWriter
{
private readonly ILogger<SurfaceAnalysisWriter> _logger;
private static readonly JsonSerializerOptions s_jsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
private static readonly JsonSerializerOptions s_prettyJsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
public SurfaceAnalysisWriter(ILogger<SurfaceAnalysisWriter> logger)
{
_logger = logger;
}
public async Task WriteAsync(
SurfaceAnalysisResult result,
SurfaceOutputOptions? options = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(result);
options ??= new SurfaceOutputOptions();
var jsonOptions = options.PrettyPrint ? s_prettyJsonOptions : s_jsonOptions;
if (options.WriteToFile && \!string.IsNullOrEmpty(options.OutputPath))
{
var filePath = Path.Combine(options.OutputPath, $"surface-{result.ScanId}.json");
await using var stream = File.Create(filePath);
await JsonSerializer.SerializeAsync(stream, result, jsonOptions, cancellationToken);
_logger.LogInformation("Wrote surface analysis to {FilePath}", filePath);
}
}
public async IAsyncEnumerable<string> WriteNdjsonAsync(
SurfaceAnalysisResult result,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(result);
// Emit summary first
yield return JsonSerializer.Serialize(new { type = "summary", data = result.Summary }, s_jsonOptions);
// Emit each entry
foreach (var entry in result.Entries)
{
cancellationToken.ThrowIfCancellationRequested();
yield return JsonSerializer.Serialize(new { type = "entry", data = entry }, s_jsonOptions);
}
// Emit entry points
foreach (var ep in result.EntryPoints)
{
cancellationToken.ThrowIfCancellationRequested();
yield return JsonSerializer.Serialize(new { type = "entrypoint", data = ep }, s_jsonOptions);
}
await Task.CompletedTask;
}
}

View File

@@ -0,0 +1,102 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Surface.Models;
namespace StellaOps.Scanner.Surface.Signals;
/// <summary>
/// Standard surface signal keys for policy integration.
/// </summary>
public static class SurfaceSignalKeys
{
public const string NetworkEndpoints = "surface.network.endpoints";
public const string ExposedPorts = "surface.network.ports";
public const string FileOperations = "surface.file.operations";
public const string ProcessSpawns = "surface.process.spawns";
public const string CryptoUsage = "surface.crypto.usage";
public const string AuthPoints = "surface.auth.points";
public const string InputHandlers = "surface.input.handlers";
public const string SecretAccess = "surface.secrets.access";
public const string ExternalCalls = "surface.external.calls";
public const string TotalSurfaceArea = "surface.total.area";
public const string RiskScore = "surface.risk.score";
public const string EntryPointCount = "surface.entrypoints.count";
}
/// <summary>
/// Interface for emitting surface analysis signals to policy engine.
/// </summary>
public interface ISurfaceSignalEmitter
{
/// <summary>Emits surface signals for a scan.</summary>
Task EmitAsync(
string scanId,
IDictionary<string, object> signals,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default surface signal emitter implementation.
/// </summary>
public sealed class SurfaceSignalEmitter : ISurfaceSignalEmitter
{
private readonly ILogger<SurfaceSignalEmitter> _logger;
public SurfaceSignalEmitter(ILogger<SurfaceSignalEmitter> logger)
{
_logger = logger;
}
public Task EmitAsync(
string scanId,
IDictionary<string, object> signals,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
ArgumentNullException.ThrowIfNull(signals);
_logger.LogInformation(
"Emitting {SignalCount} surface signals for scan {ScanId}",
signals.Count, scanId);
foreach (var (key, value) in signals)
{
_logger.LogDebug("Signal {Key}: {Value}", key, value);
}
// In production, this would emit to message bus or policy engine
return Task.CompletedTask;
}
/// <summary>Builds signals from surface analysis result.</summary>
public static IDictionary<string, object> BuildSignals(SurfaceAnalysisResult result)
{
ArgumentNullException.ThrowIfNull(result);
var signals = new Dictionary<string, object>
{
[SurfaceSignalKeys.TotalSurfaceArea] = result.Summary.TotalEntries,
[SurfaceSignalKeys.RiskScore] = result.Summary.RiskScore,
[SurfaceSignalKeys.EntryPointCount] = result.EntryPoints.Count
};
// Add type-specific counts
foreach (var (type, count) in result.Summary.ByType)
{
var key = type switch
{
SurfaceType.NetworkEndpoint => SurfaceSignalKeys.NetworkEndpoints,
SurfaceType.FileOperation => SurfaceSignalKeys.FileOperations,
SurfaceType.ProcessExecution => SurfaceSignalKeys.ProcessSpawns,
SurfaceType.CryptoOperation => SurfaceSignalKeys.CryptoUsage,
SurfaceType.AuthenticationPoint => SurfaceSignalKeys.AuthPoints,
SurfaceType.InputHandling => SurfaceSignalKeys.InputHandlers,
SurfaceType.SecretAccess => SurfaceSignalKeys.SecretAccess,
SurfaceType.ExternalCall => SurfaceSignalKeys.ExternalCalls,
_ => $"surface.{type.ToString().ToLowerInvariant()}"
};
signals[key] = count;
}
return signals;
}
}

View File

@@ -0,0 +1,97 @@
{
"components": [
{
"analyzerId": "golang",
"componentKey": "golang::source::example.com/myproject::(devel)",
"name": "example.com/myproject",
"type": "golang",
"version": "(devel)",
"metadata": {
"go.version": "1.21",
"modulePath": "example.com/myproject",
"modulePath.main": "example.com/myproject",
"provenance": "source"
},
"evidence": [
{
"kind": "File",
"source": "go.mod",
"locator": "go.mod",
"value": "example.com/myproject"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/github.com/gin-gonic/gin@v1.9.1",
"name": "github.com/gin-gonic/gin",
"type": "golang",
"version": "v1.9.1",
"metadata": {
"dependency.direct": "true",
"modulePath": "github.com/gin-gonic/gin",
"moduleSum": "h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=",
"moduleVersion": "v1.9.1",
"provenance": "source",
"registry": "proxy.golang.org"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "go.mod",
"value": "github.com/gin-gonic/gin@v1.9.1",
"hash": "h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg="
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/github.com/stretchr/testify@v1.8.4",
"name": "github.com/stretchr/testify",
"type": "golang",
"version": "v1.8.4",
"metadata": {
"dependency.indirect": "true",
"modulePath": "github.com/stretchr/testify",
"moduleSum": "h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQzsRs2+AEW5Cjls=",
"moduleVersion": "v1.8.4",
"provenance": "source",
"registry": "proxy.golang.org"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "go.mod",
"value": "github.com/stretchr/testify@v1.8.4",
"hash": "h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQzsRs2+AEW5Cjls="
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/golang.org/x/crypto@v0.14.0",
"name": "golang.org/x/crypto",
"type": "golang",
"version": "v0.14.0",
"metadata": {
"dependency.direct": "true",
"modulePath": "golang.org/x/crypto",
"moduleSum": "h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=",
"moduleVersion": "v0.14.0",
"provenance": "source",
"registry": "proxy.golang.org"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "go.mod",
"value": "golang.org/x/crypto@v0.14.0",
"hash": "h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc="
}
]
}
]
}

View File

@@ -0,0 +1,15 @@
module example.com/myproject
go 1.21
require (
github.com/gin-gonic/gin v1.9.1
github.com/stretchr/testify v1.8.4 // indirect
golang.org/x/crypto v0.14.0
)
replace github.com/old/package => github.com/new/package v1.0.0
exclude github.com/bad/package v0.0.1
retract v1.0.0

View File

@@ -0,0 +1,6 @@
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL+YQDe/2MxBPCZnqLcr7CQMpkSiQlrsZl1mOjBms=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQzsRs2+AEW5Cjls=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=

View File

@@ -0,0 +1,90 @@
{
"components": [
{
"analyzerId": "golang",
"componentKey": "golang::source::example.com/module-a::(devel)",
"name": "example.com/module-a",
"type": "golang",
"version": "(devel)",
"metadata": {
"go.version": "1.22",
"modulePath": "example.com/module-a",
"modulePath.main": "example.com/module-a",
"provenance": "source"
},
"evidence": [
{
"kind": "File",
"source": "go.mod",
"locator": "module-a/go.mod",
"value": "example.com/module-a"
}
]
},
{
"analyzerId": "golang",
"componentKey": "golang::source::example.com/module-b::(devel)",
"name": "example.com/module-b",
"type": "golang",
"version": "(devel)",
"metadata": {
"go.version": "1.22",
"modulePath": "example.com/module-b",
"modulePath.main": "example.com/module-b",
"provenance": "source"
},
"evidence": [
{
"kind": "File",
"source": "go.mod",
"locator": "module-b/go.mod",
"value": "example.com/module-b"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/github.com/google/uuid@v1.4.0",
"name": "github.com/google/uuid",
"type": "golang",
"version": "v1.4.0",
"metadata": {
"dependency.direct": "true",
"modulePath": "github.com/google/uuid",
"moduleVersion": "v1.4.0",
"provenance": "source",
"registry": "proxy.golang.org"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "module-a/go.mod",
"value": "github.com/google/uuid@v1.4.0"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/github.com/sirupsen/logrus@v1.9.3",
"name": "github.com/sirupsen/logrus",
"type": "golang",
"version": "v1.9.3",
"metadata": {
"dependency.direct": "true",
"modulePath": "github.com/sirupsen/logrus",
"moduleVersion": "v1.9.3",
"provenance": "source",
"registry": "proxy.golang.org"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "module-b/go.mod",
"value": "github.com/sirupsen/logrus@v1.9.3"
}
]
}
]
}

View File

@@ -0,0 +1,6 @@
go 1.22
use (
./module-a
./module-b
)

View File

@@ -0,0 +1,5 @@
module example.com/module-a
go 1.22
require github.com/google/uuid v1.4.0

View File

@@ -0,0 +1,5 @@
module example.com/module-b
go 1.22
require github.com/sirupsen/logrus v1.9.3

View File

@@ -0,0 +1,95 @@
{
"components": [
{
"analyzerId": "golang",
"componentKey": "golang::source::gitlab.mycompany.com/internal/myservice::(devel)",
"name": "gitlab.mycompany.com/internal/myservice",
"type": "golang",
"version": "(devel)",
"metadata": {
"go.version": "1.21",
"modulePath": "gitlab.mycompany.com/internal/myservice",
"modulePath.main": "gitlab.mycompany.com/internal/myservice",
"provenance": "source"
},
"evidence": [
{
"kind": "File",
"source": "go.mod",
"locator": "go.mod",
"value": "gitlab.mycompany.com/internal/myservice"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/git.internal.corp/lib/utils@v2.0.0",
"name": "git.internal.corp/lib/utils",
"type": "golang",
"version": "v2.0.0",
"metadata": {
"dependency.direct": "true",
"moduleCategory": "private",
"modulePath": "git.internal.corp/lib/utils",
"moduleVersion": "v2.0.0",
"private": "true",
"provenance": "source",
"registry": "git.internal.corp"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "go.mod",
"value": "git.internal.corp/lib/utils@v2.0.0"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/github.com/gin-gonic/gin@v1.9.1",
"name": "github.com/gin-gonic/gin",
"type": "golang",
"version": "v1.9.1",
"metadata": {
"dependency.direct": "true",
"modulePath": "github.com/gin-gonic/gin",
"moduleVersion": "v1.9.1",
"provenance": "source",
"registry": "proxy.golang.org"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "go.mod",
"value": "github.com/gin-gonic/gin@v1.9.1"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/gitlab.mycompany.com/shared/common@v1.0.0",
"name": "gitlab.mycompany.com/shared/common",
"type": "golang",
"version": "v1.0.0",
"metadata": {
"dependency.direct": "true",
"moduleCategory": "private",
"modulePath": "gitlab.mycompany.com/shared/common",
"moduleVersion": "v1.0.0",
"private": "true",
"provenance": "source",
"registry": "gitlab.mycompany.com"
},
"evidence": [
{
"kind": "Metadata",
"source": "go.mod",
"locator": "go.mod",
"value": "gitlab.mycompany.com/shared/common@v1.0.0"
}
]
}
]
}

View File

@@ -0,0 +1,9 @@
module gitlab.mycompany.com/internal/myservice
go 1.21
require (
github.com/gin-gonic/gin v1.9.1
gitlab.mycompany.com/shared/common v1.0.0
git.internal.corp/lib/utils v2.0.0
)

View File

@@ -0,0 +1,71 @@
{
"components": [
{
"analyzerId": "golang",
"componentKey": "golang::source::example.com/vendored-app::(devel)",
"name": "example.com/vendored-app",
"type": "golang",
"version": "(devel)",
"metadata": {
"go.version": "1.21",
"modulePath": "example.com/vendored-app",
"modulePath.main": "example.com/vendored-app",
"provenance": "source"
},
"evidence": [
{
"kind": "File",
"source": "go.mod",
"locator": "go.mod",
"value": "example.com/vendored-app"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/github.com/pkg/errors@v0.9.1",
"name": "github.com/pkg/errors",
"type": "golang",
"version": "v0.9.1",
"metadata": {
"dependency.direct": "true",
"modulePath": "github.com/pkg/errors",
"moduleVersion": "v0.9.1",
"provenance": "source",
"registry": "proxy.golang.org",
"vendored": "true"
},
"evidence": [
{
"kind": "Metadata",
"source": "vendor",
"locator": "go.mod",
"value": "github.com/pkg/errors@v0.9.1"
}
]
},
{
"analyzerId": "golang",
"purl": "pkg:golang/golang.org/x/text@v0.14.0",
"name": "golang.org/x/text",
"type": "golang",
"version": "v0.14.0",
"metadata": {
"dependency.indirect": "true",
"modulePath": "golang.org/x/text",
"moduleVersion": "v0.14.0",
"provenance": "source",
"registry": "proxy.golang.org",
"vendored": "true"
},
"evidence": [
{
"kind": "Metadata",
"source": "vendor",
"locator": "go.mod",
"value": "golang.org/x/text@v0.14.0"
}
]
}
]
}

View File

@@ -0,0 +1,8 @@
module example.com/vendored-app
go 1.21
require (
github.com/pkg/errors v0.9.1
golang.org/x/text v0.14.0 // indirect
)

View File

@@ -0,0 +1,7 @@
# github.com/pkg/errors v0.9.1
## explicit
github.com/pkg/errors
# golang.org/x/text v0.14.0
## explicit; go 1.18
golang.org/x/text/transform
golang.org/x/text/unicode/norm

View File

@@ -0,0 +1,82 @@
using System.Collections.Generic;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.GraphJobs;
internal sealed class PostgresGraphJobStore : IGraphJobStore
{
private readonly IGraphJobRepository _repository;
public PostgresGraphJobStore(IGraphJobRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
}
public async ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken)
{
await _repository.InsertAsync(job, cancellationToken);
return job;
}
public async ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken)
{
await _repository.InsertAsync(job, cancellationToken);
return job;
}
public async ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
{
var normalized = query.Normalize();
var builds = normalized.Type is null or GraphJobQueryType.Build
? await _repository.ListBuildJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
: Array.Empty<GraphBuildJob>();
var overlays = normalized.Type is null or GraphJobQueryType.Overlay
? await _repository.ListOverlayJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
: Array.Empty<GraphOverlayJob>();
return GraphJobCollection.From(builds, overlays);
}
public async ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
=> await _repository.GetBuildJobAsync(tenantId, jobId, cancellationToken);
public async ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
=> await _repository.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
public async ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (await _repository.TryReplaceAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
{
return GraphJobUpdateResult<GraphBuildJob>.UpdatedResult(job);
}
var existing = await _repository.GetBuildJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph build job '{job.Id}' not found.");
}
return GraphJobUpdateResult<GraphBuildJob>.NotUpdated(existing);
}
public async ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (await _repository.TryReplaceOverlayAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
{
return GraphJobUpdateResult<GraphOverlayJob>.UpdatedResult(job);
}
var existing = await _repository.GetOverlayJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph overlay job '{job.Id}' not found.");
}
return GraphJobUpdateResult<GraphOverlayJob>.NotUpdated(existing);
}
public async ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
=> await _repository.ListOverlayJobsAsync(tenantId, cancellationToken);
}

View File

@@ -8,9 +8,8 @@ using StellaOps.Plugin.DependencyInjection;
using StellaOps.Plugin.Hosting;
using StellaOps.Scheduler.WebService.Hosting;
using StellaOps.Scheduler.ImpactIndex;
using StellaOps.Scheduler.Storage.Mongo;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Mongo.Services;
using StellaOps.Scheduler.Storage.Postgres;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
using StellaOps.Scheduler.WebService;
using StellaOps.Scheduler.WebService.Auth;
using StellaOps.Scheduler.WebService.EventWebhooks;
@@ -83,8 +82,9 @@ builder.Services.AddOptions<SchedulerCartographerOptions>()
var storageSection = builder.Configuration.GetSection("Scheduler:Storage");
if (storageSection.Exists())
{
builder.Services.AddSchedulerMongoStorage(storageSection);
builder.Services.AddSingleton<IGraphJobStore, MongoGraphJobStore>();
builder.Services.AddSchedulerPostgresStorage(storageSection);
builder.Services.AddScoped<IGraphJobRepository, GraphJobRepository>();
builder.Services.AddSingleton<IGraphJobStore, PostgresGraphJobStore>();
builder.Services.AddSingleton<IPolicyRunService, PolicyRunService>();
builder.Services.AddSingleton<IPolicySimulationMetricsProvider, PolicySimulationMetricsProvider>();
builder.Services.AddSingleton<IPolicySimulationMetricsRecorder>(static sp => (IPolicySimulationMetricsRecorder)sp.GetRequiredService<IPolicySimulationMetricsProvider>());

View File

@@ -9,6 +9,7 @@
<ProjectReference Include="../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scheduler.Storage.Postgres/StellaOps.Scheduler.Storage.Postgres.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" />

View File

@@ -4,7 +4,7 @@ using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Queue;
using StellaOps.Scheduler.Storage.Mongo;
using StellaOps.Scheduler.Storage.Postgres;
using StellaOps.Scheduler.Worker.DependencyInjection;
var builder = Host.CreateApplicationBuilder(args);
@@ -21,7 +21,7 @@ builder.Services.AddSchedulerQueues(builder.Configuration);
var storageSection = builder.Configuration.GetSection("Scheduler:Storage");
if (storageSection.Exists())
{
builder.Services.AddSchedulerMongoStorage(storageSection);
builder.Services.AddSchedulerPostgresStorage(storageSection);
}
builder.Services.AddSchedulerWorker(builder.Configuration.GetSection("Scheduler:Worker"));

View File

@@ -10,7 +10,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scheduler.Queue\StellaOps.Scheduler.Queue.csproj" />
<ProjectReference Include="..\StellaOps.Scheduler.Storage.Mongo\StellaOps.Scheduler.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Scheduler.Storage.Postgres\StellaOps.Scheduler.Storage.Postgres.csproj" />
<ProjectReference Include="..\StellaOps.Scheduler.Worker\StellaOps.Scheduler.Worker.csproj" />
</ItemGroup>
</Project>

View File

@@ -9,8 +9,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Models", "__Libraries\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj", "{382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Mongo", "__Libraries\StellaOps.Scheduler.Storage.Mongo\StellaOps.Scheduler.Storage.Mongo.csproj", "{33770BC5-6802-45AD-A866-10027DD360E2}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Postgres", "__Libraries\StellaOps.Scheduler.Storage.Postgres\StellaOps.Scheduler.Storage.Postgres.csproj", "{167198F1-43CF-42F4-BEF2-5ABC87116A37}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.ImpactIndex", "__Libraries\StellaOps.Scheduler.ImpactIndex\StellaOps.Scheduler.ImpactIndex.csproj", "{56209C24-3CE7-4F8E-8B8C-F052CB919DE2}"
@@ -61,8 +59,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Models.
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Queue.Tests", "__Tests\StellaOps.Scheduler.Queue.Tests\StellaOps.Scheduler.Queue.Tests.csproj", "{7C22F6B7-095E-459B-BCCF-87098EA9F192}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Mongo.Tests", "__Tests\StellaOps.Scheduler.Storage.Mongo.Tests\StellaOps.Scheduler.Storage.Mongo.Tests.csproj", "{972CEB4D-510B-4701-B4A2-F14A85F11CC7}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService.Tests", "__Tests\StellaOps.Scheduler.WebService.Tests\StellaOps.Scheduler.WebService.Tests.csproj", "{7B4C9EAC-316E-4890-A715-7BB9C1577F96}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Backfill.Tests", "__Tests\StellaOps.Scheduler.Backfill.Tests\StellaOps.Scheduler.Backfill.Tests.csproj", "{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}"

View File

@@ -0,0 +1,34 @@
-- Scheduler graph jobs schema (Postgres)
DO $$ BEGIN
CREATE TYPE scheduler.graph_job_type AS ENUM ('build', 'overlay');
EXCEPTION WHEN duplicate_object THEN NULL; END $$;
DO $$ BEGIN
CREATE TYPE scheduler.graph_job_status AS ENUM ('pending', 'running', 'completed', 'failed', 'canceled');
EXCEPTION WHEN duplicate_object THEN NULL; END $$;
CREATE TABLE IF NOT EXISTS scheduler.graph_jobs (
id UUID PRIMARY KEY,
tenant_id TEXT NOT NULL,
type scheduler.graph_job_type NOT NULL,
status scheduler.graph_job_status NOT NULL,
payload JSONB NOT NULL,
correlation_id TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_graph_jobs_tenant_status ON scheduler.graph_jobs(tenant_id, status, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_graph_jobs_tenant_type_status ON scheduler.graph_jobs(tenant_id, type, status, created_at DESC);
CREATE TABLE IF NOT EXISTS scheduler.graph_job_events (
id BIGSERIAL PRIMARY KEY,
job_id UUID NOT NULL REFERENCES scheduler.graph_jobs(id) ON DELETE CASCADE,
tenant_id TEXT NOT NULL,
status scheduler.graph_job_status NOT NULL,
payload JSONB NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_graph_job_events_job ON scheduler.graph_job_events(job_id, created_at DESC);

View File

@@ -0,0 +1,157 @@
using System.Collections.Generic;
using System.Text.Json;
using Dapper;
using Npgsql;
using StellaOps.Infrastructure.Postgres;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Postgres.Repositories;
public sealed class GraphJobRepository : IGraphJobRepository
{
private readonly SchedulerDataSource _dataSource;
private readonly JsonSerializerOptions _json;
public GraphJobRepository(SchedulerDataSource dataSource)
{
_dataSource = dataSource;
_json = CanonicalJsonSerializer.Options;
}
public async ValueTask InsertAsync(GraphBuildJob job, CancellationToken cancellationToken)
{
const string sql = @"INSERT INTO scheduler.graph_jobs
(id, tenant_id, type, status, payload, created_at, updated_at, correlation_id)
VALUES (@Id, @TenantId, @Type, @Status, @Payload, @CreatedAt, @UpdatedAt, @CorrelationId);";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await conn.ExecuteAsync(sql, new
{
job.Id,
job.TenantId,
Type = (short)GraphJobQueryType.Build,
Status = (short)job.Status,
Payload = JsonSerializer.Serialize(job, _json),
job.CreatedAt,
UpdatedAt = job.UpdatedAt ?? job.CreatedAt,
job.CorrelationId
});
}
public async ValueTask InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken)
{
const string sql = @"INSERT INTO scheduler.graph_jobs
(id, tenant_id, type, status, payload, created_at, updated_at, correlation_id)
VALUES (@Id, @TenantId, @Type, @Status, @Payload, @CreatedAt, @UpdatedAt, @CorrelationId);";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await conn.ExecuteAsync(sql, new
{
job.Id,
job.TenantId,
Type = (short)GraphJobQueryType.Overlay,
Status = (short)job.Status,
Payload = JsonSerializer.Serialize(job, _json),
job.CreatedAt,
UpdatedAt = job.UpdatedAt ?? job.CreatedAt,
job.CorrelationId
});
}
public async ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
{
const string sql = "SELECT payload FROM scheduler.graph_jobs WHERE tenant_id=@TenantId AND id=@Id AND type=@Type LIMIT 1";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
var payload = await conn.ExecuteScalarAsync<string?>(sql, new { TenantId = tenantId, Id = jobId, Type = (short)GraphJobQueryType.Build });
return payload is null ? null : JsonSerializer.Deserialize<GraphBuildJob>(payload, _json);
}
public async ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
{
const string sql = "SELECT payload FROM scheduler.graph_jobs WHERE tenant_id=@TenantId AND id=@Id AND type=@Type LIMIT 1";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
var payload = await conn.ExecuteScalarAsync<string?>(sql, new { TenantId = tenantId, Id = jobId, Type = (short)GraphJobQueryType.Overlay });
return payload is null ? null : JsonSerializer.Deserialize<GraphOverlayJob>(payload, _json);
}
public async ValueTask<IReadOnlyCollection<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken)
{
var sql = "SELECT payload FROM scheduler.graph_jobs WHERE tenant_id=@TenantId AND type=@Type";
if (status is not null)
{
sql += " AND status=@Status";
}
sql += " ORDER BY created_at DESC LIMIT @Limit";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await conn.QueryAsync<string>(sql, new
{
TenantId = tenantId,
Type = (short)GraphJobQueryType.Build,
Status = status is null ? null : (short)status,
Limit = limit
});
return rows.Select(r => JsonSerializer.Deserialize<GraphBuildJob>(r, _json)!).ToArray();
}
public async ValueTask<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken)
{
var sql = "SELECT payload FROM scheduler.graph_jobs WHERE tenant_id=@TenantId AND type=@Type";
if (status is not null)
{
sql += " AND status=@Status";
}
sql += " ORDER BY created_at DESC LIMIT @Limit";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await conn.QueryAsync<string>(sql, new
{
TenantId = tenantId,
Type = (short)GraphJobQueryType.Overlay,
Status = status is null ? null : (short)status,
Limit = limit
});
return rows.Select(r => JsonSerializer.Deserialize<GraphOverlayJob>(r, _json)!).ToArray();
}
public ValueTask<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
=> ListOverlayJobsAsync(tenantId, status: null, limit: 50, cancellationToken);
public async ValueTask<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
const string sql = @"UPDATE scheduler.graph_jobs
SET status=@NewStatus, payload=@Payload, updated_at=NOW()
WHERE tenant_id=@TenantId AND id=@Id AND status=@ExpectedStatus AND type=@Type";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await conn.ExecuteAsync(sql, new
{
job.TenantId,
job.Id,
ExpectedStatus = (short)expectedStatus,
NewStatus = (short)job.Status,
Type = (short)GraphJobQueryType.Build,
Payload = JsonSerializer.Serialize(job, _json)
});
return rows == 1;
}
public async ValueTask<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
const string sql = @"UPDATE scheduler.graph_jobs
SET status=@NewStatus, payload=@Payload, updated_at=NOW()
WHERE tenant_id=@TenantId AND id=@Id AND status=@ExpectedStatus AND type=@Type";
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await conn.ExecuteAsync(sql, new
{
job.TenantId,
job.Id,
ExpectedStatus = (short)expectedStatus,
NewStatus = (short)job.Status,
Type = (short)GraphJobQueryType.Overlay,
Payload = JsonSerializer.Serialize(job, _json)
});
return rows == 1;
}
}

View File

@@ -0,0 +1,22 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Postgres.Repositories;
public interface IGraphJobRepository
{
ValueTask InsertAsync(GraphBuildJob job, CancellationToken cancellationToken);
ValueTask InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken);
ValueTask<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken);
ValueTask<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken);
ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken);
ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken);
}

View File

@@ -33,6 +33,7 @@ public static class ServiceCollectionExtensions
services.AddScoped<IDistributedLockRepository, DistributedLockRepository>();
services.AddScoped<IJobHistoryRepository, JobHistoryRepository>();
services.AddScoped<IMetricsRepository, MetricsRepository>();
services.AddScoped<IGraphJobRepository, GraphJobRepository>();
return services;
}
@@ -57,6 +58,7 @@ public static class ServiceCollectionExtensions
services.AddScoped<IDistributedLockRepository, DistributedLockRepository>();
services.AddScoped<IJobHistoryRepository, JobHistoryRepository>();
services.AddScoped<IMetricsRepository, MetricsRepository>();
services.AddScoped<IGraphJobRepository, GraphJobRepository>();
return services;
}

View File

@@ -16,6 +16,11 @@
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
<ProjectReference Include="..\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Dapper" Version="2.1.24" />
</ItemGroup>
</Project>

View File

@@ -6,8 +6,8 @@ using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Queue;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Mongo.Services;
using StellaOps.Scheduler.Storage.Postgres.Repositories.Repositories;
using StellaOps.Scheduler.Storage.Postgres.Repositories.Services;
using StellaOps.Scheduler.Worker.Events;
using StellaOps.Scheduler.Worker.Observability;

View File

@@ -1,129 +1,129 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Worker.Options;
namespace StellaOps.Scheduler.Worker.Graph;
internal sealed class GraphBuildBackgroundService : BackgroundService
{
private readonly IGraphJobRepository _repository;
private readonly GraphBuildExecutionService _executionService;
private readonly IOptions<SchedulerWorkerOptions> _options;
private readonly ILogger<GraphBuildBackgroundService> _logger;
public GraphBuildBackgroundService(
IGraphJobRepository repository,
GraphBuildExecutionService executionService,
IOptions<SchedulerWorkerOptions> options,
ILogger<GraphBuildBackgroundService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_executionService = executionService ?? throw new ArgumentNullException(nameof(executionService));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
_logger.LogInformation("Graph build worker started.");
while (!stoppingToken.IsCancellationRequested)
{
try
{
var graphOptions = _options.Value.Graph;
if (!graphOptions.Enabled)
{
await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false);
continue;
}
var jobs = await _repository.ListBuildJobsAsync(GraphJobStatus.Pending, graphOptions.BatchSize, stoppingToken).ConfigureAwait(false);
if (jobs.Count == 0)
{
await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false);
continue;
}
foreach (var job in jobs)
{
try
{
var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false);
LogResult(result);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Unhandled exception while processing graph build job {JobId}.", job.Id);
}
}
await DelayAsync(graphOptions.PollInterval, stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Graph build worker encountered an error; backing off.");
await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false);
}
}
_logger.LogInformation("Graph build worker stopping.");
}
private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken)
{
if (delay <= TimeSpan.Zero)
{
return;
}
try
{
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
}
}
private void LogResult(GraphBuildExecutionResult result)
{
switch (result.Type)
{
case GraphBuildExecutionResultType.Completed:
_logger.LogInformation(
"Graph build job {JobId} completed (tenant={TenantId}).",
result.Job.Id,
result.Job.TenantId);
break;
case GraphBuildExecutionResultType.Failed:
_logger.LogWarning(
"Graph build job {JobId} failed (tenant={TenantId}): {Reason}.",
result.Job.Id,
result.Job.TenantId,
result.Reason ?? "unknown error");
break;
case GraphBuildExecutionResultType.Skipped:
_logger.LogDebug(
"Graph build job {JobId} skipped: {Reason}.",
result.Job.Id,
result.Reason ?? "no reason");
break;
}
}
}
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Postgres.Repositories.Repositories;
using StellaOps.Scheduler.Worker.Options;
namespace StellaOps.Scheduler.Worker.Graph;
internal sealed class GraphBuildBackgroundService : BackgroundService
{
private readonly IGraphJobRepository _repository;
private readonly GraphBuildExecutionService _executionService;
private readonly IOptions<SchedulerWorkerOptions> _options;
private readonly ILogger<GraphBuildBackgroundService> _logger;
public GraphBuildBackgroundService(
IGraphJobRepository repository,
GraphBuildExecutionService executionService,
IOptions<SchedulerWorkerOptions> options,
ILogger<GraphBuildBackgroundService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_executionService = executionService ?? throw new ArgumentNullException(nameof(executionService));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
_logger.LogInformation("Graph build worker started.");
while (!stoppingToken.IsCancellationRequested)
{
try
{
var graphOptions = _options.Value.Graph;
if (!graphOptions.Enabled)
{
await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false);
continue;
}
var jobs = await _repository.ListBuildJobsAsync(GraphJobStatus.Pending, graphOptions.BatchSize, stoppingToken).ConfigureAwait(false);
if (jobs.Count == 0)
{
await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false);
continue;
}
foreach (var job in jobs)
{
try
{
var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false);
LogResult(result);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Unhandled exception while processing graph build job {JobId}.", job.Id);
}
}
await DelayAsync(graphOptions.PollInterval, stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Graph build worker encountered an error; backing off.");
await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false);
}
}
_logger.LogInformation("Graph build worker stopping.");
}
private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken)
{
if (delay <= TimeSpan.Zero)
{
return;
}
try
{
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
}
}
private void LogResult(GraphBuildExecutionResult result)
{
switch (result.Type)
{
case GraphBuildExecutionResultType.Completed:
_logger.LogInformation(
"Graph build job {JobId} completed (tenant={TenantId}).",
result.Job.Id,
result.Job.TenantId);
break;
case GraphBuildExecutionResultType.Failed:
_logger.LogWarning(
"Graph build job {JobId} failed (tenant={TenantId}): {Reason}.",
result.Job.Id,
result.Job.TenantId,
result.Reason ?? "unknown error");
break;
case GraphBuildExecutionResultType.Skipped:
_logger.LogDebug(
"Graph build job {JobId} skipped: {Reason}.",
result.Job.Id,
result.Reason ?? "no reason");
break;
}
}
}

View File

@@ -1,76 +1,76 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Postgres.Repositories.Repositories;
using StellaOps.Scheduler.Worker.Graph.Cartographer;
using StellaOps.Scheduler.Worker.Graph.Scheduler;
using StellaOps.Scheduler.Worker.Options;
using StellaOps.Scheduler.Worker.Observability;
namespace StellaOps.Scheduler.Worker.Graph;
internal sealed class GraphBuildExecutionService
{
private readonly IGraphJobRepository _repository;
private readonly ICartographerBuildClient _cartographerClient;
private readonly IGraphJobCompletionClient _completionClient;
private readonly IOptions<SchedulerWorkerOptions> _options;
private readonly SchedulerWorkerMetrics _metrics;
private readonly TimeProvider _timeProvider;
private readonly ILogger<GraphBuildExecutionService> _logger;
public GraphBuildExecutionService(
IGraphJobRepository repository,
ICartographerBuildClient cartographerClient,
IGraphJobCompletionClient completionClient,
IOptions<SchedulerWorkerOptions> options,
SchedulerWorkerMetrics metrics,
TimeProvider? timeProvider,
ILogger<GraphBuildExecutionService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_cartographerClient = cartographerClient ?? throw new ArgumentNullException(nameof(cartographerClient));
_completionClient = completionClient ?? throw new ArgumentNullException(nameof(completionClient));
_options = options ?? throw new ArgumentNullException(nameof(options));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<GraphBuildExecutionResult> ExecuteAsync(GraphBuildJob job, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(job);
var graphOptions = _options.Value.Graph;
if (!graphOptions.Enabled)
{
_metrics.RecordGraphJobResult("build", "skipped");
return GraphBuildExecutionResult.Skipped(job, "graph_processing_disabled");
}
if (job.Status != GraphJobStatus.Pending)
{
_metrics.RecordGraphJobResult("build", "skipped");
return GraphBuildExecutionResult.Skipped(job, "status_not_pending");
}
var now = _timeProvider.GetUtcNow();
GraphBuildJob running;
try
{
running = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, now, attempts: job.Attempts + 1);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to transition graph job {JobId} to running state.", job.Id);
_metrics.RecordGraphJobResult("build", "skipped");
return GraphBuildExecutionResult.Skipped(job, "transition_invalid");
}
namespace StellaOps.Scheduler.Worker.Graph;
internal sealed class GraphBuildExecutionService
{
private readonly IGraphJobRepository _repository;
private readonly ICartographerBuildClient _cartographerClient;
private readonly IGraphJobCompletionClient _completionClient;
private readonly IOptions<SchedulerWorkerOptions> _options;
private readonly SchedulerWorkerMetrics _metrics;
private readonly TimeProvider _timeProvider;
private readonly ILogger<GraphBuildExecutionService> _logger;
public GraphBuildExecutionService(
IGraphJobRepository repository,
ICartographerBuildClient cartographerClient,
IGraphJobCompletionClient completionClient,
IOptions<SchedulerWorkerOptions> options,
SchedulerWorkerMetrics metrics,
TimeProvider? timeProvider,
ILogger<GraphBuildExecutionService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_cartographerClient = cartographerClient ?? throw new ArgumentNullException(nameof(cartographerClient));
_completionClient = completionClient ?? throw new ArgumentNullException(nameof(completionClient));
_options = options ?? throw new ArgumentNullException(nameof(options));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<GraphBuildExecutionResult> ExecuteAsync(GraphBuildJob job, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(job);
var graphOptions = _options.Value.Graph;
if (!graphOptions.Enabled)
{
_metrics.RecordGraphJobResult("build", "skipped");
return GraphBuildExecutionResult.Skipped(job, "graph_processing_disabled");
}
if (job.Status != GraphJobStatus.Pending)
{
_metrics.RecordGraphJobResult("build", "skipped");
return GraphBuildExecutionResult.Skipped(job, "status_not_pending");
}
var now = _timeProvider.GetUtcNow();
GraphBuildJob running;
try
{
running = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, now, attempts: job.Attempts + 1);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to transition graph job {JobId} to running state.", job.Id);
_metrics.RecordGraphJobResult("build", "skipped");
return GraphBuildExecutionResult.Skipped(job, "transition_invalid");
}
if (!await _repository.TryReplaceAsync(running, job.Status, cancellationToken).ConfigureAwait(false))
{
_metrics.RecordGraphJobResult("build", "skipped");
@@ -78,161 +78,161 @@ internal sealed class GraphBuildExecutionService
}
_metrics.RecordGraphJobStart("build", running.TenantId, running.GraphSnapshotId ?? running.SbomId);
var attempt = 0;
CartographerBuildResult? lastResult = null;
Exception? lastException = null;
var backoff = graphOptions.RetryBackoff;
while (attempt < graphOptions.MaxAttempts)
{
cancellationToken.ThrowIfCancellationRequested();
attempt++;
try
{
var response = await _cartographerClient.StartBuildAsync(running, cancellationToken).ConfigureAwait(false);
lastResult = response;
if (!string.IsNullOrWhiteSpace(response.CartographerJobId) && response.CartographerJobId != running.CartographerJobId)
{
var updated = running with { CartographerJobId = response.CartographerJobId };
if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false))
{
running = updated;
}
}
if (!string.IsNullOrWhiteSpace(response.GraphSnapshotId) && response.GraphSnapshotId != running.GraphSnapshotId)
{
var updated = running with { GraphSnapshotId = response.GraphSnapshotId };
if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false))
{
running = updated;
}
}
if (response.Status == GraphJobStatus.Completed || response.Status == GraphJobStatus.Cancelled || response.Status == GraphJobStatus.Running)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Completed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false);
var attempt = 0;
CartographerBuildResult? lastResult = null;
Exception? lastException = null;
var backoff = graphOptions.RetryBackoff;
while (attempt < graphOptions.MaxAttempts)
{
cancellationToken.ThrowIfCancellationRequested();
attempt++;
try
{
var response = await _cartographerClient.StartBuildAsync(running, cancellationToken).ConfigureAwait(false);
lastResult = response;
if (!string.IsNullOrWhiteSpace(response.CartographerJobId) && response.CartographerJobId != running.CartographerJobId)
{
var updated = running with { CartographerJobId = response.CartographerJobId };
if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false))
{
running = updated;
}
}
if (!string.IsNullOrWhiteSpace(response.GraphSnapshotId) && response.GraphSnapshotId != running.GraphSnapshotId)
{
var updated = running with { GraphSnapshotId = response.GraphSnapshotId };
if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false))
{
running = updated;
}
}
if (response.Status == GraphJobStatus.Completed || response.Status == GraphJobStatus.Cancelled || response.Status == GraphJobStatus.Running)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Completed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false);
var duration = completionTime - running.CreatedAt;
_metrics.RecordGraphJobResult("build", "completed", duration);
_metrics.RecordGraphJobCompletion("build", running.TenantId, running.GraphSnapshotId ?? running.SbomId, "completed", duration);
return GraphBuildExecutionResult.Completed(running, response.ResultUri);
}
if (response.Status == GraphJobStatus.Failed)
{
if (attempt >= graphOptions.MaxAttempts)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false);
if (response.Status == GraphJobStatus.Failed)
{
if (attempt >= graphOptions.MaxAttempts)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false);
var duration = completionTime - running.CreatedAt;
_metrics.RecordGraphJobResult("build", "failed", duration);
_metrics.RecordGraphJobCompletion("build", running.TenantId, running.GraphSnapshotId ?? running.SbomId, "failed", duration);
return GraphBuildExecutionResult.Failed(running, response.Error);
}
_logger.LogWarning(
"Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay} (reason: {Reason}).",
attempt,
job.Id,
backoff,
response.Error ?? "unknown");
await Task.Delay(backoff, cancellationToken).ConfigureAwait(false);
continue;
}
// If Cartographer reports pending/queued we wait and retry.
if (attempt >= graphOptions.MaxAttempts)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error ?? "Cartographer did not complete the build.", cancellationToken).ConfigureAwait(false);
_logger.LogWarning(
"Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay} (reason: {Reason}).",
attempt,
job.Id,
backoff,
response.Error ?? "unknown");
await Task.Delay(backoff, cancellationToken).ConfigureAwait(false);
continue;
}
// If Cartographer reports pending/queued we wait and retry.
if (attempt >= graphOptions.MaxAttempts)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error ?? "Cartographer did not complete the build.", cancellationToken).ConfigureAwait(false);
var duration = completionTime - running.CreatedAt;
_metrics.RecordGraphJobResult("build", "failed", duration);
_metrics.RecordGraphJobCompletion("build", running.TenantId, running.GraphSnapshotId ?? running.SbomId, "failed", duration);
return GraphBuildExecutionResult.Failed(running, response.Error);
}
await Task.Delay(backoff, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
{
lastException = ex;
if (attempt >= graphOptions.MaxAttempts)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, running.GraphSnapshotId, null, ex.Message, cancellationToken).ConfigureAwait(false);
_metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt);
return GraphBuildExecutionResult.Failed(running, ex.Message);
}
_logger.LogWarning(ex, "Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay}.", attempt, job.Id, backoff);
await Task.Delay(backoff, cancellationToken).ConfigureAwait(false);
}
}
var error = lastResult?.Error ?? lastException?.Message ?? "Cartographer build failed";
var finalTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, finalTime, lastResult?.GraphSnapshotId ?? running.GraphSnapshotId, lastResult?.ResultUri, error, cancellationToken).ConfigureAwait(false);
await Task.Delay(backoff, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
{
lastException = ex;
if (attempt >= graphOptions.MaxAttempts)
{
var completionTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, running.GraphSnapshotId, null, ex.Message, cancellationToken).ConfigureAwait(false);
_metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt);
return GraphBuildExecutionResult.Failed(running, ex.Message);
}
_logger.LogWarning(ex, "Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay}.", attempt, job.Id, backoff);
await Task.Delay(backoff, cancellationToken).ConfigureAwait(false);
}
}
var error = lastResult?.Error ?? lastException?.Message ?? "Cartographer build failed";
var finalTime = _timeProvider.GetUtcNow();
await NotifyCompletionAsync(running, GraphJobStatus.Failed, finalTime, lastResult?.GraphSnapshotId ?? running.GraphSnapshotId, lastResult?.ResultUri, error, cancellationToken).ConfigureAwait(false);
var finalDuration = finalTime - running.CreatedAt;
_metrics.RecordGraphJobResult("build", "failed", finalDuration);
_metrics.RecordGraphJobCompletion("build", running.TenantId, running.GraphSnapshotId ?? running.SbomId, "failed", finalDuration);
return GraphBuildExecutionResult.Failed(running, error);
}
private async Task NotifyCompletionAsync(
GraphBuildJob job,
GraphJobStatus status,
DateTimeOffset occurredAt,
string? graphSnapshotId,
string? resultUri,
string? error,
CancellationToken cancellationToken)
{
var dto = new GraphJobCompletionRequestDto(
job.Id,
"Build",
status,
occurredAt,
graphSnapshotId ?? job.GraphSnapshotId,
resultUri,
job.CorrelationId,
status == GraphJobStatus.Failed ? (error ?? "Cartographer build failed.") : null);
try
{
await _completionClient.NotifyAsync(dto, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
{
_logger.LogError(ex, "Failed notifying Scheduler completion for graph job {JobId}.", job.Id);
}
}
}
internal enum GraphBuildExecutionResultType
{
Completed,
Failed,
Skipped
}
internal readonly record struct GraphBuildExecutionResult(
GraphBuildExecutionResultType Type,
GraphBuildJob Job,
string? Reason = null,
string? ResultUri = null)
{
public static GraphBuildExecutionResult Completed(GraphBuildJob job, string? resultUri)
=> new(GraphBuildExecutionResultType.Completed, job, ResultUri: resultUri);
public static GraphBuildExecutionResult Failed(GraphBuildJob job, string? error)
=> new(GraphBuildExecutionResultType.Failed, job, error);
public static GraphBuildExecutionResult Skipped(GraphBuildJob job, string reason)
=> new(GraphBuildExecutionResultType.Skipped, job, reason);
}
private async Task NotifyCompletionAsync(
GraphBuildJob job,
GraphJobStatus status,
DateTimeOffset occurredAt,
string? graphSnapshotId,
string? resultUri,
string? error,
CancellationToken cancellationToken)
{
var dto = new GraphJobCompletionRequestDto(
job.Id,
"Build",
status,
occurredAt,
graphSnapshotId ?? job.GraphSnapshotId,
resultUri,
job.CorrelationId,
status == GraphJobStatus.Failed ? (error ?? "Cartographer build failed.") : null);
try
{
await _completionClient.NotifyAsync(dto, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
{
_logger.LogError(ex, "Failed notifying Scheduler completion for graph job {JobId}.", job.Id);
}
}
}
internal enum GraphBuildExecutionResultType
{
Completed,
Failed,
Skipped
}
internal readonly record struct GraphBuildExecutionResult(
GraphBuildExecutionResultType Type,
GraphBuildJob Job,
string? Reason = null,
string? ResultUri = null)
{
public static GraphBuildExecutionResult Completed(GraphBuildJob job, string? resultUri)
=> new(GraphBuildExecutionResultType.Completed, job, ResultUri: resultUri);
public static GraphBuildExecutionResult Failed(GraphBuildJob job, string? error)
=> new(GraphBuildExecutionResultType.Failed, job, error);
public static GraphBuildExecutionResult Skipped(GraphBuildJob job, string reason)
=> new(GraphBuildExecutionResultType.Skipped, job, reason);
}

Some files were not shown because too many files have changed in this diff Show More