up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Reachability Corpus Validation / validate-corpus (push) Has been cancelled
Reachability Corpus Validation / validate-ground-truths (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Reachability Corpus Validation / determinism-check (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Reachability Corpus Validation / validate-corpus (push) Has been cancelled
Reachability Corpus Validation / validate-ground-truths (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Reachability Corpus Validation / determinism-check (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
This commit is contained in:
@@ -0,0 +1,30 @@
|
||||
using System.Reflection;
|
||||
using StellaOps.AirGap.Storage.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL integration test fixture for the AirGap module.
|
||||
/// Runs migrations from embedded resources and provides test isolation.
|
||||
/// </summary>
|
||||
public sealed class AirGapPostgresFixture : PostgresIntegrationFixture, ICollectionFixture<AirGapPostgresFixture>
|
||||
{
|
||||
protected override Assembly? GetMigrationAssembly()
|
||||
=> typeof(AirGapDataSource).Assembly;
|
||||
|
||||
protected override string GetModuleName() => "AirGap";
|
||||
|
||||
protected override string? GetResourcePrefix() => "Migrations";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collection definition for AirGap PostgreSQL integration tests.
|
||||
/// Tests in this collection share a single PostgreSQL container instance.
|
||||
/// </summary>
|
||||
[CollectionDefinition(Name)]
|
||||
public sealed class AirGapPostgresCollection : ICollectionFixture<AirGapPostgresFixture>
|
||||
{
|
||||
public const string Name = "AirGapPostgres";
|
||||
}
|
||||
@@ -0,0 +1,167 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Controller.Domain;
|
||||
using StellaOps.AirGap.Storage.Postgres;
|
||||
using StellaOps.AirGap.Storage.Postgres.Repositories;
|
||||
using StellaOps.AirGap.Time.Models;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AirGap.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(AirGapPostgresCollection.Name)]
|
||||
public sealed class PostgresAirGapStateStoreTests : IAsyncLifetime
|
||||
{
|
||||
private readonly AirGapPostgresFixture _fixture;
|
||||
private readonly PostgresAirGapStateStore _store;
|
||||
private readonly AirGapDataSource _dataSource;
|
||||
private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8];
|
||||
|
||||
public PostgresAirGapStateStoreTests(AirGapPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
var options = Options.Create(new PostgresOptions
|
||||
{
|
||||
ConnectionString = fixture.ConnectionString,
|
||||
SchemaName = AirGapDataSource.DefaultSchemaName,
|
||||
AutoMigrate = false
|
||||
});
|
||||
|
||||
_dataSource = new AirGapDataSource(options, NullLogger<AirGapDataSource>.Instance);
|
||||
_store = new PostgresAirGapStateStore(_dataSource, NullLogger<PostgresAirGapStateStore>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _dataSource.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_ReturnsDefaultStateForNewTenant()
|
||||
{
|
||||
// Act
|
||||
var state = await _store.GetAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
state.Should().NotBeNull();
|
||||
state.TenantId.Should().Be(_tenantId);
|
||||
state.Sealed.Should().BeFalse();
|
||||
state.PolicyHash.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAndGet_RoundTripsState()
|
||||
{
|
||||
// Arrange
|
||||
var timeAnchor = new TimeAnchor(
|
||||
DateTimeOffset.UtcNow,
|
||||
"tsa.example.com",
|
||||
"RFC3161",
|
||||
"sha256:fingerprint123",
|
||||
"sha256:tokendigest456");
|
||||
|
||||
var state = new AirGapState
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
TenantId = _tenantId,
|
||||
Sealed = true,
|
||||
PolicyHash = "sha256:policy789",
|
||||
TimeAnchor = timeAnchor,
|
||||
LastTransitionAt = DateTimeOffset.UtcNow,
|
||||
StalenessBudget = new StalenessBudget(1800, 3600),
|
||||
DriftBaselineSeconds = 5,
|
||||
ContentBudgets = new Dictionary<string, StalenessBudget>
|
||||
{
|
||||
["advisories"] = new StalenessBudget(7200, 14400),
|
||||
["vex"] = new StalenessBudget(3600, 7200)
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
await _store.SetAsync(state);
|
||||
var fetched = await _store.GetAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched.Sealed.Should().BeTrue();
|
||||
fetched.PolicyHash.Should().Be("sha256:policy789");
|
||||
fetched.TimeAnchor.Source.Should().Be("tsa.example.com");
|
||||
fetched.TimeAnchor.Format.Should().Be("RFC3161");
|
||||
fetched.StalenessBudget.WarningSeconds.Should().Be(1800);
|
||||
fetched.StalenessBudget.BreachSeconds.Should().Be(3600);
|
||||
fetched.DriftBaselineSeconds.Should().Be(5);
|
||||
fetched.ContentBudgets.Should().HaveCount(2);
|
||||
fetched.ContentBudgets["advisories"].WarningSeconds.Should().Be(7200);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAsync_UpdatesExistingState()
|
||||
{
|
||||
// Arrange
|
||||
var state1 = new AirGapState
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
TenantId = _tenantId,
|
||||
Sealed = false,
|
||||
TimeAnchor = TimeAnchor.Unknown,
|
||||
StalenessBudget = StalenessBudget.Default
|
||||
};
|
||||
|
||||
var state2 = new AirGapState
|
||||
{
|
||||
Id = state1.Id,
|
||||
TenantId = _tenantId,
|
||||
Sealed = true,
|
||||
PolicyHash = "sha256:updated",
|
||||
TimeAnchor = new TimeAnchor(DateTimeOffset.UtcNow, "updated-source", "rfc3161", "", ""),
|
||||
LastTransitionAt = DateTimeOffset.UtcNow,
|
||||
StalenessBudget = new StalenessBudget(600, 1200)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _store.SetAsync(state1);
|
||||
await _store.SetAsync(state2);
|
||||
var fetched = await _store.GetAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
fetched.Sealed.Should().BeTrue();
|
||||
fetched.PolicyHash.Should().Be("sha256:updated");
|
||||
fetched.TimeAnchor.Source.Should().Be("updated-source");
|
||||
fetched.StalenessBudget.WarningSeconds.Should().Be(600);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAsync_PersistsContentBudgets()
|
||||
{
|
||||
// Arrange
|
||||
var state = new AirGapState
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
TenantId = _tenantId,
|
||||
TimeAnchor = TimeAnchor.Unknown,
|
||||
StalenessBudget = StalenessBudget.Default,
|
||||
ContentBudgets = new Dictionary<string, StalenessBudget>
|
||||
{
|
||||
["advisories"] = new StalenessBudget(3600, 7200),
|
||||
["vex"] = new StalenessBudget(1800, 3600),
|
||||
["policy"] = new StalenessBudget(900, 1800)
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
await _store.SetAsync(state);
|
||||
var fetched = await _store.GetAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
fetched.ContentBudgets.Should().HaveCount(3);
|
||||
fetched.ContentBudgets.Should().ContainKey("advisories");
|
||||
fetched.ContentBudgets.Should().ContainKey("vex");
|
||||
fetched.ContentBudgets.Should().ContainKey("policy");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
<?xml version="1.0" ?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AirGap.Storage.Postgres\StellaOps.AirGap.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.AirGap.Controller\StellaOps.AirGap.Controller.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
177
src/Aoc/StellaOps.Aoc.Cli/Commands/VerifyCommand.cs
Normal file
177
src/Aoc/StellaOps.Aoc.Cli/Commands/VerifyCommand.cs
Normal file
@@ -0,0 +1,177 @@
|
||||
using System.CommandLine;
|
||||
using System.CommandLine.Invocation;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Aoc.Cli.Models;
|
||||
using StellaOps.Aoc.Cli.Services;
|
||||
|
||||
namespace StellaOps.Aoc.Cli.Commands;
|
||||
|
||||
public static class VerifyCommand
|
||||
{
|
||||
public static Command Create()
|
||||
{
|
||||
var sinceOption = new Option<string>(
|
||||
aliases: ["--since", "-s"],
|
||||
description: "Git commit SHA or ISO timestamp to verify from")
|
||||
{
|
||||
IsRequired = true
|
||||
};
|
||||
|
||||
var mongoOption = new Option<string?>(
|
||||
aliases: ["--mongo", "-m"],
|
||||
description: "MongoDB connection string (legacy support)");
|
||||
|
||||
var postgresOption = new Option<string?>(
|
||||
aliases: ["--postgres", "-p"],
|
||||
description: "PostgreSQL connection string");
|
||||
|
||||
var outputOption = new Option<string?>(
|
||||
aliases: ["--output", "-o"],
|
||||
description: "Path for JSON output report");
|
||||
|
||||
var ndjsonOption = new Option<string?>(
|
||||
aliases: ["--ndjson", "-n"],
|
||||
description: "Path for NDJSON output (one violation per line)");
|
||||
|
||||
var tenantOption = new Option<string?>(
|
||||
aliases: ["--tenant", "-t"],
|
||||
description: "Filter by tenant ID");
|
||||
|
||||
var dryRunOption = new Option<bool>(
|
||||
aliases: ["--dry-run"],
|
||||
description: "Validate configuration without querying database",
|
||||
getDefaultValue: () => false);
|
||||
|
||||
var verboseOption = new Option<bool>(
|
||||
aliases: ["--verbose", "-v"],
|
||||
description: "Enable verbose output",
|
||||
getDefaultValue: () => false);
|
||||
|
||||
var command = new Command("verify", "Verify AOC compliance for documents since a given point")
|
||||
{
|
||||
sinceOption,
|
||||
mongoOption,
|
||||
postgresOption,
|
||||
outputOption,
|
||||
ndjsonOption,
|
||||
tenantOption,
|
||||
dryRunOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetHandler(async (context) =>
|
||||
{
|
||||
var since = context.ParseResult.GetValueForOption(sinceOption)!;
|
||||
var mongo = context.ParseResult.GetValueForOption(mongoOption);
|
||||
var postgres = context.ParseResult.GetValueForOption(postgresOption);
|
||||
var output = context.ParseResult.GetValueForOption(outputOption);
|
||||
var ndjson = context.ParseResult.GetValueForOption(ndjsonOption);
|
||||
var tenant = context.ParseResult.GetValueForOption(tenantOption);
|
||||
var dryRun = context.ParseResult.GetValueForOption(dryRunOption);
|
||||
var verbose = context.ParseResult.GetValueForOption(verboseOption);
|
||||
|
||||
var options = new VerifyOptions
|
||||
{
|
||||
Since = since,
|
||||
MongoConnectionString = mongo,
|
||||
PostgresConnectionString = postgres,
|
||||
OutputPath = output,
|
||||
NdjsonPath = ndjson,
|
||||
Tenant = tenant,
|
||||
DryRun = dryRun,
|
||||
Verbose = verbose
|
||||
};
|
||||
|
||||
var exitCode = await ExecuteAsync(options, context.GetCancellationToken());
|
||||
context.ExitCode = exitCode;
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static async Task<int> ExecuteAsync(VerifyOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
if (options.Verbose)
|
||||
{
|
||||
Console.WriteLine($"AOC Verify starting...");
|
||||
Console.WriteLine($" Since: {options.Since}");
|
||||
Console.WriteLine($" Tenant: {options.Tenant ?? "(all)"}");
|
||||
Console.WriteLine($" Dry run: {options.DryRun}");
|
||||
}
|
||||
|
||||
// Validate connection string is provided
|
||||
if (string.IsNullOrEmpty(options.MongoConnectionString) && string.IsNullOrEmpty(options.PostgresConnectionString))
|
||||
{
|
||||
Console.Error.WriteLine("Error: Either --mongo or --postgres connection string is required");
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (options.DryRun)
|
||||
{
|
||||
Console.WriteLine("Dry run mode - configuration validated successfully");
|
||||
return 0;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var service = new AocVerificationService();
|
||||
var result = await service.VerifyAsync(options, cancellationToken);
|
||||
|
||||
// Write JSON output if requested
|
||||
if (!string.IsNullOrEmpty(options.OutputPath))
|
||||
{
|
||||
var json = JsonSerializer.Serialize(result, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
await File.WriteAllTextAsync(options.OutputPath, json, cancellationToken);
|
||||
|
||||
if (options.Verbose)
|
||||
{
|
||||
Console.WriteLine($"JSON report written to: {options.OutputPath}");
|
||||
}
|
||||
}
|
||||
|
||||
// Write NDJSON output if requested
|
||||
if (!string.IsNullOrEmpty(options.NdjsonPath))
|
||||
{
|
||||
var ndjsonLines = result.Violations.Select(v =>
|
||||
JsonSerializer.Serialize(v, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }));
|
||||
await File.WriteAllLinesAsync(options.NdjsonPath, ndjsonLines, cancellationToken);
|
||||
|
||||
if (options.Verbose)
|
||||
{
|
||||
Console.WriteLine($"NDJSON report written to: {options.NdjsonPath}");
|
||||
}
|
||||
}
|
||||
|
||||
// Output summary
|
||||
Console.WriteLine($"AOC Verification Complete");
|
||||
Console.WriteLine($" Documents scanned: {result.DocumentsScanned}");
|
||||
Console.WriteLine($" Violations found: {result.ViolationCount}");
|
||||
Console.WriteLine($" Duration: {result.DurationMs}ms");
|
||||
|
||||
if (result.ViolationCount > 0)
|
||||
{
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Violations by type:");
|
||||
foreach (var group in result.Violations.GroupBy(v => v.Code))
|
||||
{
|
||||
Console.WriteLine($" {group.Key}: {group.Count()}");
|
||||
}
|
||||
}
|
||||
|
||||
return result.ViolationCount > 0 ? 2 : 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"Error during verification: {ex.Message}");
|
||||
if (options.Verbose)
|
||||
{
|
||||
Console.Error.WriteLine(ex.StackTrace);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
57
src/Aoc/StellaOps.Aoc.Cli/Models/VerificationResult.cs
Normal file
57
src/Aoc/StellaOps.Aoc.Cli/Models/VerificationResult.cs
Normal file
@@ -0,0 +1,57 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Aoc.Cli.Models;
|
||||
|
||||
public sealed class VerificationResult
|
||||
{
|
||||
[JsonPropertyName("since")]
|
||||
public required string Since { get; init; }
|
||||
|
||||
[JsonPropertyName("tenant")]
|
||||
public string? Tenant { get; init; }
|
||||
|
||||
[JsonPropertyName("verifiedAt")]
|
||||
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
[JsonPropertyName("documentsScanned")]
|
||||
public int DocumentsScanned { get; set; }
|
||||
|
||||
[JsonPropertyName("violationCount")]
|
||||
public int ViolationCount => Violations.Count;
|
||||
|
||||
[JsonPropertyName("violations")]
|
||||
public List<DocumentViolation> Violations { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("durationMs")]
|
||||
public long DurationMs { get; set; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
public string Status => ViolationCount == 0 ? "PASS" : "FAIL";
|
||||
}
|
||||
|
||||
public sealed class DocumentViolation
|
||||
{
|
||||
[JsonPropertyName("documentId")]
|
||||
public required string DocumentId { get; init; }
|
||||
|
||||
[JsonPropertyName("collection")]
|
||||
public required string Collection { get; init; }
|
||||
|
||||
[JsonPropertyName("code")]
|
||||
public required string Code { get; init; }
|
||||
|
||||
[JsonPropertyName("path")]
|
||||
public required string Path { get; init; }
|
||||
|
||||
[JsonPropertyName("message")]
|
||||
public required string Message { get; init; }
|
||||
|
||||
[JsonPropertyName("tenant")]
|
||||
public string? Tenant { get; init; }
|
||||
|
||||
[JsonPropertyName("detectedAt")]
|
||||
public DateTimeOffset DetectedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
[JsonPropertyName("documentTimestamp")]
|
||||
public DateTimeOffset? DocumentTimestamp { get; init; }
|
||||
}
|
||||
13
src/Aoc/StellaOps.Aoc.Cli/Models/VerifyOptions.cs
Normal file
13
src/Aoc/StellaOps.Aoc.Cli/Models/VerifyOptions.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
namespace StellaOps.Aoc.Cli.Models;
|
||||
|
||||
public sealed class VerifyOptions
|
||||
{
|
||||
public required string Since { get; init; }
|
||||
public string? MongoConnectionString { get; init; }
|
||||
public string? PostgresConnectionString { get; init; }
|
||||
public string? OutputPath { get; init; }
|
||||
public string? NdjsonPath { get; init; }
|
||||
public string? Tenant { get; init; }
|
||||
public bool DryRun { get; init; }
|
||||
public bool Verbose { get; init; }
|
||||
}
|
||||
18
src/Aoc/StellaOps.Aoc.Cli/Program.cs
Normal file
18
src/Aoc/StellaOps.Aoc.Cli/Program.cs
Normal file
@@ -0,0 +1,18 @@
|
||||
using System.CommandLine;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Aoc.Cli.Commands;
|
||||
|
||||
namespace StellaOps.Aoc.Cli;
|
||||
|
||||
public static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
var rootCommand = new RootCommand("StellaOps AOC CLI - Verify append-only contract compliance")
|
||||
{
|
||||
VerifyCommand.Create()
|
||||
};
|
||||
|
||||
return await rootCommand.InvokeAsync(args);
|
||||
}
|
||||
}
|
||||
256
src/Aoc/StellaOps.Aoc.Cli/Services/AocVerificationService.cs
Normal file
256
src/Aoc/StellaOps.Aoc.Cli/Services/AocVerificationService.cs
Normal file
@@ -0,0 +1,256 @@
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using Npgsql;
|
||||
using StellaOps.Aoc.Cli.Models;
|
||||
|
||||
namespace StellaOps.Aoc.Cli.Services;
|
||||
|
||||
public sealed class AocVerificationService
|
||||
{
|
||||
private readonly AocWriteGuard _guard = new();
|
||||
|
||||
public async Task<VerificationResult> VerifyAsync(VerifyOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
var result = new VerificationResult
|
||||
{
|
||||
Since = options.Since,
|
||||
Tenant = options.Tenant
|
||||
};
|
||||
|
||||
// Parse the since parameter
|
||||
var sinceTimestamp = ParseSinceParameter(options.Since);
|
||||
|
||||
// Route to appropriate database verification
|
||||
if (!string.IsNullOrEmpty(options.PostgresConnectionString))
|
||||
{
|
||||
await VerifyPostgresAsync(options.PostgresConnectionString, sinceTimestamp, options.Tenant, result, cancellationToken);
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(options.MongoConnectionString))
|
||||
{
|
||||
// MongoDB support - for legacy verification
|
||||
// Note: The codebase is transitioning to PostgreSQL
|
||||
await VerifyMongoAsync(options.MongoConnectionString, sinceTimestamp, options.Tenant, result, cancellationToken);
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
result.DurationMs = stopwatch.ElapsedMilliseconds;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static DateTimeOffset ParseSinceParameter(string since)
|
||||
{
|
||||
// Try parsing as ISO timestamp first
|
||||
if (DateTimeOffset.TryParse(since, out var timestamp))
|
||||
{
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
// If it looks like a git commit SHA, use current time minus a default window
|
||||
// In a real implementation, we'd query git for the commit timestamp
|
||||
if (since.Length >= 7 && since.All(c => char.IsLetterOrDigit(c)))
|
||||
{
|
||||
// Default to 24 hours ago for commit-based queries
|
||||
// The actual implementation would resolve the commit timestamp
|
||||
return DateTimeOffset.UtcNow.AddHours(-24);
|
||||
}
|
||||
|
||||
// Default fallback
|
||||
return DateTimeOffset.UtcNow.AddDays(-1);
|
||||
}
|
||||
|
||||
private async Task VerifyPostgresAsync(
|
||||
string connectionString,
|
||||
DateTimeOffset since,
|
||||
string? tenant,
|
||||
VerificationResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = new NpgsqlConnection(connectionString);
|
||||
await connection.OpenAsync(cancellationToken);
|
||||
|
||||
// Query advisory_raw documents from Concelier
|
||||
await VerifyConcelierDocumentsAsync(connection, since, tenant, result, cancellationToken);
|
||||
|
||||
// Query VEX documents from Excititor
|
||||
await VerifyExcititorDocumentsAsync(connection, since, tenant, result, cancellationToken);
|
||||
}
|
||||
|
||||
private async Task VerifyConcelierDocumentsAsync(
|
||||
NpgsqlConnection connection,
|
||||
DateTimeOffset since,
|
||||
string? tenant,
|
||||
VerificationResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var sql = """
|
||||
SELECT id, tenant, content, created_at
|
||||
FROM concelier.advisory_raw
|
||||
WHERE created_at >= @since
|
||||
""";
|
||||
|
||||
if (!string.IsNullOrEmpty(tenant))
|
||||
{
|
||||
sql += " AND tenant = @tenant";
|
||||
}
|
||||
|
||||
await using var cmd = new NpgsqlCommand(sql, connection);
|
||||
cmd.Parameters.AddWithValue("since", since);
|
||||
|
||||
if (!string.IsNullOrEmpty(tenant))
|
||||
{
|
||||
cmd.Parameters.AddWithValue("tenant", tenant);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
result.DocumentsScanned++;
|
||||
|
||||
var docId = reader.GetString(0);
|
||||
var docTenant = reader.IsDBNull(1) ? null : reader.GetString(1);
|
||||
var contentJson = reader.GetString(2);
|
||||
var createdAt = reader.GetDateTime(3);
|
||||
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(contentJson);
|
||||
var guardResult = _guard.Validate(doc.RootElement);
|
||||
|
||||
foreach (var violation in guardResult.Violations)
|
||||
{
|
||||
result.Violations.Add(new DocumentViolation
|
||||
{
|
||||
DocumentId = docId,
|
||||
Collection = "concelier.advisory_raw",
|
||||
Code = violation.Code.ToErrorCode(),
|
||||
Path = violation.Path,
|
||||
Message = violation.Message,
|
||||
Tenant = docTenant,
|
||||
DocumentTimestamp = new DateTimeOffset(createdAt, TimeSpan.Zero)
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
result.Violations.Add(new DocumentViolation
|
||||
{
|
||||
DocumentId = docId,
|
||||
Collection = "concelier.advisory_raw",
|
||||
Code = "ERR_AOC_PARSE",
|
||||
Path = "/",
|
||||
Message = "Document content is not valid JSON",
|
||||
Tenant = docTenant,
|
||||
DocumentTimestamp = new DateTimeOffset(createdAt, TimeSpan.Zero)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (PostgresException ex) when (ex.SqlState == "42P01") // relation does not exist
|
||||
{
|
||||
// Table doesn't exist - this is okay for fresh installations
|
||||
Console.WriteLine("Note: concelier.advisory_raw table not found (may not be initialized)");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task VerifyExcititorDocumentsAsync(
|
||||
NpgsqlConnection connection,
|
||||
DateTimeOffset since,
|
||||
string? tenant,
|
||||
VerificationResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var sql = """
|
||||
SELECT id, tenant, document, created_at
|
||||
FROM excititor.vex_documents
|
||||
WHERE created_at >= @since
|
||||
""";
|
||||
|
||||
if (!string.IsNullOrEmpty(tenant))
|
||||
{
|
||||
sql += " AND tenant = @tenant";
|
||||
}
|
||||
|
||||
await using var cmd = new NpgsqlCommand(sql, connection);
|
||||
cmd.Parameters.AddWithValue("since", since);
|
||||
|
||||
if (!string.IsNullOrEmpty(tenant))
|
||||
{
|
||||
cmd.Parameters.AddWithValue("tenant", tenant);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
result.DocumentsScanned++;
|
||||
|
||||
var docId = reader.GetString(0);
|
||||
var docTenant = reader.IsDBNull(1) ? null : reader.GetString(1);
|
||||
var contentJson = reader.GetString(2);
|
||||
var createdAt = reader.GetDateTime(3);
|
||||
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(contentJson);
|
||||
var guardResult = _guard.Validate(doc.RootElement);
|
||||
|
||||
foreach (var violation in guardResult.Violations)
|
||||
{
|
||||
result.Violations.Add(new DocumentViolation
|
||||
{
|
||||
DocumentId = docId,
|
||||
Collection = "excititor.vex_documents",
|
||||
Code = violation.Code.ToErrorCode(),
|
||||
Path = violation.Path,
|
||||
Message = violation.Message,
|
||||
Tenant = docTenant,
|
||||
DocumentTimestamp = new DateTimeOffset(createdAt, TimeSpan.Zero)
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
result.Violations.Add(new DocumentViolation
|
||||
{
|
||||
DocumentId = docId,
|
||||
Collection = "excititor.vex_documents",
|
||||
Code = "ERR_AOC_PARSE",
|
||||
Path = "/",
|
||||
Message = "Document content is not valid JSON",
|
||||
Tenant = docTenant,
|
||||
DocumentTimestamp = new DateTimeOffset(createdAt, TimeSpan.Zero)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (PostgresException ex) when (ex.SqlState == "42P01") // relation does not exist
|
||||
{
|
||||
// Table doesn't exist - this is okay for fresh installations
|
||||
Console.WriteLine("Note: excititor.vex_documents table not found (may not be initialized)");
|
||||
}
|
||||
}
|
||||
|
||||
private Task VerifyMongoAsync(
|
||||
string connectionString,
|
||||
DateTimeOffset since,
|
||||
string? tenant,
|
||||
VerificationResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// MongoDB support is deprecated - log warning and return empty result
|
||||
Console.WriteLine("Warning: MongoDB verification is deprecated. The codebase is transitioning to PostgreSQL.");
|
||||
Console.WriteLine(" Use --postgres instead of --mongo for production verification.");
|
||||
|
||||
// For backwards compatibility during transition, we don't fail
|
||||
// but we also don't perform actual MongoDB queries
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
25
src/Aoc/StellaOps.Aoc.Cli/StellaOps.Aoc.Cli.csproj
Normal file
25
src/Aoc/StellaOps.Aoc.Cli/StellaOps.Aoc.Cli.csproj
Normal file
@@ -0,0 +1,25 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<AssemblyName>stella-aoc</AssemblyName>
|
||||
<RootNamespace>StellaOps.Aoc.Cli</RootNamespace>
|
||||
<Description>StellaOps AOC CLI - Verify append-only contract compliance in advisory databases</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="System.CommandLine" Version="2.0.0-beta4.22272.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.0" />
|
||||
<PackageReference Include="Npgsql" Version="9.0.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,12 @@
|
||||
; Shipped analyzer releases
|
||||
; https://github.com/dotnet/roslyn-analyzers/blob/main/src/Microsoft.CodeAnalysis.Analyzers/ReleaseTrackingAnalyzers.Help.md
|
||||
|
||||
## Release 1.0
|
||||
|
||||
### New Rules
|
||||
|
||||
Rule ID | Category | Severity | Notes
|
||||
--------|----------|----------|-------
|
||||
AOC0001 | AOC | Error | AocForbiddenFieldAnalyzer - Detects writes to forbidden fields
|
||||
AOC0002 | AOC | Error | AocForbiddenFieldAnalyzer - Detects writes to derived fields
|
||||
AOC0003 | AOC | Warning | AocForbiddenFieldAnalyzer - Detects unguarded database writes
|
||||
@@ -0,0 +1,7 @@
|
||||
; Unshipped analyzer changes
|
||||
; https://github.com/dotnet/roslyn-analyzers/blob/main/src/Microsoft.CodeAnalysis.Analyzers/ReleaseTrackingAnalyzers.Help.md
|
||||
|
||||
### New Rules
|
||||
|
||||
Rule ID | Category | Severity | Notes
|
||||
--------|----------|----------|-------
|
||||
@@ -0,0 +1,404 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.CodeAnalysis;
|
||||
using Microsoft.CodeAnalysis.CSharp;
|
||||
using Microsoft.CodeAnalysis.CSharp.Syntax;
|
||||
using Microsoft.CodeAnalysis.Diagnostics;
|
||||
using Microsoft.CodeAnalysis.Operations;
|
||||
|
||||
namespace StellaOps.Aoc.Analyzers;
|
||||
|
||||
/// <summary>
|
||||
/// Roslyn analyzer that detects writes to AOC-forbidden fields during ingestion.
|
||||
/// This prevents accidental overwrites of derived/computed fields that should only
|
||||
/// be set by the merge/decisioning pipeline.
|
||||
/// </summary>
|
||||
[DiagnosticAnalyzer(LanguageNames.CSharp)]
|
||||
public sealed class AocForbiddenFieldAnalyzer : DiagnosticAnalyzer
|
||||
{
|
||||
public const string DiagnosticIdForbiddenField = "AOC0001";
|
||||
public const string DiagnosticIdDerivedField = "AOC0002";
|
||||
public const string DiagnosticIdUnguardedWrite = "AOC0003";
|
||||
|
||||
private static readonly ImmutableHashSet<string> ForbiddenTopLevel = ImmutableHashSet.Create(
|
||||
StringComparer.OrdinalIgnoreCase,
|
||||
"severity",
|
||||
"cvss",
|
||||
"cvss_vector",
|
||||
"effective_status",
|
||||
"effective_range",
|
||||
"merged_from",
|
||||
"consensus_provider",
|
||||
"reachability",
|
||||
"asset_criticality",
|
||||
"risk_score");
|
||||
|
||||
private static readonly DiagnosticDescriptor ForbiddenFieldRule = new(
|
||||
DiagnosticIdForbiddenField,
|
||||
title: "AOC forbidden field write detected",
|
||||
messageFormat: "Field '{0}' is forbidden in AOC ingestion context; this field is computed by the decisioning pipeline (ERR_AOC_001)",
|
||||
category: "AOC",
|
||||
defaultSeverity: DiagnosticSeverity.Error,
|
||||
isEnabledByDefault: true,
|
||||
description: "AOC (Append-Only Contracts) forbid writes to certain fields during ingestion. These fields are computed by downstream merge/decisioning pipelines and must not be set during initial data capture.",
|
||||
helpLinkUri: "https://stella-ops.org/docs/aoc/forbidden-fields");
|
||||
|
||||
private static readonly DiagnosticDescriptor DerivedFieldRule = new(
|
||||
DiagnosticIdDerivedField,
|
||||
title: "AOC derived field write detected",
|
||||
messageFormat: "Derived field '{0}' must not be written during ingestion; effective_* fields are computed post-merge (ERR_AOC_006)",
|
||||
category: "AOC",
|
||||
defaultSeverity: DiagnosticSeverity.Error,
|
||||
isEnabledByDefault: true,
|
||||
description: "Fields prefixed with 'effective_' are derived values computed after merge. Writing them during ingestion violates append-only contracts.",
|
||||
helpLinkUri: "https://stella-ops.org/docs/aoc/derived-fields");
|
||||
|
||||
private static readonly DiagnosticDescriptor UnguardedWriteRule = new(
|
||||
DiagnosticIdUnguardedWrite,
|
||||
title: "AOC unguarded database write detected",
|
||||
messageFormat: "Database write operation '{0}' detected without AOC guard validation; wrap with IAocGuard.Validate() (ERR_AOC_007)",
|
||||
category: "AOC",
|
||||
defaultSeverity: DiagnosticSeverity.Warning,
|
||||
isEnabledByDefault: true,
|
||||
description: "All database writes in ingestion pipelines should be validated by the AOC guard to ensure forbidden fields are not written.",
|
||||
helpLinkUri: "https://stella-ops.org/docs/aoc/guard-usage");
|
||||
|
||||
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics =>
|
||||
ImmutableArray.Create(ForbiddenFieldRule, DerivedFieldRule, UnguardedWriteRule);
|
||||
|
||||
public override void Initialize(AnalysisContext context)
|
||||
{
|
||||
context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
|
||||
context.EnableConcurrentExecution();
|
||||
|
||||
context.RegisterOperationAction(AnalyzeAssignment, OperationKind.SimpleAssignment);
|
||||
context.RegisterOperationAction(AnalyzePropertyReference, OperationKind.PropertyReference);
|
||||
context.RegisterOperationAction(AnalyzeInvocation, OperationKind.Invocation);
|
||||
context.RegisterSyntaxNodeAction(AnalyzeObjectInitializer, SyntaxKind.ObjectInitializerExpression);
|
||||
context.RegisterSyntaxNodeAction(AnalyzeAnonymousObjectMember, SyntaxKind.AnonymousObjectMemberDeclarator);
|
||||
}
|
||||
|
||||
private static void AnalyzeAssignment(OperationAnalysisContext context)
|
||||
{
|
||||
if (context.Operation is not ISimpleAssignmentOperation assignment)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!IsIngestionContext(context.ContainingSymbol))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var targetName = GetTargetPropertyName(assignment.Target);
|
||||
if (string.IsNullOrEmpty(targetName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
CheckForbiddenField(context, targetName!, assignment.Syntax.GetLocation());
|
||||
}
|
||||
|
||||
private static void AnalyzePropertyReference(OperationAnalysisContext context)
|
||||
{
|
||||
if (context.Operation is not IPropertyReferenceOperation propertyRef)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!IsIngestionContext(context.ContainingSymbol))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!IsWriteContext(propertyRef))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var propertyName = propertyRef.Property.Name;
|
||||
CheckForbiddenField(context, propertyName, propertyRef.Syntax.GetLocation());
|
||||
}
|
||||
|
||||
private static void AnalyzeInvocation(OperationAnalysisContext context)
|
||||
{
|
||||
if (context.Operation is not IInvocationOperation invocation)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!IsIngestionContext(context.ContainingSymbol))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var method = invocation.TargetMethod;
|
||||
var methodName = method.Name;
|
||||
|
||||
// Check for dictionary/document indexer writes with forbidden keys
|
||||
if (IsDictionarySetOperation(method))
|
||||
{
|
||||
CheckDictionaryWriteArguments(context, invocation);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for unguarded database write operations
|
||||
if (IsDatabaseWriteOperation(method))
|
||||
{
|
||||
if (!IsWithinAocGuardScope(invocation))
|
||||
{
|
||||
var diagnostic = Diagnostic.Create(
|
||||
UnguardedWriteRule,
|
||||
invocation.Syntax.GetLocation(),
|
||||
$"{method.ContainingType?.Name}.{methodName}");
|
||||
context.ReportDiagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void AnalyzeObjectInitializer(SyntaxNodeAnalysisContext context)
|
||||
{
|
||||
var initializer = (InitializerExpressionSyntax)context.Node;
|
||||
|
||||
if (!IsIngestionContext(context.ContainingSymbol))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var expression in initializer.Expressions)
|
||||
{
|
||||
if (expression is AssignmentExpressionSyntax assignment)
|
||||
{
|
||||
var left = assignment.Left;
|
||||
string? propertyName = left switch
|
||||
{
|
||||
IdentifierNameSyntax identifier => identifier.Identifier.Text,
|
||||
_ => null
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(propertyName))
|
||||
{
|
||||
CheckForbiddenFieldSyntax(context, propertyName!, left.GetLocation());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void AnalyzeAnonymousObjectMember(SyntaxNodeAnalysisContext context)
|
||||
{
|
||||
var member = (AnonymousObjectMemberDeclaratorSyntax)context.Node;
|
||||
|
||||
if (!IsIngestionContext(context.ContainingSymbol))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var name = member.NameEquals?.Name.Identifier.Text;
|
||||
if (!string.IsNullOrEmpty(name))
|
||||
{
|
||||
CheckForbiddenFieldSyntax(context, name!, member.GetLocation());
|
||||
}
|
||||
}
|
||||
|
||||
private static void CheckForbiddenField(OperationAnalysisContext context, string fieldName, Location location)
|
||||
{
|
||||
if (ForbiddenTopLevel.Contains(fieldName))
|
||||
{
|
||||
var diagnostic = Diagnostic.Create(ForbiddenFieldRule, location, fieldName);
|
||||
context.ReportDiagnostic(diagnostic);
|
||||
return;
|
||||
}
|
||||
|
||||
if (fieldName.StartsWith("effective_", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var diagnostic = Diagnostic.Create(DerivedFieldRule, location, fieldName);
|
||||
context.ReportDiagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CheckForbiddenFieldSyntax(SyntaxNodeAnalysisContext context, string fieldName, Location location)
|
||||
{
|
||||
if (ForbiddenTopLevel.Contains(fieldName))
|
||||
{
|
||||
var diagnostic = Diagnostic.Create(ForbiddenFieldRule, location, fieldName);
|
||||
context.ReportDiagnostic(diagnostic);
|
||||
return;
|
||||
}
|
||||
|
||||
if (fieldName.StartsWith("effective_", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var diagnostic = Diagnostic.Create(DerivedFieldRule, location, fieldName);
|
||||
context.ReportDiagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CheckDictionaryWriteArguments(OperationAnalysisContext context, IInvocationOperation invocation)
|
||||
{
|
||||
foreach (var argument in invocation.Arguments)
|
||||
{
|
||||
if (argument.Value is ILiteralOperation literal && literal.ConstantValue.HasValue)
|
||||
{
|
||||
var value = literal.ConstantValue.Value?.ToString();
|
||||
if (!string.IsNullOrEmpty(value))
|
||||
{
|
||||
CheckForbiddenField(context, value!, argument.Syntax.GetLocation());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string? GetTargetPropertyName(IOperation? target)
|
||||
{
|
||||
return target switch
|
||||
{
|
||||
IPropertyReferenceOperation propRef => propRef.Property.Name,
|
||||
IFieldReferenceOperation fieldRef => fieldRef.Field.Name,
|
||||
ILocalReferenceOperation localRef => localRef.Local.Name,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static bool IsWriteContext(IPropertyReferenceOperation propertyRef)
|
||||
{
|
||||
var parent = propertyRef.Parent;
|
||||
return parent is ISimpleAssignmentOperation assignment && assignment.Target == propertyRef;
|
||||
}
|
||||
|
||||
private static bool IsIngestionContext(ISymbol? containingSymbol)
|
||||
{
|
||||
if (containingSymbol is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var assemblyName = containingSymbol.ContainingAssembly?.Name;
|
||||
if (string.IsNullOrEmpty(assemblyName))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Allow analyzer assemblies and tests
|
||||
if (assemblyName!.EndsWith(".Analyzers", StringComparison.Ordinal) ||
|
||||
assemblyName.EndsWith(".Tests", StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for ingestion-related assemblies/namespaces
|
||||
if (assemblyName.Contains(".Connector.", StringComparison.Ordinal) ||
|
||||
assemblyName.Contains(".Ingestion", StringComparison.Ordinal) ||
|
||||
assemblyName.EndsWith(".Connector", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check namespace for ingestion context
|
||||
var ns = containingSymbol.ContainingNamespace?.ToDisplayString();
|
||||
if (!string.IsNullOrEmpty(ns))
|
||||
{
|
||||
if (ns!.Contains(".Connector.", StringComparison.Ordinal) ||
|
||||
ns.Contains(".Ingestion", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool IsDictionarySetOperation(IMethodSymbol method)
|
||||
{
|
||||
var name = method.Name;
|
||||
if (!string.Equals(name, "set_Item", StringComparison.Ordinal) &&
|
||||
!string.Equals(name, "Add", StringComparison.Ordinal) &&
|
||||
!string.Equals(name, "TryAdd", StringComparison.Ordinal) &&
|
||||
!string.Equals(name, "Set", StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var containingType = method.ContainingType;
|
||||
if (containingType is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var typeName = containingType.ToDisplayString();
|
||||
return typeName.Contains("Dictionary", StringComparison.Ordinal) ||
|
||||
typeName.Contains("BsonDocument", StringComparison.Ordinal) ||
|
||||
typeName.Contains("JsonObject", StringComparison.Ordinal) ||
|
||||
typeName.Contains("JsonElement", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static bool IsDatabaseWriteOperation(IMethodSymbol method)
|
||||
{
|
||||
var name = method.Name;
|
||||
var writeOps = new[]
|
||||
{
|
||||
"InsertOne", "InsertOneAsync",
|
||||
"InsertMany", "InsertManyAsync",
|
||||
"UpdateOne", "UpdateOneAsync",
|
||||
"UpdateMany", "UpdateManyAsync",
|
||||
"ReplaceOne", "ReplaceOneAsync",
|
||||
"BulkWrite", "BulkWriteAsync",
|
||||
"ExecuteNonQuery", "ExecuteNonQueryAsync",
|
||||
"SaveChanges", "SaveChangesAsync",
|
||||
"Add", "AddAsync",
|
||||
"Update", "UpdateAsync"
|
||||
};
|
||||
|
||||
foreach (var op in writeOps)
|
||||
{
|
||||
if (string.Equals(name, op, StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool IsWithinAocGuardScope(IInvocationOperation invocation)
|
||||
{
|
||||
// Walk up the operation tree to find if we're within an AOC guard validation scope
|
||||
var current = invocation.Parent;
|
||||
var depth = 0;
|
||||
const int maxDepth = 20;
|
||||
|
||||
while (current is not null && depth < maxDepth)
|
||||
{
|
||||
if (current is IInvocationOperation parentInvocation)
|
||||
{
|
||||
var method = parentInvocation.TargetMethod;
|
||||
if (method.Name == "Validate" &&
|
||||
method.ContainingType?.Name.Contains("AocGuard", StringComparison.Ordinal) == true)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if containing method has IAocGuard parameter or calls Validate
|
||||
if (current is IBlockOperation)
|
||||
{
|
||||
// We've reached a method body; check the containing method signature
|
||||
var containingMethod = invocation.SemanticModel?.GetEnclosingSymbol(invocation.Syntax.SpanStart) as IMethodSymbol;
|
||||
if (containingMethod is not null)
|
||||
{
|
||||
foreach (var param in containingMethod.Parameters)
|
||||
{
|
||||
if (param.Type.Name.Contains("AocGuard", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
current = current.Parent;
|
||||
depth++;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
57
src/Aoc/__Analyzers/StellaOps.Aoc.Analyzers/README.md
Normal file
57
src/Aoc/__Analyzers/StellaOps.Aoc.Analyzers/README.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# StellaOps.Aoc.Analyzers
|
||||
|
||||
Roslyn source analyzers for enforcing AOC (Append-Only Contracts) during compile time.
|
||||
|
||||
## Rules
|
||||
|
||||
| Rule ID | Category | Severity | Description |
|
||||
|---------|----------|----------|-------------|
|
||||
| AOC0001 | AOC | Error | Forbidden field write detected - fields like `severity`, `cvss`, etc. |
|
||||
| AOC0002 | AOC | Error | Derived field write detected - `effective_*` prefixed fields |
|
||||
| AOC0003 | AOC | Warning | Unguarded database write - writes without `IAocGuard.Validate()` |
|
||||
|
||||
## Forbidden Fields
|
||||
|
||||
The following fields must not be written during ingestion:
|
||||
- `severity`
|
||||
- `cvss`
|
||||
- `cvss_vector`
|
||||
- `effective_status`
|
||||
- `effective_range`
|
||||
- `merged_from`
|
||||
- `consensus_provider`
|
||||
- `reachability`
|
||||
- `asset_criticality`
|
||||
- `risk_score`
|
||||
|
||||
Additionally, any field prefixed with `effective_` is considered derived and forbidden.
|
||||
|
||||
## Usage
|
||||
|
||||
Reference this analyzer in your project:
|
||||
|
||||
```xml
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\Aoc\__Analyzers\StellaOps.Aoc.Analyzers\StellaOps.Aoc.Analyzers.csproj"
|
||||
OutputItemType="Analyzer"
|
||||
ReferenceOutputAssembly="false" />
|
||||
</ItemGroup>
|
||||
```
|
||||
|
||||
Or add as a NuGet package once published.
|
||||
|
||||
## Suppression
|
||||
|
||||
To suppress a specific diagnostic:
|
||||
|
||||
```csharp
|
||||
#pragma warning disable AOC0001
|
||||
// Code that intentionally writes forbidden field
|
||||
#pragma warning restore AOC0001
|
||||
```
|
||||
|
||||
Or use `[SuppressMessage]` attribute:
|
||||
|
||||
```csharp
|
||||
[SuppressMessage("AOC", "AOC0001", Justification = "Legitimate use case")]
|
||||
```
|
||||
@@ -0,0 +1,24 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netstandard2.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IncludeBuildOutput>false</IncludeBuildOutput>
|
||||
<AnalysisLevel>latest</AnalysisLevel>
|
||||
<EnforceExtendedAnalyzerRules>true</EnforceExtendedAnalyzerRules>
|
||||
<Description>StellaOps AOC Roslyn Analyzers - Compile-time detection of forbidden field writes and unguarded ingestion operations</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.0.1" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.Analyzers" Version="3.3.4" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Include="README.md" Visible="false" />
|
||||
<None Include="AnalyzerReleases.Shipped.md" Visible="false" />
|
||||
<None Include="AnalyzerReleases.Unshipped.md" Visible="false" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,300 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.CodeAnalysis;
|
||||
using Microsoft.CodeAnalysis.CSharp;
|
||||
using Microsoft.CodeAnalysis.Diagnostics;
|
||||
using StellaOps.Aoc.Analyzers;
|
||||
|
||||
namespace StellaOps.Aoc.Analyzers.Tests;
|
||||
|
||||
public sealed class AocForbiddenFieldAnalyzerTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("severity")]
|
||||
[InlineData("cvss")]
|
||||
[InlineData("cvss_vector")]
|
||||
[InlineData("effective_status")]
|
||||
[InlineData("merged_from")]
|
||||
[InlineData("consensus_provider")]
|
||||
[InlineData("reachability")]
|
||||
[InlineData("asset_criticality")]
|
||||
[InlineData("risk_score")]
|
||||
public async Task ReportsDiagnostic_ForForbiddenFieldAssignment(string fieldName)
|
||||
{
|
||||
string source = $$"""
|
||||
namespace StellaOps.Concelier.Connector.Sample;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? {{fieldName}} { get; set; }
|
||||
}
|
||||
|
||||
public sealed class Ingester
|
||||
{
|
||||
public void Process(AdvisoryModel advisory)
|
||||
{
|
||||
advisory.{{fieldName}} = "value";
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample");
|
||||
Assert.Contains(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("effective_date")]
|
||||
[InlineData("effective_version")]
|
||||
[InlineData("effective_score")]
|
||||
public async Task ReportsDiagnostic_ForDerivedFieldAssignment(string fieldName)
|
||||
{
|
||||
string source = $$"""
|
||||
namespace StellaOps.Concelier.Connector.Sample;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? {{fieldName}} { get; set; }
|
||||
}
|
||||
|
||||
public sealed class Ingester
|
||||
{
|
||||
public void Process(AdvisoryModel advisory)
|
||||
{
|
||||
advisory.{{fieldName}} = "value";
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample");
|
||||
Assert.Contains(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdDerivedField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsDiagnostic_ForForbiddenFieldInObjectInitializer()
|
||||
{
|
||||
const string source = """
|
||||
namespace StellaOps.Concelier.Connector.Sample;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? severity { get; set; }
|
||||
public string? cveId { get; set; }
|
||||
}
|
||||
|
||||
public sealed class Ingester
|
||||
{
|
||||
public AdvisoryModel Create()
|
||||
{
|
||||
return new AdvisoryModel
|
||||
{
|
||||
severity = "high",
|
||||
cveId = "CVE-2024-0001"
|
||||
};
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample");
|
||||
Assert.Contains(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DoesNotReportDiagnostic_ForAllowedFieldAssignment()
|
||||
{
|
||||
const string source = """
|
||||
namespace StellaOps.Concelier.Connector.Sample;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? cveId { get; set; }
|
||||
public string? description { get; set; }
|
||||
}
|
||||
|
||||
public sealed class Ingester
|
||||
{
|
||||
public void Process(AdvisoryModel advisory)
|
||||
{
|
||||
advisory.cveId = "CVE-2024-0001";
|
||||
advisory.description = "Test vulnerability";
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample");
|
||||
Assert.DoesNotContain(diagnostics, d =>
|
||||
d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField ||
|
||||
d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdDerivedField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DoesNotReportDiagnostic_ForNonIngestionAssembly()
|
||||
{
|
||||
const string source = """
|
||||
namespace StellaOps.Internal.Processing;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? severity { get; set; }
|
||||
}
|
||||
|
||||
public sealed class Processor
|
||||
{
|
||||
public void Process(AdvisoryModel advisory)
|
||||
{
|
||||
advisory.severity = "high";
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Internal.Processing");
|
||||
Assert.DoesNotContain(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DoesNotReportDiagnostic_ForTestAssembly()
|
||||
{
|
||||
const string source = """
|
||||
namespace StellaOps.Concelier.Connector.Sample.Tests;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? severity { get; set; }
|
||||
}
|
||||
|
||||
public sealed class IngesterTests
|
||||
{
|
||||
public void TestProcess()
|
||||
{
|
||||
var advisory = new AdvisoryModel { severity = "high" };
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample.Tests");
|
||||
Assert.DoesNotContain(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsDiagnostic_ForDictionaryAddWithForbiddenKey()
|
||||
{
|
||||
const string source = """
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Sample;
|
||||
|
||||
public sealed class Ingester
|
||||
{
|
||||
public void Process()
|
||||
{
|
||||
var dict = new Dictionary<string, object>();
|
||||
dict.Add("cvss", 9.8);
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample");
|
||||
Assert.Contains(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsDiagnostic_CaseInsensitive()
|
||||
{
|
||||
const string source = """
|
||||
namespace StellaOps.Concelier.Connector.Sample;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? Severity { get; set; }
|
||||
public string? CVSS { get; set; }
|
||||
}
|
||||
|
||||
public sealed class Ingester
|
||||
{
|
||||
public void Process(AdvisoryModel advisory)
|
||||
{
|
||||
advisory.Severity = "high";
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample");
|
||||
Assert.Contains(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsDiagnostic_ForAnonymousObjectWithForbiddenField()
|
||||
{
|
||||
const string source = """
|
||||
namespace StellaOps.Concelier.Connector.Sample;
|
||||
|
||||
public sealed class Ingester
|
||||
{
|
||||
public object Create()
|
||||
{
|
||||
return new { severity = "high", cveId = "CVE-2024-0001" };
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Connector.Sample");
|
||||
Assert.Contains(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DoesNotReportDiagnostic_ForIngestionNamespaceButNotConnector()
|
||||
{
|
||||
const string source = """
|
||||
namespace StellaOps.Concelier.Ingestion;
|
||||
|
||||
public sealed class AdvisoryModel
|
||||
{
|
||||
public string? severity { get; set; }
|
||||
}
|
||||
|
||||
public sealed class Processor
|
||||
{
|
||||
public void Process(AdvisoryModel advisory)
|
||||
{
|
||||
advisory.severity = "high";
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var diagnostics = await AnalyzeAsync(source, "StellaOps.Concelier.Ingestion");
|
||||
Assert.Contains(diagnostics, d => d.Id == AocForbiddenFieldAnalyzer.DiagnosticIdForbiddenField);
|
||||
}
|
||||
|
||||
private static async Task<ImmutableArray<Diagnostic>> AnalyzeAsync(string source, string assemblyName)
|
||||
{
|
||||
var compilation = CSharpCompilation.Create(
|
||||
assemblyName,
|
||||
new[] { CSharpSyntaxTree.ParseText(source) },
|
||||
CreateMetadataReferences(),
|
||||
new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary));
|
||||
|
||||
var analyzer = new AocForbiddenFieldAnalyzer();
|
||||
var compilationWithAnalyzers = compilation.WithAnalyzers(ImmutableArray.Create<DiagnosticAnalyzer>(analyzer));
|
||||
return await compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync();
|
||||
}
|
||||
|
||||
private static IEnumerable<MetadataReference> CreateMetadataReferences()
|
||||
{
|
||||
yield return MetadataReference.CreateFromFile(typeof(object).GetTypeInfo().Assembly.Location);
|
||||
yield return MetadataReference.CreateFromFile(typeof(Enumerable).GetTypeInfo().Assembly.Location);
|
||||
|
||||
// Get System.Collections reference for Dictionary<,>
|
||||
var systemCollectionsPath = Path.GetDirectoryName(typeof(object).GetTypeInfo().Assembly.Location);
|
||||
if (!string.IsNullOrEmpty(systemCollectionsPath))
|
||||
{
|
||||
var collectionsPath = Path.Combine(systemCollectionsPath!, "System.Collections.dll");
|
||||
if (File.Exists(collectionsPath))
|
||||
{
|
||||
yield return MetadataReference.CreateFromFile(collectionsPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.0.1" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Analyzers\StellaOps.Aoc.Analyzers\StellaOps.Aoc.Analyzers.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,195 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Aoc.Cli.Models;
|
||||
using StellaOps.Aoc.Cli.Services;
|
||||
|
||||
namespace StellaOps.Aoc.Cli.Tests;
|
||||
|
||||
public sealed class AocVerificationServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public void VerifyOptions_RequiredProperties_AreSet()
|
||||
{
|
||||
var options = new VerifyOptions
|
||||
{
|
||||
Since = "2025-12-01",
|
||||
PostgresConnectionString = "Host=localhost;Database=test",
|
||||
Verbose = true
|
||||
};
|
||||
|
||||
Assert.Equal("2025-12-01", options.Since);
|
||||
Assert.Equal("Host=localhost;Database=test", options.PostgresConnectionString);
|
||||
Assert.True(options.Verbose);
|
||||
Assert.False(options.DryRun);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerificationResult_Status_ReturnsPass_WhenNoViolations()
|
||||
{
|
||||
var result = new VerificationResult
|
||||
{
|
||||
Since = "2025-12-01"
|
||||
};
|
||||
|
||||
Assert.Equal("PASS", result.Status);
|
||||
Assert.Equal(0, result.ViolationCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerificationResult_Status_ReturnsFail_WhenViolationsExist()
|
||||
{
|
||||
var result = new VerificationResult
|
||||
{
|
||||
Since = "2025-12-01",
|
||||
Violations =
|
||||
{
|
||||
new DocumentViolation
|
||||
{
|
||||
DocumentId = "doc-1",
|
||||
Collection = "test",
|
||||
Code = "ERR_AOC_001",
|
||||
Path = "/severity",
|
||||
Message = "Forbidden field"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Assert.Equal("FAIL", result.Status);
|
||||
Assert.Equal(1, result.ViolationCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DocumentViolation_Serializes_ToExpectedJson()
|
||||
{
|
||||
var violation = new DocumentViolation
|
||||
{
|
||||
DocumentId = "doc-123",
|
||||
Collection = "advisory_raw",
|
||||
Code = "ERR_AOC_001",
|
||||
Path = "/severity",
|
||||
Message = "Field 'severity' is forbidden",
|
||||
Tenant = "tenant-1"
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(violation, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
|
||||
Assert.Contains("\"documentId\":\"doc-123\"", json);
|
||||
Assert.Contains("\"collection\":\"advisory_raw\"", json);
|
||||
Assert.Contains("\"code\":\"ERR_AOC_001\"", json);
|
||||
Assert.Contains("\"path\":\"/severity\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerificationResult_Serializes_WithAllFields()
|
||||
{
|
||||
var result = new VerificationResult
|
||||
{
|
||||
Since = "abc123",
|
||||
Tenant = "tenant-1",
|
||||
DocumentsScanned = 100,
|
||||
DurationMs = 500,
|
||||
Violations =
|
||||
{
|
||||
new DocumentViolation
|
||||
{
|
||||
DocumentId = "doc-1",
|
||||
Collection = "test",
|
||||
Code = "ERR_AOC_001",
|
||||
Path = "/severity",
|
||||
Message = "Forbidden"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(result, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
|
||||
Assert.Contains("\"since\":\"abc123\"", json);
|
||||
Assert.Contains("\"tenant\":\"tenant-1\"", json);
|
||||
Assert.Contains("\"documentsScanned\":100", json);
|
||||
Assert.Contains("\"violationCount\":1", json);
|
||||
Assert.Contains("\"status\":\"FAIL\"", json);
|
||||
Assert.Contains("\"durationMs\":500", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyOptions_MongoAndPostgres_AreMutuallyExclusive()
|
||||
{
|
||||
var optionsMongo = new VerifyOptions
|
||||
{
|
||||
Since = "HEAD~1",
|
||||
MongoConnectionString = "mongodb://localhost:27017"
|
||||
};
|
||||
|
||||
var optionsPostgres = new VerifyOptions
|
||||
{
|
||||
Since = "HEAD~1",
|
||||
PostgresConnectionString = "Host=localhost;Database=test"
|
||||
};
|
||||
|
||||
Assert.NotNull(optionsMongo.MongoConnectionString);
|
||||
Assert.Null(optionsMongo.PostgresConnectionString);
|
||||
|
||||
Assert.Null(optionsPostgres.MongoConnectionString);
|
||||
Assert.NotNull(optionsPostgres.PostgresConnectionString);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyOptions_DryRun_DefaultsToFalse()
|
||||
{
|
||||
var options = new VerifyOptions
|
||||
{
|
||||
Since = "2025-01-01"
|
||||
};
|
||||
|
||||
Assert.False(options.DryRun);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyOptions_Verbose_DefaultsToFalse()
|
||||
{
|
||||
var options = new VerifyOptions
|
||||
{
|
||||
Since = "2025-01-01"
|
||||
};
|
||||
|
||||
Assert.False(options.Verbose);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerificationResult_ViolationCount_MatchesListCount()
|
||||
{
|
||||
var result = new VerificationResult
|
||||
{
|
||||
Since = "test"
|
||||
};
|
||||
|
||||
Assert.Equal(0, result.ViolationCount);
|
||||
|
||||
result.Violations.Add(new DocumentViolation
|
||||
{
|
||||
DocumentId = "1",
|
||||
Collection = "test",
|
||||
Code = "ERR",
|
||||
Path = "/",
|
||||
Message = "msg"
|
||||
});
|
||||
|
||||
Assert.Equal(1, result.ViolationCount);
|
||||
|
||||
result.Violations.Add(new DocumentViolation
|
||||
{
|
||||
DocumentId = "2",
|
||||
Collection = "test",
|
||||
Code = "ERR",
|
||||
Path = "/",
|
||||
Message = "msg"
|
||||
});
|
||||
|
||||
Assert.Equal(2, result.ViolationCount);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\StellaOps.Aoc.Cli\StellaOps.Aoc.Cli.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
29
src/Aoc/aoc.runsettings
Normal file
29
src/Aoc/aoc.runsettings
Normal file
@@ -0,0 +1,29 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<RunSettings>
|
||||
<DataCollectionRunSettings>
|
||||
<DataCollectors>
|
||||
<DataCollector friendlyName="XPlat code coverage">
|
||||
<Configuration>
|
||||
<Format>cobertura,opencover</Format>
|
||||
<Exclude>[*.Tests]*,[*]*.Migrations.*</Exclude>
|
||||
<Include>[StellaOps.Aoc]*,[StellaOps.Aoc.Cli]*,[StellaOps.Aoc.Analyzers]*</Include>
|
||||
<ExcludeByFile>**/obj/**,**/bin/**</ExcludeByFile>
|
||||
<SingleHit>false</SingleHit>
|
||||
<UseSourceLink>true</UseSourceLink>
|
||||
<IncludeTestAssembly>false</IncludeTestAssembly>
|
||||
<SkipAutoProps>true</SkipAutoProps>
|
||||
<DeterministicReport>true</DeterministicReport>
|
||||
</Configuration>
|
||||
</DataCollector>
|
||||
</DataCollectors>
|
||||
</DataCollectionRunSettings>
|
||||
|
||||
<!-- Coverage thresholds for CI enforcement -->
|
||||
<!-- Minimum line coverage: 70% -->
|
||||
<!-- Minimum branch coverage: 60% -->
|
||||
<Coverlet>
|
||||
<ThresholdType>line,branch</ThresholdType>
|
||||
<Threshold>70,60</Threshold>
|
||||
<ThresholdStat>total</ThresholdStat>
|
||||
</Coverlet>
|
||||
</RunSettings>
|
||||
@@ -56,6 +56,7 @@ internal static class CommandFactory
|
||||
root.Add(BuildKmsCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildVulnCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildVexCommand(services, options, verboseOption, cancellationToken));
|
||||
root.Add(BuildDecisionCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildCryptoCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildExportCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildAttestCommand(services, verboseOption, cancellationToken));
|
||||
@@ -74,11 +75,13 @@ internal static class CommandFactory
|
||||
root.Add(BuildCvssCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildRiskCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildReachabilityCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildGraphCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildApiCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildSdkCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildMirrorCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildAirgapCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildDevPortalCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(BuildSymbolsCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(SystemCommandBuilder.BuildSystemCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
var pluginLogger = loggerFactory.CreateLogger<CliCommandModuleLoader>();
|
||||
@@ -3868,11 +3871,32 @@ internal static class CommandFactory
|
||||
{
|
||||
Description = "Emit raw JSON payload instead of formatted output."
|
||||
};
|
||||
// GAP-VEX-006: Evidence display options
|
||||
var showCallPathsOption = new Option<bool>("--call-paths")
|
||||
{
|
||||
Description = "Include reachability call paths in the output."
|
||||
};
|
||||
var showGraphHashOption = new Option<bool>("--graph-hash")
|
||||
{
|
||||
Description = "Include call graph hash and CAS URI in the output."
|
||||
};
|
||||
var showRuntimeHitsOption = new Option<bool>("--runtime-hits")
|
||||
{
|
||||
Description = "Include runtime execution hits from probes."
|
||||
};
|
||||
var showFullEvidenceOption = new Option<bool>("--full-evidence")
|
||||
{
|
||||
Description = "Include all evidence types (call paths, graph hash, runtime hits, DSSE pointers)."
|
||||
};
|
||||
|
||||
show.Add(showVulnIdArg);
|
||||
show.Add(showProductKeyArg);
|
||||
show.Add(showTenantOption);
|
||||
show.Add(showJsonOption);
|
||||
show.Add(showCallPathsOption);
|
||||
show.Add(showGraphHashOption);
|
||||
show.Add(showRuntimeHitsOption);
|
||||
show.Add(showFullEvidenceOption);
|
||||
|
||||
show.SetAction((parseResult, _) =>
|
||||
{
|
||||
@@ -3880,14 +3904,29 @@ internal static class CommandFactory
|
||||
var productKey = parseResult.GetValue(showProductKeyArg) ?? string.Empty;
|
||||
var tenant = parseResult.GetValue(showTenantOption);
|
||||
var emitJson = parseResult.GetValue(showJsonOption);
|
||||
var includeCallPaths = parseResult.GetValue(showCallPathsOption);
|
||||
var includeGraphHash = parseResult.GetValue(showGraphHashOption);
|
||||
var includeRuntimeHits = parseResult.GetValue(showRuntimeHitsOption);
|
||||
var fullEvidence = parseResult.GetValue(showFullEvidenceOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
// Full evidence enables all flags
|
||||
if (fullEvidence)
|
||||
{
|
||||
includeCallPaths = true;
|
||||
includeGraphHash = true;
|
||||
includeRuntimeHits = true;
|
||||
}
|
||||
|
||||
return CommandHandlers.HandleVexConsensusShowAsync(
|
||||
services,
|
||||
vulnId,
|
||||
productKey,
|
||||
tenant,
|
||||
emitJson,
|
||||
includeCallPaths,
|
||||
includeGraphHash,
|
||||
includeRuntimeHits,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
@@ -4269,9 +4308,336 @@ internal static class CommandFactory
|
||||
obs.Add(linkset);
|
||||
vex.Add(obs);
|
||||
|
||||
// UI-VEX-401-032: VEX explain command for comprehensive decision explanation
|
||||
var explain = new Command("explain", "Explain a VEX decision with full reachability evidence and verification status.");
|
||||
|
||||
var explainVulnIdArg = new Argument<string>("vulnerability-id")
|
||||
{
|
||||
Description = "Vulnerability identifier (e.g., CVE-2024-1234)."
|
||||
};
|
||||
var explainProductKeyOption = new Option<string>("--product-key", new[] { "-p" })
|
||||
{
|
||||
Description = "Product key for the decision.",
|
||||
Required = true
|
||||
};
|
||||
var explainTenantOption = new Option<string?>("--tenant", new[] { "-t" })
|
||||
{
|
||||
Description = "Tenant identifier."
|
||||
};
|
||||
var explainCallPathsOption = new Option<bool>("--call-paths")
|
||||
{
|
||||
Description = "Include call path evidence with full frame details."
|
||||
};
|
||||
explainCallPathsOption.SetDefaultValue(true);
|
||||
var explainRuntimeHitsOption = new Option<bool>("--runtime-hits")
|
||||
{
|
||||
Description = "Include runtime execution hit evidence."
|
||||
};
|
||||
explainRuntimeHitsOption.SetDefaultValue(true);
|
||||
var explainGraphOption = new Option<bool>("--graph")
|
||||
{
|
||||
Description = "Include reachability graph metadata."
|
||||
};
|
||||
explainGraphOption.SetDefaultValue(true);
|
||||
var explainDsseOption = new Option<bool>("--dsse")
|
||||
{
|
||||
Description = "Include DSSE envelope details."
|
||||
};
|
||||
var explainRekorOption = new Option<bool>("--rekor")
|
||||
{
|
||||
Description = "Include Rekor transparency log entry details."
|
||||
};
|
||||
var explainVerifyOption = new Option<bool>("--verify")
|
||||
{
|
||||
Description = "Verify attestation signatures and Rekor inclusion proofs."
|
||||
};
|
||||
var explainOfflineOption = new Option<bool>("--offline")
|
||||
{
|
||||
Description = "Perform verification using embedded proofs only (air-gapped mode)."
|
||||
};
|
||||
var explainJsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output as JSON for machine processing."
|
||||
};
|
||||
|
||||
explain.Add(explainVulnIdArg);
|
||||
explain.Add(explainProductKeyOption);
|
||||
explain.Add(explainTenantOption);
|
||||
explain.Add(explainCallPathsOption);
|
||||
explain.Add(explainRuntimeHitsOption);
|
||||
explain.Add(explainGraphOption);
|
||||
explain.Add(explainDsseOption);
|
||||
explain.Add(explainRekorOption);
|
||||
explain.Add(explainVerifyOption);
|
||||
explain.Add(explainOfflineOption);
|
||||
explain.Add(explainJsonOption);
|
||||
explain.Add(verboseOption);
|
||||
|
||||
explain.SetAction((parseResult, _) =>
|
||||
{
|
||||
var vulnId = parseResult.GetValue(explainVulnIdArg) ?? string.Empty;
|
||||
var productKey = parseResult.GetValue(explainProductKeyOption) ?? string.Empty;
|
||||
var tenant = parseResult.GetValue(explainTenantOption);
|
||||
var includeCallPaths = parseResult.GetValue(explainCallPathsOption);
|
||||
var includeRuntimeHits = parseResult.GetValue(explainRuntimeHitsOption);
|
||||
var includeGraph = parseResult.GetValue(explainGraphOption);
|
||||
var includeDsse = parseResult.GetValue(explainDsseOption);
|
||||
var includeRekor = parseResult.GetValue(explainRekorOption);
|
||||
var verify = parseResult.GetValue(explainVerifyOption);
|
||||
var offline = parseResult.GetValue(explainOfflineOption);
|
||||
var emitJson = parseResult.GetValue(explainJsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleVexExplainAsync(
|
||||
services,
|
||||
vulnId,
|
||||
productKey,
|
||||
tenant,
|
||||
includeCallPaths,
|
||||
includeRuntimeHits,
|
||||
includeGraph,
|
||||
includeDsse,
|
||||
includeRekor,
|
||||
verify,
|
||||
offline,
|
||||
emitJson,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
vex.Add(explain);
|
||||
|
||||
return vex;
|
||||
}
|
||||
|
||||
// CLI-VEX-401-011: VEX decision commands with DSSE/Rekor integration
|
||||
private static Command BuildDecisionCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
|
||||
{
|
||||
var decision = new Command("decision", "Manage VEX decisions with DSSE signing and Rekor transparency.");
|
||||
|
||||
// decision export
|
||||
var export = new Command("export", "Export VEX decisions as OpenVEX documents with optional DSSE signing.");
|
||||
|
||||
var expTenantOption = new Option<string>("--tenant", new[] { "-t" })
|
||||
{
|
||||
Description = "Tenant identifier.",
|
||||
Required = true
|
||||
};
|
||||
var expScanIdOption = new Option<string?>("--scan-id")
|
||||
{
|
||||
Description = "Filter by scan identifier."
|
||||
};
|
||||
var expVulnIdsOption = new Option<string[]>("--vuln-id")
|
||||
{
|
||||
Description = "Filter by vulnerability identifiers (repeatable).",
|
||||
Arity = ArgumentArity.ZeroOrMore
|
||||
};
|
||||
var expPurlsOption = new Option<string[]>("--purl")
|
||||
{
|
||||
Description = "Filter by Package URLs (repeatable).",
|
||||
Arity = ArgumentArity.ZeroOrMore
|
||||
};
|
||||
var expStatusesOption = new Option<string[]>("--status")
|
||||
{
|
||||
Description = "Filter by VEX status (not_affected, affected, fixed, under_investigation). Repeatable.",
|
||||
Arity = ArgumentArity.ZeroOrMore
|
||||
};
|
||||
var expOutputOption = new Option<string>("--output", new[] { "-o" })
|
||||
{
|
||||
Description = "Output file path for the OpenVEX document.",
|
||||
Required = true
|
||||
};
|
||||
var expFormatOption = new Option<string>("--format", new[] { "-f" })
|
||||
{
|
||||
Description = "Output format (openvex, dsse, ndjson). Default: openvex."
|
||||
};
|
||||
expFormatOption.SetDefaultValue("openvex");
|
||||
var expSignOption = new Option<bool>("--sign", new[] { "-s" })
|
||||
{
|
||||
Description = "Sign the output with DSSE envelope."
|
||||
};
|
||||
var expRekorOption = new Option<bool>("--rekor")
|
||||
{
|
||||
Description = "Submit DSSE envelope to Rekor transparency log."
|
||||
};
|
||||
var expIncludeEvidenceOption = new Option<bool>("--include-evidence")
|
||||
{
|
||||
Description = "Include reachability evidence blocks in output."
|
||||
};
|
||||
expIncludeEvidenceOption.SetDefaultValue(true);
|
||||
var expJsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output metadata as JSON to stdout."
|
||||
};
|
||||
|
||||
export.Add(expTenantOption);
|
||||
export.Add(expScanIdOption);
|
||||
export.Add(expVulnIdsOption);
|
||||
export.Add(expPurlsOption);
|
||||
export.Add(expStatusesOption);
|
||||
export.Add(expOutputOption);
|
||||
export.Add(expFormatOption);
|
||||
export.Add(expSignOption);
|
||||
export.Add(expRekorOption);
|
||||
export.Add(expIncludeEvidenceOption);
|
||||
export.Add(expJsonOption);
|
||||
|
||||
export.SetAction((parseResult, _) =>
|
||||
{
|
||||
var tenant = parseResult.GetValue(expTenantOption) ?? string.Empty;
|
||||
var scanId = parseResult.GetValue(expScanIdOption);
|
||||
var vulnIds = parseResult.GetValue(expVulnIdsOption) ?? Array.Empty<string>();
|
||||
var purls = parseResult.GetValue(expPurlsOption) ?? Array.Empty<string>();
|
||||
var statuses = parseResult.GetValue(expStatusesOption) ?? Array.Empty<string>();
|
||||
var output = parseResult.GetValue(expOutputOption) ?? string.Empty;
|
||||
var format = parseResult.GetValue(expFormatOption) ?? "openvex";
|
||||
var sign = parseResult.GetValue(expSignOption);
|
||||
var rekor = parseResult.GetValue(expRekorOption);
|
||||
var includeEvidence = parseResult.GetValue(expIncludeEvidenceOption);
|
||||
var emitJson = parseResult.GetValue(expJsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleDecisionExportAsync(
|
||||
services,
|
||||
tenant,
|
||||
scanId,
|
||||
vulnIds,
|
||||
purls,
|
||||
statuses,
|
||||
output,
|
||||
format,
|
||||
sign,
|
||||
rekor,
|
||||
includeEvidence,
|
||||
emitJson,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
decision.Add(export);
|
||||
|
||||
// decision verify
|
||||
var verify = new Command("verify", "Verify DSSE signature and optional Rekor inclusion proof of a VEX decision document.");
|
||||
|
||||
var verifyFileArg = new Argument<string>("file")
|
||||
{
|
||||
Description = "Path to the VEX document or DSSE envelope to verify."
|
||||
};
|
||||
var verifyDigestOption = new Option<string?>("--digest")
|
||||
{
|
||||
Description = "Expected payload digest (sha256:...) to verify."
|
||||
};
|
||||
var verifyRekorOption = new Option<bool>("--rekor")
|
||||
{
|
||||
Description = "Verify Rekor inclusion proof."
|
||||
};
|
||||
var verifyRekorUuidOption = new Option<string?>("--rekor-uuid")
|
||||
{
|
||||
Description = "Rekor entry UUID for inclusion verification."
|
||||
};
|
||||
var verifyPublicKeyOption = new Option<string?>("--public-key")
|
||||
{
|
||||
Description = "Path to public key file for offline signature verification."
|
||||
};
|
||||
var verifyJsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output verification result as JSON."
|
||||
};
|
||||
|
||||
verify.Add(verifyFileArg);
|
||||
verify.Add(verifyDigestOption);
|
||||
verify.Add(verifyRekorOption);
|
||||
verify.Add(verifyRekorUuidOption);
|
||||
verify.Add(verifyPublicKeyOption);
|
||||
verify.Add(verifyJsonOption);
|
||||
|
||||
verify.SetAction((parseResult, _) =>
|
||||
{
|
||||
var file = parseResult.GetValue(verifyFileArg) ?? string.Empty;
|
||||
var digest = parseResult.GetValue(verifyDigestOption);
|
||||
var verifyRekor = parseResult.GetValue(verifyRekorOption);
|
||||
var rekorUuid = parseResult.GetValue(verifyRekorUuidOption);
|
||||
var publicKey = parseResult.GetValue(verifyPublicKeyOption);
|
||||
var emitJson = parseResult.GetValue(verifyJsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleDecisionVerifyAsync(
|
||||
services,
|
||||
file,
|
||||
digest,
|
||||
verifyRekor,
|
||||
rekorUuid,
|
||||
publicKey,
|
||||
emitJson,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
decision.Add(verify);
|
||||
|
||||
// decision compare
|
||||
var compare = new Command("compare", "Compare two VEX decision documents and show differences.");
|
||||
|
||||
var compareBaseArg = new Argument<string>("base")
|
||||
{
|
||||
Description = "Path to the base VEX document."
|
||||
};
|
||||
var compareTargetArg = new Argument<string>("target")
|
||||
{
|
||||
Description = "Path to the target VEX document to compare against base."
|
||||
};
|
||||
var compareOutputOption = new Option<string?>("--output", new[] { "-o" })
|
||||
{
|
||||
Description = "Output file path for the diff report."
|
||||
};
|
||||
var compareFormatOption = new Option<string>("--format", new[] { "-f" })
|
||||
{
|
||||
Description = "Output format (text, json, markdown). Default: text."
|
||||
};
|
||||
compareFormatOption.SetDefaultValue("text");
|
||||
var compareShowUnchangedOption = new Option<bool>("--show-unchanged")
|
||||
{
|
||||
Description = "Include unchanged statements in output."
|
||||
};
|
||||
var compareSummaryOnlyOption = new Option<bool>("--summary-only")
|
||||
{
|
||||
Description = "Show only summary counts, not detailed diffs."
|
||||
};
|
||||
|
||||
compare.Add(compareBaseArg);
|
||||
compare.Add(compareTargetArg);
|
||||
compare.Add(compareOutputOption);
|
||||
compare.Add(compareFormatOption);
|
||||
compare.Add(compareShowUnchangedOption);
|
||||
compare.Add(compareSummaryOnlyOption);
|
||||
|
||||
compare.SetAction((parseResult, _) =>
|
||||
{
|
||||
var basePath = parseResult.GetValue(compareBaseArg) ?? string.Empty;
|
||||
var targetPath = parseResult.GetValue(compareTargetArg) ?? string.Empty;
|
||||
var output = parseResult.GetValue(compareOutputOption);
|
||||
var format = parseResult.GetValue(compareFormatOption) ?? "text";
|
||||
var showUnchanged = parseResult.GetValue(compareShowUnchangedOption);
|
||||
var summaryOnly = parseResult.GetValue(compareSummaryOnlyOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleDecisionCompareAsync(
|
||||
services,
|
||||
basePath,
|
||||
targetPath,
|
||||
output,
|
||||
format,
|
||||
showUnchanged,
|
||||
summaryOnly,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
decision.Add(compare);
|
||||
|
||||
return decision;
|
||||
}
|
||||
|
||||
private static Command BuildConfigCommand(StellaOpsCliOptions options)
|
||||
{
|
||||
var config = new Command("config", "Inspect CLI configuration state.");
|
||||
@@ -10458,6 +10824,120 @@ internal static class CommandFactory
|
||||
return reachability;
|
||||
}
|
||||
|
||||
// UI-CLI-401-007: stella graph command with DSSE pointers, runtime hits, predicates, counterfactuals
|
||||
private static Command BuildGraphCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
|
||||
{
|
||||
var graph = new Command("graph", "Call graph evidence commands.");
|
||||
|
||||
var tenantOption = new Option<string?>("--tenant", "-t")
|
||||
{
|
||||
Description = "Tenant context for the operation."
|
||||
};
|
||||
|
||||
var jsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output in JSON format."
|
||||
};
|
||||
|
||||
// stella graph explain
|
||||
var explain = new Command("explain", "Explain call graph reachability with signed evidence.");
|
||||
var graphIdOption = new Option<string>("--graph-id", "-g")
|
||||
{
|
||||
Description = "Call graph identifier.",
|
||||
Required = true
|
||||
};
|
||||
var vulnerabilityIdOption = new Option<string?>("--vuln-id", "-v")
|
||||
{
|
||||
Description = "Vulnerability identifier to explain."
|
||||
};
|
||||
var packagePurlOption = new Option<string?>("--purl")
|
||||
{
|
||||
Description = "Package URL to explain."
|
||||
};
|
||||
var includeCallPathsOption = new Option<bool>("--call-paths")
|
||||
{
|
||||
Description = "Include detailed signed call paths in the explanation."
|
||||
};
|
||||
var includeRuntimeHitsOption = new Option<bool>("--runtime-hits")
|
||||
{
|
||||
Description = "Include runtime execution hits from instrumentation probes."
|
||||
};
|
||||
var includePredicatesOption = new Option<bool>("--predicates")
|
||||
{
|
||||
Description = "Include semantic predicates attached to evidence."
|
||||
};
|
||||
var includeDsseOption = new Option<bool>("--dsse")
|
||||
{
|
||||
Description = "Include DSSE envelope pointers and Rekor log entries."
|
||||
};
|
||||
var includeCounterfactualsOption = new Option<bool>("--counterfactuals")
|
||||
{
|
||||
Description = "Include counterfactual controls showing what-if scenarios."
|
||||
};
|
||||
var fullEvidenceOption = new Option<bool>("--full-evidence")
|
||||
{
|
||||
Description = "Include all evidence types (call paths, runtime hits, predicates, DSSE, counterfactuals)."
|
||||
};
|
||||
|
||||
explain.Add(tenantOption);
|
||||
explain.Add(graphIdOption);
|
||||
explain.Add(vulnerabilityIdOption);
|
||||
explain.Add(packagePurlOption);
|
||||
explain.Add(includeCallPathsOption);
|
||||
explain.Add(includeRuntimeHitsOption);
|
||||
explain.Add(includePredicatesOption);
|
||||
explain.Add(includeDsseOption);
|
||||
explain.Add(includeCounterfactualsOption);
|
||||
explain.Add(fullEvidenceOption);
|
||||
explain.Add(jsonOption);
|
||||
explain.Add(verboseOption);
|
||||
|
||||
explain.SetAction((parseResult, _) =>
|
||||
{
|
||||
var tenant = parseResult.GetValue(tenantOption);
|
||||
var graphId = parseResult.GetValue(graphIdOption) ?? string.Empty;
|
||||
var vulnerabilityId = parseResult.GetValue(vulnerabilityIdOption);
|
||||
var packagePurl = parseResult.GetValue(packagePurlOption);
|
||||
var includeCallPaths = parseResult.GetValue(includeCallPathsOption);
|
||||
var includeRuntimeHits = parseResult.GetValue(includeRuntimeHitsOption);
|
||||
var includePredicates = parseResult.GetValue(includePredicatesOption);
|
||||
var includeDsse = parseResult.GetValue(includeDsseOption);
|
||||
var includeCounterfactuals = parseResult.GetValue(includeCounterfactualsOption);
|
||||
var fullEvidence = parseResult.GetValue(fullEvidenceOption);
|
||||
var emitJson = parseResult.GetValue(jsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
// Full evidence enables all flags
|
||||
if (fullEvidence)
|
||||
{
|
||||
includeCallPaths = true;
|
||||
includeRuntimeHits = true;
|
||||
includePredicates = true;
|
||||
includeDsse = true;
|
||||
includeCounterfactuals = true;
|
||||
}
|
||||
|
||||
return CommandHandlers.HandleGraphExplainAsync(
|
||||
services,
|
||||
tenant,
|
||||
graphId,
|
||||
vulnerabilityId,
|
||||
packagePurl,
|
||||
includeCallPaths,
|
||||
includeRuntimeHits,
|
||||
includePredicates,
|
||||
includeDsse,
|
||||
includeCounterfactuals,
|
||||
emitJson,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
graph.Add(explain);
|
||||
|
||||
return graph;
|
||||
}
|
||||
|
||||
// CLI-SDK-63-001: stella api command
|
||||
private static Command BuildApiCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -11071,4 +11551,316 @@ internal static class CommandFactory
|
||||
|
||||
return devportal;
|
||||
}
|
||||
|
||||
// SYMS-BUNDLE-401-014: Symbol bundle commands for air-gapped installations
|
||||
private static Command BuildSymbolsCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
|
||||
{
|
||||
var symbols = new Command("symbols", "Manage symbol bundles for air-gapped installations.");
|
||||
|
||||
// symbols bundle build
|
||||
var bundleBuild = new Command("bundle", "Build a deterministic symbol bundle.");
|
||||
|
||||
var bundleNameOption = new Option<string>("--name", new[] { "-n" })
|
||||
{
|
||||
Description = "Bundle name.",
|
||||
Required = true
|
||||
};
|
||||
var bundleVersionOption = new Option<string>("--version")
|
||||
{
|
||||
Description = "Bundle version (SemVer).",
|
||||
Required = true
|
||||
};
|
||||
var bundleSourceOption = new Option<string>("--source", new[] { "-s" })
|
||||
{
|
||||
Description = "Source directory containing symbol manifests.",
|
||||
Required = true
|
||||
};
|
||||
var bundleOutputOption = new Option<string>("--output", new[] { "-o" })
|
||||
{
|
||||
Description = "Output directory for bundle archive.",
|
||||
Required = true
|
||||
};
|
||||
var bundlePlatformOption = new Option<string?>("--platform")
|
||||
{
|
||||
Description = "Filter symbols by platform (e.g., linux-x64, win-x64)."
|
||||
};
|
||||
var bundleTenantOption = new Option<string?>("--tenant")
|
||||
{
|
||||
Description = "Filter symbols by tenant ID."
|
||||
};
|
||||
var bundleSignOption = new Option<bool>("--sign")
|
||||
{
|
||||
Description = "Sign the bundle with DSSE."
|
||||
};
|
||||
var bundleKeyPathOption = new Option<string?>("--key")
|
||||
{
|
||||
Description = "Path to signing key (PEM-encoded private key)."
|
||||
};
|
||||
var bundleKeyIdOption = new Option<string?>("--key-id")
|
||||
{
|
||||
Description = "Key ID for DSSE signature."
|
||||
};
|
||||
var bundleAlgorithmOption = new Option<string>("--algorithm")
|
||||
{
|
||||
Description = "Signing algorithm (ecdsa-p256, ed25519, rsa-pss-sha256)."
|
||||
};
|
||||
bundleAlgorithmOption.SetDefaultValue("ecdsa-p256");
|
||||
var bundleRekorOption = new Option<bool>("--rekor")
|
||||
{
|
||||
Description = "Submit to Rekor transparency log."
|
||||
};
|
||||
var bundleRekorUrlOption = new Option<string>("--rekor-url")
|
||||
{
|
||||
Description = "Rekor server URL."
|
||||
};
|
||||
bundleRekorUrlOption.SetDefaultValue("https://rekor.sigstore.dev");
|
||||
var bundleFormatOption = new Option<string>("--format")
|
||||
{
|
||||
Description = "Bundle format (zip, tar.gz)."
|
||||
};
|
||||
bundleFormatOption.SetDefaultValue("zip");
|
||||
var bundleCompressionOption = new Option<int>("--compression")
|
||||
{
|
||||
Description = "Compression level (0-9)."
|
||||
};
|
||||
bundleCompressionOption.SetDefaultValue(6);
|
||||
var bundleJsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output result as JSON."
|
||||
};
|
||||
|
||||
bundleBuild.Add(bundleNameOption);
|
||||
bundleBuild.Add(bundleVersionOption);
|
||||
bundleBuild.Add(bundleSourceOption);
|
||||
bundleBuild.Add(bundleOutputOption);
|
||||
bundleBuild.Add(bundlePlatformOption);
|
||||
bundleBuild.Add(bundleTenantOption);
|
||||
bundleBuild.Add(bundleSignOption);
|
||||
bundleBuild.Add(bundleKeyPathOption);
|
||||
bundleBuild.Add(bundleKeyIdOption);
|
||||
bundleBuild.Add(bundleAlgorithmOption);
|
||||
bundleBuild.Add(bundleRekorOption);
|
||||
bundleBuild.Add(bundleRekorUrlOption);
|
||||
bundleBuild.Add(bundleFormatOption);
|
||||
bundleBuild.Add(bundleCompressionOption);
|
||||
bundleBuild.Add(bundleJsonOption);
|
||||
bundleBuild.Add(verboseOption);
|
||||
|
||||
bundleBuild.SetAction((parseResult, _) =>
|
||||
{
|
||||
var name = parseResult.GetValue(bundleNameOption)!;
|
||||
var version = parseResult.GetValue(bundleVersionOption)!;
|
||||
var source = parseResult.GetValue(bundleSourceOption)!;
|
||||
var output = parseResult.GetValue(bundleOutputOption)!;
|
||||
var platform = parseResult.GetValue(bundlePlatformOption);
|
||||
var tenant = parseResult.GetValue(bundleTenantOption);
|
||||
var sign = parseResult.GetValue(bundleSignOption);
|
||||
var keyPath = parseResult.GetValue(bundleKeyPathOption);
|
||||
var keyId = parseResult.GetValue(bundleKeyIdOption);
|
||||
var algorithm = parseResult.GetValue(bundleAlgorithmOption)!;
|
||||
var rekor = parseResult.GetValue(bundleRekorOption);
|
||||
var rekorUrl = parseResult.GetValue(bundleRekorUrlOption)!;
|
||||
var format = parseResult.GetValue(bundleFormatOption)!;
|
||||
var compression = parseResult.GetValue(bundleCompressionOption);
|
||||
var json = parseResult.GetValue(bundleJsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleSymbolBundleBuildAsync(
|
||||
services,
|
||||
name,
|
||||
version,
|
||||
source,
|
||||
output,
|
||||
platform,
|
||||
tenant,
|
||||
sign,
|
||||
keyPath,
|
||||
keyId,
|
||||
algorithm,
|
||||
rekor,
|
||||
rekorUrl,
|
||||
format,
|
||||
compression,
|
||||
json,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
symbols.Add(bundleBuild);
|
||||
|
||||
// symbols verify
|
||||
var verify = new Command("verify", "Verify a symbol bundle's integrity and signatures.");
|
||||
|
||||
var verifyBundleOption = new Option<string>("--bundle", new[] { "-b" })
|
||||
{
|
||||
Description = "Path to bundle archive.",
|
||||
Required = true
|
||||
};
|
||||
var verifyPublicKeyOption = new Option<string?>("--public-key")
|
||||
{
|
||||
Description = "Path to public key for signature verification."
|
||||
};
|
||||
var verifyRekorOfflineOption = new Option<bool>("--rekor-offline")
|
||||
{
|
||||
Description = "Verify Rekor inclusion proof offline."
|
||||
};
|
||||
verifyRekorOfflineOption.SetDefaultValue(true);
|
||||
var verifyRekorKeyOption = new Option<string?>("--rekor-key")
|
||||
{
|
||||
Description = "Path to Rekor public key for offline verification."
|
||||
};
|
||||
var verifyHashesOption = new Option<bool>("--verify-hashes")
|
||||
{
|
||||
Description = "Verify all blob hashes."
|
||||
};
|
||||
verifyHashesOption.SetDefaultValue(true);
|
||||
var verifyJsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output result as JSON."
|
||||
};
|
||||
|
||||
verify.Add(verifyBundleOption);
|
||||
verify.Add(verifyPublicKeyOption);
|
||||
verify.Add(verifyRekorOfflineOption);
|
||||
verify.Add(verifyRekorKeyOption);
|
||||
verify.Add(verifyHashesOption);
|
||||
verify.Add(verifyJsonOption);
|
||||
verify.Add(verboseOption);
|
||||
|
||||
verify.SetAction((parseResult, _) =>
|
||||
{
|
||||
var bundlePath = parseResult.GetValue(verifyBundleOption)!;
|
||||
var publicKeyPath = parseResult.GetValue(verifyPublicKeyOption);
|
||||
var rekorOffline = parseResult.GetValue(verifyRekorOfflineOption);
|
||||
var rekorKeyPath = parseResult.GetValue(verifyRekorKeyOption);
|
||||
var verifyHashes = parseResult.GetValue(verifyHashesOption);
|
||||
var json = parseResult.GetValue(verifyJsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleSymbolBundleVerifyAsync(
|
||||
services,
|
||||
bundlePath,
|
||||
publicKeyPath,
|
||||
rekorOffline,
|
||||
rekorKeyPath,
|
||||
verifyHashes,
|
||||
json,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
symbols.Add(verify);
|
||||
|
||||
// symbols extract
|
||||
var extract = new Command("extract", "Extract symbols from a bundle.");
|
||||
|
||||
var extractBundleOption = new Option<string>("--bundle", new[] { "-b" })
|
||||
{
|
||||
Description = "Path to bundle archive.",
|
||||
Required = true
|
||||
};
|
||||
var extractOutputOption = new Option<string>("--output", new[] { "-o" })
|
||||
{
|
||||
Description = "Output directory.",
|
||||
Required = true
|
||||
};
|
||||
var extractVerifyOption = new Option<bool>("--verify")
|
||||
{
|
||||
Description = "Verify bundle before extraction."
|
||||
};
|
||||
extractVerifyOption.SetDefaultValue(true);
|
||||
var extractPlatformOption = new Option<string?>("--platform")
|
||||
{
|
||||
Description = "Extract only symbols for this platform."
|
||||
};
|
||||
var extractOverwriteOption = new Option<bool>("--overwrite")
|
||||
{
|
||||
Description = "Overwrite existing files."
|
||||
};
|
||||
var extractManifestsOnlyOption = new Option<bool>("--manifests-only")
|
||||
{
|
||||
Description = "Extract only manifest files (not blobs)."
|
||||
};
|
||||
var extractJsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output result as JSON."
|
||||
};
|
||||
|
||||
extract.Add(extractBundleOption);
|
||||
extract.Add(extractOutputOption);
|
||||
extract.Add(extractVerifyOption);
|
||||
extract.Add(extractPlatformOption);
|
||||
extract.Add(extractOverwriteOption);
|
||||
extract.Add(extractManifestsOnlyOption);
|
||||
extract.Add(extractJsonOption);
|
||||
extract.Add(verboseOption);
|
||||
|
||||
extract.SetAction((parseResult, _) =>
|
||||
{
|
||||
var bundlePath = parseResult.GetValue(extractBundleOption)!;
|
||||
var outputDir = parseResult.GetValue(extractOutputOption)!;
|
||||
var verifyFirst = parseResult.GetValue(extractVerifyOption);
|
||||
var platform = parseResult.GetValue(extractPlatformOption);
|
||||
var overwrite = parseResult.GetValue(extractOverwriteOption);
|
||||
var manifestsOnly = parseResult.GetValue(extractManifestsOnlyOption);
|
||||
var json = parseResult.GetValue(extractJsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleSymbolBundleExtractAsync(
|
||||
services,
|
||||
bundlePath,
|
||||
outputDir,
|
||||
verifyFirst,
|
||||
platform,
|
||||
overwrite,
|
||||
manifestsOnly,
|
||||
json,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
symbols.Add(extract);
|
||||
|
||||
// symbols inspect
|
||||
var inspect = new Command("inspect", "Inspect bundle contents without extracting.");
|
||||
|
||||
var inspectBundleOption = new Option<string>("--bundle", new[] { "-b" })
|
||||
{
|
||||
Description = "Path to bundle archive.",
|
||||
Required = true
|
||||
};
|
||||
var inspectEntriesOption = new Option<bool>("--entries")
|
||||
{
|
||||
Description = "List all entries in the bundle."
|
||||
};
|
||||
var inspectJsonOption = new Option<bool>("--json")
|
||||
{
|
||||
Description = "Output result as JSON."
|
||||
};
|
||||
|
||||
inspect.Add(inspectBundleOption);
|
||||
inspect.Add(inspectEntriesOption);
|
||||
inspect.Add(inspectJsonOption);
|
||||
inspect.Add(verboseOption);
|
||||
|
||||
inspect.SetAction((parseResult, _) =>
|
||||
{
|
||||
var bundlePath = parseResult.GetValue(inspectBundleOption)!;
|
||||
var showEntries = parseResult.GetValue(inspectEntriesOption);
|
||||
var json = parseResult.GetValue(inspectJsonOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleSymbolBundleInspectAsync(
|
||||
services,
|
||||
bundlePath,
|
||||
showEntries,
|
||||
json,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
symbols.Add(inspect);
|
||||
|
||||
return symbols;
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4357,6 +4357,61 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
|
||||
?? new ReachabilityExplainResult();
|
||||
}
|
||||
|
||||
// UI-CLI-401-007: Graph explain with DSSE pointers, runtime hits, predicates, counterfactuals
|
||||
public async Task<GraphExplainResult> ExplainGraphAsync(GraphExplainRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
EnsureBackendConfigured();
|
||||
OfflineModeGuard.ThrowIfOffline("graph explain");
|
||||
|
||||
var queryParams = new List<string>();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.VulnerabilityId))
|
||||
queryParams.Add($"vulnerabilityId={Uri.EscapeDataString(request.VulnerabilityId)}");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.PackagePurl))
|
||||
queryParams.Add($"packagePurl={Uri.EscapeDataString(request.PackagePurl)}");
|
||||
|
||||
if (request.IncludeCallPaths)
|
||||
queryParams.Add("includeCallPaths=true");
|
||||
|
||||
if (request.IncludeRuntimeHits)
|
||||
queryParams.Add("includeRuntimeHits=true");
|
||||
|
||||
if (request.IncludePredicates)
|
||||
queryParams.Add("includePredicates=true");
|
||||
|
||||
if (request.IncludeDsseEnvelopes)
|
||||
queryParams.Add("includeDsseEnvelopes=true");
|
||||
|
||||
if (request.IncludeCounterfactuals)
|
||||
queryParams.Add("includeCounterfactuals=true");
|
||||
|
||||
var query = queryParams.Count > 0 ? "?" + string.Join("&", queryParams) : "";
|
||||
var relative = $"api/graphs/{Uri.EscapeDataString(request.GraphId)}/explain{query}";
|
||||
|
||||
using var httpRequest = CreateRequest(HttpMethod.Get, relative);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.Tenant))
|
||||
{
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", request.Tenant.Trim());
|
||||
}
|
||||
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
throw new HttpRequestException($"Explain graph failed: {message}", null, response.StatusCode);
|
||||
}
|
||||
|
||||
return await response.Content.ReadFromJsonAsync<GraphExplainResult>(JsonOptions, cancellationToken).ConfigureAwait(false)
|
||||
?? new GraphExplainResult();
|
||||
}
|
||||
|
||||
// CLI-SDK-63-001: API spec operations
|
||||
public async Task<ApiSpecListResponse> ListApiSpecsAsync(string? tenant, CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -4660,4 +4715,121 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
|
||||
var result = await response.Content.ReadFromJsonAsync<SdkListResponse>(JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
return result ?? new SdkListResponse { Success = false, Error = "Empty response" };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exports VEX decisions as OpenVEX documents with optional DSSE signing.
|
||||
/// </summary>
|
||||
public async Task<DecisionExportResponse> ExportDecisionsAsync(
|
||||
DecisionExportRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
try
|
||||
{
|
||||
var queryParams = new List<string>();
|
||||
|
||||
if (!string.IsNullOrEmpty(request.ScanId))
|
||||
{
|
||||
queryParams.Add($"scanId={Uri.EscapeDataString(request.ScanId)}");
|
||||
}
|
||||
|
||||
if (request.VulnIds is { Count: > 0 })
|
||||
{
|
||||
foreach (var vulnId in request.VulnIds)
|
||||
{
|
||||
queryParams.Add($"vulnId={Uri.EscapeDataString(vulnId)}");
|
||||
}
|
||||
}
|
||||
|
||||
if (request.Purls is { Count: > 0 })
|
||||
{
|
||||
foreach (var purl in request.Purls)
|
||||
{
|
||||
queryParams.Add($"purl={Uri.EscapeDataString(purl)}");
|
||||
}
|
||||
}
|
||||
|
||||
if (request.Statuses is { Count: > 0 })
|
||||
{
|
||||
foreach (var status in request.Statuses)
|
||||
{
|
||||
queryParams.Add($"status={Uri.EscapeDataString(status)}");
|
||||
}
|
||||
}
|
||||
|
||||
queryParams.Add($"format={Uri.EscapeDataString(request.Format)}");
|
||||
queryParams.Add($"sign={request.Sign.ToString().ToLowerInvariant()}");
|
||||
queryParams.Add($"rekor={request.SubmitToRekor.ToString().ToLowerInvariant()}");
|
||||
queryParams.Add($"includeEvidence={request.IncludeEvidence.ToString().ToLowerInvariant()}");
|
||||
|
||||
var queryString = queryParams.Count > 0 ? "?" + string.Join("&", queryParams) : "";
|
||||
var url = $"{_options.BackendUrl}/api/v1/decisions/export{queryString}";
|
||||
|
||||
using var httpRequest = new HttpRequestMessage(HttpMethod.Get, url);
|
||||
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", request.TenantId);
|
||||
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false);
|
||||
return new DecisionExportResponse
|
||||
{
|
||||
Success = false,
|
||||
Error = message
|
||||
};
|
||||
}
|
||||
|
||||
var content = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Extract metadata from response headers
|
||||
response.Headers.TryGetValues("X-VEX-Digest", out var digestValues);
|
||||
response.Headers.TryGetValues("X-VEX-Rekor-Index", out var rekorIndexValues);
|
||||
response.Headers.TryGetValues("X-VEX-Rekor-UUID", out var rekorUuidValues);
|
||||
response.Headers.TryGetValues("X-VEX-Statement-Count", out var countValues);
|
||||
response.Headers.TryGetValues("X-VEX-Signed", out var signedValues);
|
||||
|
||||
var digest = digestValues?.FirstOrDefault();
|
||||
var rekorUuid = rekorUuidValues?.FirstOrDefault();
|
||||
long? rekorIndex = null;
|
||||
int statementCount = 0;
|
||||
bool signed = false;
|
||||
|
||||
if (rekorIndexValues?.FirstOrDefault() is { } indexStr && long.TryParse(indexStr, out var idx))
|
||||
{
|
||||
rekorIndex = idx;
|
||||
}
|
||||
|
||||
if (countValues?.FirstOrDefault() is { } countStr && int.TryParse(countStr, out var cnt))
|
||||
{
|
||||
statementCount = cnt;
|
||||
}
|
||||
|
||||
if (signedValues?.FirstOrDefault() is { } signedStr)
|
||||
{
|
||||
signed = signedStr.Equals("true", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
return new DecisionExportResponse
|
||||
{
|
||||
Success = true,
|
||||
Content = content,
|
||||
Digest = digest,
|
||||
RekorLogIndex = rekorIndex,
|
||||
RekorUuid = rekorUuid,
|
||||
StatementCount = statementCount,
|
||||
Signed = signed
|
||||
};
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
return new DecisionExportResponse
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,6 +123,9 @@ internal interface IBackendOperationsClient
|
||||
Task<ReachabilityListResponse> ListReachabilityAnalysesAsync(ReachabilityListRequest request, CancellationToken cancellationToken);
|
||||
Task<ReachabilityExplainResult> ExplainReachabilityAsync(ReachabilityExplainRequest request, CancellationToken cancellationToken);
|
||||
|
||||
// UI-CLI-401-007: Graph explain with DSSE pointers, runtime hits, predicates, counterfactuals
|
||||
Task<GraphExplainResult> ExplainGraphAsync(GraphExplainRequest request, CancellationToken cancellationToken);
|
||||
|
||||
// CLI-SDK-63-001: API spec download
|
||||
Task<ApiSpecListResponse> ListApiSpecsAsync(string? tenant, CancellationToken cancellationToken);
|
||||
Task<ApiSpecDownloadResult> DownloadApiSpecAsync(ApiSpecDownloadRequest request, CancellationToken cancellationToken);
|
||||
|
||||
100
src/Cli/StellaOps.Cli/Services/Models/DecisionModels.cs
Normal file
100
src/Cli/StellaOps.Cli/Services/Models/DecisionModels.cs
Normal file
@@ -0,0 +1,100 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Cli.Services.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Request to export VEX decisions.
|
||||
/// </summary>
|
||||
public sealed class DecisionExportRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional scan identifier to filter decisions.
|
||||
/// </summary>
|
||||
public string? ScanId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional vulnerability identifiers to filter.
|
||||
/// </summary>
|
||||
public List<string>? VulnIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional Package URLs to filter.
|
||||
/// </summary>
|
||||
public List<string>? Purls { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional statuses to filter.
|
||||
/// </summary>
|
||||
public List<string>? Statuses { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Output format (openvex, dsse, ndjson).
|
||||
/// </summary>
|
||||
public string Format { get; init; } = "openvex";
|
||||
|
||||
/// <summary>
|
||||
/// Whether to sign the output with DSSE.
|
||||
/// </summary>
|
||||
public bool Sign { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to submit DSSE envelope to Rekor.
|
||||
/// </summary>
|
||||
public bool SubmitToRekor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include reachability evidence blocks.
|
||||
/// </summary>
|
||||
public bool IncludeEvidence { get; init; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from VEX decision export.
|
||||
/// </summary>
|
||||
public sealed class DecisionExportResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the export was successful.
|
||||
/// </summary>
|
||||
public bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if export failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The exported document content.
|
||||
/// </summary>
|
||||
public string? Content { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the output was signed.
|
||||
/// </summary>
|
||||
public bool Signed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the payload.
|
||||
/// </summary>
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log index if submitted to transparency log.
|
||||
/// </summary>
|
||||
public long? RekorLogIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor entry UUID if submitted to transparency log.
|
||||
/// </summary>
|
||||
public string? RekorUuid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of VEX statements in the export.
|
||||
/// </summary>
|
||||
public int StatementCount { get; init; }
|
||||
}
|
||||
@@ -250,3 +250,272 @@ internal sealed record ReachabilityOverride
|
||||
[JsonPropertyName("score")]
|
||||
public double? Score { get; init; }
|
||||
}
|
||||
|
||||
// UI-CLI-401-007: Graph explain models with DSSE pointers, runtime hits, predicates, counterfactual controls
|
||||
|
||||
/// <summary>
|
||||
/// Request to explain a call graph with signed evidence.
|
||||
/// </summary>
|
||||
internal sealed class GraphExplainRequest
|
||||
{
|
||||
[JsonPropertyName("graphId")]
|
||||
public string GraphId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
[JsonPropertyName("packagePurl")]
|
||||
public string? PackagePurl { get; init; }
|
||||
|
||||
[JsonPropertyName("includeCallPaths")]
|
||||
public bool IncludeCallPaths { get; init; }
|
||||
|
||||
[JsonPropertyName("includeRuntimeHits")]
|
||||
public bool IncludeRuntimeHits { get; init; }
|
||||
|
||||
[JsonPropertyName("includePredicates")]
|
||||
public bool IncludePredicates { get; init; }
|
||||
|
||||
[JsonPropertyName("includeDsseEnvelopes")]
|
||||
public bool IncludeDsseEnvelopes { get; init; }
|
||||
|
||||
[JsonPropertyName("includeCounterfactuals")]
|
||||
public bool IncludeCounterfactuals { get; init; }
|
||||
|
||||
[JsonPropertyName("tenant")]
|
||||
public string? Tenant { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of graph explanation with signed evidence.
|
||||
/// </summary>
|
||||
internal sealed class GraphExplainResult
|
||||
{
|
||||
[JsonPropertyName("graphId")]
|
||||
public string GraphId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("graphHash")]
|
||||
public string GraphHash { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
[JsonPropertyName("packagePurl")]
|
||||
public string? PackagePurl { get; init; }
|
||||
|
||||
[JsonPropertyName("reachabilityState")]
|
||||
public string ReachabilityState { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("reachabilityScore")]
|
||||
public double? ReachabilityScore { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public string Confidence { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("reasoning")]
|
||||
public string? Reasoning { get; init; }
|
||||
|
||||
[JsonPropertyName("signedCallPaths")]
|
||||
public IReadOnlyList<SignedCallPath> SignedCallPaths { get; init; } = Array.Empty<SignedCallPath>();
|
||||
|
||||
[JsonPropertyName("runtimeHits")]
|
||||
public IReadOnlyList<RuntimeHit> RuntimeHits { get; init; } = Array.Empty<RuntimeHit>();
|
||||
|
||||
[JsonPropertyName("predicates")]
|
||||
public IReadOnlyList<ReachabilityPredicate> Predicates { get; init; } = Array.Empty<ReachabilityPredicate>();
|
||||
|
||||
[JsonPropertyName("dssePointers")]
|
||||
public IReadOnlyList<DssePointer> DssePointers { get; init; } = Array.Empty<DssePointer>();
|
||||
|
||||
[JsonPropertyName("counterfactuals")]
|
||||
public IReadOnlyList<CounterfactualControl> Counterfactuals { get; init; } = Array.Empty<CounterfactualControl>();
|
||||
|
||||
[JsonPropertyName("vexDecision")]
|
||||
public GraphVexDecision? VexDecision { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Call path with cryptographic signature.
|
||||
/// </summary>
|
||||
internal sealed class SignedCallPath
|
||||
{
|
||||
[JsonPropertyName("pathId")]
|
||||
public string PathId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("pathHash")]
|
||||
public string PathHash { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("depth")]
|
||||
public int Depth { get; init; }
|
||||
|
||||
[JsonPropertyName("entryPoint")]
|
||||
public ReachabilityFunction EntryPoint { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("frames")]
|
||||
public IReadOnlyList<ReachabilityFunction> Frames { get; init; } = Array.Empty<ReachabilityFunction>();
|
||||
|
||||
[JsonPropertyName("vulnerableFunction")]
|
||||
public ReachabilityFunction VulnerableFunction { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("dsseEnvelopeId")]
|
||||
public string? DsseEnvelopeId { get; init; }
|
||||
|
||||
[JsonPropertyName("rekorEntryId")]
|
||||
public string? RekorEntryId { get; init; }
|
||||
|
||||
[JsonPropertyName("signedAt")]
|
||||
public DateTimeOffset? SignedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runtime execution hit from instrumentation probes.
|
||||
/// </summary>
|
||||
internal sealed class RuntimeHit
|
||||
{
|
||||
[JsonPropertyName("hitId")]
|
||||
public string HitId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("functionName")]
|
||||
public string FunctionName { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("className")]
|
||||
public string? ClassName { get; init; }
|
||||
|
||||
[JsonPropertyName("packageName")]
|
||||
public string? PackageName { get; init; }
|
||||
|
||||
[JsonPropertyName("hitCount")]
|
||||
public long HitCount { get; init; }
|
||||
|
||||
[JsonPropertyName("firstObserved")]
|
||||
public DateTimeOffset FirstObserved { get; init; }
|
||||
|
||||
[JsonPropertyName("lastObserved")]
|
||||
public DateTimeOffset LastObserved { get; init; }
|
||||
|
||||
[JsonPropertyName("probeSource")]
|
||||
public string ProbeSource { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("traceId")]
|
||||
public string? TraceId { get; init; }
|
||||
|
||||
[JsonPropertyName("observationWindow")]
|
||||
public string? ObservationWindow { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Semantic predicate attached to reachability evidence.
|
||||
/// </summary>
|
||||
internal sealed class ReachabilityPredicate
|
||||
{
|
||||
[JsonPropertyName("predicateType")]
|
||||
public string PredicateType { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("predicateUri")]
|
||||
public string PredicateUri { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("subject")]
|
||||
public string Subject { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("content")]
|
||||
public string? Content { get; init; }
|
||||
|
||||
[JsonPropertyName("signedBy")]
|
||||
public string? SignedBy { get; init; }
|
||||
|
||||
[JsonPropertyName("timestamp")]
|
||||
public DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope pointer for signed evidence.
|
||||
/// </summary>
|
||||
internal sealed class DssePointer
|
||||
{
|
||||
[JsonPropertyName("envelopeId")]
|
||||
public string EnvelopeId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("payloadType")]
|
||||
public string PayloadType { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("payloadHash")]
|
||||
public string PayloadHash { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("keyId")]
|
||||
public string KeyId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("algorithm")]
|
||||
public string Algorithm { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("rekorLogIndex")]
|
||||
public long? RekorLogIndex { get; init; }
|
||||
|
||||
[JsonPropertyName("rekorLogId")]
|
||||
public string? RekorLogId { get; init; }
|
||||
|
||||
[JsonPropertyName("rekorIntegratedTime")]
|
||||
public DateTimeOffset? RekorIntegratedTime { get; init; }
|
||||
|
||||
[JsonPropertyName("verificationUrl")]
|
||||
public string? VerificationUrl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Counterfactual control showing what-if scenarios.
|
||||
/// </summary>
|
||||
internal sealed class CounterfactualControl
|
||||
{
|
||||
[JsonPropertyName("controlId")]
|
||||
public string ControlId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("controlType")]
|
||||
public string ControlType { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("description")]
|
||||
public string Description { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("currentState")]
|
||||
public string CurrentState { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("alternativeState")]
|
||||
public string AlternativeState { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("impact")]
|
||||
public string Impact { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("recommendation")]
|
||||
public string? Recommendation { get; init; }
|
||||
|
||||
[JsonPropertyName("affectedPaths")]
|
||||
public IReadOnlyList<string> AffectedPaths { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("riskReduction")]
|
||||
public double? RiskReduction { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX decision linked to graph evidence.
|
||||
/// </summary>
|
||||
internal sealed class GraphVexDecision
|
||||
{
|
||||
[JsonPropertyName("vexDocumentId")]
|
||||
public string VexDocumentId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
public string Status { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("justification")]
|
||||
public string? Justification { get; init; }
|
||||
|
||||
[JsonPropertyName("actionStatement")]
|
||||
public string? ActionStatement { get; init; }
|
||||
|
||||
[JsonPropertyName("dsseEnvelopeId")]
|
||||
public string? DsseEnvelopeId { get; init; }
|
||||
|
||||
[JsonPropertyName("rekorEntryId")]
|
||||
public string? RekorEntryId { get; init; }
|
||||
|
||||
[JsonPropertyName("issuedAt")]
|
||||
public DateTimeOffset IssuedAt { get; init; }
|
||||
}
|
||||
|
||||
130
src/Cli/StellaOps.Cli/Services/Models/SymbolBundleModels.cs
Normal file
130
src/Cli/StellaOps.Cli/Services/Models/SymbolBundleModels.cs
Normal file
@@ -0,0 +1,130 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Cli.Services.Models;
|
||||
|
||||
// SYMS-BUNDLE-401-014: Symbol bundle CLI models
|
||||
|
||||
/// <summary>
|
||||
/// Request to build a symbol bundle.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleBuildRequest(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("sourceDir")] string SourceDir,
|
||||
[property: JsonPropertyName("outputDir")] string OutputDir,
|
||||
[property: JsonPropertyName("platform")] string? Platform = null,
|
||||
[property: JsonPropertyName("tenantId")] string? TenantId = null,
|
||||
[property: JsonPropertyName("sign")] bool Sign = false,
|
||||
[property: JsonPropertyName("signingKeyPath")] string? SigningKeyPath = null,
|
||||
[property: JsonPropertyName("keyId")] string? KeyId = null,
|
||||
[property: JsonPropertyName("signingAlgorithm")] string SigningAlgorithm = "ecdsa-p256",
|
||||
[property: JsonPropertyName("submitRekor")] bool SubmitRekor = false,
|
||||
[property: JsonPropertyName("rekorUrl")] string RekorUrl = "https://rekor.sigstore.dev",
|
||||
[property: JsonPropertyName("format")] string Format = "zip",
|
||||
[property: JsonPropertyName("compressionLevel")] int CompressionLevel = 6);
|
||||
|
||||
/// <summary>
|
||||
/// Result of symbol bundle build operation.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleBuildResult(
|
||||
[property: JsonPropertyName("success")] bool Success,
|
||||
[property: JsonPropertyName("bundlePath")] string? BundlePath = null,
|
||||
[property: JsonPropertyName("manifestPath")] string? ManifestPath = null,
|
||||
[property: JsonPropertyName("bundleId")] string? BundleId = null,
|
||||
[property: JsonPropertyName("entryCount")] int EntryCount = 0,
|
||||
[property: JsonPropertyName("totalSizeBytes")] long TotalSizeBytes = 0,
|
||||
[property: JsonPropertyName("signed")] bool Signed = false,
|
||||
[property: JsonPropertyName("rekorLogIndex")] long? RekorLogIndex = null,
|
||||
[property: JsonPropertyName("error")] string? Error = null,
|
||||
[property: JsonPropertyName("warnings")] IReadOnlyList<string>? Warnings = null,
|
||||
[property: JsonPropertyName("durationMs")] long DurationMs = 0);
|
||||
|
||||
/// <summary>
|
||||
/// Request to verify a symbol bundle.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleVerifyRequest(
|
||||
[property: JsonPropertyName("bundlePath")] string BundlePath,
|
||||
[property: JsonPropertyName("publicKeyPath")] string? PublicKeyPath = null,
|
||||
[property: JsonPropertyName("verifyRekorOffline")] bool VerifyRekorOffline = true,
|
||||
[property: JsonPropertyName("rekorPublicKeyPath")] string? RekorPublicKeyPath = null,
|
||||
[property: JsonPropertyName("verifyBlobHashes")] bool VerifyBlobHashes = true);
|
||||
|
||||
/// <summary>
|
||||
/// Result of symbol bundle verification.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleVerifyResult(
|
||||
[property: JsonPropertyName("valid")] bool Valid,
|
||||
[property: JsonPropertyName("bundleId")] string? BundleId = null,
|
||||
[property: JsonPropertyName("name")] string? Name = null,
|
||||
[property: JsonPropertyName("version")] string? Version = null,
|
||||
[property: JsonPropertyName("signatureStatus")] string SignatureStatus = "unsigned",
|
||||
[property: JsonPropertyName("rekorStatus")] string? RekorStatus = null,
|
||||
[property: JsonPropertyName("hashStatus")] SymbolBundleHashStatus? HashStatus = null,
|
||||
[property: JsonPropertyName("errors")] IReadOnlyList<string>? Errors = null,
|
||||
[property: JsonPropertyName("warnings")] IReadOnlyList<string>? Warnings = null);
|
||||
|
||||
/// <summary>
|
||||
/// Hash verification status for a bundle.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleHashStatus(
|
||||
[property: JsonPropertyName("bundleHashValid")] bool BundleHashValid,
|
||||
[property: JsonPropertyName("validEntries")] int ValidEntries,
|
||||
[property: JsonPropertyName("invalidEntries")] int InvalidEntries,
|
||||
[property: JsonPropertyName("totalEntries")] int TotalEntries,
|
||||
[property: JsonPropertyName("invalidEntryIds")] IReadOnlyList<string>? InvalidEntryIds = null);
|
||||
|
||||
/// <summary>
|
||||
/// Request to extract a symbol bundle.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleExtractRequest(
|
||||
[property: JsonPropertyName("bundlePath")] string BundlePath,
|
||||
[property: JsonPropertyName("outputDir")] string OutputDir,
|
||||
[property: JsonPropertyName("verifyFirst")] bool VerifyFirst = true,
|
||||
[property: JsonPropertyName("platform")] string? Platform = null,
|
||||
[property: JsonPropertyName("overwrite")] bool Overwrite = false,
|
||||
[property: JsonPropertyName("manifestsOnly")] bool ManifestsOnly = false);
|
||||
|
||||
/// <summary>
|
||||
/// Result of symbol bundle extraction.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleExtractResult(
|
||||
[property: JsonPropertyName("success")] bool Success,
|
||||
[property: JsonPropertyName("extractedCount")] int ExtractedCount = 0,
|
||||
[property: JsonPropertyName("skippedCount")] int SkippedCount = 0,
|
||||
[property: JsonPropertyName("totalBytesExtracted")] long TotalBytesExtracted = 0,
|
||||
[property: JsonPropertyName("verificationPassed")] bool? VerificationPassed = null,
|
||||
[property: JsonPropertyName("error")] string? Error = null,
|
||||
[property: JsonPropertyName("durationMs")] long DurationMs = 0);
|
||||
|
||||
/// <summary>
|
||||
/// Symbol bundle manifest info for inspection.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleInfo(
|
||||
[property: JsonPropertyName("bundleId")] string BundleId,
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
|
||||
[property: JsonPropertyName("platform")] string? Platform = null,
|
||||
[property: JsonPropertyName("tenantId")] string? TenantId = null,
|
||||
[property: JsonPropertyName("entryCount")] int EntryCount = 0,
|
||||
[property: JsonPropertyName("totalSizeBytes")] long TotalSizeBytes = 0,
|
||||
[property: JsonPropertyName("hashAlgorithm")] string HashAlgorithm = "blake3",
|
||||
[property: JsonPropertyName("signed")] bool Signed = false,
|
||||
[property: JsonPropertyName("signatureAlgorithm")] string? SignatureAlgorithm = null,
|
||||
[property: JsonPropertyName("signatureKeyId")] string? SignatureKeyId = null,
|
||||
[property: JsonPropertyName("rekorLogIndex")] long? RekorLogIndex = null,
|
||||
[property: JsonPropertyName("entries")] IReadOnlyList<SymbolBundleEntryInfo>? Entries = null);
|
||||
|
||||
/// <summary>
|
||||
/// Individual entry in a symbol bundle.
|
||||
/// </summary>
|
||||
internal sealed record SymbolBundleEntryInfo(
|
||||
[property: JsonPropertyName("debugId")] string DebugId,
|
||||
[property: JsonPropertyName("binaryName")] string BinaryName,
|
||||
[property: JsonPropertyName("platform")] string? Platform = null,
|
||||
[property: JsonPropertyName("format")] string? Format = null,
|
||||
[property: JsonPropertyName("blobHash")] string? BlobHash = null,
|
||||
[property: JsonPropertyName("blobSizeBytes")] long BlobSizeBytes = 0,
|
||||
[property: JsonPropertyName("symbolCount")] int SymbolCount = 0);
|
||||
264
src/Cli/StellaOps.Cli/Services/Models/VexExplainModels.cs
Normal file
264
src/Cli/StellaOps.Cli/Services/Models/VexExplainModels.cs
Normal file
@@ -0,0 +1,264 @@
|
||||
// UI-VEX-401-032: VEX Decision Explanation Models
|
||||
// Provides comprehensive decision explanation with reachability evidence and attestation details
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Cli.Services.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Complete VEX decision explanation with all supporting evidence.
|
||||
/// </summary>
|
||||
internal sealed class VexDecisionExplanation
|
||||
{
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public required string VulnerabilityId { get; init; }
|
||||
|
||||
[JsonPropertyName("productKey")]
|
||||
public required string ProductKey { get; init; }
|
||||
|
||||
[JsonPropertyName("tenant")]
|
||||
public required string Tenant { get; init; }
|
||||
|
||||
[JsonPropertyName("timestamp")]
|
||||
public DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
[JsonPropertyName("decision")]
|
||||
public required VexDecisionSummary Decision { get; init; }
|
||||
|
||||
[JsonPropertyName("callPathEvidence")]
|
||||
public CallPathEvidence? CallPathEvidence { get; set; }
|
||||
|
||||
[JsonPropertyName("runtimeHitEvidence")]
|
||||
public RuntimeHitEvidence? RuntimeHitEvidence { get; set; }
|
||||
|
||||
[JsonPropertyName("graphMetadata")]
|
||||
public ReachabilityGraphMetadata? GraphMetadata { get; set; }
|
||||
|
||||
[JsonPropertyName("dsseAttestation")]
|
||||
public DsseAttestationInfo? DsseAttestation { get; set; }
|
||||
|
||||
[JsonPropertyName("rekorEntry")]
|
||||
public RekorEntryInfo? RekorEntry { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX decision summary with status and justification.
|
||||
/// </summary>
|
||||
internal sealed class VexDecisionSummary
|
||||
{
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
[JsonPropertyName("justification")]
|
||||
public required string Justification { get; init; }
|
||||
|
||||
[JsonPropertyName("impactStatement")]
|
||||
public required string ImpactStatement { get; init; }
|
||||
|
||||
[JsonPropertyName("decisionSource")]
|
||||
public required string DecisionSource { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Call path analysis evidence showing reachability status.
|
||||
/// </summary>
|
||||
internal sealed class CallPathEvidence
|
||||
{
|
||||
[JsonPropertyName("analysisMethod")]
|
||||
public required string AnalysisMethod { get; init; }
|
||||
|
||||
[JsonPropertyName("entryPointsAnalyzed")]
|
||||
public int EntryPointsAnalyzed { get; init; }
|
||||
|
||||
[JsonPropertyName("vulnerableFunctionsIdentified")]
|
||||
public int VulnerableFunctionsIdentified { get; init; }
|
||||
|
||||
[JsonPropertyName("pathsToVulnerableCode")]
|
||||
public int PathsToVulnerableCode { get; init; }
|
||||
|
||||
[JsonPropertyName("vulnerableFunction")]
|
||||
public FunctionReference? VulnerableFunction { get; init; }
|
||||
|
||||
[JsonPropertyName("nearestReachableDistance")]
|
||||
public int? NearestReachableDistance { get; init; }
|
||||
|
||||
[JsonPropertyName("analysisComplete")]
|
||||
public bool AnalysisComplete { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to a specific function in code.
|
||||
/// </summary>
|
||||
internal sealed class FunctionReference
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
[JsonPropertyName("module")]
|
||||
public required string Module { get; init; }
|
||||
|
||||
[JsonPropertyName("file")]
|
||||
public required string File { get; init; }
|
||||
|
||||
[JsonPropertyName("line")]
|
||||
public int Line { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runtime execution hit evidence from production telemetry.
|
||||
/// </summary>
|
||||
internal sealed class RuntimeHitEvidence
|
||||
{
|
||||
[JsonPropertyName("collectionPeriod")]
|
||||
public required DateRange CollectionPeriod { get; init; }
|
||||
|
||||
[JsonPropertyName("totalExecutions")]
|
||||
public long TotalExecutions { get; init; }
|
||||
|
||||
[JsonPropertyName("vulnerableFunctionHits")]
|
||||
public long VulnerableFunctionHits { get; init; }
|
||||
|
||||
[JsonPropertyName("coveragePercentage")]
|
||||
public decimal CoveragePercentage { get; init; }
|
||||
|
||||
[JsonPropertyName("profilingMethod")]
|
||||
public required string ProfilingMethod { get; init; }
|
||||
|
||||
[JsonPropertyName("confidenceLevel")]
|
||||
public required string ConfidenceLevel { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Date range for evidence collection period.
|
||||
/// </summary>
|
||||
internal sealed class DateRange
|
||||
{
|
||||
[JsonPropertyName("start")]
|
||||
public DateTimeOffset Start { get; init; }
|
||||
|
||||
[JsonPropertyName("end")]
|
||||
public DateTimeOffset End { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata about the reachability graph used for analysis.
|
||||
/// </summary>
|
||||
internal sealed class ReachabilityGraphMetadata
|
||||
{
|
||||
[JsonPropertyName("graphId")]
|
||||
public required string GraphId { get; init; }
|
||||
|
||||
[JsonPropertyName("buildTimestamp")]
|
||||
public DateTimeOffset BuildTimestamp { get; init; }
|
||||
|
||||
[JsonPropertyName("totalNodes")]
|
||||
public int TotalNodes { get; init; }
|
||||
|
||||
[JsonPropertyName("totalEdges")]
|
||||
public int TotalEdges { get; init; }
|
||||
|
||||
[JsonPropertyName("entryPoints")]
|
||||
public int EntryPoints { get; init; }
|
||||
|
||||
[JsonPropertyName("vulnerableSinks")]
|
||||
public int VulnerableSinks { get; init; }
|
||||
|
||||
[JsonPropertyName("algorithm")]
|
||||
public required string Algorithm { get; init; }
|
||||
|
||||
[JsonPropertyName("analysisDurationMs")]
|
||||
public long AnalysisDurationMs { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE (Dead Simple Signing Envelope) attestation information.
|
||||
/// </summary>
|
||||
internal sealed class DsseAttestationInfo
|
||||
{
|
||||
[JsonPropertyName("payloadType")]
|
||||
public required string PayloadType { get; init; }
|
||||
|
||||
[JsonPropertyName("digestAlgorithm")]
|
||||
public required string DigestAlgorithm { get; init; }
|
||||
|
||||
[JsonPropertyName("payloadDigest")]
|
||||
public required string PayloadDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("signatures")]
|
||||
public required List<VexDsseSignatureInfo> Signatures { get; init; }
|
||||
|
||||
[JsonPropertyName("verificationStatus")]
|
||||
public string? VerificationStatus { get; set; }
|
||||
|
||||
[JsonPropertyName("verifiedAt")]
|
||||
public DateTimeOffset? VerifiedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Information about a DSSE signature for VEX explanations.
|
||||
/// </summary>
|
||||
internal sealed class VexDsseSignatureInfo
|
||||
{
|
||||
[JsonPropertyName("keyId")]
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
[JsonPropertyName("algorithm")]
|
||||
public required string Algorithm { get; init; }
|
||||
|
||||
[JsonPropertyName("signedAt")]
|
||||
public DateTimeOffset SignedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("publicKeyFingerprint")]
|
||||
public required string PublicKeyFingerprint { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor transparency log entry information.
|
||||
/// </summary>
|
||||
internal sealed class RekorEntryInfo
|
||||
{
|
||||
[JsonPropertyName("rekorUrl")]
|
||||
public required string RekorUrl { get; init; }
|
||||
|
||||
[JsonPropertyName("logIndex")]
|
||||
public long LogIndex { get; init; }
|
||||
|
||||
[JsonPropertyName("entryUuid")]
|
||||
public required string EntryUuid { get; init; }
|
||||
|
||||
[JsonPropertyName("integratedTime")]
|
||||
public DateTimeOffset IntegratedTime { get; init; }
|
||||
|
||||
[JsonPropertyName("treeSize")]
|
||||
public long TreeSize { get; init; }
|
||||
|
||||
[JsonPropertyName("rootHash")]
|
||||
public required string RootHash { get; init; }
|
||||
|
||||
[JsonPropertyName("inclusionProof")]
|
||||
public InclusionProofInfo? InclusionProof { get; set; }
|
||||
|
||||
[JsonPropertyName("inclusionVerified")]
|
||||
public bool? InclusionVerified { get; set; }
|
||||
|
||||
[JsonPropertyName("verifiedAt")]
|
||||
public DateTimeOffset? VerifiedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Merkle tree inclusion proof for Rekor verification.
|
||||
/// </summary>
|
||||
internal sealed class InclusionProofInfo
|
||||
{
|
||||
[JsonPropertyName("logIndex")]
|
||||
public long LogIndex { get; init; }
|
||||
|
||||
[JsonPropertyName("treeSize")]
|
||||
public long TreeSize { get; init; }
|
||||
|
||||
[JsonPropertyName("rootHash")]
|
||||
public required string RootHash { get; init; }
|
||||
|
||||
[JsonPropertyName("hashes")]
|
||||
public required List<string> Hashes { get; init; }
|
||||
}
|
||||
@@ -101,7 +101,9 @@ internal sealed record VexConsensusDetailResponse(
|
||||
[property: JsonPropertyName("quorum")] VexQuorumInfo? Quorum = null,
|
||||
[property: JsonPropertyName("rationale")] VexRationaleInfo? Rationale = null,
|
||||
[property: JsonPropertyName("signature")] VexSignatureInfo? Signature = null,
|
||||
[property: JsonPropertyName("evidence")] IReadOnlyList<VexEvidenceInfo>? Evidence = null);
|
||||
[property: JsonPropertyName("evidence")] IReadOnlyList<VexEvidenceInfo>? Evidence = null,
|
||||
// GAP-VEX-006: Reachability evidence
|
||||
[property: JsonPropertyName("reachabilityEvidence")] VexReachabilityEvidence? ReachabilityEvidence = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX quorum information showing how consensus was reached.
|
||||
@@ -256,3 +258,42 @@ internal sealed record VexExportVerifyResult(
|
||||
[property: JsonPropertyName("keyId")] string? KeyId = null,
|
||||
[property: JsonPropertyName("signedAt")] DateTimeOffset? SignedAt = null,
|
||||
[property: JsonPropertyName("errors")] IReadOnlyList<string>? Errors = null);
|
||||
|
||||
// GAP-VEX-006: Reachability evidence models for VEX decisions
|
||||
|
||||
/// <summary>
|
||||
/// Reachability evidence linked to VEX decision.
|
||||
/// </summary>
|
||||
internal sealed record VexReachabilityEvidence(
|
||||
[property: JsonPropertyName("graphHash")] string? GraphHash = null,
|
||||
[property: JsonPropertyName("graphCasUri")] string? GraphCasUri = null,
|
||||
[property: JsonPropertyName("graphAlgorithm")] string? GraphAlgorithm = null,
|
||||
[property: JsonPropertyName("graphGeneratedAt")] DateTimeOffset? GraphGeneratedAt = null,
|
||||
[property: JsonPropertyName("reachabilityState")] string? ReachabilityState = null,
|
||||
[property: JsonPropertyName("confidence")] double? Confidence = null,
|
||||
[property: JsonPropertyName("callPaths")] IReadOnlyList<VexCallPath>? CallPaths = null,
|
||||
[property: JsonPropertyName("runtimeHits")] IReadOnlyList<VexRuntimeHit>? RuntimeHits = null,
|
||||
[property: JsonPropertyName("dsseEnvelopeId")] string? DsseEnvelopeId = null,
|
||||
[property: JsonPropertyName("rekorEntryId")] string? RekorEntryId = null);
|
||||
|
||||
/// <summary>
|
||||
/// Call path evidence for VEX decision.
|
||||
/// </summary>
|
||||
internal sealed record VexCallPath(
|
||||
[property: JsonPropertyName("pathId")] string PathId,
|
||||
[property: JsonPropertyName("pathHash")] string? PathHash = null,
|
||||
[property: JsonPropertyName("depth")] int Depth = 0,
|
||||
[property: JsonPropertyName("entryPoint")] string EntryPoint = "",
|
||||
[property: JsonPropertyName("frames")] IReadOnlyList<string> Frames = null!,
|
||||
[property: JsonPropertyName("vulnerableFunction")] string VulnerableFunction = "",
|
||||
[property: JsonPropertyName("dsseEnvelopeId")] string? DsseEnvelopeId = null,
|
||||
[property: JsonPropertyName("rekorEntryId")] string? RekorEntryId = null);
|
||||
|
||||
/// <summary>
|
||||
/// Runtime execution hit evidence for VEX decision.
|
||||
/// </summary>
|
||||
internal sealed record VexRuntimeHit(
|
||||
[property: JsonPropertyName("functionName")] string FunctionName,
|
||||
[property: JsonPropertyName("hitCount")] long HitCount = 0,
|
||||
[property: JsonPropertyName("probeSource")] string? ProbeSource = null,
|
||||
[property: JsonPropertyName("lastObserved")] DateTimeOffset? LastObserved = null);
|
||||
|
||||
@@ -66,6 +66,9 @@ internal static class CliMetrics
|
||||
private static readonly Counter<long> BunResolveCounter = Meter.CreateCounter<long>("stellaops.cli.bun.resolve.count");
|
||||
private static readonly Counter<long> AttestSignCounter = Meter.CreateCounter<long>("stellaops.cli.attest.sign.count");
|
||||
private static readonly Counter<long> AttestVerifyCounter = Meter.CreateCounter<long>("stellaops.cli.attest.verify.count");
|
||||
private static readonly Counter<long> DecisionExportCounter = Meter.CreateCounter<long>("stellaops.cli.decision.export.count");
|
||||
private static readonly Counter<long> DecisionVerifyCounter = Meter.CreateCounter<long>("stellaops.cli.decision.verify.count");
|
||||
private static readonly Counter<long> DecisionCompareCounter = Meter.CreateCounter<long>("stellaops.cli.decision.compare.count");
|
||||
private static readonly Histogram<double> CommandDurationHistogram = Meter.CreateHistogram<double>("stellaops.cli.command.duration.ms");
|
||||
|
||||
public static void RecordScannerDownload(string channel, bool fromCache)
|
||||
@@ -183,6 +186,30 @@ internal static class CliMetrics
|
||||
=> AttestVerifyCounter.Add(1, WithSealedModeTag(
|
||||
Tag("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)));
|
||||
|
||||
/// <summary>
|
||||
/// Records a VEX decision export operation.
|
||||
/// </summary>
|
||||
/// <param name="outcome">The export outcome (success, error).</param>
|
||||
public static void RecordDecisionExport(string outcome)
|
||||
=> DecisionExportCounter.Add(1, WithSealedModeTag(
|
||||
Tag("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)));
|
||||
|
||||
/// <summary>
|
||||
/// Records a VEX decision verification operation.
|
||||
/// </summary>
|
||||
/// <param name="outcome">The verification outcome (success, failed, error).</param>
|
||||
public static void RecordDecisionVerify(string outcome)
|
||||
=> DecisionVerifyCounter.Add(1, WithSealedModeTag(
|
||||
Tag("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)));
|
||||
|
||||
/// <summary>
|
||||
/// Records a VEX decision comparison operation.
|
||||
/// </summary>
|
||||
/// <param name="outcome">The comparison outcome (success, error).</param>
|
||||
public static void RecordDecisionCompare(string outcome)
|
||||
=> DecisionCompareCounter.Add(1, WithSealedModeTag(
|
||||
Tag("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)));
|
||||
|
||||
public static IDisposable MeasureCommandDuration(string command)
|
||||
{
|
||||
var start = DateTime.UtcNow;
|
||||
|
||||
@@ -0,0 +1,197 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Excititor.Core.Evidence;
|
||||
using StellaOps.Excititor.Storage.Postgres;
|
||||
using StellaOps.Excititor.Storage.Postgres.Repositories;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Excititor.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(ExcititorPostgresCollection.Name)]
|
||||
public sealed class PostgresVexAttestationStoreTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ExcititorPostgresFixture _fixture;
|
||||
private readonly PostgresVexAttestationStore _store;
|
||||
private readonly ExcititorDataSource _dataSource;
|
||||
private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8];
|
||||
|
||||
public PostgresVexAttestationStoreTests(ExcititorPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
var options = Options.Create(new PostgresOptions
|
||||
{
|
||||
ConnectionString = fixture.ConnectionString,
|
||||
SchemaName = fixture.SchemaName,
|
||||
AutoMigrate = false
|
||||
});
|
||||
|
||||
_dataSource = new ExcititorDataSource(options, NullLogger<ExcititorDataSource>.Instance);
|
||||
_store = new PostgresVexAttestationStore(_dataSource, NullLogger<PostgresVexAttestationStore>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.Fixture.RunMigrationsFromAssemblyAsync(
|
||||
typeof(ExcititorDataSource).Assembly,
|
||||
moduleName: "Excititor",
|
||||
resourcePrefix: "Migrations",
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _dataSource.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAndFindById_RoundTripsAttestation()
|
||||
{
|
||||
// Arrange
|
||||
var attestation = CreateAttestation("attest-1", "manifest-1");
|
||||
|
||||
// Act
|
||||
await _store.SaveAsync(attestation, CancellationToken.None);
|
||||
var fetched = await _store.FindByIdAsync(_tenantId, "attest-1", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.AttestationId.Should().Be("attest-1");
|
||||
fetched.ManifestId.Should().Be("manifest-1");
|
||||
fetched.MerkleRoot.Should().Be("sha256:merkle123");
|
||||
fetched.DsseEnvelopeHash.Should().Be("sha256:envelope456");
|
||||
fetched.ItemCount.Should().Be(10);
|
||||
fetched.Metadata.Should().ContainKey("source");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByIdAsync_ReturnsNullForUnknownId()
|
||||
{
|
||||
// Act
|
||||
var result = await _store.FindByIdAsync(_tenantId, "nonexistent", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByManifestIdAsync_ReturnsMatchingAttestation()
|
||||
{
|
||||
// Arrange
|
||||
var attestation = CreateAttestation("attest-2", "manifest-target");
|
||||
await _store.SaveAsync(attestation, CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var fetched = await _store.FindByManifestIdAsync(_tenantId, "manifest-target", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.AttestationId.Should().Be("attest-2");
|
||||
fetched.ManifestId.Should().Be("manifest-target");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_UpdatesExistingAttestation()
|
||||
{
|
||||
// Arrange
|
||||
var original = CreateAttestation("attest-update", "manifest-old");
|
||||
var updated = new VexStoredAttestation(
|
||||
"attest-update",
|
||||
_tenantId,
|
||||
"manifest-new",
|
||||
"sha256:newmerkle",
|
||||
"{\"updated\":true}",
|
||||
"sha256:newhash",
|
||||
20,
|
||||
DateTimeOffset.UtcNow,
|
||||
ImmutableDictionary<string, string>.Empty.Add("version", "2"));
|
||||
|
||||
// Act
|
||||
await _store.SaveAsync(original, CancellationToken.None);
|
||||
await _store.SaveAsync(updated, CancellationToken.None);
|
||||
var fetched = await _store.FindByIdAsync(_tenantId, "attest-update", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.ManifestId.Should().Be("manifest-new");
|
||||
fetched.ItemCount.Should().Be(20);
|
||||
fetched.Metadata.Should().ContainKey("version");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CountAsync_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
await _store.SaveAsync(CreateAttestation("attest-a", "manifest-a"), CancellationToken.None);
|
||||
await _store.SaveAsync(CreateAttestation("attest-b", "manifest-b"), CancellationToken.None);
|
||||
await _store.SaveAsync(CreateAttestation("attest-c", "manifest-c"), CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var count = await _store.CountAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsPaginatedResults()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var attestation = CreateAttestation($"attest-{i:D2}", $"manifest-{i:D2}");
|
||||
await _store.SaveAsync(attestation, CancellationToken.None);
|
||||
}
|
||||
|
||||
// Act
|
||||
var query = new VexAttestationQuery(_tenantId, limit: 2, offset: 0);
|
||||
var result = await _store.ListAsync(query, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(2);
|
||||
result.TotalCount.Should().Be(5);
|
||||
result.HasMore.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_FiltersBySinceAndUntil()
|
||||
{
|
||||
// Arrange
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var attestations = new[]
|
||||
{
|
||||
CreateAttestation("old-attest", "manifest-old", now.AddDays(-10)),
|
||||
CreateAttestation("recent-attest", "manifest-recent", now.AddDays(-1)),
|
||||
CreateAttestation("new-attest", "manifest-new", now)
|
||||
};
|
||||
|
||||
foreach (var a in attestations)
|
||||
{
|
||||
await _store.SaveAsync(a, CancellationToken.None);
|
||||
}
|
||||
|
||||
// Act
|
||||
var query = new VexAttestationQuery(_tenantId, since: now.AddDays(-2), until: now.AddDays(1));
|
||||
var result = await _store.ListAsync(query, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(2);
|
||||
result.Items.Select(a => a.AttestationId).Should().Contain("recent-attest", "new-attest");
|
||||
}
|
||||
|
||||
private VexStoredAttestation CreateAttestation(string attestationId, string manifestId, DateTimeOffset? attestedAt = null) =>
|
||||
new VexStoredAttestation(
|
||||
attestationId,
|
||||
_tenantId,
|
||||
manifestId,
|
||||
"sha256:merkle123",
|
||||
"{\"payloadType\":\"application/vnd.in-toto+json\"}",
|
||||
"sha256:envelope456",
|
||||
10,
|
||||
attestedAt ?? DateTimeOffset.UtcNow,
|
||||
ImmutableDictionary<string, string>.Empty.Add("source", "test"));
|
||||
}
|
||||
@@ -0,0 +1,226 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Nodes;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Core.Observations;
|
||||
using StellaOps.Excititor.Storage.Postgres;
|
||||
using StellaOps.Excititor.Storage.Postgres.Repositories;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Excititor.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(ExcititorPostgresCollection.Name)]
|
||||
public sealed class PostgresVexObservationStoreTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ExcititorPostgresFixture _fixture;
|
||||
private readonly PostgresVexObservationStore _store;
|
||||
private readonly ExcititorDataSource _dataSource;
|
||||
private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8];
|
||||
|
||||
public PostgresVexObservationStoreTests(ExcititorPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
var options = Options.Create(new PostgresOptions
|
||||
{
|
||||
ConnectionString = fixture.ConnectionString,
|
||||
SchemaName = fixture.SchemaName,
|
||||
AutoMigrate = false
|
||||
});
|
||||
|
||||
_dataSource = new ExcititorDataSource(options, NullLogger<ExcititorDataSource>.Instance);
|
||||
_store = new PostgresVexObservationStore(_dataSource, NullLogger<PostgresVexObservationStore>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.Fixture.RunMigrationsFromAssemblyAsync(
|
||||
typeof(ExcititorDataSource).Assembly,
|
||||
moduleName: "Excititor",
|
||||
resourcePrefix: "Migrations",
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _dataSource.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndGetById_RoundTripsObservation()
|
||||
{
|
||||
// Arrange
|
||||
var observation = CreateObservation("obs-1", "provider-a", "CVE-2025-1234", "pkg:npm/lodash@4.17.21");
|
||||
|
||||
// Act
|
||||
var inserted = await _store.InsertAsync(observation, CancellationToken.None);
|
||||
var fetched = await _store.GetByIdAsync(_tenantId, "obs-1", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
inserted.Should().BeTrue();
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.ObservationId.Should().Be("obs-1");
|
||||
fetched.ProviderId.Should().Be("provider-a");
|
||||
fetched.Statements.Should().HaveCount(1);
|
||||
fetched.Statements[0].VulnerabilityId.Should().Be("CVE-2025-1234");
|
||||
fetched.Statements[0].ProductKey.Should().Be("pkg:npm/lodash@4.17.21");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_ReturnsNullForUnknownId()
|
||||
{
|
||||
// Act
|
||||
var result = await _store.GetByIdAsync(_tenantId, "nonexistent", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAsync_ReturnsFalseForDuplicateId()
|
||||
{
|
||||
// Arrange
|
||||
var observation = CreateObservation("obs-dup", "provider-a", "CVE-2025-9999", "pkg:npm/test@1.0.0");
|
||||
|
||||
// Act
|
||||
var first = await _store.InsertAsync(observation, CancellationToken.None);
|
||||
var second = await _store.InsertAsync(observation, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
first.Should().BeTrue();
|
||||
second.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_UpdatesExistingObservation()
|
||||
{
|
||||
// Arrange
|
||||
var original = CreateObservation("obs-upsert", "provider-a", "CVE-2025-0001", "pkg:npm/old@1.0.0");
|
||||
var updated = CreateObservation("obs-upsert", "provider-b", "CVE-2025-0001", "pkg:npm/new@2.0.0");
|
||||
|
||||
// Act
|
||||
await _store.InsertAsync(original, CancellationToken.None);
|
||||
await _store.UpsertAsync(updated, CancellationToken.None);
|
||||
var fetched = await _store.GetByIdAsync(_tenantId, "obs-upsert", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.ProviderId.Should().Be("provider-b");
|
||||
fetched.Statements[0].ProductKey.Should().Be("pkg:npm/new@2.0.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByProviderAsync_ReturnsMatchingObservations()
|
||||
{
|
||||
// Arrange
|
||||
await _store.InsertAsync(CreateObservation("obs-p1", "redhat-csaf", "CVE-2025-1111", "pkg:rpm/test@1.0"), CancellationToken.None);
|
||||
await _store.InsertAsync(CreateObservation("obs-p2", "redhat-csaf", "CVE-2025-2222", "pkg:rpm/test@2.0"), CancellationToken.None);
|
||||
await _store.InsertAsync(CreateObservation("obs-p3", "ubuntu-csaf", "CVE-2025-3333", "pkg:deb/test@1.0"), CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var found = await _store.FindByProviderAsync(_tenantId, "redhat-csaf", limit: 10, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
found.Should().HaveCount(2);
|
||||
found.Select(o => o.ObservationId).Should().Contain("obs-p1", "obs-p2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CountAsync_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
await _store.InsertAsync(CreateObservation("obs-c1", "provider-a", "CVE-1", "pkg:1"), CancellationToken.None);
|
||||
await _store.InsertAsync(CreateObservation("obs-c2", "provider-a", "CVE-2", "pkg:2"), CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var count = await _store.CountAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteAsync_RemovesObservation()
|
||||
{
|
||||
// Arrange
|
||||
await _store.InsertAsync(CreateObservation("obs-del", "provider-a", "CVE-DEL", "pkg:del"), CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var deleted = await _store.DeleteAsync(_tenantId, "obs-del", CancellationToken.None);
|
||||
var fetched = await _store.GetByIdAsync(_tenantId, "obs-del", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
deleted.Should().BeTrue();
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertManyAsync_InsertsMultipleObservations()
|
||||
{
|
||||
// Arrange
|
||||
var observations = new[]
|
||||
{
|
||||
CreateObservation("batch-1", "provider-a", "CVE-B1", "pkg:b1"),
|
||||
CreateObservation("batch-2", "provider-a", "CVE-B2", "pkg:b2"),
|
||||
CreateObservation("batch-3", "provider-a", "CVE-B3", "pkg:b3")
|
||||
};
|
||||
|
||||
// Act
|
||||
var inserted = await _store.InsertManyAsync(_tenantId, observations, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
inserted.Should().Be(3);
|
||||
var count = await _store.CountAsync(_tenantId, CancellationToken.None);
|
||||
count.Should().Be(3);
|
||||
}
|
||||
|
||||
private VexObservation CreateObservation(string observationId, string providerId, string vulnId, string productKey)
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var statement = new VexObservationStatement(
|
||||
vulnId,
|
||||
productKey,
|
||||
VexClaimStatus.NotAffected,
|
||||
lastObserved: now,
|
||||
purl: productKey,
|
||||
cpe: null,
|
||||
evidence: ImmutableArray<JsonNode>.Empty);
|
||||
|
||||
var upstream = new VexObservationUpstream(
|
||||
upstreamId: observationId,
|
||||
documentVersion: "1.0",
|
||||
fetchedAt: now,
|
||||
receivedAt: now,
|
||||
contentHash: $"sha256:{Guid.NewGuid():N}",
|
||||
signature: new VexObservationSignature(present: false, null, null, null));
|
||||
|
||||
var linkset = new VexObservationLinkset(
|
||||
aliases: [vulnId],
|
||||
purls: [productKey],
|
||||
cpes: [],
|
||||
references: [new VexObservationReference("source", $"https://example.test/{observationId}")]);
|
||||
|
||||
var content = new VexObservationContent(
|
||||
format: "csaf",
|
||||
specVersion: "2.0",
|
||||
raw: JsonNode.Parse("""{"document":"test"}""")!);
|
||||
|
||||
return new VexObservation(
|
||||
observationId,
|
||||
_tenantId,
|
||||
providerId,
|
||||
streamId: "stream-default",
|
||||
upstream,
|
||||
[statement],
|
||||
content,
|
||||
linkset,
|
||||
now,
|
||||
supersedes: null,
|
||||
attributes: ImmutableDictionary<string, string>.Empty);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Storage.Postgres;
|
||||
using StellaOps.Excititor.Storage.Postgres.Repositories;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Excititor.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(ExcititorPostgresCollection.Name)]
|
||||
public sealed class PostgresVexProviderStoreTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ExcititorPostgresFixture _fixture;
|
||||
private readonly PostgresVexProviderStore _store;
|
||||
private readonly ExcititorDataSource _dataSource;
|
||||
|
||||
public PostgresVexProviderStoreTests(ExcititorPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
var options = Options.Create(new PostgresOptions
|
||||
{
|
||||
ConnectionString = fixture.ConnectionString,
|
||||
SchemaName = fixture.SchemaName,
|
||||
AutoMigrate = false
|
||||
});
|
||||
|
||||
_dataSource = new ExcititorDataSource(options, NullLogger<ExcititorDataSource>.Instance);
|
||||
_store = new PostgresVexProviderStore(_dataSource, NullLogger<PostgresVexProviderStore>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.Fixture.RunMigrationsFromAssemblyAsync(
|
||||
typeof(ExcititorDataSource).Assembly,
|
||||
moduleName: "Excititor",
|
||||
resourcePrefix: "Migrations",
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _dataSource.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAndFind_RoundTripsProvider()
|
||||
{
|
||||
// Arrange
|
||||
var provider = new VexProvider(
|
||||
id: "redhat-csaf",
|
||||
displayName: "Red Hat CSAF",
|
||||
kind: VexProviderKind.Vendor,
|
||||
baseUris: [new Uri("https://access.redhat.com/security/data/csaf/")],
|
||||
discovery: new VexProviderDiscovery(
|
||||
new Uri("https://access.redhat.com/security/data/csaf/.well-known/csaf/provider-metadata.json"),
|
||||
null),
|
||||
trust: VexProviderTrust.Default,
|
||||
enabled: true);
|
||||
|
||||
// Act
|
||||
await _store.SaveAsync(provider, CancellationToken.None);
|
||||
var fetched = await _store.FindAsync("redhat-csaf", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be("redhat-csaf");
|
||||
fetched.DisplayName.Should().Be("Red Hat CSAF");
|
||||
fetched.Kind.Should().Be(VexProviderKind.Vendor);
|
||||
fetched.Enabled.Should().BeTrue();
|
||||
fetched.BaseUris.Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindAsync_ReturnsNullForUnknownId()
|
||||
{
|
||||
// Act
|
||||
var result = await _store.FindAsync("nonexistent-provider", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_UpdatesExistingProvider()
|
||||
{
|
||||
// Arrange
|
||||
var original = new VexProvider(
|
||||
"ubuntu-csaf", "Ubuntu CSAF", VexProviderKind.Distro,
|
||||
[], VexProviderDiscovery.Empty, VexProviderTrust.Default, true);
|
||||
|
||||
var updated = new VexProvider(
|
||||
"ubuntu-csaf", "Canonical Ubuntu CSAF", VexProviderKind.Distro,
|
||||
[new Uri("https://ubuntu.com/security/")],
|
||||
VexProviderDiscovery.Empty, VexProviderTrust.Default, false);
|
||||
|
||||
// Act
|
||||
await _store.SaveAsync(original, CancellationToken.None);
|
||||
await _store.SaveAsync(updated, CancellationToken.None);
|
||||
var fetched = await _store.FindAsync("ubuntu-csaf", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.DisplayName.Should().Be("Canonical Ubuntu CSAF");
|
||||
fetched.Enabled.Should().BeFalse();
|
||||
fetched.BaseUris.Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsAllProviders()
|
||||
{
|
||||
// Arrange
|
||||
var provider1 = new VexProvider(
|
||||
"aaa-provider", "AAA Provider", VexProviderKind.Vendor,
|
||||
[], VexProviderDiscovery.Empty, VexProviderTrust.Default, true);
|
||||
var provider2 = new VexProvider(
|
||||
"zzz-provider", "ZZZ Provider", VexProviderKind.Hub,
|
||||
[], VexProviderDiscovery.Empty, VexProviderTrust.Default, true);
|
||||
|
||||
await _store.SaveAsync(provider1, CancellationToken.None);
|
||||
await _store.SaveAsync(provider2, CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var providers = await _store.ListAsync(CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
providers.Should().HaveCount(2);
|
||||
providers.Select(p => p.Id).Should().ContainInOrder("aaa-provider", "zzz-provider");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_PersistsTrustSettings()
|
||||
{
|
||||
// Arrange
|
||||
var cosign = new VexCosignTrust("https://accounts.google.com", "@redhat.com$");
|
||||
var trust = new VexProviderTrust(0.9, cosign, ["ABCD1234", "EFGH5678"]);
|
||||
var provider = new VexProvider(
|
||||
"trusted-provider", "Trusted Provider", VexProviderKind.Attestation,
|
||||
[], VexProviderDiscovery.Empty, trust, true);
|
||||
|
||||
// Act
|
||||
await _store.SaveAsync(provider, CancellationToken.None);
|
||||
var fetched = await _store.FindAsync("trusted-provider", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Trust.Weight.Should().Be(0.9);
|
||||
fetched.Trust.Cosign.Should().NotBeNull();
|
||||
fetched.Trust.Cosign!.Issuer.Should().Be("https://accounts.google.com");
|
||||
fetched.Trust.Cosign.IdentityPattern.Should().Be("@redhat.com$");
|
||||
fetched.Trust.PgpFingerprints.Should().HaveCount(2);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,187 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Excititor.Core.Observations;
|
||||
using StellaOps.Excititor.Storage.Postgres;
|
||||
using StellaOps.Excititor.Storage.Postgres.Repositories;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Excititor.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(ExcititorPostgresCollection.Name)]
|
||||
public sealed class PostgresVexTimelineEventStoreTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ExcititorPostgresFixture _fixture;
|
||||
private readonly PostgresVexTimelineEventStore _store;
|
||||
private readonly ExcititorDataSource _dataSource;
|
||||
private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8];
|
||||
|
||||
public PostgresVexTimelineEventStoreTests(ExcititorPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
var options = Options.Create(new PostgresOptions
|
||||
{
|
||||
ConnectionString = fixture.ConnectionString,
|
||||
SchemaName = fixture.SchemaName,
|
||||
AutoMigrate = false
|
||||
});
|
||||
|
||||
_dataSource = new ExcititorDataSource(options, NullLogger<ExcititorDataSource>.Instance);
|
||||
_store = new PostgresVexTimelineEventStore(_dataSource, NullLogger<PostgresVexTimelineEventStore>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.Fixture.RunMigrationsFromAssemblyAsync(
|
||||
typeof(ExcititorDataSource).Assembly,
|
||||
moduleName: "Excititor",
|
||||
resourcePrefix: "Migrations",
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _dataSource.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndGetById_RoundTripsEvent()
|
||||
{
|
||||
// Arrange
|
||||
var evt = new TimelineEvent(
|
||||
eventId: "evt-" + Guid.NewGuid().ToString("N"),
|
||||
tenant: _tenantId,
|
||||
providerId: "redhat-csaf",
|
||||
streamId: "stream-1",
|
||||
eventType: "observation_created",
|
||||
traceId: "trace-123",
|
||||
justificationSummary: "Component not affected",
|
||||
createdAt: DateTimeOffset.UtcNow,
|
||||
evidenceHash: "sha256:abc123",
|
||||
payloadHash: "sha256:def456",
|
||||
attributes: ImmutableDictionary<string, string>.Empty.Add("cve", "CVE-2025-1234"));
|
||||
|
||||
// Act
|
||||
var id = await _store.InsertAsync(evt, CancellationToken.None);
|
||||
var fetched = await _store.GetByIdAsync(_tenantId, id, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.EventId.Should().Be(evt.EventId);
|
||||
fetched.ProviderId.Should().Be("redhat-csaf");
|
||||
fetched.EventType.Should().Be("observation_created");
|
||||
fetched.JustificationSummary.Should().Be("Component not affected");
|
||||
fetched.EvidenceHash.Should().Be("sha256:abc123");
|
||||
fetched.Attributes.Should().ContainKey("cve");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByIdAsync_ReturnsNullForUnknownEvent()
|
||||
{
|
||||
// Act
|
||||
var result = await _store.GetByIdAsync(_tenantId, "nonexistent-event", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetRecentAsync_ReturnsEventsInDescendingOrder()
|
||||
{
|
||||
// Arrange
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var events = new[]
|
||||
{
|
||||
CreateEvent("evt-1", now.AddMinutes(-10)),
|
||||
CreateEvent("evt-2", now.AddMinutes(-5)),
|
||||
CreateEvent("evt-3", now)
|
||||
};
|
||||
|
||||
foreach (var evt in events)
|
||||
{
|
||||
await _store.InsertAsync(evt, CancellationToken.None);
|
||||
}
|
||||
|
||||
// Act
|
||||
var recent = await _store.GetRecentAsync(_tenantId, limit: 10, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
recent.Should().HaveCount(3);
|
||||
recent[0].EventId.Should().Be("evt-3"); // Most recent first
|
||||
recent[1].EventId.Should().Be("evt-2");
|
||||
recent[2].EventId.Should().Be("evt-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByTraceIdAsync_ReturnsMatchingEvents()
|
||||
{
|
||||
// Arrange
|
||||
var traceId = "trace-" + Guid.NewGuid().ToString("N")[..8];
|
||||
var evt1 = CreateEvent("evt-a", DateTimeOffset.UtcNow, traceId: traceId);
|
||||
var evt2 = CreateEvent("evt-b", DateTimeOffset.UtcNow, traceId: traceId);
|
||||
var evt3 = CreateEvent("evt-c", DateTimeOffset.UtcNow, traceId: "other-trace");
|
||||
|
||||
await _store.InsertAsync(evt1, CancellationToken.None);
|
||||
await _store.InsertAsync(evt2, CancellationToken.None);
|
||||
await _store.InsertAsync(evt3, CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var found = await _store.FindByTraceIdAsync(_tenantId, traceId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
found.Should().HaveCount(2);
|
||||
found.Select(e => e.EventId).Should().Contain("evt-a", "evt-b");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CountAsync_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
await _store.InsertAsync(CreateEvent("evt-1", DateTimeOffset.UtcNow), CancellationToken.None);
|
||||
await _store.InsertAsync(CreateEvent("evt-2", DateTimeOffset.UtcNow), CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var count = await _store.CountAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
count.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertManyAsync_InsertsMultipleEvents()
|
||||
{
|
||||
// Arrange
|
||||
var events = new[]
|
||||
{
|
||||
CreateEvent("batch-1", DateTimeOffset.UtcNow),
|
||||
CreateEvent("batch-2", DateTimeOffset.UtcNow),
|
||||
CreateEvent("batch-3", DateTimeOffset.UtcNow)
|
||||
};
|
||||
|
||||
// Act
|
||||
var inserted = await _store.InsertManyAsync(_tenantId, events, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
inserted.Should().Be(3);
|
||||
var count = await _store.CountAsync(_tenantId, CancellationToken.None);
|
||||
count.Should().Be(3);
|
||||
}
|
||||
|
||||
private TimelineEvent CreateEvent(string eventId, DateTimeOffset createdAt, string? traceId = null) =>
|
||||
new TimelineEvent(
|
||||
eventId: eventId,
|
||||
tenant: _tenantId,
|
||||
providerId: "test-provider",
|
||||
streamId: "stream-1",
|
||||
eventType: "test_event",
|
||||
traceId: traceId ?? "trace-default",
|
||||
justificationSummary: "Test event",
|
||||
createdAt: createdAt,
|
||||
evidenceHash: null,
|
||||
payloadHash: null,
|
||||
attributes: ImmutableDictionary<string, string>.Empty);
|
||||
}
|
||||
@@ -21,7 +21,7 @@
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.70" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Reflection;
|
||||
using StellaOps.Infrastructure.Postgres.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL integration test fixture for the Graph.Indexer module.
|
||||
/// </summary>
|
||||
public sealed class GraphIndexerPostgresFixture : PostgresIntegrationFixture, ICollectionFixture<GraphIndexerPostgresFixture>
|
||||
{
|
||||
protected override Assembly? GetMigrationAssembly()
|
||||
=> typeof(GraphIndexerDataSource).Assembly;
|
||||
|
||||
protected override string GetModuleName() => "GraphIndexer";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collection definition for Graph.Indexer PostgreSQL integration tests.
|
||||
/// Tests in this collection share a single PostgreSQL container instance.
|
||||
/// </summary>
|
||||
[CollectionDefinition(Name)]
|
||||
public sealed class GraphIndexerPostgresCollection : ICollectionFixture<GraphIndexerPostgresFixture>
|
||||
{
|
||||
public const string Name = "GraphIndexerPostgres";
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MicrosoftOptions = Microsoft.Extensions.Options;
|
||||
using StellaOps.Graph.Indexer.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(GraphIndexerPostgresCollection.Name)]
|
||||
public sealed class PostgresIdempotencyStoreTests : IAsyncLifetime
|
||||
{
|
||||
private readonly GraphIndexerPostgresFixture _fixture;
|
||||
private readonly PostgresIdempotencyStore _store;
|
||||
|
||||
public PostgresIdempotencyStoreTests(GraphIndexerPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new GraphIndexerDataSource(MicrosoftOptions.Options.Create(options), NullLogger<GraphIndexerDataSource>.Instance);
|
||||
_store = new PostgresIdempotencyStore(dataSource, NullLogger<PostgresIdempotencyStore>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task HasSeenAsync_ReturnsFalseForNewToken()
|
||||
{
|
||||
// Arrange
|
||||
var sequenceToken = "seq-" + Guid.NewGuid().ToString("N");
|
||||
|
||||
// Act
|
||||
var result = await _store.HasSeenAsync(sequenceToken, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkSeenAsync_ThenHasSeenAsync_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var sequenceToken = "seq-" + Guid.NewGuid().ToString("N");
|
||||
|
||||
// Act
|
||||
await _store.MarkSeenAsync(sequenceToken, CancellationToken.None);
|
||||
var result = await _store.HasSeenAsync(sequenceToken, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkSeenAsync_AllowsDifferentTokens()
|
||||
{
|
||||
// Arrange
|
||||
var token1 = "seq-" + Guid.NewGuid().ToString("N");
|
||||
var token2 = "seq-" + Guid.NewGuid().ToString("N");
|
||||
|
||||
// Act
|
||||
await _store.MarkSeenAsync(token1, CancellationToken.None);
|
||||
await _store.MarkSeenAsync(token2, CancellationToken.None);
|
||||
var seen1 = await _store.HasSeenAsync(token1, CancellationToken.None);
|
||||
var seen2 = await _store.HasSeenAsync(token2, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
seen1.Should().BeTrue();
|
||||
seen2.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkSeenAsync_IsIdempotent()
|
||||
{
|
||||
// Arrange
|
||||
var sequenceToken = "seq-" + Guid.NewGuid().ToString("N");
|
||||
|
||||
// Act - marking same token twice should not throw
|
||||
await _store.MarkSeenAsync(sequenceToken, CancellationToken.None);
|
||||
await _store.MarkSeenAsync(sequenceToken, CancellationToken.None);
|
||||
var result = await _store.HasSeenAsync(sequenceToken, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
<?xml version="1.0" ?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Graph.Indexer.Storage.Postgres\StellaOps.Graph.Indexer.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,44 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Connections;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL data source for Graph.Indexer module.
|
||||
/// </summary>
|
||||
public sealed class GraphIndexerDataSource : DataSourceBase
|
||||
{
|
||||
/// <summary>
|
||||
/// Default schema name for Graph.Indexer tables.
|
||||
/// </summary>
|
||||
public const string DefaultSchemaName = "graph";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new Graph.Indexer data source.
|
||||
/// </summary>
|
||||
public GraphIndexerDataSource(IOptions<PostgresOptions> options, ILogger<GraphIndexerDataSource> logger)
|
||||
: base(CreateOptions(options.Value), logger)
|
||||
{
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override string ModuleName => "Graph.Indexer";
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void ConfigureDataSourceBuilder(NpgsqlDataSourceBuilder builder)
|
||||
{
|
||||
base.ConfigureDataSourceBuilder(builder);
|
||||
}
|
||||
|
||||
private static PostgresOptions CreateOptions(PostgresOptions baseOptions)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(baseOptions.SchemaName))
|
||||
{
|
||||
baseOptions.SchemaName = DefaultSchemaName;
|
||||
}
|
||||
return baseOptions;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,181 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Graph.Indexer.Analytics;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IGraphAnalyticsWriter"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresGraphAnalyticsWriter : RepositoryBase<GraphIndexerDataSource>, IGraphAnalyticsWriter
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresGraphAnalyticsWriter(GraphIndexerDataSource dataSource, ILogger<PostgresGraphAnalyticsWriter> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task PersistClusterAssignmentsAsync(
|
||||
GraphAnalyticsSnapshot snapshot,
|
||||
ImmutableArray<ClusterAssignment> assignments,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
// Delete existing assignments for this snapshot
|
||||
const string deleteSql = @"
|
||||
DELETE FROM graph.cluster_assignments
|
||||
WHERE tenant = @tenant AND snapshot_id = @snapshot_id";
|
||||
|
||||
await using (var deleteCommand = CreateCommand(deleteSql, connection, transaction))
|
||||
{
|
||||
AddParameter(deleteCommand, "@tenant", snapshot.Tenant ?? string.Empty);
|
||||
AddParameter(deleteCommand, "@snapshot_id", snapshot.SnapshotId ?? string.Empty);
|
||||
await deleteCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Insert new assignments
|
||||
const string insertSql = @"
|
||||
INSERT INTO graph.cluster_assignments (tenant, snapshot_id, node_id, cluster_id, kind, computed_at)
|
||||
VALUES (@tenant, @snapshot_id, @node_id, @cluster_id, @kind, @computed_at)";
|
||||
|
||||
var computedAt = snapshot.GeneratedAt;
|
||||
|
||||
foreach (var assignment in assignments)
|
||||
{
|
||||
await using var insertCommand = CreateCommand(insertSql, connection, transaction);
|
||||
AddParameter(insertCommand, "@tenant", snapshot.Tenant ?? string.Empty);
|
||||
AddParameter(insertCommand, "@snapshot_id", snapshot.SnapshotId ?? string.Empty);
|
||||
AddParameter(insertCommand, "@node_id", assignment.NodeId ?? string.Empty);
|
||||
AddParameter(insertCommand, "@cluster_id", assignment.ClusterId ?? string.Empty);
|
||||
AddParameter(insertCommand, "@kind", assignment.Kind ?? string.Empty);
|
||||
AddParameter(insertCommand, "@computed_at", computedAt);
|
||||
|
||||
await insertCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch
|
||||
{
|
||||
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task PersistCentralityAsync(
|
||||
GraphAnalyticsSnapshot snapshot,
|
||||
ImmutableArray<CentralityScore> scores,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
// Delete existing scores for this snapshot
|
||||
const string deleteSql = @"
|
||||
DELETE FROM graph.centrality_scores
|
||||
WHERE tenant = @tenant AND snapshot_id = @snapshot_id";
|
||||
|
||||
await using (var deleteCommand = CreateCommand(deleteSql, connection, transaction))
|
||||
{
|
||||
AddParameter(deleteCommand, "@tenant", snapshot.Tenant ?? string.Empty);
|
||||
AddParameter(deleteCommand, "@snapshot_id", snapshot.SnapshotId ?? string.Empty);
|
||||
await deleteCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Insert new scores
|
||||
const string insertSql = @"
|
||||
INSERT INTO graph.centrality_scores (tenant, snapshot_id, node_id, degree, betweenness, kind, computed_at)
|
||||
VALUES (@tenant, @snapshot_id, @node_id, @degree, @betweenness, @kind, @computed_at)";
|
||||
|
||||
var computedAt = snapshot.GeneratedAt;
|
||||
|
||||
foreach (var score in scores)
|
||||
{
|
||||
await using var insertCommand = CreateCommand(insertSql, connection, transaction);
|
||||
AddParameter(insertCommand, "@tenant", snapshot.Tenant ?? string.Empty);
|
||||
AddParameter(insertCommand, "@snapshot_id", snapshot.SnapshotId ?? string.Empty);
|
||||
AddParameter(insertCommand, "@node_id", score.NodeId ?? string.Empty);
|
||||
AddParameter(insertCommand, "@degree", score.Degree);
|
||||
AddParameter(insertCommand, "@betweenness", score.Betweenness);
|
||||
AddParameter(insertCommand, "@kind", score.Kind ?? string.Empty);
|
||||
AddParameter(insertCommand, "@computed_at", computedAt);
|
||||
|
||||
await insertCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch
|
||||
{
|
||||
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private static NpgsqlCommand CreateCommand(string sql, NpgsqlConnection connection, NpgsqlTransaction transaction)
|
||||
{
|
||||
return new NpgsqlCommand(sql, connection, transaction);
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS graph;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS graph.cluster_assignments (
|
||||
tenant TEXT NOT NULL,
|
||||
snapshot_id TEXT NOT NULL,
|
||||
node_id TEXT NOT NULL,
|
||||
cluster_id TEXT NOT NULL,
|
||||
kind TEXT NOT NULL,
|
||||
computed_at TIMESTAMPTZ NOT NULL,
|
||||
PRIMARY KEY (tenant, snapshot_id, node_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_cluster_assignments_cluster ON graph.cluster_assignments (tenant, cluster_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_cluster_assignments_computed_at ON graph.cluster_assignments (computed_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS graph.centrality_scores (
|
||||
tenant TEXT NOT NULL,
|
||||
snapshot_id TEXT NOT NULL,
|
||||
node_id TEXT NOT NULL,
|
||||
degree DOUBLE PRECISION NOT NULL,
|
||||
betweenness DOUBLE PRECISION NOT NULL,
|
||||
kind TEXT NOT NULL,
|
||||
computed_at TIMESTAMPTZ NOT NULL,
|
||||
PRIMARY KEY (tenant, snapshot_id, node_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_centrality_scores_degree ON graph.centrality_scores (tenant, degree DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_centrality_scores_betweenness ON graph.centrality_scores (tenant, betweenness DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_centrality_scores_computed_at ON graph.centrality_scores (computed_at);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,174 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Graph.Indexer.Ingestion.Sbom;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IGraphDocumentWriter"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresGraphDocumentWriter : RepositoryBase<GraphIndexerDataSource>, IGraphDocumentWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresGraphDocumentWriter(GraphIndexerDataSource dataSource, ILogger<PostgresGraphDocumentWriter> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task WriteAsync(GraphBuildBatch batch, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(batch);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
var batchId = Guid.NewGuid().ToString("N");
|
||||
var writtenAt = DateTimeOffset.UtcNow;
|
||||
|
||||
// Insert nodes
|
||||
foreach (var node in batch.Nodes)
|
||||
{
|
||||
var nodeId = ExtractId(node);
|
||||
var nodeJson = node.ToJsonString();
|
||||
|
||||
const string nodeSql = @"
|
||||
INSERT INTO graph.graph_nodes (id, batch_id, document_json, written_at)
|
||||
VALUES (@id, @batch_id, @document_json, @written_at)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
batch_id = EXCLUDED.batch_id,
|
||||
document_json = EXCLUDED.document_json,
|
||||
written_at = EXCLUDED.written_at";
|
||||
|
||||
await using var nodeCommand = CreateCommand(nodeSql, connection, transaction);
|
||||
AddParameter(nodeCommand, "@id", nodeId);
|
||||
AddParameter(nodeCommand, "@batch_id", batchId);
|
||||
AddJsonbParameter(nodeCommand, "@document_json", nodeJson);
|
||||
AddParameter(nodeCommand, "@written_at", writtenAt);
|
||||
|
||||
await nodeCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Insert edges
|
||||
foreach (var edge in batch.Edges)
|
||||
{
|
||||
var edgeId = ExtractEdgeId(edge);
|
||||
var edgeJson = edge.ToJsonString();
|
||||
|
||||
const string edgeSql = @"
|
||||
INSERT INTO graph.graph_edges (id, batch_id, source_id, target_id, document_json, written_at)
|
||||
VALUES (@id, @batch_id, @source_id, @target_id, @document_json, @written_at)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
batch_id = EXCLUDED.batch_id,
|
||||
source_id = EXCLUDED.source_id,
|
||||
target_id = EXCLUDED.target_id,
|
||||
document_json = EXCLUDED.document_json,
|
||||
written_at = EXCLUDED.written_at";
|
||||
|
||||
await using var edgeCommand = CreateCommand(edgeSql, connection, transaction);
|
||||
AddParameter(edgeCommand, "@id", edgeId);
|
||||
AddParameter(edgeCommand, "@batch_id", batchId);
|
||||
AddParameter(edgeCommand, "@source_id", ExtractString(edge, "source") ?? string.Empty);
|
||||
AddParameter(edgeCommand, "@target_id", ExtractString(edge, "target") ?? string.Empty);
|
||||
AddJsonbParameter(edgeCommand, "@document_json", edgeJson);
|
||||
AddParameter(edgeCommand, "@written_at", writtenAt);
|
||||
|
||||
await edgeCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch
|
||||
{
|
||||
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private static string ExtractId(JsonObject node)
|
||||
{
|
||||
return ExtractString(node, "id") ?? ExtractString(node, "@id") ?? Guid.NewGuid().ToString("N");
|
||||
}
|
||||
|
||||
private static string ExtractEdgeId(JsonObject edge)
|
||||
{
|
||||
var id = ExtractString(edge, "id") ?? ExtractString(edge, "@id");
|
||||
if (!string.IsNullOrWhiteSpace(id))
|
||||
{
|
||||
return id;
|
||||
}
|
||||
|
||||
var source = ExtractString(edge, "source") ?? string.Empty;
|
||||
var target = ExtractString(edge, "target") ?? string.Empty;
|
||||
var type = ExtractString(edge, "type") ?? ExtractString(edge, "relationship") ?? "relates_to";
|
||||
return $"{source}|{target}|{type}";
|
||||
}
|
||||
|
||||
private static string? ExtractString(JsonObject obj, string key)
|
||||
{
|
||||
if (obj.TryGetPropertyValue(key, out var value) && value is JsonValue jv)
|
||||
{
|
||||
return jv.GetValue<string>();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static NpgsqlCommand CreateCommand(string sql, NpgsqlConnection connection, NpgsqlTransaction transaction)
|
||||
{
|
||||
return new NpgsqlCommand(sql, connection, transaction);
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS graph;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS graph.graph_nodes (
|
||||
id TEXT PRIMARY KEY,
|
||||
batch_id TEXT NOT NULL,
|
||||
document_json JSONB NOT NULL,
|
||||
written_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_nodes_batch_id ON graph.graph_nodes (batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_nodes_written_at ON graph.graph_nodes (written_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS graph.graph_edges (
|
||||
id TEXT PRIMARY KEY,
|
||||
batch_id TEXT NOT NULL,
|
||||
source_id TEXT NOT NULL,
|
||||
target_id TEXT NOT NULL,
|
||||
document_json JSONB NOT NULL,
|
||||
written_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_edges_batch_id ON graph.graph_edges (batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_edges_source_id ON graph.graph_edges (source_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_edges_target_id ON graph.graph_edges (target_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_edges_written_at ON graph.graph_edges (written_at);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,157 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Graph.Indexer.Analytics;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IGraphSnapshotProvider"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresGraphSnapshotProvider : RepositoryBase<GraphIndexerDataSource>, IGraphSnapshotProvider
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresGraphSnapshotProvider(GraphIndexerDataSource dataSource, ILogger<PostgresGraphSnapshotProvider> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Enqueues a snapshot for processing.
|
||||
/// </summary>
|
||||
public async Task EnqueueAsync(GraphAnalyticsSnapshot snapshot, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO graph.pending_snapshots (tenant, snapshot_id, generated_at, nodes_json, edges_json, queued_at)
|
||||
VALUES (@tenant, @snapshot_id, @generated_at, @nodes_json, @edges_json, @queued_at)
|
||||
ON CONFLICT (tenant, snapshot_id) DO UPDATE SET
|
||||
generated_at = EXCLUDED.generated_at,
|
||||
nodes_json = EXCLUDED.nodes_json,
|
||||
edges_json = EXCLUDED.edges_json,
|
||||
queued_at = EXCLUDED.queued_at";
|
||||
|
||||
var nodesJson = JsonSerializer.Serialize(snapshot.Nodes.Select(n => n.ToJsonString()), JsonOptions);
|
||||
var edgesJson = JsonSerializer.Serialize(snapshot.Edges.Select(e => e.ToJsonString()), JsonOptions);
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@tenant", snapshot.Tenant ?? string.Empty);
|
||||
AddParameter(command, "@snapshot_id", snapshot.SnapshotId ?? string.Empty);
|
||||
AddParameter(command, "@generated_at", snapshot.GeneratedAt);
|
||||
AddJsonbParameter(command, "@nodes_json", nodesJson);
|
||||
AddJsonbParameter(command, "@edges_json", edgesJson);
|
||||
AddParameter(command, "@queued_at", DateTimeOffset.UtcNow);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<GraphAnalyticsSnapshot>> GetPendingSnapshotsAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT tenant, snapshot_id, generated_at, nodes_json, edges_json
|
||||
FROM graph.pending_snapshots
|
||||
ORDER BY queued_at ASC
|
||||
LIMIT 100";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<GraphAnalyticsSnapshot>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapSnapshot(reader));
|
||||
}
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
public async Task MarkProcessedAsync(string tenant, string snapshotId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(snapshotId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
DELETE FROM graph.pending_snapshots
|
||||
WHERE tenant = @tenant AND snapshot_id = @snapshot_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@tenant", tenant ?? string.Empty);
|
||||
AddParameter(command, "@snapshot_id", snapshotId.Trim());
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static GraphAnalyticsSnapshot MapSnapshot(NpgsqlDataReader reader)
|
||||
{
|
||||
var tenant = reader.GetString(0);
|
||||
var snapshotId = reader.GetString(1);
|
||||
var generatedAt = reader.GetFieldValue<DateTimeOffset>(2);
|
||||
var nodesJson = reader.GetString(3);
|
||||
var edgesJson = reader.GetString(4);
|
||||
|
||||
var nodeStrings = JsonSerializer.Deserialize<List<string>>(nodesJson, JsonOptions) ?? new List<string>();
|
||||
var edgeStrings = JsonSerializer.Deserialize<List<string>>(edgesJson, JsonOptions) ?? new List<string>();
|
||||
|
||||
var nodes = nodeStrings
|
||||
.Select(s => JsonNode.Parse(s) as JsonObject)
|
||||
.Where(n => n is not null)
|
||||
.Cast<JsonObject>()
|
||||
.ToImmutableArray();
|
||||
|
||||
var edges = edgeStrings
|
||||
.Select(s => JsonNode.Parse(s) as JsonObject)
|
||||
.Where(e => e is not null)
|
||||
.Cast<JsonObject>()
|
||||
.ToImmutableArray();
|
||||
|
||||
return new GraphAnalyticsSnapshot(tenant, snapshotId, generatedAt, nodes, edges);
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS graph;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS graph.pending_snapshots (
|
||||
tenant TEXT NOT NULL,
|
||||
snapshot_id TEXT NOT NULL,
|
||||
generated_at TIMESTAMPTZ NOT NULL,
|
||||
nodes_json JSONB NOT NULL,
|
||||
edges_json JSONB NOT NULL,
|
||||
queued_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
PRIMARY KEY (tenant, snapshot_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_pending_snapshots_queued_at ON graph.pending_snapshots (queued_at);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Graph.Indexer.Incremental;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IIdempotencyStore"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresIdempotencyStore : RepositoryBase<GraphIndexerDataSource>, IIdempotencyStore
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresIdempotencyStore(GraphIndexerDataSource dataSource, ILogger<PostgresIdempotencyStore> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task<bool> HasSeenAsync(string sequenceToken, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sequenceToken);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT EXISTS(SELECT 1 FROM graph.idempotency_tokens WHERE sequence_token = @sequence_token)";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@sequence_token", sequenceToken.Trim());
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
return result is bool seen && seen;
|
||||
}
|
||||
|
||||
public async Task MarkSeenAsync(string sequenceToken, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sequenceToken);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO graph.idempotency_tokens (sequence_token, seen_at)
|
||||
VALUES (@sequence_token, @seen_at)
|
||||
ON CONFLICT (sequence_token) DO NOTHING";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@sequence_token", sequenceToken.Trim());
|
||||
AddParameter(command, "@seen_at", DateTimeOffset.UtcNow);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS graph;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS graph.idempotency_tokens (
|
||||
sequence_token TEXT PRIMARY KEY,
|
||||
seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_idempotency_tokens_seen_at ON graph.idempotency_tokens (seen_at);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Graph.Indexer.Analytics;
|
||||
using StellaOps.Graph.Indexer.Incremental;
|
||||
using StellaOps.Graph.Indexer.Ingestion.Sbom;
|
||||
using StellaOps.Graph.Indexer.Storage.Postgres.Repositories;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
|
||||
namespace StellaOps.Graph.Indexer.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for configuring Graph.Indexer PostgreSQL storage services.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds Graph.Indexer PostgreSQL storage services.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configuration">Configuration root.</param>
|
||||
/// <param name="sectionName">Configuration section name for PostgreSQL options.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddGraphIndexerPostgresStorage(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration,
|
||||
string sectionName = "Postgres:Graph")
|
||||
{
|
||||
services.Configure<PostgresOptions>(configuration.GetSection(sectionName));
|
||||
services.AddSingleton<GraphIndexerDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddSingleton<IIdempotencyStore, PostgresIdempotencyStore>();
|
||||
services.AddSingleton<IGraphSnapshotProvider, PostgresGraphSnapshotProvider>();
|
||||
services.AddSingleton<IGraphAnalyticsWriter, PostgresGraphAnalyticsWriter>();
|
||||
services.AddSingleton<IGraphDocumentWriter, PostgresGraphDocumentWriter>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds Graph.Indexer PostgreSQL storage services with explicit options.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configureOptions">Options configuration action.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddGraphIndexerPostgresStorage(
|
||||
this IServiceCollection services,
|
||||
Action<PostgresOptions> configureOptions)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
services.AddSingleton<GraphIndexerDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddSingleton<IIdempotencyStore, PostgresIdempotencyStore>();
|
||||
services.AddSingleton<IGraphSnapshotProvider, PostgresGraphSnapshotProvider>();
|
||||
services.AddSingleton<IGraphAnalyticsWriter, PostgresGraphAnalyticsWriter>();
|
||||
services.AddSingleton<IGraphDocumentWriter, PostgresGraphDocumentWriter>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<RootNamespace>StellaOps.Graph.Indexer.Storage.Postgres</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Graph.Indexer/StellaOps.Graph.Indexer.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Infrastructure.Postgres/StellaOps.Infrastructure.Postgres.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -289,8 +289,31 @@ public sealed class GraphSnapshotBuilder
|
||||
out string sourceNodeId,
|
||||
out string targetNodeId)
|
||||
{
|
||||
var kind = edge["kind"]!.GetValue<string>();
|
||||
var canonicalKey = edge["canonical_key"]!.AsObject();
|
||||
// Handle simple edge format with direct source/target properties
|
||||
if (!edge.TryGetPropertyValue("kind", out var kindNode) || kindNode is null)
|
||||
{
|
||||
if (edge.TryGetPropertyValue("source", out var simpleSource) && simpleSource is not null &&
|
||||
edge.TryGetPropertyValue("target", out var simpleTarget) && simpleTarget is not null)
|
||||
{
|
||||
sourceNodeId = simpleSource.GetValue<string>();
|
||||
targetNodeId = simpleTarget.GetValue<string>();
|
||||
return nodesById.ContainsKey(sourceNodeId) && nodesById.ContainsKey(targetNodeId);
|
||||
}
|
||||
|
||||
sourceNodeId = string.Empty;
|
||||
targetNodeId = string.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
var kind = kindNode.GetValue<string>();
|
||||
if (!edge.TryGetPropertyValue("canonical_key", out var canonicalKeyNode) || canonicalKeyNode is null)
|
||||
{
|
||||
sourceNodeId = string.Empty;
|
||||
targetNodeId = string.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
var canonicalKey = canonicalKeyNode.AsObject();
|
||||
|
||||
string? source = null;
|
||||
string? target = null;
|
||||
|
||||
@@ -14,8 +14,8 @@ public sealed class GraphAnalyticsEngineTests
|
||||
var first = engine.Compute(snapshot);
|
||||
var second = engine.Compute(snapshot);
|
||||
|
||||
Assert.Equal(first.Clusters, second.Clusters);
|
||||
Assert.Equal(first.CentralityScores, second.CentralityScores);
|
||||
Assert.Equal(first.Clusters.ToArray(), second.Clusters.ToArray());
|
||||
Assert.Equal(first.CentralityScores.ToArray(), second.CentralityScores.ToArray());
|
||||
|
||||
var mainCluster = first.Clusters.First(c => c.NodeId == snapshot.Nodes[0]["id"]!.GetValue<string>()).ClusterId;
|
||||
Assert.All(first.Clusters.Where(c => c.NodeId != snapshot.Nodes[^1]["id"]!.GetValue<string>()), c => Assert.Equal(mainCluster, c.ClusterId));
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for localization bundles.
|
||||
/// </summary>
|
||||
public interface ILocalizationBundleRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a localization bundle by ID.
|
||||
/// </summary>
|
||||
Task<LocalizationBundleEntity?> GetByIdAsync(string tenantId, string bundleId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all localization bundles for a tenant.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<LocalizationBundleEntity>> ListAsync(string tenantId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets localization bundles by bundle key.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<LocalizationBundleEntity>> GetByBundleKeyAsync(string tenantId, string bundleKey, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a specific localization bundle by key and locale.
|
||||
/// </summary>
|
||||
Task<LocalizationBundleEntity?> GetByKeyAndLocaleAsync(string tenantId, string bundleKey, string locale, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the default bundle for a key.
|
||||
/// </summary>
|
||||
Task<LocalizationBundleEntity?> GetDefaultByKeyAsync(string tenantId, string bundleKey, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new localization bundle.
|
||||
/// </summary>
|
||||
Task<LocalizationBundleEntity> CreateAsync(LocalizationBundleEntity bundle, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Updates an existing localization bundle.
|
||||
/// </summary>
|
||||
Task<bool> UpdateAsync(LocalizationBundleEntity bundle, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a localization bundle.
|
||||
/// </summary>
|
||||
Task<bool> DeleteAsync(string tenantId, string bundleId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for operator overrides.
|
||||
/// </summary>
|
||||
public interface IOperatorOverrideRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets an operator override by ID.
|
||||
/// </summary>
|
||||
Task<OperatorOverrideEntity?> GetByIdAsync(string tenantId, string overrideId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all operator overrides for a tenant.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<OperatorOverrideEntity>> ListAsync(string tenantId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets active (non-expired) operator overrides for a tenant.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<OperatorOverrideEntity>> GetActiveAsync(string tenantId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets active overrides by type.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<OperatorOverrideEntity>> GetActiveByTypeAsync(string tenantId, string overrideType, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new operator override.
|
||||
/// </summary>
|
||||
Task<OperatorOverrideEntity> CreateAsync(OperatorOverrideEntity override_, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes an operator override.
|
||||
/// </summary>
|
||||
Task<bool> DeleteAsync(string tenantId, string overrideId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes all expired overrides for a tenant.
|
||||
/// </summary>
|
||||
Task<int> DeleteExpiredAsync(string tenantId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,216 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="ILocalizationBundleRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class LocalizationBundleRepository : RepositoryBase<NotifyDataSource>, ILocalizationBundleRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public LocalizationBundleRepository(NotifyDataSource dataSource, ILogger<LocalizationBundleRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<LocalizationBundleEntity?> GetByIdAsync(string tenantId, string bundleId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT bundle_id, tenant_id, locale, bundle_key, strings, is_default, parent_locale,
|
||||
description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.localization_bundles WHERE tenant_id = @tenant_id AND bundle_id = @bundle_id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "bundle_id", bundleId); },
|
||||
MapLocalizationBundle, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<LocalizationBundleEntity>> ListAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT bundle_id, tenant_id, locale, bundle_key, strings, is_default, parent_locale,
|
||||
description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.localization_bundles WHERE tenant_id = @tenant_id ORDER BY bundle_key, locale
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapLocalizationBundle, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<LocalizationBundleEntity>> GetByBundleKeyAsync(string tenantId, string bundleKey, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT bundle_id, tenant_id, locale, bundle_key, strings, is_default, parent_locale,
|
||||
description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.localization_bundles WHERE tenant_id = @tenant_id AND bundle_key = @bundle_key ORDER BY locale
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "bundle_key", bundleKey); },
|
||||
MapLocalizationBundle, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<LocalizationBundleEntity?> GetByKeyAndLocaleAsync(string tenantId, string bundleKey, string locale, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT bundle_id, tenant_id, locale, bundle_key, strings, is_default, parent_locale,
|
||||
description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.localization_bundles
|
||||
WHERE tenant_id = @tenant_id AND bundle_key = @bundle_key AND LOWER(locale) = LOWER(@locale)
|
||||
LIMIT 1
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "bundle_key", bundleKey);
|
||||
AddParameter(cmd, "locale", locale);
|
||||
},
|
||||
MapLocalizationBundle, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<LocalizationBundleEntity?> GetDefaultByKeyAsync(string tenantId, string bundleKey, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT bundle_id, tenant_id, locale, bundle_key, strings, is_default, parent_locale,
|
||||
description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.localization_bundles
|
||||
WHERE tenant_id = @tenant_id AND bundle_key = @bundle_key AND is_default = TRUE
|
||||
LIMIT 1
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "bundle_key", bundleKey); },
|
||||
MapLocalizationBundle, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<LocalizationBundleEntity> CreateAsync(LocalizationBundleEntity bundle, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(bundle.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO notify.localization_bundles (bundle_id, tenant_id, locale, bundle_key, strings, is_default,
|
||||
parent_locale, description, metadata, created_by, updated_by)
|
||||
VALUES (@bundle_id, @tenant_id, @locale, @bundle_key, @strings, @is_default,
|
||||
@parent_locale, @description, @metadata, @created_by, @updated_by)
|
||||
RETURNING bundle_id, tenant_id, locale, bundle_key, strings, is_default, parent_locale,
|
||||
description, metadata, created_by, created_at, updated_by, updated_at
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenConnectionAsync(bundle.TenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "bundle_id", bundle.BundleId);
|
||||
AddParameter(command, "tenant_id", bundle.TenantId);
|
||||
AddParameter(command, "locale", bundle.Locale);
|
||||
AddParameter(command, "bundle_key", bundle.BundleKey);
|
||||
AddJsonbParameter(command, "strings", bundle.Strings);
|
||||
AddParameter(command, "is_default", bundle.IsDefault);
|
||||
AddParameter(command, "parent_locale", (object?)bundle.ParentLocale ?? DBNull.Value);
|
||||
AddParameter(command, "description", (object?)bundle.Description ?? DBNull.Value);
|
||||
AddJsonbParameter(command, "metadata", bundle.Metadata);
|
||||
AddParameter(command, "created_by", (object?)bundle.CreatedBy ?? DBNull.Value);
|
||||
AddParameter(command, "updated_by", (object?)bundle.UpdatedBy ?? DBNull.Value);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
|
||||
return MapLocalizationBundle(reader);
|
||||
}
|
||||
|
||||
public async Task<bool> UpdateAsync(LocalizationBundleEntity bundle, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(bundle.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
UPDATE notify.localization_bundles
|
||||
SET locale = @locale, bundle_key = @bundle_key, strings = @strings, is_default = @is_default,
|
||||
parent_locale = @parent_locale, description = @description, metadata = @metadata, updated_by = @updated_by
|
||||
WHERE tenant_id = @tenant_id AND bundle_id = @bundle_id
|
||||
""";
|
||||
var rows = await ExecuteAsync(bundle.TenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", bundle.TenantId);
|
||||
AddParameter(cmd, "bundle_id", bundle.BundleId);
|
||||
AddParameter(cmd, "locale", bundle.Locale);
|
||||
AddParameter(cmd, "bundle_key", bundle.BundleKey);
|
||||
AddJsonbParameter(cmd, "strings", bundle.Strings);
|
||||
AddParameter(cmd, "is_default", bundle.IsDefault);
|
||||
AddParameter(cmd, "parent_locale", (object?)bundle.ParentLocale ?? DBNull.Value);
|
||||
AddParameter(cmd, "description", (object?)bundle.Description ?? DBNull.Value);
|
||||
AddJsonbParameter(cmd, "metadata", bundle.Metadata);
|
||||
AddParameter(cmd, "updated_by", (object?)bundle.UpdatedBy ?? DBNull.Value);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteAsync(string tenantId, string bundleId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = "DELETE FROM notify.localization_bundles WHERE tenant_id = @tenant_id AND bundle_id = @bundle_id";
|
||||
var rows = await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "bundle_id", bundleId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
private static LocalizationBundleEntity MapLocalizationBundle(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
BundleId = reader.GetString(0),
|
||||
TenantId = reader.GetString(1),
|
||||
Locale = reader.GetString(2),
|
||||
BundleKey = reader.GetString(3),
|
||||
Strings = reader.GetString(4),
|
||||
IsDefault = reader.GetBoolean(5),
|
||||
ParentLocale = GetNullableString(reader, 6),
|
||||
Description = GetNullableString(reader, 7),
|
||||
Metadata = GetNullableString(reader, 8),
|
||||
CreatedBy = GetNullableString(reader, 9),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10),
|
||||
UpdatedBy = GetNullableString(reader, 11),
|
||||
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(12)
|
||||
};
|
||||
|
||||
private async Task EnsureTableAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = """
|
||||
CREATE TABLE IF NOT EXISTS notify.localization_bundles (
|
||||
bundle_id TEXT NOT NULL,
|
||||
tenant_id TEXT NOT NULL,
|
||||
locale TEXT NOT NULL,
|
||||
bundle_key TEXT NOT NULL,
|
||||
strings JSONB NOT NULL,
|
||||
is_default BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
parent_locale TEXT,
|
||||
description TEXT,
|
||||
metadata JSONB,
|
||||
created_by TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_by TEXT,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
PRIMARY KEY (tenant_id, bundle_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_localization_bundles_key ON notify.localization_bundles (tenant_id, bundle_key);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_localization_bundles_key_locale ON notify.localization_bundles (tenant_id, bundle_key, locale);
|
||||
CREATE INDEX IF NOT EXISTS idx_localization_bundles_default ON notify.localization_bundles (tenant_id, bundle_key, is_default) WHERE is_default = TRUE;
|
||||
""";
|
||||
|
||||
await ExecuteAsync(tenantId, ddl, _ => { }, cancellationToken).ConfigureAwait(false);
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,160 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IOperatorOverrideRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class OperatorOverrideRepository : RepositoryBase<NotifyDataSource>, IOperatorOverrideRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public OperatorOverrideRepository(NotifyDataSource dataSource, ILogger<OperatorOverrideRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<OperatorOverrideEntity?> GetByIdAsync(string tenantId, string overrideId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT override_id, tenant_id, override_type, expires_at, channel_id, rule_id, reason, created_by, created_at
|
||||
FROM notify.operator_overrides WHERE tenant_id = @tenant_id AND override_id = @override_id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "override_id", overrideId); },
|
||||
MapOperatorOverride, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<OperatorOverrideEntity>> ListAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT override_id, tenant_id, override_type, expires_at, channel_id, rule_id, reason, created_by, created_at
|
||||
FROM notify.operator_overrides WHERE tenant_id = @tenant_id ORDER BY created_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapOperatorOverride, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<OperatorOverrideEntity>> GetActiveAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT override_id, tenant_id, override_type, expires_at, channel_id, rule_id, reason, created_by, created_at
|
||||
FROM notify.operator_overrides WHERE tenant_id = @tenant_id AND expires_at > NOW() ORDER BY created_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapOperatorOverride, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<OperatorOverrideEntity>> GetActiveByTypeAsync(string tenantId, string overrideType, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT override_id, tenant_id, override_type, expires_at, channel_id, rule_id, reason, created_by, created_at
|
||||
FROM notify.operator_overrides WHERE tenant_id = @tenant_id AND override_type = @override_type AND expires_at > NOW()
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "override_type", overrideType); },
|
||||
MapOperatorOverride, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<OperatorOverrideEntity> CreateAsync(OperatorOverrideEntity override_, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(override_.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO notify.operator_overrides (override_id, tenant_id, override_type, expires_at, channel_id, rule_id, reason, created_by)
|
||||
VALUES (@override_id, @tenant_id, @override_type, @expires_at, @channel_id, @rule_id, @reason, @created_by)
|
||||
RETURNING override_id, tenant_id, override_type, expires_at, channel_id, rule_id, reason, created_by, created_at
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenConnectionAsync(override_.TenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "override_id", override_.OverrideId);
|
||||
AddParameter(command, "tenant_id", override_.TenantId);
|
||||
AddParameter(command, "override_type", override_.OverrideType);
|
||||
AddParameter(command, "expires_at", override_.ExpiresAt);
|
||||
AddParameter(command, "channel_id", (object?)override_.ChannelId ?? DBNull.Value);
|
||||
AddParameter(command, "rule_id", (object?)override_.RuleId ?? DBNull.Value);
|
||||
AddParameter(command, "reason", (object?)override_.Reason ?? DBNull.Value);
|
||||
AddParameter(command, "created_by", (object?)override_.CreatedBy ?? DBNull.Value);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
|
||||
return MapOperatorOverride(reader);
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteAsync(string tenantId, string overrideId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = "DELETE FROM notify.operator_overrides WHERE tenant_id = @tenant_id AND override_id = @override_id";
|
||||
var rows = await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "override_id", overrideId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
public async Task<int> DeleteExpiredAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = "DELETE FROM notify.operator_overrides WHERE tenant_id = @tenant_id AND expires_at <= NOW()";
|
||||
return await ExecuteAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static OperatorOverrideEntity MapOperatorOverride(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
OverrideId = reader.GetString(0),
|
||||
TenantId = reader.GetString(1),
|
||||
OverrideType = reader.GetString(2),
|
||||
ExpiresAt = reader.GetFieldValue<DateTimeOffset>(3),
|
||||
ChannelId = GetNullableString(reader, 4),
|
||||
RuleId = GetNullableString(reader, 5),
|
||||
Reason = GetNullableString(reader, 6),
|
||||
CreatedBy = GetNullableString(reader, 7),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(8)
|
||||
};
|
||||
|
||||
private async Task EnsureTableAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = """
|
||||
CREATE TABLE IF NOT EXISTS notify.operator_overrides (
|
||||
override_id TEXT NOT NULL,
|
||||
tenant_id TEXT NOT NULL,
|
||||
override_type TEXT NOT NULL,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
channel_id TEXT,
|
||||
rule_id TEXT,
|
||||
reason TEXT,
|
||||
created_by TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
PRIMARY KEY (tenant_id, override_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_operator_overrides_type ON notify.operator_overrides (tenant_id, override_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_operator_overrides_expires ON notify.operator_overrides (tenant_id, expires_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_operator_overrides_active ON notify.operator_overrides (tenant_id, override_type, expires_at) WHERE expires_at > NOW();
|
||||
""";
|
||||
|
||||
await ExecuteAsync(tenantId, ddl, _ => { }, cancellationToken).ConfigureAwait(false);
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,198 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.Notify.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Notify.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IThrottleConfigRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class ThrottleConfigRepository : RepositoryBase<NotifyDataSource>, IThrottleConfigRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public ThrottleConfigRepository(NotifyDataSource dataSource, ILogger<ThrottleConfigRepository> logger)
|
||||
: base(dataSource, logger) { }
|
||||
|
||||
public async Task<ThrottleConfigEntity?> GetByIdAsync(string tenantId, string configId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT config_id, tenant_id, name, default_window_seconds, max_notifications_per_window, channel_id,
|
||||
is_default, enabled, description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.throttle_configs WHERE tenant_id = @tenant_id AND config_id = @config_id
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "config_id", configId); },
|
||||
MapThrottleConfig, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ThrottleConfigEntity>> ListAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT config_id, tenant_id, name, default_window_seconds, max_notifications_per_window, channel_id,
|
||||
is_default, enabled, description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.throttle_configs WHERE tenant_id = @tenant_id ORDER BY name
|
||||
""";
|
||||
return await QueryAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapThrottleConfig, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ThrottleConfigEntity?> GetDefaultAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT config_id, tenant_id, name, default_window_seconds, max_notifications_per_window, channel_id,
|
||||
is_default, enabled, description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.throttle_configs WHERE tenant_id = @tenant_id AND is_default = TRUE LIMIT 1
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => AddParameter(cmd, "tenant_id", tenantId),
|
||||
MapThrottleConfig, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ThrottleConfigEntity?> GetByChannelAsync(string tenantId, string channelId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
SELECT config_id, tenant_id, name, default_window_seconds, max_notifications_per_window, channel_id,
|
||||
is_default, enabled, description, metadata, created_by, created_at, updated_by, updated_at
|
||||
FROM notify.throttle_configs WHERE tenant_id = @tenant_id AND channel_id = @channel_id AND enabled = TRUE LIMIT 1
|
||||
""";
|
||||
return await QuerySingleOrDefaultAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "channel_id", channelId); },
|
||||
MapThrottleConfig, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ThrottleConfigEntity> CreateAsync(ThrottleConfigEntity config, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(config.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO notify.throttle_configs (config_id, tenant_id, name, default_window_seconds, max_notifications_per_window,
|
||||
channel_id, is_default, enabled, description, metadata, created_by, updated_by)
|
||||
VALUES (@config_id, @tenant_id, @name, @default_window_seconds, @max_notifications_per_window,
|
||||
@channel_id, @is_default, @enabled, @description, @metadata, @created_by, @updated_by)
|
||||
RETURNING config_id, tenant_id, name, default_window_seconds, max_notifications_per_window, channel_id,
|
||||
is_default, enabled, description, metadata, created_by, created_at, updated_by, updated_at
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenConnectionAsync(config.TenantId, "writer", cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "config_id", config.ConfigId);
|
||||
AddParameter(command, "tenant_id", config.TenantId);
|
||||
AddParameter(command, "name", config.Name);
|
||||
AddParameter(command, "default_window_seconds", (long)config.DefaultWindow.TotalSeconds);
|
||||
AddParameter(command, "max_notifications_per_window", (object?)config.MaxNotificationsPerWindow ?? DBNull.Value);
|
||||
AddParameter(command, "channel_id", (object?)config.ChannelId ?? DBNull.Value);
|
||||
AddParameter(command, "is_default", config.IsDefault);
|
||||
AddParameter(command, "enabled", config.Enabled);
|
||||
AddParameter(command, "description", (object?)config.Description ?? DBNull.Value);
|
||||
AddJsonbParameter(command, "metadata", config.Metadata);
|
||||
AddParameter(command, "created_by", (object?)config.CreatedBy ?? DBNull.Value);
|
||||
AddParameter(command, "updated_by", (object?)config.UpdatedBy ?? DBNull.Value);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
|
||||
return MapThrottleConfig(reader);
|
||||
}
|
||||
|
||||
public async Task<bool> UpdateAsync(ThrottleConfigEntity config, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(config.TenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = """
|
||||
UPDATE notify.throttle_configs
|
||||
SET name = @name, default_window_seconds = @default_window_seconds,
|
||||
max_notifications_per_window = @max_notifications_per_window, channel_id = @channel_id,
|
||||
is_default = @is_default, enabled = @enabled, description = @description,
|
||||
metadata = @metadata, updated_by = @updated_by
|
||||
WHERE tenant_id = @tenant_id AND config_id = @config_id
|
||||
""";
|
||||
var rows = await ExecuteAsync(config.TenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", config.TenantId);
|
||||
AddParameter(cmd, "config_id", config.ConfigId);
|
||||
AddParameter(cmd, "name", config.Name);
|
||||
AddParameter(cmd, "default_window_seconds", (long)config.DefaultWindow.TotalSeconds);
|
||||
AddParameter(cmd, "max_notifications_per_window", (object?)config.MaxNotificationsPerWindow ?? DBNull.Value);
|
||||
AddParameter(cmd, "channel_id", (object?)config.ChannelId ?? DBNull.Value);
|
||||
AddParameter(cmd, "is_default", config.IsDefault);
|
||||
AddParameter(cmd, "enabled", config.Enabled);
|
||||
AddParameter(cmd, "description", (object?)config.Description ?? DBNull.Value);
|
||||
AddJsonbParameter(cmd, "metadata", config.Metadata);
|
||||
AddParameter(cmd, "updated_by", (object?)config.UpdatedBy ?? DBNull.Value);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteAsync(string tenantId, string configId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = "DELETE FROM notify.throttle_configs WHERE tenant_id = @tenant_id AND config_id = @config_id";
|
||||
var rows = await ExecuteAsync(tenantId, sql,
|
||||
cmd => { AddParameter(cmd, "tenant_id", tenantId); AddParameter(cmd, "config_id", configId); },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
private static ThrottleConfigEntity MapThrottleConfig(NpgsqlDataReader reader) => new()
|
||||
{
|
||||
ConfigId = reader.GetString(0),
|
||||
TenantId = reader.GetString(1),
|
||||
Name = reader.GetString(2),
|
||||
DefaultWindow = TimeSpan.FromSeconds(reader.GetInt64(3)),
|
||||
MaxNotificationsPerWindow = GetNullableInt32(reader, 4),
|
||||
ChannelId = GetNullableString(reader, 5),
|
||||
IsDefault = reader.GetBoolean(6),
|
||||
Enabled = reader.GetBoolean(7),
|
||||
Description = GetNullableString(reader, 8),
|
||||
Metadata = GetNullableString(reader, 9),
|
||||
CreatedBy = GetNullableString(reader, 10),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(11),
|
||||
UpdatedBy = GetNullableString(reader, 12),
|
||||
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(13)
|
||||
};
|
||||
|
||||
private async Task EnsureTableAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = """
|
||||
CREATE TABLE IF NOT EXISTS notify.throttle_configs (
|
||||
config_id TEXT NOT NULL,
|
||||
tenant_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
default_window_seconds BIGINT NOT NULL DEFAULT 300,
|
||||
max_notifications_per_window INTEGER,
|
||||
channel_id TEXT,
|
||||
is_default BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
enabled BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
description TEXT,
|
||||
metadata JSONB,
|
||||
created_by TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_by TEXT,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
PRIMARY KEY (tenant_id, config_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_throttle_configs_channel ON notify.throttle_configs (tenant_id, channel_id) WHERE channel_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_throttle_configs_default ON notify.throttle_configs (tenant_id, is_default) WHERE is_default = TRUE;
|
||||
""";
|
||||
|
||||
await ExecuteAsync(tenantId, ddl, _ => { }, cancellationToken).ConfigureAwait(false);
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -42,6 +42,11 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<INotifyAuditRepository, NotifyAuditRepository>();
|
||||
services.AddScoped<ILockRepository, LockRepository>();
|
||||
|
||||
// Register new repositories (SPRINT-3412: PostgreSQL durability)
|
||||
services.AddScoped<IThrottleConfigRepository, ThrottleConfigRepository>();
|
||||
services.AddScoped<IOperatorOverrideRepository, OperatorOverrideRepository>();
|
||||
services.AddScoped<ILocalizationBundleRepository, LocalizationBundleRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -73,6 +78,11 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<IIncidentRepository, IncidentRepository>();
|
||||
services.AddScoped<INotifyAuditRepository, NotifyAuditRepository>();
|
||||
|
||||
// Register new repositories (SPRINT-3412: PostgreSQL durability)
|
||||
services.AddScoped<IThrottleConfigRepository, ThrottleConfigRepository>();
|
||||
services.AddScoped<IOperatorOverrideRepository, OperatorOverrideRepository>();
|
||||
services.AddScoped<ILocalizationBundleRepository, LocalizationBundleRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Reflection;
|
||||
using StellaOps.Infrastructure.Postgres.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL integration test fixture for the PacksRegistry module.
|
||||
/// </summary>
|
||||
public sealed class PacksRegistryPostgresFixture : PostgresIntegrationFixture, ICollectionFixture<PacksRegistryPostgresFixture>
|
||||
{
|
||||
protected override Assembly? GetMigrationAssembly()
|
||||
=> typeof(PacksRegistryDataSource).Assembly;
|
||||
|
||||
protected override string GetModuleName() => "PacksRegistry";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collection definition for PacksRegistry PostgreSQL integration tests.
|
||||
/// Tests in this collection share a single PostgreSQL container instance.
|
||||
/// </summary>
|
||||
[CollectionDefinition(Name)]
|
||||
public sealed class PacksRegistryPostgresCollection : ICollectionFixture<PacksRegistryPostgresFixture>
|
||||
{
|
||||
public const string Name = "PacksRegistryPostgres";
|
||||
}
|
||||
@@ -0,0 +1,154 @@
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MicrosoftOptions = Microsoft.Extensions.Options;
|
||||
using StellaOps.PacksRegistry.Core.Models;
|
||||
using StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(PacksRegistryPostgresCollection.Name)]
|
||||
public sealed class PostgresPackRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PacksRegistryPostgresFixture _fixture;
|
||||
private readonly PostgresPackRepository _repository;
|
||||
private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8];
|
||||
|
||||
public PostgresPackRepositoryTests(PacksRegistryPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PacksRegistryDataSource(MicrosoftOptions.Options.Create(options), NullLogger<PacksRegistryDataSource>.Instance);
|
||||
_repository = new PostgresPackRepository(dataSource, NullLogger<PostgresPackRepository>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndGet_RoundTripsPackRecord()
|
||||
{
|
||||
// Arrange
|
||||
var packId = "pack-" + Guid.NewGuid().ToString("N");
|
||||
var record = new PackRecord(
|
||||
PackId: packId,
|
||||
Name: "test-pack",
|
||||
Version: "1.0.0",
|
||||
TenantId: _tenantId,
|
||||
Digest: "sha256:abc123",
|
||||
Signature: "sig123",
|
||||
ProvenanceUri: "https://example.com/provenance",
|
||||
ProvenanceDigest: "sha256:prov456",
|
||||
CreatedAtUtc: DateTimeOffset.UtcNow,
|
||||
Metadata: new Dictionary<string, string> { ["author"] = "test" });
|
||||
var content = Encoding.UTF8.GetBytes("pack content here");
|
||||
var provenance = Encoding.UTF8.GetBytes("provenance data");
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(record, content, provenance);
|
||||
var fetched = await _repository.GetAsync(packId);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.PackId.Should().Be(packId);
|
||||
fetched.Name.Should().Be("test-pack");
|
||||
fetched.Version.Should().Be("1.0.0");
|
||||
fetched.TenantId.Should().Be(_tenantId);
|
||||
fetched.Metadata.Should().ContainKey("author");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetContentAsync_ReturnsPackContent()
|
||||
{
|
||||
// Arrange
|
||||
var packId = "pack-" + Guid.NewGuid().ToString("N");
|
||||
var record = CreatePackRecord(packId, "content-test", "1.0.0");
|
||||
var expectedContent = Encoding.UTF8.GetBytes("this is the pack content");
|
||||
|
||||
await _repository.UpsertAsync(record, expectedContent, null);
|
||||
|
||||
// Act
|
||||
var content = await _repository.GetContentAsync(packId);
|
||||
|
||||
// Assert
|
||||
content.Should().NotBeNull();
|
||||
Encoding.UTF8.GetString(content!).Should().Be("this is the pack content");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetProvenanceAsync_ReturnsProvenanceData()
|
||||
{
|
||||
// Arrange
|
||||
var packId = "pack-" + Guid.NewGuid().ToString("N");
|
||||
var record = CreatePackRecord(packId, "provenance-test", "1.0.0");
|
||||
var content = Encoding.UTF8.GetBytes("content");
|
||||
var expectedProvenance = Encoding.UTF8.GetBytes("provenance statement");
|
||||
|
||||
await _repository.UpsertAsync(record, content, expectedProvenance);
|
||||
|
||||
// Act
|
||||
var provenance = await _repository.GetProvenanceAsync(packId);
|
||||
|
||||
// Assert
|
||||
provenance.Should().NotBeNull();
|
||||
Encoding.UTF8.GetString(provenance!).Should().Be("provenance statement");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsPacksForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var pack1 = CreatePackRecord("pack-1-" + Guid.NewGuid().ToString("N")[..8], "pack-a", "1.0.0");
|
||||
var pack2 = CreatePackRecord("pack-2-" + Guid.NewGuid().ToString("N")[..8], "pack-b", "2.0.0");
|
||||
var content = Encoding.UTF8.GetBytes("content");
|
||||
|
||||
await _repository.UpsertAsync(pack1, content, null);
|
||||
await _repository.UpsertAsync(pack2, content, null);
|
||||
|
||||
// Act
|
||||
var packs = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
packs.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_UpdatesExistingPack()
|
||||
{
|
||||
// Arrange
|
||||
var packId = "pack-" + Guid.NewGuid().ToString("N");
|
||||
var record1 = CreatePackRecord(packId, "original", "1.0.0");
|
||||
var record2 = CreatePackRecord(packId, "updated", "2.0.0");
|
||||
var content = Encoding.UTF8.GetBytes("content");
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(record1, content, null);
|
||||
await _repository.UpsertAsync(record2, content, null);
|
||||
var fetched = await _repository.GetAsync(packId);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Name.Should().Be("updated");
|
||||
fetched.Version.Should().Be("2.0.0");
|
||||
}
|
||||
|
||||
private PackRecord CreatePackRecord(string packId, string name, string version) =>
|
||||
new(
|
||||
PackId: packId,
|
||||
Name: name,
|
||||
Version: version,
|
||||
TenantId: _tenantId,
|
||||
Digest: "sha256:" + Guid.NewGuid().ToString("N"),
|
||||
Signature: null,
|
||||
ProvenanceUri: null,
|
||||
ProvenanceDigest: null,
|
||||
CreatedAtUtc: DateTimeOffset.UtcNow,
|
||||
Metadata: null);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
<?xml version="1.0" ?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.PacksRegistry.Storage.Postgres\StellaOps.PacksRegistry.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,44 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Connections;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL data source for PacksRegistry module.
|
||||
/// </summary>
|
||||
public sealed class PacksRegistryDataSource : DataSourceBase
|
||||
{
|
||||
/// <summary>
|
||||
/// Default schema name for PacksRegistry tables.
|
||||
/// </summary>
|
||||
public const string DefaultSchemaName = "packs";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new PacksRegistry data source.
|
||||
/// </summary>
|
||||
public PacksRegistryDataSource(IOptions<PostgresOptions> options, ILogger<PacksRegistryDataSource> logger)
|
||||
: base(CreateOptions(options.Value), logger)
|
||||
{
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override string ModuleName => "PacksRegistry";
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void ConfigureDataSourceBuilder(NpgsqlDataSourceBuilder builder)
|
||||
{
|
||||
base.ConfigureDataSourceBuilder(builder);
|
||||
}
|
||||
|
||||
private static PostgresOptions CreateOptions(PostgresOptions baseOptions)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(baseOptions.SchemaName))
|
||||
{
|
||||
baseOptions.SchemaName = DefaultSchemaName;
|
||||
}
|
||||
return baseOptions;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,163 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.PacksRegistry.Core.Contracts;
|
||||
using StellaOps.PacksRegistry.Core.Models;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IAttestationRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresAttestationRepository : RepositoryBase<PacksRegistryDataSource>, IAttestationRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresAttestationRepository(PacksRegistryDataSource dataSource, ILogger<PostgresAttestationRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task UpsertAsync(AttestationRecord record, byte[] content, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
ArgumentNullException.ThrowIfNull(content);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO packs.attestations (pack_id, tenant_id, type, digest, content, notes, created_at)
|
||||
VALUES (@pack_id, @tenant_id, @type, @digest, @content, @notes, @created_at)
|
||||
ON CONFLICT (pack_id, type) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
digest = EXCLUDED.digest,
|
||||
content = EXCLUDED.content,
|
||||
notes = EXCLUDED.notes,
|
||||
created_at = EXCLUDED.created_at";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", record.PackId);
|
||||
AddParameter(command, "@tenant_id", record.TenantId);
|
||||
AddParameter(command, "@type", record.Type);
|
||||
AddParameter(command, "@digest", record.Digest);
|
||||
AddParameter(command, "@content", content);
|
||||
AddParameter(command, "@notes", (object?)record.Notes ?? DBNull.Value);
|
||||
AddParameter(command, "@created_at", record.CreatedAtUtc);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<AttestationRecord?> GetAsync(string packId, string type, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(type);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT pack_id, tenant_id, type, digest, notes, created_at
|
||||
FROM packs.attestations
|
||||
WHERE pack_id = @pack_id AND type = @type";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
AddParameter(command, "@type", type.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapAttestationRecord(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<AttestationRecord>> ListAsync(string packId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT pack_id, tenant_id, type, digest, notes, created_at
|
||||
FROM packs.attestations
|
||||
WHERE pack_id = @pack_id
|
||||
ORDER BY created_at DESC";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<AttestationRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapAttestationRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<byte[]?> GetContentAsync(string packId, string type, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(type);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = "SELECT content FROM packs.attestations WHERE pack_id = @pack_id AND type = @type";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
AddParameter(command, "@type", type.Trim());
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
return result is byte[] bytes ? bytes : null;
|
||||
}
|
||||
|
||||
private static AttestationRecord MapAttestationRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
return new AttestationRecord(
|
||||
PackId: reader.GetString(0),
|
||||
TenantId: reader.GetString(1),
|
||||
Type: reader.GetString(2),
|
||||
Digest: reader.GetString(3),
|
||||
CreatedAtUtc: reader.GetFieldValue<DateTimeOffset>(5),
|
||||
Notes: reader.IsDBNull(4) ? null : reader.GetString(4));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS packs;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS packs.attestations (
|
||||
pack_id TEXT NOT NULL,
|
||||
tenant_id TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
digest TEXT NOT NULL,
|
||||
content BYTEA NOT NULL,
|
||||
notes TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL,
|
||||
PRIMARY KEY (pack_id, type)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_attestations_tenant_id ON packs.attestations (tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_attestations_created_at ON packs.attestations (created_at DESC);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.PacksRegistry.Core.Contracts;
|
||||
using StellaOps.PacksRegistry.Core.Models;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IAuditRepository"/>.
|
||||
/// Append-only audit log for registry actions.
|
||||
/// </summary>
|
||||
public sealed class PostgresAuditRepository : RepositoryBase<PacksRegistryDataSource>, IAuditRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresAuditRepository(PacksRegistryDataSource dataSource, ILogger<PostgresAuditRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task AppendAsync(AuditRecord record, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO packs.audit_log (id, pack_id, tenant_id, event, actor, notes, occurred_at)
|
||||
VALUES (@id, @pack_id, @tenant_id, @event, @actor, @notes, @occurred_at)";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@id", Guid.NewGuid().ToString("N"));
|
||||
AddParameter(command, "@pack_id", (object?)record.PackId ?? DBNull.Value);
|
||||
AddParameter(command, "@tenant_id", record.TenantId);
|
||||
AddParameter(command, "@event", record.Event);
|
||||
AddParameter(command, "@actor", (object?)record.Actor ?? DBNull.Value);
|
||||
AddParameter(command, "@notes", (object?)record.Notes ?? DBNull.Value);
|
||||
AddParameter(command, "@occurred_at", record.OccurredAtUtc);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<AuditRecord>> ListAsync(string? tenantId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var sql = @"
|
||||
SELECT pack_id, tenant_id, event, occurred_at, actor, notes
|
||||
FROM packs.audit_log";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
sql += " WHERE tenant_id = @tenant_id";
|
||||
}
|
||||
|
||||
sql += " ORDER BY occurred_at DESC";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
}
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<AuditRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapAuditRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static AuditRecord MapAuditRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
return new AuditRecord(
|
||||
PackId: reader.IsDBNull(0) ? null : reader.GetString(0),
|
||||
TenantId: reader.GetString(1),
|
||||
Event: reader.GetString(2),
|
||||
OccurredAtUtc: reader.GetFieldValue<DateTimeOffset>(3),
|
||||
Actor: reader.IsDBNull(4) ? null : reader.GetString(4),
|
||||
Notes: reader.IsDBNull(5) ? null : reader.GetString(5));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS packs;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS packs.audit_log (
|
||||
id TEXT PRIMARY KEY,
|
||||
pack_id TEXT,
|
||||
tenant_id TEXT NOT NULL,
|
||||
event TEXT NOT NULL,
|
||||
actor TEXT,
|
||||
notes TEXT,
|
||||
occurred_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_log_tenant_id ON packs.audit_log (tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_log_pack_id ON packs.audit_log (pack_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_log_occurred_at ON packs.audit_log (occurred_at DESC);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.PacksRegistry.Core.Contracts;
|
||||
using StellaOps.PacksRegistry.Core.Models;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="ILifecycleRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresLifecycleRepository : RepositoryBase<PacksRegistryDataSource>, ILifecycleRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresLifecycleRepository(PacksRegistryDataSource dataSource, ILogger<PostgresLifecycleRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task UpsertAsync(LifecycleRecord record, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO packs.lifecycles (pack_id, tenant_id, state, notes, updated_at)
|
||||
VALUES (@pack_id, @tenant_id, @state, @notes, @updated_at)
|
||||
ON CONFLICT (pack_id) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
state = EXCLUDED.state,
|
||||
notes = EXCLUDED.notes,
|
||||
updated_at = EXCLUDED.updated_at";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", record.PackId);
|
||||
AddParameter(command, "@tenant_id", record.TenantId);
|
||||
AddParameter(command, "@state", record.State);
|
||||
AddParameter(command, "@notes", (object?)record.Notes ?? DBNull.Value);
|
||||
AddParameter(command, "@updated_at", record.UpdatedAtUtc);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<LifecycleRecord?> GetAsync(string packId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT pack_id, tenant_id, state, notes, updated_at
|
||||
FROM packs.lifecycles
|
||||
WHERE pack_id = @pack_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapLifecycleRecord(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<LifecycleRecord>> ListAsync(string? tenantId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var sql = @"
|
||||
SELECT pack_id, tenant_id, state, notes, updated_at
|
||||
FROM packs.lifecycles";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
sql += " WHERE tenant_id = @tenant_id";
|
||||
}
|
||||
|
||||
sql += " ORDER BY updated_at DESC";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
}
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<LifecycleRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapLifecycleRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static LifecycleRecord MapLifecycleRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
return new LifecycleRecord(
|
||||
PackId: reader.GetString(0),
|
||||
TenantId: reader.GetString(1),
|
||||
State: reader.GetString(2),
|
||||
Notes: reader.IsDBNull(3) ? null : reader.GetString(3),
|
||||
UpdatedAtUtc: reader.GetFieldValue<DateTimeOffset>(4));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS packs;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS packs.lifecycles (
|
||||
pack_id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
state TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
updated_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_lifecycles_tenant_id ON packs.lifecycles (tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_lifecycles_state ON packs.lifecycles (state);
|
||||
CREATE INDEX IF NOT EXISTS idx_lifecycles_updated_at ON packs.lifecycles (updated_at DESC);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.PacksRegistry.Core.Contracts;
|
||||
using StellaOps.PacksRegistry.Core.Models;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IMirrorRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresMirrorRepository : RepositoryBase<PacksRegistryDataSource>, IMirrorRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresMirrorRepository(PacksRegistryDataSource dataSource, ILogger<PostgresMirrorRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task UpsertAsync(MirrorSourceRecord record, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO packs.mirror_sources (id, tenant_id, upstream_uri, enabled, status, notes, updated_at, last_successful_sync_at)
|
||||
VALUES (@id, @tenant_id, @upstream_uri, @enabled, @status, @notes, @updated_at, @last_successful_sync_at)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
upstream_uri = EXCLUDED.upstream_uri,
|
||||
enabled = EXCLUDED.enabled,
|
||||
status = EXCLUDED.status,
|
||||
notes = EXCLUDED.notes,
|
||||
updated_at = EXCLUDED.updated_at,
|
||||
last_successful_sync_at = EXCLUDED.last_successful_sync_at";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@id", record.Id);
|
||||
AddParameter(command, "@tenant_id", record.TenantId);
|
||||
AddParameter(command, "@upstream_uri", record.UpstreamUri.ToString());
|
||||
AddParameter(command, "@enabled", record.Enabled);
|
||||
AddParameter(command, "@status", record.Status);
|
||||
AddParameter(command, "@notes", (object?)record.Notes ?? DBNull.Value);
|
||||
AddParameter(command, "@updated_at", record.UpdatedAtUtc);
|
||||
AddParameter(command, "@last_successful_sync_at", record.LastSuccessfulSyncUtc.HasValue ? record.LastSuccessfulSyncUtc.Value : DBNull.Value);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<MirrorSourceRecord?> GetAsync(string id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(id);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT id, tenant_id, upstream_uri, enabled, status, updated_at, notes, last_successful_sync_at
|
||||
FROM packs.mirror_sources
|
||||
WHERE id = @id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@id", id.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapMirrorSourceRecord(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<MirrorSourceRecord>> ListAsync(string? tenantId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var sql = @"
|
||||
SELECT id, tenant_id, upstream_uri, enabled, status, updated_at, notes, last_successful_sync_at
|
||||
FROM packs.mirror_sources";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
sql += " WHERE tenant_id = @tenant_id";
|
||||
}
|
||||
|
||||
sql += " ORDER BY updated_at DESC";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
}
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<MirrorSourceRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapMirrorSourceRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static MirrorSourceRecord MapMirrorSourceRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
return new MirrorSourceRecord(
|
||||
Id: reader.GetString(0),
|
||||
TenantId: reader.GetString(1),
|
||||
UpstreamUri: new Uri(reader.GetString(2)),
|
||||
Enabled: reader.GetBoolean(3),
|
||||
Status: reader.GetString(4),
|
||||
UpdatedAtUtc: reader.GetFieldValue<DateTimeOffset>(5),
|
||||
Notes: reader.IsDBNull(6) ? null : reader.GetString(6),
|
||||
LastSuccessfulSyncUtc: reader.IsDBNull(7) ? null : reader.GetFieldValue<DateTimeOffset>(7));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS packs;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS packs.mirror_sources (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
upstream_uri TEXT NOT NULL,
|
||||
enabled BOOLEAN NOT NULL DEFAULT true,
|
||||
status TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
updated_at TIMESTAMPTZ NOT NULL,
|
||||
last_successful_sync_at TIMESTAMPTZ
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_sources_tenant_id ON packs.mirror_sources (tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_sources_enabled ON packs.mirror_sources (enabled);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_sources_updated_at ON packs.mirror_sources (updated_at DESC);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,215 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.PacksRegistry.Core.Contracts;
|
||||
using StellaOps.PacksRegistry.Core.Models;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IPackRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresPackRepository : RepositoryBase<PacksRegistryDataSource>, IPackRepository
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresPackRepository(PacksRegistryDataSource dataSource, ILogger<PostgresPackRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task UpsertAsync(PackRecord record, byte[] content, byte[]? provenance, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
ArgumentNullException.ThrowIfNull(content);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO packs.packs (pack_id, name, version, tenant_id, digest, signature, provenance_uri, provenance_digest, metadata, content, provenance, created_at)
|
||||
VALUES (@pack_id, @name, @version, @tenant_id, @digest, @signature, @provenance_uri, @provenance_digest, @metadata, @content, @provenance, @created_at)
|
||||
ON CONFLICT (pack_id) DO UPDATE SET
|
||||
name = EXCLUDED.name,
|
||||
version = EXCLUDED.version,
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
digest = EXCLUDED.digest,
|
||||
signature = EXCLUDED.signature,
|
||||
provenance_uri = EXCLUDED.provenance_uri,
|
||||
provenance_digest = EXCLUDED.provenance_digest,
|
||||
metadata = EXCLUDED.metadata,
|
||||
content = EXCLUDED.content,
|
||||
provenance = EXCLUDED.provenance";
|
||||
|
||||
var metadataJson = record.Metadata is null ? null : JsonSerializer.Serialize(record.Metadata, JsonOptions);
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", record.PackId);
|
||||
AddParameter(command, "@name", record.Name);
|
||||
AddParameter(command, "@version", record.Version);
|
||||
AddParameter(command, "@tenant_id", record.TenantId);
|
||||
AddParameter(command, "@digest", record.Digest);
|
||||
AddParameter(command, "@signature", (object?)record.Signature ?? DBNull.Value);
|
||||
AddParameter(command, "@provenance_uri", (object?)record.ProvenanceUri ?? DBNull.Value);
|
||||
AddParameter(command, "@provenance_digest", (object?)record.ProvenanceDigest ?? DBNull.Value);
|
||||
AddJsonbParameter(command, "@metadata", metadataJson);
|
||||
AddParameter(command, "@content", content);
|
||||
AddParameter(command, "@provenance", (object?)provenance ?? DBNull.Value);
|
||||
AddParameter(command, "@created_at", record.CreatedAtUtc);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<PackRecord?> GetAsync(string packId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT pack_id, name, version, tenant_id, digest, signature, provenance_uri, provenance_digest, metadata, created_at
|
||||
FROM packs.packs
|
||||
WHERE pack_id = @pack_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapPackRecord(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PackRecord>> ListAsync(string? tenantId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var sql = @"
|
||||
SELECT pack_id, name, version, tenant_id, digest, signature, provenance_uri, provenance_digest, metadata, created_at
|
||||
FROM packs.packs";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
sql += " WHERE tenant_id = @tenant_id";
|
||||
}
|
||||
|
||||
sql += " ORDER BY created_at DESC";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
}
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<PackRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapPackRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<byte[]?> GetContentAsync(string packId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = "SELECT content FROM packs.packs WHERE pack_id = @pack_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
return result is byte[] bytes ? bytes : null;
|
||||
}
|
||||
|
||||
public async Task<byte[]?> GetProvenanceAsync(string packId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = "SELECT provenance FROM packs.packs WHERE pack_id = @pack_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
return result is byte[] bytes ? bytes : null;
|
||||
}
|
||||
|
||||
private static PackRecord MapPackRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
var metadataJson = reader.IsDBNull(8) ? null : reader.GetString(8);
|
||||
var metadata = string.IsNullOrWhiteSpace(metadataJson)
|
||||
? null
|
||||
: JsonSerializer.Deserialize<Dictionary<string, string>>(metadataJson, JsonOptions);
|
||||
|
||||
return new PackRecord(
|
||||
PackId: reader.GetString(0),
|
||||
Name: reader.GetString(1),
|
||||
Version: reader.GetString(2),
|
||||
TenantId: reader.GetString(3),
|
||||
Digest: reader.GetString(4),
|
||||
Signature: reader.IsDBNull(5) ? null : reader.GetString(5),
|
||||
ProvenanceUri: reader.IsDBNull(6) ? null : reader.GetString(6),
|
||||
ProvenanceDigest: reader.IsDBNull(7) ? null : reader.GetString(7),
|
||||
CreatedAtUtc: reader.GetFieldValue<DateTimeOffset>(9),
|
||||
Metadata: metadata);
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS packs;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS packs.packs (
|
||||
pack_id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
version TEXT NOT NULL,
|
||||
tenant_id TEXT NOT NULL,
|
||||
digest TEXT NOT NULL,
|
||||
signature TEXT,
|
||||
provenance_uri TEXT,
|
||||
provenance_digest TEXT,
|
||||
metadata JSONB,
|
||||
content BYTEA NOT NULL,
|
||||
provenance BYTEA,
|
||||
created_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_packs_tenant_id ON packs.packs (tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_packs_name_version ON packs.packs (name, version);
|
||||
CREATE INDEX IF NOT EXISTS idx_packs_created_at ON packs.packs (created_at DESC);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.PacksRegistry.Core.Contracts;
|
||||
using StellaOps.PacksRegistry.Core.Models;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IParityRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresParityRepository : RepositoryBase<PacksRegistryDataSource>, IParityRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresParityRepository(PacksRegistryDataSource dataSource, ILogger<PostgresParityRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task UpsertAsync(ParityRecord record, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO packs.parities (pack_id, tenant_id, status, notes, updated_at)
|
||||
VALUES (@pack_id, @tenant_id, @status, @notes, @updated_at)
|
||||
ON CONFLICT (pack_id) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
status = EXCLUDED.status,
|
||||
notes = EXCLUDED.notes,
|
||||
updated_at = EXCLUDED.updated_at";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", record.PackId);
|
||||
AddParameter(command, "@tenant_id", record.TenantId);
|
||||
AddParameter(command, "@status", record.Status);
|
||||
AddParameter(command, "@notes", (object?)record.Notes ?? DBNull.Value);
|
||||
AddParameter(command, "@updated_at", record.UpdatedAtUtc);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ParityRecord?> GetAsync(string packId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT pack_id, tenant_id, status, notes, updated_at
|
||||
FROM packs.parities
|
||||
WHERE pack_id = @pack_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@pack_id", packId.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapParityRecord(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ParityRecord>> ListAsync(string? tenantId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var sql = @"
|
||||
SELECT pack_id, tenant_id, status, notes, updated_at
|
||||
FROM packs.parities";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
sql += " WHERE tenant_id = @tenant_id";
|
||||
}
|
||||
|
||||
sql += " ORDER BY updated_at DESC";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
{
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
}
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ParityRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapParityRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static ParityRecord MapParityRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
return new ParityRecord(
|
||||
PackId: reader.GetString(0),
|
||||
TenantId: reader.GetString(1),
|
||||
Status: reader.GetString(2),
|
||||
Notes: reader.IsDBNull(3) ? null : reader.GetString(3),
|
||||
UpdatedAtUtc: reader.GetFieldValue<DateTimeOffset>(4));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS packs;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS packs.parities (
|
||||
pack_id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
updated_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_parities_tenant_id ON packs.parities (tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_parities_status ON packs.parities (status);
|
||||
CREATE INDEX IF NOT EXISTS idx_parities_updated_at ON packs.parities (updated_at DESC);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
using StellaOps.PacksRegistry.Core.Contracts;
|
||||
using StellaOps.PacksRegistry.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.PacksRegistry.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for configuring PacksRegistry PostgreSQL storage services.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds PacksRegistry PostgreSQL storage services.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configuration">Configuration root.</param>
|
||||
/// <param name="sectionName">Configuration section name for PostgreSQL options.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddPacksRegistryPostgresStorage(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration,
|
||||
string sectionName = "Postgres:PacksRegistry")
|
||||
{
|
||||
services.Configure<PostgresOptions>(configuration.GetSection(sectionName));
|
||||
services.AddSingleton<PacksRegistryDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddSingleton<IPackRepository, PostgresPackRepository>();
|
||||
services.AddSingleton<IAttestationRepository, PostgresAttestationRepository>();
|
||||
services.AddSingleton<IAuditRepository, PostgresAuditRepository>();
|
||||
services.AddSingleton<ILifecycleRepository, PostgresLifecycleRepository>();
|
||||
services.AddSingleton<IMirrorRepository, PostgresMirrorRepository>();
|
||||
services.AddSingleton<IParityRepository, PostgresParityRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds PacksRegistry PostgreSQL storage services with explicit options.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configureOptions">Options configuration action.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddPacksRegistryPostgresStorage(
|
||||
this IServiceCollection services,
|
||||
Action<PostgresOptions> configureOptions)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
services.AddSingleton<PacksRegistryDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddSingleton<IPackRepository, PostgresPackRepository>();
|
||||
services.AddSingleton<IAttestationRepository, PostgresAttestationRepository>();
|
||||
services.AddSingleton<IAuditRepository, PostgresAuditRepository>();
|
||||
services.AddSingleton<ILifecycleRepository, PostgresLifecycleRepository>();
|
||||
services.AddSingleton<IMirrorRepository, PostgresMirrorRepository>();
|
||||
services.AddSingleton<IParityRepository, PostgresParityRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<RootNamespace>StellaOps.PacksRegistry.Storage.Postgres</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Infrastructure.Postgres/StellaOps.Infrastructure.Postgres.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -1,11 +1,15 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Http;
|
||||
using StellaOps.Policy.Engine.Caching;
|
||||
using StellaOps.Policy.Engine.EffectiveDecisionMap;
|
||||
using StellaOps.Policy.Engine.Events;
|
||||
using StellaOps.Policy.Engine.ExceptionCache;
|
||||
using StellaOps.Policy.Engine.Gates;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.Vex;
|
||||
using StellaOps.Policy.Engine.WhatIfSimulation;
|
||||
using StellaOps.Policy.Engine.Workers;
|
||||
using StackExchange.Redis;
|
||||
@@ -115,6 +119,65 @@ public static class PolicyEngineServiceCollectionExtensions
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the VEX decision emitter and gate evaluator services.
|
||||
/// Supports OpenVEX document generation from reachability evidence.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddVexDecisionEmitter(this IServiceCollection services)
|
||||
{
|
||||
// Gate evaluator for VEX status transitions
|
||||
services.TryAddSingleton<IPolicyGateEvaluator, PolicyGateEvaluator>();
|
||||
|
||||
// VEX decision emitter
|
||||
services.TryAddSingleton<IVexDecisionEmitter, VexDecisionEmitter>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the VEX decision emitter with options configuration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddVexDecisionEmitter(
|
||||
this IServiceCollection services,
|
||||
Action<VexDecisionEmitterOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
return services.AddVexDecisionEmitter();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds policy gate evaluator with options configuration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyGates(
|
||||
this IServiceCollection services,
|
||||
Action<PolicyGateOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
services.TryAddSingleton<IPolicyGateEvaluator, PolicyGateEvaluator>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the VEX decision signing service for DSSE envelope creation and Rekor submission.
|
||||
/// Optional dependencies: IVexSignerClient, IVexRekorClient.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddVexDecisionSigning(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<IVexDecisionSigningService, VexDecisionSigningService>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the VEX decision signing service with options configuration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddVexDecisionSigning(
|
||||
this IServiceCollection services,
|
||||
Action<VexSigningOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
return services.AddVexDecisionSigning();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds Redis connection for effective decision map and evaluation cache.
|
||||
/// </summary>
|
||||
@@ -128,6 +191,59 @@ public static class PolicyEngineServiceCollectionExtensions
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the Signals-backed reachability facts client.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddReachabilityFactsSignalsClient(
|
||||
this IServiceCollection services,
|
||||
Action<ReachabilityFactsSignalsClientOptions>? configure = null)
|
||||
{
|
||||
if (configure is not null)
|
||||
{
|
||||
services.Configure(configure);
|
||||
}
|
||||
|
||||
services.AddHttpClient<IReachabilityFactsSignalsClient, ReachabilityFactsSignalsClient>()
|
||||
.ConfigureHttpClient((sp, client) =>
|
||||
{
|
||||
var options = sp.GetService<Microsoft.Extensions.Options.IOptions<ReachabilityFactsSignalsClientOptions>>()?.Value;
|
||||
if (options?.BaseUri is not null)
|
||||
{
|
||||
client.BaseAddress = options.BaseUri;
|
||||
}
|
||||
|
||||
if (options?.Timeout > TimeSpan.Zero)
|
||||
{
|
||||
client.Timeout = options.Timeout;
|
||||
}
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the Signals-backed reachability facts store.
|
||||
/// Requires AddReachabilityFactsSignalsClient to be called first.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSignalsBackedReachabilityFactsStore(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton<IReachabilityFactsStore, SignalsBackedReachabilityFactsStore>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds reachability facts integration with Signals service.
|
||||
/// Combines client and store registration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddReachabilityFactsSignalsIntegration(
|
||||
this IServiceCollection services,
|
||||
Action<ReachabilityFactsSignalsClientOptions>? configure = null)
|
||||
{
|
||||
services.AddReachabilityFactsSignalsClient(configure);
|
||||
services.AddSignalsBackedReachabilityFactsStore();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds all Policy Engine services with default configuration.
|
||||
/// </summary>
|
||||
|
||||
@@ -222,6 +222,7 @@ builder.Services.AddSingleton<IReachabilityFactsStore, InMemoryReachabilityFacts
|
||||
builder.Services.AddSingleton<IReachabilityFactsOverlayCache, InMemoryReachabilityFactsOverlayCache>();
|
||||
builder.Services.AddSingleton<ReachabilityFactsJoiningService>();
|
||||
builder.Services.AddSingleton<IRuntimeEvaluationExecutor, RuntimeEvaluationExecutor>();
|
||||
builder.Services.AddVexDecisionEmitter(); // POLICY-VEX-401-006
|
||||
|
||||
builder.Services.AddHttpContextAccessor();
|
||||
builder.Services.AddRouting(options => options.LowercaseUrls = true);
|
||||
|
||||
@@ -0,0 +1,234 @@
|
||||
namespace StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
|
||||
/// <summary>
|
||||
/// HTTP client interface for fetching reachability facts from Signals service.
|
||||
/// </summary>
|
||||
public interface IReachabilityFactsSignalsClient
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a reachability fact by subject key.
|
||||
/// </summary>
|
||||
/// <param name="subjectKey">Subject key (scan ID or component key).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The reachability fact document, or null if not found.</returns>
|
||||
Task<SignalsReachabilityFactResponse?> GetBySubjectAsync(
|
||||
string subjectKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets multiple reachability facts by subject keys.
|
||||
/// </summary>
|
||||
/// <param name="subjectKeys">Subject keys to lookup.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Dictionary of subject key to fact.</returns>
|
||||
Task<IReadOnlyDictionary<string, SignalsReachabilityFactResponse>> GetBatchBySubjectsAsync(
|
||||
IReadOnlyList<string> subjectKeys,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Triggers recomputation of reachability for a subject.
|
||||
/// </summary>
|
||||
/// <param name="request">Recompute request.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if recompute was triggered.</returns>
|
||||
Task<bool> TriggerRecomputeAsync(
|
||||
SignalsRecomputeRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from Signals /facts/{subjectKey} endpoint.
|
||||
/// Maps to ReachabilityFactDocument in Signals module.
|
||||
/// </summary>
|
||||
public sealed record SignalsReachabilityFactResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Document ID.
|
||||
/// </summary>
|
||||
public string Id { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Callgraph ID.
|
||||
/// </summary>
|
||||
public string CallgraphId { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Subject information.
|
||||
/// </summary>
|
||||
public SignalsSubject? Subject { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry points.
|
||||
/// </summary>
|
||||
public List<string>? EntryPoints { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability states.
|
||||
/// </summary>
|
||||
public List<SignalsReachabilityState>? States { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Runtime facts.
|
||||
/// </summary>
|
||||
public List<SignalsRuntimeFact>? RuntimeFacts { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CAS URI for runtime-facts batch artifact.
|
||||
/// </summary>
|
||||
public string? RuntimeFactsBatchUri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// BLAKE3 hash of runtime-facts batch.
|
||||
/// </summary>
|
||||
public string? RuntimeFactsBatchHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional metadata.
|
||||
/// </summary>
|
||||
public Dictionary<string, string?>? Metadata { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Context facts for provenance.
|
||||
/// </summary>
|
||||
public SignalsContextFacts? ContextFacts { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncertainty information.
|
||||
/// </summary>
|
||||
public SignalsUncertainty? Uncertainty { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Edge bundle references.
|
||||
/// </summary>
|
||||
public List<SignalsEdgeBundleReference>? EdgeBundles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether quarantined edges exist.
|
||||
/// </summary>
|
||||
public bool HasQuarantinedEdges { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability score.
|
||||
/// </summary>
|
||||
public double Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk score.
|
||||
/// </summary>
|
||||
public double RiskScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Count of unknowns.
|
||||
/// </summary>
|
||||
public int UnknownsCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unknowns pressure.
|
||||
/// </summary>
|
||||
public double UnknownsPressure { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Computation timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset ComputedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subject key.
|
||||
/// </summary>
|
||||
public string SubjectKey { get; init; } = string.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subject information from Signals.
|
||||
/// </summary>
|
||||
public sealed record SignalsSubject
|
||||
{
|
||||
public string? ImageDigest { get; init; }
|
||||
public string? Component { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public string? ScanId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reachability state from Signals.
|
||||
/// </summary>
|
||||
public sealed record SignalsReachabilityState
|
||||
{
|
||||
public string Target { get; init; } = string.Empty;
|
||||
public bool Reachable { get; init; }
|
||||
public double Confidence { get; init; }
|
||||
public string Bucket { get; init; } = "unknown";
|
||||
public string? LatticeState { get; init; }
|
||||
public string? PreviousLatticeState { get; init; }
|
||||
public double Weight { get; init; }
|
||||
public double Score { get; init; }
|
||||
public List<string>? Path { get; init; }
|
||||
public SignalsEvidence? Evidence { get; init; }
|
||||
public DateTimeOffset? LatticeTransitionAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evidence from Signals.
|
||||
/// </summary>
|
||||
public sealed record SignalsEvidence
|
||||
{
|
||||
public List<string>? RuntimeHits { get; init; }
|
||||
public List<string>? BlockedEdges { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runtime fact from Signals.
|
||||
/// </summary>
|
||||
public sealed record SignalsRuntimeFact
|
||||
{
|
||||
public string SymbolId { get; init; } = string.Empty;
|
||||
public string? CodeId { get; init; }
|
||||
public string? SymbolDigest { get; init; }
|
||||
public string? Purl { get; init; }
|
||||
public string? BuildId { get; init; }
|
||||
public int HitCount { get; init; }
|
||||
public DateTimeOffset? ObservedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context facts from Signals.
|
||||
/// </summary>
|
||||
public sealed record SignalsContextFacts;
|
||||
|
||||
/// <summary>
|
||||
/// Uncertainty information from Signals.
|
||||
/// </summary>
|
||||
public sealed record SignalsUncertainty
|
||||
{
|
||||
public string? AggregateTier { get; init; }
|
||||
public double? RiskScore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Edge bundle reference from Signals.
|
||||
/// </summary>
|
||||
public sealed record SignalsEdgeBundleReference
|
||||
{
|
||||
public string BundleId { get; init; } = string.Empty;
|
||||
public string Reason { get; init; } = string.Empty;
|
||||
public int EdgeCount { get; init; }
|
||||
public string? CasUri { get; init; }
|
||||
public string? DsseDigest { get; init; }
|
||||
public bool HasRevokedEdges { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to trigger reachability recomputation.
|
||||
/// </summary>
|
||||
public sealed record SignalsRecomputeRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Subject key to recompute.
|
||||
/// </summary>
|
||||
public required string SubjectKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant ID.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,227 @@
|
||||
using System.Diagnostics;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
|
||||
/// <summary>
|
||||
/// HTTP client for fetching reachability facts from Signals service.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityFactsSignalsClient : IReachabilityFactsSignalsClient
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
};
|
||||
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly ReachabilityFactsSignalsClientOptions _options;
|
||||
private readonly ILogger<ReachabilityFactsSignalsClient> _logger;
|
||||
|
||||
public ReachabilityFactsSignalsClient(
|
||||
HttpClient httpClient,
|
||||
IOptions<ReachabilityFactsSignalsClientOptions> options,
|
||||
ILogger<ReachabilityFactsSignalsClient> logger)
|
||||
{
|
||||
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
_options = options.Value;
|
||||
|
||||
if (_httpClient.BaseAddress is null && _options.BaseUri is not null)
|
||||
{
|
||||
_httpClient.BaseAddress = _options.BaseUri;
|
||||
}
|
||||
|
||||
_httpClient.DefaultRequestHeaders.Accept.Clear();
|
||||
_httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/json");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<SignalsReachabilityFactResponse?> GetBySubjectAsync(
|
||||
string subjectKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(subjectKey);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"signals_client.get_fact",
|
||||
ActivityKind.Client);
|
||||
activity?.SetTag("signals.subject_key", subjectKey);
|
||||
|
||||
var path = $"signals/facts/{Uri.EscapeDataString(subjectKey)}";
|
||||
|
||||
try
|
||||
{
|
||||
var response = await _httpClient.GetAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
_logger.LogDebug("Reachability fact not found for subject {SubjectKey}", subjectKey);
|
||||
return null;
|
||||
}
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var fact = await response.Content
|
||||
.ReadFromJsonAsync<SignalsReachabilityFactResponse>(SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Retrieved reachability fact for subject {SubjectKey}: score={Score}, states={StateCount}",
|
||||
subjectKey,
|
||||
fact?.Score,
|
||||
fact?.States?.Count ?? 0);
|
||||
|
||||
return fact;
|
||||
}
|
||||
catch (HttpRequestException ex) when (ex.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to get reachability fact for subject {SubjectKey}", subjectKey);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyDictionary<string, SignalsReachabilityFactResponse>> GetBatchBySubjectsAsync(
|
||||
IReadOnlyList<string> subjectKeys,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subjectKeys);
|
||||
|
||||
if (subjectKeys.Count == 0)
|
||||
{
|
||||
return new Dictionary<string, SignalsReachabilityFactResponse>();
|
||||
}
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"signals_client.get_facts_batch",
|
||||
ActivityKind.Client);
|
||||
activity?.SetTag("signals.batch_size", subjectKeys.Count);
|
||||
|
||||
var result = new Dictionary<string, SignalsReachabilityFactResponse>(StringComparer.Ordinal);
|
||||
|
||||
// Signals doesn't expose a batch endpoint, so we fetch in parallel with concurrency limit
|
||||
var semaphore = new SemaphoreSlim(_options.MaxConcurrentRequests);
|
||||
var tasks = subjectKeys.Select(async key =>
|
||||
{
|
||||
await semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
var fact = await GetBySubjectAsync(key, cancellationToken).ConfigureAwait(false);
|
||||
return (Key: key, Fact: fact);
|
||||
}
|
||||
finally
|
||||
{
|
||||
semaphore.Release();
|
||||
}
|
||||
});
|
||||
|
||||
var results = await Task.WhenAll(tasks).ConfigureAwait(false);
|
||||
|
||||
foreach (var (key, fact) in results)
|
||||
{
|
||||
if (fact is not null)
|
||||
{
|
||||
result[key] = fact;
|
||||
}
|
||||
}
|
||||
|
||||
activity?.SetTag("signals.found_count", result.Count);
|
||||
_logger.LogDebug(
|
||||
"Batch retrieved {FoundCount}/{TotalCount} reachability facts",
|
||||
result.Count,
|
||||
subjectKeys.Count);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> TriggerRecomputeAsync(
|
||||
SignalsRecomputeRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"signals_client.trigger_recompute",
|
||||
ActivityKind.Client);
|
||||
activity?.SetTag("signals.subject_key", request.SubjectKey);
|
||||
activity?.SetTag("signals.tenant_id", request.TenantId);
|
||||
|
||||
try
|
||||
{
|
||||
var response = await _httpClient.PostAsJsonAsync(
|
||||
"signals/reachability/recompute",
|
||||
new { subjectKey = request.SubjectKey, tenantId = request.TenantId },
|
||||
SerializerOptions,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Triggered reachability recompute for subject {SubjectKey}",
|
||||
request.SubjectKey);
|
||||
return true;
|
||||
}
|
||||
|
||||
_logger.LogWarning(
|
||||
"Failed to trigger reachability recompute for subject {SubjectKey}: {StatusCode}",
|
||||
request.SubjectKey,
|
||||
response.StatusCode);
|
||||
return false;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Error triggering reachability recompute for subject {SubjectKey}",
|
||||
request.SubjectKey);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the Signals reachability client.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityFactsSignalsClientOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "ReachabilitySignals";
|
||||
|
||||
/// <summary>
|
||||
/// Base URI for the Signals service.
|
||||
/// </summary>
|
||||
public Uri? BaseUri { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum concurrent requests for batch operations.
|
||||
/// Default: 10.
|
||||
/// </summary>
|
||||
public int MaxConcurrentRequests { get; set; } = 10;
|
||||
|
||||
/// <summary>
|
||||
/// Request timeout.
|
||||
/// Default: 30 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Retry count for transient failures.
|
||||
/// Default: 3.
|
||||
/// </summary>
|
||||
public int RetryCount { get; set; } = 3;
|
||||
}
|
||||
@@ -0,0 +1,377 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of <see cref="IReachabilityFactsStore"/> that delegates to the Signals service.
|
||||
/// Maps between Signals' ReachabilityFactDocument and Policy's ReachabilityFact.
|
||||
/// </summary>
|
||||
public sealed class SignalsBackedReachabilityFactsStore : IReachabilityFactsStore
|
||||
{
|
||||
private readonly IReachabilityFactsSignalsClient _signalsClient;
|
||||
private readonly ILogger<SignalsBackedReachabilityFactsStore> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public SignalsBackedReachabilityFactsStore(
|
||||
IReachabilityFactsSignalsClient signalsClient,
|
||||
ILogger<SignalsBackedReachabilityFactsStore> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_signalsClient = signalsClient ?? throw new ArgumentNullException(nameof(signalsClient));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ReachabilityFact?> GetAsync(
|
||||
string tenantId,
|
||||
string componentPurl,
|
||||
string advisoryId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Signals uses subjectKey which is typically a scan ID or component key
|
||||
// For Policy lookups, we construct a composite key
|
||||
var subjectKey = BuildSubjectKey(componentPurl, advisoryId);
|
||||
|
||||
var response = await _signalsClient.GetBySubjectAsync(subjectKey, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (response is null)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"No reachability fact found for {TenantId}/{ComponentPurl}/{AdvisoryId}",
|
||||
tenantId, componentPurl, advisoryId);
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapToReachabilityFact(tenantId, componentPurl, advisoryId, response);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>> GetBatchAsync(
|
||||
IReadOnlyList<ReachabilityFactKey> keys,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (keys.Count == 0)
|
||||
{
|
||||
return new Dictionary<ReachabilityFactKey, ReachabilityFact>();
|
||||
}
|
||||
|
||||
// Build subject keys for batch lookup
|
||||
var subjectKeyMap = keys.ToDictionary(
|
||||
k => BuildSubjectKey(k.ComponentPurl, k.AdvisoryId),
|
||||
k => k,
|
||||
StringComparer.Ordinal);
|
||||
|
||||
var responses = await _signalsClient.GetBatchBySubjectsAsync(
|
||||
subjectKeyMap.Keys.ToList(),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var result = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
|
||||
|
||||
foreach (var (subjectKey, response) in responses)
|
||||
{
|
||||
if (subjectKeyMap.TryGetValue(subjectKey, out var key))
|
||||
{
|
||||
var fact = MapToReachabilityFact(key.TenantId, key.ComponentPurl, key.AdvisoryId, response);
|
||||
result[key] = fact;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<ReachabilityFact>> QueryAsync(
|
||||
ReachabilityFactsQuery query,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Signals service doesn't expose a direct query API
|
||||
// For now, return empty - callers should use batch lookups instead
|
||||
_logger.LogDebug(
|
||||
"Query not supported by Signals backend; use batch lookups instead. Tenant={TenantId}",
|
||||
query.TenantId);
|
||||
|
||||
return Task.FromResult<IReadOnlyList<ReachabilityFact>>(Array.Empty<ReachabilityFact>());
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task SaveAsync(ReachabilityFact fact, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Read-only store - facts are computed by Signals service
|
||||
_logger.LogWarning(
|
||||
"Save not supported by Signals backend. Facts are computed by Signals service.");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task SaveBatchAsync(IReadOnlyList<ReachabilityFact> facts, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Read-only store - facts are computed by Signals service
|
||||
_logger.LogWarning(
|
||||
"SaveBatch not supported by Signals backend. Facts are computed by Signals service.");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task DeleteAsync(
|
||||
string tenantId,
|
||||
string componentPurl,
|
||||
string advisoryId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Read-only store - facts are managed by Signals service
|
||||
_logger.LogWarning(
|
||||
"Delete not supported by Signals backend. Facts are managed by Signals service.");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<long> CountAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Not available from Signals API
|
||||
return Task.FromResult(0L);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Triggers recomputation of reachability for a subject.
|
||||
/// </summary>
|
||||
public Task<bool> TriggerRecomputeAsync(
|
||||
string tenantId,
|
||||
string subjectKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return _signalsClient.TriggerRecomputeAsync(
|
||||
new SignalsRecomputeRequest { SubjectKey = subjectKey, TenantId = tenantId },
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
private static string BuildSubjectKey(string componentPurl, string advisoryId)
|
||||
{
|
||||
// Build a deterministic subject key from component and advisory
|
||||
// This should match how Signals indexes facts
|
||||
return $"{componentPurl}|{advisoryId}";
|
||||
}
|
||||
|
||||
private ReachabilityFact MapToReachabilityFact(
|
||||
string tenantId,
|
||||
string componentPurl,
|
||||
string advisoryId,
|
||||
SignalsReachabilityFactResponse response)
|
||||
{
|
||||
// Determine overall state from lattice states
|
||||
var (state, confidence, hasRuntimeEvidence) = DetermineOverallState(response);
|
||||
|
||||
// Determine analysis method
|
||||
var method = DetermineAnalysisMethod(response);
|
||||
|
||||
// Build evidence reference
|
||||
var evidenceRef = response.RuntimeFactsBatchUri ?? response.CallgraphId;
|
||||
var evidenceHash = response.RuntimeFactsBatchHash;
|
||||
|
||||
// Build metadata
|
||||
var metadata = BuildMetadata(response);
|
||||
|
||||
return new ReachabilityFact
|
||||
{
|
||||
Id = response.Id,
|
||||
TenantId = tenantId,
|
||||
ComponentPurl = componentPurl,
|
||||
AdvisoryId = advisoryId,
|
||||
State = state,
|
||||
Confidence = (decimal)confidence,
|
||||
Score = (decimal)response.Score,
|
||||
HasRuntimeEvidence = hasRuntimeEvidence,
|
||||
Source = "signals",
|
||||
Method = method,
|
||||
EvidenceRef = evidenceRef,
|
||||
EvidenceHash = evidenceHash,
|
||||
ComputedAt = response.ComputedAt,
|
||||
ExpiresAt = null, // Signals doesn't expose expiry; rely on cache TTL
|
||||
Metadata = metadata,
|
||||
};
|
||||
}
|
||||
|
||||
private static (ReachabilityState State, double Confidence, bool HasRuntimeEvidence) DetermineOverallState(
|
||||
SignalsReachabilityFactResponse response)
|
||||
{
|
||||
if (response.States is null || response.States.Count == 0)
|
||||
{
|
||||
return (ReachabilityState.Unknown, 0, false);
|
||||
}
|
||||
|
||||
// Aggregate states - worst case wins for reachability
|
||||
var hasReachable = false;
|
||||
var hasUnreachable = false;
|
||||
var hasRuntimeEvidence = false;
|
||||
var maxConfidence = 0.0;
|
||||
var totalConfidence = 0.0;
|
||||
|
||||
foreach (var state in response.States)
|
||||
{
|
||||
if (state.Reachable)
|
||||
{
|
||||
hasReachable = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
hasUnreachable = true;
|
||||
}
|
||||
|
||||
if (state.Evidence?.RuntimeHits?.Count > 0)
|
||||
{
|
||||
hasRuntimeEvidence = true;
|
||||
}
|
||||
|
||||
maxConfidence = Math.Max(maxConfidence, state.Confidence);
|
||||
totalConfidence += state.Confidence;
|
||||
}
|
||||
|
||||
// Also check runtime facts
|
||||
if (response.RuntimeFacts?.Count > 0)
|
||||
{
|
||||
hasRuntimeEvidence = true;
|
||||
}
|
||||
|
||||
var avgConfidence = totalConfidence / response.States.Count;
|
||||
|
||||
// Determine overall state
|
||||
ReachabilityState overallState;
|
||||
if (hasReachable && hasRuntimeEvidence)
|
||||
{
|
||||
overallState = ReachabilityState.Reachable; // Confirmed reachable
|
||||
}
|
||||
else if (hasReachable)
|
||||
{
|
||||
overallState = ReachabilityState.Reachable; // Statically reachable
|
||||
}
|
||||
else if (hasUnreachable && avgConfidence >= 0.7)
|
||||
{
|
||||
overallState = ReachabilityState.Unreachable;
|
||||
}
|
||||
else if (hasUnreachable)
|
||||
{
|
||||
overallState = ReachabilityState.UnderInvestigation; // Low confidence
|
||||
}
|
||||
else
|
||||
{
|
||||
overallState = ReachabilityState.Unknown;
|
||||
}
|
||||
|
||||
return (overallState, avgConfidence, hasRuntimeEvidence);
|
||||
}
|
||||
|
||||
private static AnalysisMethod DetermineAnalysisMethod(SignalsReachabilityFactResponse response)
|
||||
{
|
||||
var hasStaticAnalysis = response.States?.Count > 0;
|
||||
var hasRuntimeAnalysis = response.RuntimeFacts?.Count > 0 ||
|
||||
response.States?.Any(s => s.Evidence?.RuntimeHits?.Count > 0) == true;
|
||||
|
||||
if (hasStaticAnalysis && hasRuntimeAnalysis)
|
||||
{
|
||||
return AnalysisMethod.Hybrid;
|
||||
}
|
||||
|
||||
if (hasRuntimeAnalysis)
|
||||
{
|
||||
return AnalysisMethod.Dynamic;
|
||||
}
|
||||
|
||||
if (hasStaticAnalysis)
|
||||
{
|
||||
return AnalysisMethod.Static;
|
||||
}
|
||||
|
||||
return AnalysisMethod.Manual;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?>? BuildMetadata(SignalsReachabilityFactResponse response)
|
||||
{
|
||||
var metadata = new Dictionary<string, object?>(StringComparer.Ordinal);
|
||||
|
||||
if (!string.IsNullOrEmpty(response.CallgraphId))
|
||||
{
|
||||
metadata["callgraph_id"] = response.CallgraphId;
|
||||
}
|
||||
|
||||
if (response.Subject is not null)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(response.Subject.ScanId))
|
||||
{
|
||||
metadata["scan_id"] = response.Subject.ScanId;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(response.Subject.ImageDigest))
|
||||
{
|
||||
metadata["image_digest"] = response.Subject.ImageDigest;
|
||||
}
|
||||
}
|
||||
|
||||
if (response.EntryPoints?.Count > 0)
|
||||
{
|
||||
metadata["entry_points"] = response.EntryPoints;
|
||||
}
|
||||
|
||||
if (response.Uncertainty is not null)
|
||||
{
|
||||
metadata["uncertainty_tier"] = response.Uncertainty.AggregateTier;
|
||||
metadata["uncertainty_risk_score"] = response.Uncertainty.RiskScore;
|
||||
}
|
||||
|
||||
if (response.EdgeBundles?.Count > 0)
|
||||
{
|
||||
metadata["edge_bundle_count"] = response.EdgeBundles.Count;
|
||||
metadata["has_revoked_edges"] = response.EdgeBundles.Any(b => b.HasRevokedEdges);
|
||||
}
|
||||
|
||||
if (response.HasQuarantinedEdges)
|
||||
{
|
||||
metadata["has_quarantined_edges"] = true;
|
||||
}
|
||||
|
||||
metadata["unknowns_count"] = response.UnknownsCount;
|
||||
metadata["unknowns_pressure"] = response.UnknownsPressure;
|
||||
metadata["risk_score"] = response.RiskScore;
|
||||
|
||||
if (!string.IsNullOrEmpty(response.RuntimeFactsBatchUri))
|
||||
{
|
||||
metadata["runtime_facts_cas_uri"] = response.RuntimeFactsBatchUri;
|
||||
}
|
||||
|
||||
// Extract call paths from states for evidence
|
||||
var callPaths = response.States?
|
||||
.Where(s => s.Path?.Count > 0)
|
||||
.Select(s => s.Path!)
|
||||
.ToList();
|
||||
|
||||
if (callPaths?.Count > 0)
|
||||
{
|
||||
metadata["call_paths"] = callPaths;
|
||||
}
|
||||
|
||||
// Extract runtime hits from states
|
||||
var runtimeHits = response.States?
|
||||
.Where(s => s.Evidence?.RuntimeHits?.Count > 0)
|
||||
.SelectMany(s => s.Evidence!.RuntimeHits!)
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
if (runtimeHits?.Count > 0)
|
||||
{
|
||||
metadata["runtime_hits"] = runtimeHits;
|
||||
}
|
||||
|
||||
// Extract lattice states
|
||||
var latticeStates = response.States?
|
||||
.Where(s => !string.IsNullOrEmpty(s.LatticeState))
|
||||
.Select(s => new { s.Target, s.LatticeState, s.Confidence })
|
||||
.ToList();
|
||||
|
||||
if (latticeStates?.Count > 0)
|
||||
{
|
||||
metadata["lattice_states"] = latticeStates;
|
||||
}
|
||||
|
||||
return metadata.Count > 0 ? metadata : null;
|
||||
}
|
||||
}
|
||||
@@ -476,6 +476,56 @@ public static class PolicyEngineTelemetry
|
||||
|
||||
#endregion
|
||||
|
||||
#region VEX Decision Metrics
|
||||
|
||||
// Counter: policy_vex_decisions_total{status,lattice_state}
|
||||
private static readonly Counter<long> VexDecisionsCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_vex_decisions_total",
|
||||
unit: "decisions",
|
||||
description: "Total VEX decisions emitted by status and lattice state.");
|
||||
|
||||
// Counter: policy_vex_signing_total{success,rekor_submitted}
|
||||
private static readonly Counter<long> VexSigningCounter =
|
||||
Meter.CreateCounter<long>(
|
||||
"policy_vex_signing_total",
|
||||
unit: "signings",
|
||||
description: "Total VEX decision signing operations.");
|
||||
|
||||
/// <summary>
|
||||
/// Records a VEX decision emission.
|
||||
/// </summary>
|
||||
/// <param name="status">VEX status (not_affected, affected, under_investigation, fixed).</param>
|
||||
/// <param name="latticeState">Lattice state code (U, SR, SU, RO, RU, CR, CU, X).</param>
|
||||
public static void RecordVexDecision(string status, string latticeState)
|
||||
{
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "status", NormalizeTag(status) },
|
||||
{ "lattice_state", NormalizeTag(latticeState) },
|
||||
};
|
||||
|
||||
VexDecisionsCounter.Add(1, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a VEX signing operation.
|
||||
/// </summary>
|
||||
/// <param name="success">Whether the signing operation succeeded.</param>
|
||||
/// <param name="rekorSubmitted">Whether the envelope was submitted to Rekor.</param>
|
||||
public static void RecordVexSigning(bool success, bool rekorSubmitted)
|
||||
{
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "success", success ? "true" : "false" },
|
||||
{ "rekor_submitted", rekorSubmitted ? "true" : "false" },
|
||||
};
|
||||
|
||||
VexSigningCounter.Add(1, tags);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Reachability Metrics
|
||||
|
||||
// Counter: policy_reachability_applied_total{state}
|
||||
|
||||
432
src/Policy/StellaOps.Policy.Engine/Vex/VexDecisionEmitter.cs
Normal file
432
src/Policy/StellaOps.Policy.Engine/Vex/VexDecisionEmitter.cs
Normal file
@@ -0,0 +1,432 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Gates;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Vex;
|
||||
|
||||
/// <summary>
|
||||
/// Service for emitting OpenVEX decisions based on reachability facts.
|
||||
/// </summary>
|
||||
public interface IVexDecisionEmitter
|
||||
{
|
||||
/// <summary>
|
||||
/// Emits VEX decisions for a set of findings.
|
||||
/// </summary>
|
||||
Task<VexDecisionEmitResult> EmitAsync(VexDecisionEmitRequest request, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Determines the VEX status for a single finding based on reachability.
|
||||
/// </summary>
|
||||
Task<VexStatusDetermination> DetermineStatusAsync(
|
||||
string tenantId,
|
||||
string vulnId,
|
||||
string purl,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of determining VEX status from reachability.
|
||||
/// </summary>
|
||||
public sealed record VexStatusDetermination
|
||||
{
|
||||
public required string Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
public string? Bucket { get; init; }
|
||||
public double Confidence { get; init; }
|
||||
public string? LatticeState { get; init; }
|
||||
public ReachabilityFact? Fact { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IVexDecisionEmitter"/>.
|
||||
/// </summary>
|
||||
public sealed class VexDecisionEmitter : IVexDecisionEmitter
|
||||
{
|
||||
private readonly ReachabilityFactsJoiningService _factsService;
|
||||
private readonly IPolicyGateEvaluator _gateEvaluator;
|
||||
private readonly IOptionsMonitor<VexDecisionEmitterOptions> _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<VexDecisionEmitter> _logger;
|
||||
|
||||
// Status constants
|
||||
private const string StatusNotAffected = "not_affected";
|
||||
private const string StatusAffected = "affected";
|
||||
private const string StatusUnderInvestigation = "under_investigation";
|
||||
private const string StatusFixed = "fixed";
|
||||
|
||||
// Lattice state constants
|
||||
private const string LatticeUnknown = "U";
|
||||
private const string LatticeStaticallyReachable = "SR";
|
||||
private const string LatticeStaticallyUnreachable = "SU";
|
||||
private const string LatticeRuntimeObserved = "RO";
|
||||
private const string LatticeRuntimeUnobserved = "RU";
|
||||
private const string LatticeConfirmedReachable = "CR";
|
||||
private const string LatticeConfirmedUnreachable = "CU";
|
||||
private const string LatticeContested = "X";
|
||||
|
||||
public VexDecisionEmitter(
|
||||
ReachabilityFactsJoiningService factsService,
|
||||
IPolicyGateEvaluator gateEvaluator,
|
||||
IOptionsMonitor<VexDecisionEmitterOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<VexDecisionEmitter> logger)
|
||||
{
|
||||
_factsService = factsService ?? throw new ArgumentNullException(nameof(factsService));
|
||||
_gateEvaluator = gateEvaluator ?? throw new ArgumentNullException(nameof(gateEvaluator));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<VexDecisionEmitResult> EmitAsync(VexDecisionEmitRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"vex_decision.emit",
|
||||
ActivityKind.Internal);
|
||||
activity?.SetTag("tenant", request.TenantId);
|
||||
activity?.SetTag("findings_count", request.Findings.Count);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var options = _options.CurrentValue;
|
||||
|
||||
// Fetch reachability facts for all findings
|
||||
var factRequests = request.Findings
|
||||
.Select(f => new ReachabilityFactsRequest(f.Purl, f.VulnId))
|
||||
.ToList();
|
||||
|
||||
var factsBatch = await _factsService.GetFactsBatchAsync(request.TenantId, factRequests, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// Process each finding
|
||||
var statements = new List<VexStatement>();
|
||||
var gateDecisions = new Dictionary<string, PolicyGateDecision>();
|
||||
var blocked = new List<VexBlockedFinding>();
|
||||
|
||||
foreach (var finding in request.Findings)
|
||||
{
|
||||
var factKey = new ReachabilityFactKey(request.TenantId, finding.Purl, finding.VulnId);
|
||||
factsBatch.Found.TryGetValue(factKey, out var fact);
|
||||
|
||||
// Determine status from reachability
|
||||
var (status, justification, latticeState, confidence) = DetermineStatusFromFact(fact, finding);
|
||||
|
||||
// If override specified, use it
|
||||
if (!string.IsNullOrWhiteSpace(finding.OverrideStatus))
|
||||
{
|
||||
status = finding.OverrideStatus;
|
||||
justification = null; // Override may need different justification
|
||||
}
|
||||
|
||||
// Evaluate gates
|
||||
var gateRequest = new PolicyGateRequest
|
||||
{
|
||||
TenantId = request.TenantId,
|
||||
VulnId = finding.VulnId,
|
||||
Purl = finding.Purl,
|
||||
SymbolId = finding.SymbolId,
|
||||
ScanId = finding.ScanId,
|
||||
RequestedStatus = status,
|
||||
Justification = justification,
|
||||
LatticeState = latticeState,
|
||||
UncertaintyTier = fact?.Metadata?.TryGetValue("uncertainty_tier", out var tier) == true ? tier?.ToString() : null,
|
||||
GraphHash = fact?.EvidenceHash,
|
||||
Confidence = confidence,
|
||||
HasRuntimeEvidence = fact?.HasRuntimeEvidence ?? false,
|
||||
PathLength = fact?.Metadata?.TryGetValue("path_length", out var pl) == true && pl is int pathLen ? pathLen : null,
|
||||
AllowOverride = !string.IsNullOrWhiteSpace(finding.OverrideJustification),
|
||||
OverrideJustification = finding.OverrideJustification
|
||||
};
|
||||
|
||||
var gateDecision = await _gateEvaluator.EvaluateAsync(gateRequest, cancellationToken).ConfigureAwait(false);
|
||||
gateDecisions[$"{finding.VulnId}:{finding.Purl}"] = gateDecision;
|
||||
|
||||
// Handle blocked findings
|
||||
if (gateDecision.Decision == PolicyGateDecisionType.Block)
|
||||
{
|
||||
blocked.Add(new VexBlockedFinding
|
||||
{
|
||||
VulnId = finding.VulnId,
|
||||
Purl = finding.Purl,
|
||||
RequestedStatus = status,
|
||||
BlockedBy = gateDecision.BlockedBy ?? "Unknown",
|
||||
Reason = gateDecision.BlockReason ?? "Gate evaluation blocked this status",
|
||||
Suggestion = gateDecision.Suggestion
|
||||
});
|
||||
|
||||
// Fall back to under_investigation for blocked findings
|
||||
if (options.FallbackToUnderInvestigation)
|
||||
{
|
||||
status = StatusUnderInvestigation;
|
||||
justification = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
continue; // Skip this finding entirely
|
||||
}
|
||||
}
|
||||
|
||||
// Build statement
|
||||
var statement = BuildStatement(finding, status, justification, fact, request.IncludeEvidence, now);
|
||||
statements.Add(statement);
|
||||
|
||||
PolicyEngineTelemetry.RecordVexDecision(status, latticeState ?? LatticeUnknown);
|
||||
}
|
||||
|
||||
// Build document
|
||||
var documentId = $"urn:uuid:{Guid.NewGuid()}";
|
||||
var document = new VexDecisionDocument
|
||||
{
|
||||
Id = documentId,
|
||||
Author = request.Author,
|
||||
Timestamp = now,
|
||||
Statements = statements.ToImmutableArray()
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
"Emitted VEX document {DocumentId} with {StatementCount} statements ({BlockedCount} blocked)",
|
||||
documentId,
|
||||
statements.Count,
|
||||
blocked.Count);
|
||||
|
||||
return new VexDecisionEmitResult
|
||||
{
|
||||
Document = document,
|
||||
GateDecisions = gateDecisions,
|
||||
Blocked = blocked
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<VexStatusDetermination> DetermineStatusAsync(
|
||||
string tenantId,
|
||||
string vulnId,
|
||||
string purl,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var fact = await _factsService.GetFactAsync(tenantId, purl, vulnId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var (status, justification, latticeState, confidence) = DetermineStatusFromFact(fact, null);
|
||||
|
||||
var bucket = BucketFromLatticeState(latticeState);
|
||||
|
||||
return new VexStatusDetermination
|
||||
{
|
||||
Status = status,
|
||||
Justification = justification,
|
||||
Bucket = bucket,
|
||||
Confidence = confidence,
|
||||
LatticeState = latticeState,
|
||||
Fact = fact
|
||||
};
|
||||
}
|
||||
|
||||
private (string status, string? justification, string? latticeState, double confidence) DetermineStatusFromFact(
|
||||
ReachabilityFact? fact,
|
||||
VexFindingInput? finding)
|
||||
{
|
||||
if (fact is null)
|
||||
{
|
||||
// No reachability data - default to under_investigation
|
||||
return (StatusUnderInvestigation, null, LatticeUnknown, 0.0);
|
||||
}
|
||||
|
||||
var latticeState = MapReachabilityStateToLattice(fact.State, fact.HasRuntimeEvidence);
|
||||
var confidence = (double)fact.Confidence;
|
||||
|
||||
return fact.State switch
|
||||
{
|
||||
// Confirmed unreachable - not_affected with strong justification
|
||||
ReachabilityState.Unreachable when fact.HasRuntimeEvidence =>
|
||||
(StatusNotAffected, VexJustification.VulnerableCodeNotInExecutePath, LatticeConfirmedUnreachable, confidence),
|
||||
|
||||
// Static unreachable - not_affected with weaker justification
|
||||
ReachabilityState.Unreachable =>
|
||||
(StatusNotAffected, VexJustification.VulnerableCodeNotInExecutePath, LatticeStaticallyUnreachable, confidence),
|
||||
|
||||
// Confirmed reachable - affected
|
||||
ReachabilityState.Reachable when fact.HasRuntimeEvidence =>
|
||||
(StatusAffected, null, LatticeConfirmedReachable, confidence),
|
||||
|
||||
// Static reachable - affected
|
||||
ReachabilityState.Reachable =>
|
||||
(StatusAffected, null, LatticeStaticallyReachable, confidence),
|
||||
|
||||
// Under investigation
|
||||
ReachabilityState.UnderInvestigation =>
|
||||
(StatusUnderInvestigation, null, latticeState, confidence),
|
||||
|
||||
// Unknown - default to under_investigation
|
||||
ReachabilityState.Unknown =>
|
||||
(StatusUnderInvestigation, null, LatticeUnknown, confidence),
|
||||
|
||||
_ => (StatusUnderInvestigation, null, LatticeUnknown, 0.0)
|
||||
};
|
||||
}
|
||||
|
||||
private static string MapReachabilityStateToLattice(ReachabilityState state, bool hasRuntimeEvidence)
|
||||
{
|
||||
return state switch
|
||||
{
|
||||
ReachabilityState.Reachable when hasRuntimeEvidence => LatticeConfirmedReachable,
|
||||
ReachabilityState.Reachable => LatticeStaticallyReachable,
|
||||
ReachabilityState.Unreachable when hasRuntimeEvidence => LatticeConfirmedUnreachable,
|
||||
ReachabilityState.Unreachable => LatticeStaticallyUnreachable,
|
||||
ReachabilityState.UnderInvestigation => LatticeContested,
|
||||
_ => LatticeUnknown
|
||||
};
|
||||
}
|
||||
|
||||
private static string BucketFromLatticeState(string? latticeState)
|
||||
{
|
||||
return latticeState switch
|
||||
{
|
||||
LatticeConfirmedReachable or LatticeRuntimeObserved => "runtime",
|
||||
LatticeStaticallyReachable => "static",
|
||||
LatticeConfirmedUnreachable or LatticeRuntimeUnobserved => "runtime_unreachable",
|
||||
LatticeStaticallyUnreachable => "static_unreachable",
|
||||
LatticeContested => "contested",
|
||||
_ => "unknown"
|
||||
};
|
||||
}
|
||||
|
||||
private VexStatement BuildStatement(
|
||||
VexFindingInput finding,
|
||||
string status,
|
||||
string? justification,
|
||||
ReachabilityFact? fact,
|
||||
bool includeEvidence,
|
||||
DateTimeOffset timestamp)
|
||||
{
|
||||
var vulnerability = new VexVulnerability
|
||||
{
|
||||
Id = finding.VulnId,
|
||||
Name = finding.VulnName,
|
||||
Description = finding.VulnDescription
|
||||
};
|
||||
|
||||
var productBuilder = ImmutableArray.CreateBuilder<VexProduct>();
|
||||
var product = new VexProduct
|
||||
{
|
||||
Id = finding.Purl,
|
||||
Subcomponents = !string.IsNullOrWhiteSpace(finding.SymbolId)
|
||||
? ImmutableArray.Create(new VexSubcomponent { Id = finding.SymbolId })
|
||||
: null
|
||||
};
|
||||
productBuilder.Add(product);
|
||||
|
||||
VexEvidenceBlock? evidence = null;
|
||||
if (includeEvidence && fact is not null)
|
||||
{
|
||||
var latticeState = MapReachabilityStateToLattice(fact.State, fact.HasRuntimeEvidence);
|
||||
|
||||
// Extract evidence details from metadata
|
||||
ImmutableArray<string>? callPath = null;
|
||||
ImmutableArray<string>? entryPoints = null;
|
||||
ImmutableArray<string>? runtimeHits = null;
|
||||
string? graphCasUri = null;
|
||||
string? graphDsseDigest = null;
|
||||
|
||||
if (fact.Metadata is not null)
|
||||
{
|
||||
if (fact.Metadata.TryGetValue("call_path", out var cpObj) && cpObj is IEnumerable<object> cpList)
|
||||
{
|
||||
callPath = cpList.Select(x => x?.ToString() ?? string.Empty).ToImmutableArray();
|
||||
}
|
||||
|
||||
if (fact.Metadata.TryGetValue("entry_points", out var epObj) && epObj is IEnumerable<object> epList)
|
||||
{
|
||||
entryPoints = epList.Select(x => x?.ToString() ?? string.Empty).ToImmutableArray();
|
||||
}
|
||||
|
||||
if (fact.Metadata.TryGetValue("runtime_hits", out var rhObj) && rhObj is IEnumerable<object> rhList)
|
||||
{
|
||||
runtimeHits = rhList.Select(x => x?.ToString() ?? string.Empty).ToImmutableArray();
|
||||
}
|
||||
|
||||
if (fact.Metadata.TryGetValue("graph_cas_uri", out var casUri))
|
||||
{
|
||||
graphCasUri = casUri?.ToString();
|
||||
}
|
||||
|
||||
if (fact.Metadata.TryGetValue("graph_dsse_digest", out var dsseDigest))
|
||||
{
|
||||
graphDsseDigest = dsseDigest?.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
evidence = new VexEvidenceBlock
|
||||
{
|
||||
LatticeState = latticeState,
|
||||
UncertaintyTier = fact.Metadata?.TryGetValue("uncertainty_tier", out var tier) == true ? tier?.ToString() : null,
|
||||
Confidence = (double)fact.Confidence,
|
||||
RiskScore = fact.Metadata?.TryGetValue("risk_score", out var rs) == true && rs is double riskScore ? riskScore : null,
|
||||
CallPath = callPath,
|
||||
EntryPoints = entryPoints,
|
||||
RuntimeHits = runtimeHits,
|
||||
GraphHash = fact.EvidenceHash,
|
||||
GraphCasUri = graphCasUri,
|
||||
GraphDsseDigest = graphDsseDigest,
|
||||
Method = fact.Method.ToString().ToLowerInvariant(),
|
||||
ComputedAt = fact.ComputedAt
|
||||
};
|
||||
}
|
||||
|
||||
// Build impact/action statements
|
||||
string? impactStatement = null;
|
||||
string? actionStatement = null;
|
||||
|
||||
if (status == StatusNotAffected && justification == VexJustification.VulnerableCodeNotInExecutePath)
|
||||
{
|
||||
impactStatement = "Reachability analysis confirms the vulnerable code path is not executed.";
|
||||
}
|
||||
else if (status == StatusAffected)
|
||||
{
|
||||
actionStatement = "Vulnerable code path is reachable. Remediation recommended.";
|
||||
}
|
||||
|
||||
return new VexStatement
|
||||
{
|
||||
Vulnerability = vulnerability,
|
||||
Products = productBuilder.ToImmutable(),
|
||||
Status = status,
|
||||
Justification = justification,
|
||||
ImpactStatement = impactStatement,
|
||||
ActionStatement = actionStatement,
|
||||
Timestamp = timestamp,
|
||||
Evidence = evidence
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for VEX decision emitter.
|
||||
/// </summary>
|
||||
public sealed class VexDecisionEmitterOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether to fall back to under_investigation when gates block.
|
||||
/// </summary>
|
||||
public bool FallbackToUnderInvestigation { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum confidence required for not_affected auto-determination.
|
||||
/// </summary>
|
||||
public double MinConfidenceForNotAffected { get; set; } = 0.7;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to require runtime evidence for not_affected.
|
||||
/// </summary>
|
||||
public bool RequireRuntimeForNotAffected { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Default author for VEX documents.
|
||||
/// </summary>
|
||||
public string DefaultAuthor { get; set; } = "stellaops/policy-engine";
|
||||
}
|
||||
467
src/Policy/StellaOps.Policy.Engine/Vex/VexDecisionModels.cs
Normal file
467
src/Policy/StellaOps.Policy.Engine/Vex/VexDecisionModels.cs
Normal file
@@ -0,0 +1,467 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Policy.Engine.Gates;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Vex;
|
||||
|
||||
/// <summary>
|
||||
/// OpenVEX decision document emitted by the policy engine.
|
||||
/// </summary>
|
||||
public sealed record VexDecisionDocument
|
||||
{
|
||||
/// <summary>
|
||||
/// Document identifier (GUID).
|
||||
/// </summary>
|
||||
[JsonPropertyName("@id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// OpenVEX context (always "https://openvex.dev/ns/v0.2.0").
|
||||
/// </summary>
|
||||
[JsonPropertyName("@context")]
|
||||
public string Context { get; init; } = "https://openvex.dev/ns/v0.2.0";
|
||||
|
||||
/// <summary>
|
||||
/// Author identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("author")]
|
||||
public required string Author { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Role of the author.
|
||||
/// </summary>
|
||||
[JsonPropertyName("role")]
|
||||
public string Role { get; init; } = "policy_engine";
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the document was created.
|
||||
/// </summary>
|
||||
[JsonPropertyName("timestamp")]
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Document version (SemVer).
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public int Version { get; init; } = 1;
|
||||
|
||||
/// <summary>
|
||||
/// Tooling identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tooling")]
|
||||
public string Tooling { get; init; } = "stellaops/policy-engine";
|
||||
|
||||
/// <summary>
|
||||
/// VEX statements in this document.
|
||||
/// </summary>
|
||||
[JsonPropertyName("statements")]
|
||||
public required ImmutableArray<VexStatement> Statements { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single VEX statement with reachability evidence.
|
||||
/// </summary>
|
||||
public sealed record VexStatement
|
||||
{
|
||||
/// <summary>
|
||||
/// Vulnerability identifier (CVE, GHSA, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("vulnerability")]
|
||||
public required VexVulnerability Vulnerability { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Products affected by this statement.
|
||||
/// </summary>
|
||||
[JsonPropertyName("products")]
|
||||
public required ImmutableArray<VexProduct> Products { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX status (not_affected, affected, under_investigation, fixed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for not_affected status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("justification")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Impact statement for not_affected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("impact_statement")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? ImpactStatement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Action statement for affected/fixed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("action_statement")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? ActionStatement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of the statement.
|
||||
/// </summary>
|
||||
[JsonPropertyName("timestamp")]
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Status notes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status_notes")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? StatusNotes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability evidence block (StellaOps extension).
|
||||
/// </summary>
|
||||
[JsonPropertyName("x-stellaops-evidence")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public VexEvidenceBlock? Evidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX vulnerability reference.
|
||||
/// </summary>
|
||||
public sealed record VexVulnerability
|
||||
{
|
||||
/// <summary>
|
||||
/// Vulnerability identifier (CVE-2021-44228, GHSA-..., etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("@id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability name/title.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Description of the vulnerability.
|
||||
/// </summary>
|
||||
[JsonPropertyName("description")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Description { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX product reference.
|
||||
/// </summary>
|
||||
public sealed record VexProduct
|
||||
{
|
||||
/// <summary>
|
||||
/// Product identifier (purl).
|
||||
/// </summary>
|
||||
[JsonPropertyName("@id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subcomponents (function-level specificity).
|
||||
/// </summary>
|
||||
[JsonPropertyName("subcomponents")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableArray<VexSubcomponent>? Subcomponents { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX subcomponent for function-level precision.
|
||||
/// </summary>
|
||||
public sealed record VexSubcomponent
|
||||
{
|
||||
/// <summary>
|
||||
/// Subcomponent identifier (symbol ID).
|
||||
/// </summary>
|
||||
[JsonPropertyName("@id")]
|
||||
public required string Id { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// StellaOps reachability evidence block (extension).
|
||||
/// </summary>
|
||||
public sealed record VexEvidenceBlock
|
||||
{
|
||||
/// <summary>
|
||||
/// v1 lattice state code (U, SR, SU, RO, RU, CR, CU, X).
|
||||
/// </summary>
|
||||
[JsonPropertyName("lattice_state")]
|
||||
public required string LatticeState { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncertainty tier (T1, T2, T3, T4).
|
||||
/// </summary>
|
||||
[JsonPropertyName("uncertainty_tier")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? UncertaintyTier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence score (0.0-1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk score incorporating uncertainty.
|
||||
/// </summary>
|
||||
[JsonPropertyName("risk_score")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public double? RiskScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Call path from entry point to target.
|
||||
/// </summary>
|
||||
[JsonPropertyName("call_path")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableArray<string>? CallPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry points considered.
|
||||
/// </summary>
|
||||
[JsonPropertyName("entry_points")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableArray<string>? EntryPoints { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Runtime hits (symbols observed at runtime).
|
||||
/// </summary>
|
||||
[JsonPropertyName("runtime_hits")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableArray<string>? RuntimeHits { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// BLAKE3 hash of the call graph.
|
||||
/// </summary>
|
||||
[JsonPropertyName("graph_hash")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? GraphHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CAS URI for the call graph.
|
||||
/// </summary>
|
||||
[JsonPropertyName("graph_cas_uri")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? GraphCasUri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope digest for the graph.
|
||||
/// </summary>
|
||||
[JsonPropertyName("graph_dsse_digest")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? GraphDsseDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Edge bundles attached to this evidence.
|
||||
/// </summary>
|
||||
[JsonPropertyName("edge_bundles")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableArray<VexEdgeBundleRef>? EdgeBundles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Analysis method (static, dynamic, hybrid).
|
||||
/// </summary>
|
||||
[JsonPropertyName("method")]
|
||||
public string Method { get; init; } = "hybrid";
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when evidence was computed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("computed_at")]
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to an edge bundle with DSSE attestation.
|
||||
/// </summary>
|
||||
public sealed record VexEdgeBundleRef
|
||||
{
|
||||
/// <summary>
|
||||
/// Bundle identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("bundle_id")]
|
||||
public required string BundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Bundle reason (RuntimeHits, InitArray, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("reason")]
|
||||
public required string Reason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CAS URI for the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cas_uri")]
|
||||
public required string CasUri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// DSSE CAS URI (if signed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("dsse_cas_uri")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? DsseCasUri { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to emit VEX decisions for a set of findings.
|
||||
/// </summary>
|
||||
public sealed record VexDecisionEmitRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Author identifier for the VEX document.
|
||||
/// </summary>
|
||||
public required string Author { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Findings to emit decisions for.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<VexFindingInput> Findings { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include full evidence blocks.
|
||||
/// </summary>
|
||||
public bool IncludeEvidence { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to request DSSE signatures.
|
||||
/// </summary>
|
||||
public bool RequestDsse { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to submit to Rekor transparency log.
|
||||
/// </summary>
|
||||
public bool SubmitToRekor { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input for a single finding to emit a VEX decision.
|
||||
/// </summary>
|
||||
public sealed record VexFindingInput
|
||||
{
|
||||
/// <summary>
|
||||
/// Vulnerability identifier.
|
||||
/// </summary>
|
||||
public required string VulnId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL.
|
||||
/// </summary>
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Target symbol identifier (function-level).
|
||||
/// </summary>
|
||||
public string? SymbolId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Scan identifier.
|
||||
/// </summary>
|
||||
public string? ScanId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability name/title.
|
||||
/// </summary>
|
||||
public string? VulnName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability description.
|
||||
/// </summary>
|
||||
public string? VulnDescription { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Override VEX status (if specified, bypasses auto-determination).
|
||||
/// </summary>
|
||||
public string? OverrideStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for override.
|
||||
/// </summary>
|
||||
public string? OverrideJustification { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of emitting VEX decisions.
|
||||
/// </summary>
|
||||
public sealed record VexDecisionEmitResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The emitted VEX document.
|
||||
/// </summary>
|
||||
public required VexDecisionDocument Document { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gate decisions for each finding.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, PolicyGateDecision> GateDecisions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Findings that were blocked by gates.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<VexBlockedFinding> Blocked { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope digest (if signed).
|
||||
/// </summary>
|
||||
public string? DsseDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log index (if submitted).
|
||||
/// </summary>
|
||||
public long? RekorLogIndex { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A finding that was blocked by policy gates.
|
||||
/// </summary>
|
||||
public sealed record VexBlockedFinding
|
||||
{
|
||||
/// <summary>
|
||||
/// Vulnerability identifier.
|
||||
/// </summary>
|
||||
public required string VulnId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL.
|
||||
/// </summary>
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The status that was requested.
|
||||
/// </summary>
|
||||
public required string RequestedStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The gate that blocked.
|
||||
/// </summary>
|
||||
public required string BlockedBy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for blocking.
|
||||
/// </summary>
|
||||
public required string Reason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Suggestion for resolving.
|
||||
/// </summary>
|
||||
public string? Suggestion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// OpenVEX justification values for not_affected status.
|
||||
/// </summary>
|
||||
public static class VexJustification
|
||||
{
|
||||
public const string ComponentNotPresent = "component_not_present";
|
||||
public const string VulnerableCodeNotPresent = "vulnerable_code_not_present";
|
||||
public const string VulnerableCodeNotInExecutePath = "vulnerable_code_not_in_execute_path";
|
||||
public const string VulnerableCodeCannotBeControlledByAdversary = "vulnerable_code_cannot_be_controlled_by_adversary";
|
||||
public const string InlineMitigationsAlreadyExist = "inline_mitigations_already_exist";
|
||||
}
|
||||
@@ -0,0 +1,696 @@
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Vex;
|
||||
|
||||
/// <summary>
|
||||
/// Service for signing VEX decision documents with DSSE envelopes and optionally submitting to Rekor.
|
||||
/// </summary>
|
||||
public interface IVexDecisionSigningService
|
||||
{
|
||||
/// <summary>
|
||||
/// Signs a VEX decision document, creating a DSSE envelope.
|
||||
/// </summary>
|
||||
Task<VexSigningResult> SignAsync(VexSigningRequest request, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a signed VEX decision envelope.
|
||||
/// </summary>
|
||||
Task<VexVerificationResult> VerifyAsync(VexVerificationRequest request, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to sign a VEX decision document.
|
||||
/// </summary>
|
||||
public sealed record VexSigningRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The VEX decision document to sign.
|
||||
/// </summary>
|
||||
public required VexDecisionDocument Document { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key identifier for signing (null for default/keyless).
|
||||
/// </summary>
|
||||
public string? KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to submit to Rekor transparency log.
|
||||
/// </summary>
|
||||
public bool SubmitToRekor { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Subject URIs for the attestation (e.g., SBOM digest, scan ID).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? SubjectUris { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence artifact digests to reference.
|
||||
/// </summary>
|
||||
public IReadOnlyList<VexEvidenceReference>? EvidenceRefs { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to supporting evidence artifact.
|
||||
/// </summary>
|
||||
public sealed record VexEvidenceReference
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of evidence (e.g., "sbom", "callgraph", "scan-report").
|
||||
/// </summary>
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 digest of the evidence artifact.
|
||||
/// </summary>
|
||||
public required string Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CAS URI for the artifact.
|
||||
/// </summary>
|
||||
public string? CasUri { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signing a VEX decision.
|
||||
/// </summary>
|
||||
public sealed record VexSigningResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether signing was successful.
|
||||
/// </summary>
|
||||
public bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The DSSE envelope containing the signed VEX decision.
|
||||
/// </summary>
|
||||
public VexDsseEnvelope? Envelope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 digest of the canonical envelope.
|
||||
/// </summary>
|
||||
public string? EnvelopeDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor transparency log metadata (if submitted).
|
||||
/// </summary>
|
||||
public VexRekorMetadata? RekorMetadata { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if signing failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope for VEX decisions.
|
||||
/// </summary>
|
||||
public sealed record VexDsseEnvelope
|
||||
{
|
||||
/// <summary>
|
||||
/// Payload type (always "stella.ops/vexDecision@v1").
|
||||
/// </summary>
|
||||
public string PayloadType { get; init; } = VexPredicateTypes.VexDecision;
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded payload (canonical JSON of VEX document).
|
||||
/// </summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signatures on the envelope.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<VexDsseSignature> Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Signature in a VEX DSSE envelope.
|
||||
/// </summary>
|
||||
public sealed record VexDsseSignature
|
||||
{
|
||||
/// <summary>
|
||||
/// Key identifier used for signing.
|
||||
/// </summary>
|
||||
public string? KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded signature.
|
||||
/// </summary>
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor transparency log metadata.
|
||||
/// </summary>
|
||||
public sealed record VexRekorMetadata
|
||||
{
|
||||
/// <summary>
|
||||
/// Rekor entry UUID.
|
||||
/// </summary>
|
||||
public required string Uuid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log index.
|
||||
/// </summary>
|
||||
public long Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log URL.
|
||||
/// </summary>
|
||||
public required string LogUrl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of entry creation.
|
||||
/// </summary>
|
||||
public DateTimeOffset IntegratedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle tree root hash at integration time.
|
||||
/// </summary>
|
||||
public string? TreeRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Inclusion proof (if available).
|
||||
/// </summary>
|
||||
public VexRekorInclusionProof? InclusionProof { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor inclusion proof.
|
||||
/// </summary>
|
||||
public sealed record VexRekorInclusionProof
|
||||
{
|
||||
/// <summary>
|
||||
/// Checkpoint text.
|
||||
/// </summary>
|
||||
public required string Checkpoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hashes in the inclusion proof.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> Hashes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Leaf index in the tree.
|
||||
/// </summary>
|
||||
public long LeafIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tree size at proof time.
|
||||
/// </summary>
|
||||
public long TreeSize { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to verify a signed VEX decision.
|
||||
/// </summary>
|
||||
public sealed record VexVerificationRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The DSSE envelope to verify.
|
||||
/// </summary>
|
||||
public required VexDsseEnvelope Envelope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected Rekor metadata (optional).
|
||||
/// </summary>
|
||||
public VexRekorMetadata? ExpectedRekorMetadata { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify Rekor inclusion.
|
||||
/// </summary>
|
||||
public bool VerifyRekorInclusion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying a signed VEX decision.
|
||||
/// </summary>
|
||||
public sealed record VexVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether verification passed.
|
||||
/// </summary>
|
||||
public bool Valid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The decoded VEX decision document.
|
||||
/// </summary>
|
||||
public VexDecisionDocument? Document { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verification errors (if any).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Errors { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verified Rekor metadata.
|
||||
/// </summary>
|
||||
public VexRekorMetadata? RekorMetadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX predicate type constants.
|
||||
/// </summary>
|
||||
public static class VexPredicateTypes
|
||||
{
|
||||
/// <summary>
|
||||
/// Predicate type for VEX decisions: stella.ops/vexDecision@v1.
|
||||
/// </summary>
|
||||
public const string VexDecision = "stella.ops/vexDecision@v1";
|
||||
|
||||
/// <summary>
|
||||
/// Predicate type for full VEX documents: stella.ops/vex@v1.
|
||||
/// </summary>
|
||||
public const string VexDocument = "stella.ops/vex@v1";
|
||||
|
||||
/// <summary>
|
||||
/// Standard OpenVEX predicate type.
|
||||
/// </summary>
|
||||
public const string OpenVex = "https://openvex.dev/ns";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IVexDecisionSigningService"/>.
|
||||
/// </summary>
|
||||
public sealed class VexDecisionSigningService : IVexDecisionSigningService
|
||||
{
|
||||
private static readonly JsonSerializerOptions CanonicalJsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private readonly IVexSignerClient? _signerClient;
|
||||
private readonly IVexRekorClient? _rekorClient;
|
||||
private readonly IOptionsMonitor<VexSigningOptions> _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<VexDecisionSigningService> _logger;
|
||||
|
||||
public VexDecisionSigningService(
|
||||
IVexSignerClient? signerClient,
|
||||
IVexRekorClient? rekorClient,
|
||||
IOptionsMonitor<VexSigningOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<VexDecisionSigningService> logger)
|
||||
{
|
||||
_signerClient = signerClient;
|
||||
_rekorClient = rekorClient;
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<VexSigningResult> SignAsync(VexSigningRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
|
||||
"vex_decision.sign",
|
||||
ActivityKind.Internal);
|
||||
activity?.SetTag("tenant", request.TenantId);
|
||||
activity?.SetTag("document_id", request.Document.Id);
|
||||
|
||||
try
|
||||
{
|
||||
var options = _options.CurrentValue;
|
||||
|
||||
// Serialize document to canonical JSON
|
||||
var documentJson = SerializeCanonical(request.Document);
|
||||
var payloadBase64 = Convert.ToBase64String(documentJson);
|
||||
|
||||
// Sign the payload
|
||||
VexDsseSignature signature;
|
||||
if (_signerClient is not null && options.UseSignerService)
|
||||
{
|
||||
var signResult = await _signerClient.SignAsync(
|
||||
new VexSignerRequest
|
||||
{
|
||||
PayloadType = VexPredicateTypes.VexDecision,
|
||||
PayloadBase64 = payloadBase64,
|
||||
KeyId = request.KeyId,
|
||||
TenantId = request.TenantId
|
||||
},
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!signResult.Success)
|
||||
{
|
||||
return new VexSigningResult
|
||||
{
|
||||
Success = false,
|
||||
Error = signResult.Error ?? "Signer service returned failure"
|
||||
};
|
||||
}
|
||||
|
||||
signature = new VexDsseSignature
|
||||
{
|
||||
KeyId = signResult.KeyId,
|
||||
Sig = signResult.Signature!
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
// Local signing fallback (for testing/development)
|
||||
signature = SignLocally(VexPredicateTypes.VexDecision, documentJson, request.KeyId);
|
||||
}
|
||||
|
||||
// Build envelope
|
||||
var envelope = new VexDsseEnvelope
|
||||
{
|
||||
PayloadType = VexPredicateTypes.VexDecision,
|
||||
Payload = payloadBase64,
|
||||
Signatures = [signature]
|
||||
};
|
||||
|
||||
// Compute envelope digest
|
||||
var envelopeJson = SerializeCanonical(envelope);
|
||||
var envelopeDigest = ComputeSha256(envelopeJson);
|
||||
|
||||
// Submit to Rekor if requested
|
||||
VexRekorMetadata? rekorMetadata = null;
|
||||
if (request.SubmitToRekor && _rekorClient is not null && options.RekorEnabled)
|
||||
{
|
||||
rekorMetadata = await SubmitToRekorAsync(envelope, envelopeDigest, request, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Signed VEX decision {DocumentId} for tenant {TenantId}. Rekor: {RekorSubmitted}",
|
||||
request.Document.Id,
|
||||
request.TenantId,
|
||||
rekorMetadata is not null);
|
||||
|
||||
PolicyEngineTelemetry.RecordVexSigning(success: true, rekorSubmitted: rekorMetadata is not null);
|
||||
|
||||
return new VexSigningResult
|
||||
{
|
||||
Success = true,
|
||||
Envelope = envelope,
|
||||
EnvelopeDigest = $"sha256:{envelopeDigest}",
|
||||
RekorMetadata = rekorMetadata
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to sign VEX decision {DocumentId}", request.Document.Id);
|
||||
PolicyEngineTelemetry.RecordVexSigning(success: false, rekorSubmitted: false);
|
||||
|
||||
return new VexSigningResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<VexVerificationResult> VerifyAsync(VexVerificationRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
// Decode payload
|
||||
var payloadBytes = Convert.FromBase64String(request.Envelope.Payload);
|
||||
var document = JsonSerializer.Deserialize<VexDecisionDocument>(payloadBytes, CanonicalJsonOptions);
|
||||
|
||||
if (document is null)
|
||||
{
|
||||
errors.Add("Failed to decode VEX document from payload");
|
||||
return new VexVerificationResult { Valid = false, Errors = errors };
|
||||
}
|
||||
|
||||
// Verify payload type
|
||||
if (request.Envelope.PayloadType != VexPredicateTypes.VexDecision &&
|
||||
request.Envelope.PayloadType != VexPredicateTypes.VexDocument &&
|
||||
request.Envelope.PayloadType != VexPredicateTypes.OpenVex)
|
||||
{
|
||||
errors.Add($"Invalid payload type: {request.Envelope.PayloadType}");
|
||||
}
|
||||
|
||||
// Verify signatures
|
||||
if (request.Envelope.Signatures.Count == 0)
|
||||
{
|
||||
errors.Add("Envelope has no signatures");
|
||||
}
|
||||
|
||||
foreach (var sig in request.Envelope.Signatures)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(sig.Sig))
|
||||
{
|
||||
errors.Add("Signature is empty");
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: Verify actual signature if signer client provides public key resolution
|
||||
// For now, we just verify the signature is well-formed base64
|
||||
try
|
||||
{
|
||||
_ = Convert.FromBase64String(sig.Sig);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
errors.Add($"Invalid base64 signature for keyId: {sig.KeyId ?? "(none)"}");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Rekor inclusion if requested
|
||||
VexRekorMetadata? verifiedRekor = null;
|
||||
if (request.VerifyRekorInclusion && request.ExpectedRekorMetadata is not null && _rekorClient is not null)
|
||||
{
|
||||
var proofResult = await _rekorClient.GetProofAsync(
|
||||
request.ExpectedRekorMetadata.Uuid,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (proofResult is null)
|
||||
{
|
||||
errors.Add($"Could not retrieve Rekor proof for UUID: {request.ExpectedRekorMetadata.Uuid}");
|
||||
}
|
||||
else
|
||||
{
|
||||
verifiedRekor = proofResult;
|
||||
}
|
||||
}
|
||||
|
||||
return new VexVerificationResult
|
||||
{
|
||||
Valid = errors.Count == 0,
|
||||
Document = document,
|
||||
Errors = errors.Count > 0 ? errors : null,
|
||||
RekorMetadata = verifiedRekor ?? request.ExpectedRekorMetadata
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add($"Verification failed: {ex.Message}");
|
||||
return new VexVerificationResult { Valid = false, Errors = errors };
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<VexRekorMetadata?> SubmitToRekorAsync(
|
||||
VexDsseEnvelope envelope,
|
||||
string envelopeDigest,
|
||||
VexSigningRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_rekorClient is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _rekorClient.SubmitAsync(
|
||||
new VexRekorSubmitRequest
|
||||
{
|
||||
Envelope = envelope,
|
||||
EnvelopeDigest = envelopeDigest,
|
||||
ArtifactKind = "vex-decision",
|
||||
SubjectUris = request.SubjectUris
|
||||
},
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!result.Success)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Failed to submit VEX decision {DocumentId} to Rekor: {Error}",
|
||||
request.Document.Id,
|
||||
result.Error);
|
||||
return null;
|
||||
}
|
||||
|
||||
return result.Metadata;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Error submitting VEX decision {DocumentId} to Rekor",
|
||||
request.Document.Id);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static VexDsseSignature SignLocally(string payloadType, byte[] payload, string? keyId)
|
||||
{
|
||||
// Compute DSSE PAE: "DSSEv1" + len(payloadType) + payloadType + len(payload) + payload
|
||||
using var ms = new MemoryStream();
|
||||
using var writer = new BinaryWriter(ms);
|
||||
|
||||
var prefix = "DSSEv1 "u8;
|
||||
writer.Write(prefix);
|
||||
|
||||
var typeBytes = System.Text.Encoding.UTF8.GetBytes(payloadType);
|
||||
writer.Write(typeBytes.Length.ToString());
|
||||
writer.Write(' ');
|
||||
writer.Write(typeBytes);
|
||||
writer.Write(' ');
|
||||
|
||||
writer.Write(payload.Length.ToString());
|
||||
writer.Write(' ');
|
||||
writer.Write(payload);
|
||||
|
||||
var pae = ms.ToArray();
|
||||
|
||||
// For local signing, use SHA256 hash as a placeholder signature
|
||||
// In production, this would use actual key material
|
||||
using var sha256 = SHA256.Create();
|
||||
var signatureBytes = sha256.ComputeHash(pae);
|
||||
|
||||
return new VexDsseSignature
|
||||
{
|
||||
KeyId = keyId ?? "local:sha256",
|
||||
Sig = Convert.ToBase64String(signatureBytes)
|
||||
};
|
||||
}
|
||||
|
||||
private static byte[] SerializeCanonical<T>(T value)
|
||||
{
|
||||
return JsonSerializer.SerializeToUtf8Bytes(value, CanonicalJsonOptions);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] data)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
var hash = sha256.ComputeHash(data);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Client interface for VEX signing operations (delegates to Signer service).
|
||||
/// </summary>
|
||||
public interface IVexSignerClient
|
||||
{
|
||||
/// <summary>
|
||||
/// Signs a VEX payload.
|
||||
/// </summary>
|
||||
Task<VexSignerResult> SignAsync(VexSignerRequest request, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to sign a VEX payload.
|
||||
/// </summary>
|
||||
public sealed record VexSignerRequest
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required string PayloadBase64 { get; init; }
|
||||
public string? KeyId { get; init; }
|
||||
public required string TenantId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result from VEX signing.
|
||||
/// </summary>
|
||||
public sealed record VexSignerResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? Signature { get; init; }
|
||||
public string? KeyId { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Client interface for Rekor operations.
|
||||
/// </summary>
|
||||
public interface IVexRekorClient
|
||||
{
|
||||
/// <summary>
|
||||
/// Submits a VEX envelope to Rekor.
|
||||
/// </summary>
|
||||
Task<VexRekorSubmitResult> SubmitAsync(VexRekorSubmitRequest request, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a Rekor proof by UUID.
|
||||
/// </summary>
|
||||
Task<VexRekorMetadata?> GetProofAsync(string uuid, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to submit to Rekor.
|
||||
/// </summary>
|
||||
public sealed record VexRekorSubmitRequest
|
||||
{
|
||||
public required VexDsseEnvelope Envelope { get; init; }
|
||||
public required string EnvelopeDigest { get; init; }
|
||||
public string? ArtifactKind { get; init; }
|
||||
public IReadOnlyList<string>? SubjectUris { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of Rekor submission.
|
||||
/// </summary>
|
||||
public sealed record VexRekorSubmitResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public VexRekorMetadata? Metadata { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for VEX signing service.
|
||||
/// </summary>
|
||||
public sealed class VexSigningOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "VexSigning";
|
||||
|
||||
/// <summary>
|
||||
/// Whether to use the Signer service (true) or local signing (false).
|
||||
/// </summary>
|
||||
public bool UseSignerService { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether Rekor submission is enabled.
|
||||
/// </summary>
|
||||
public bool RekorEnabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Default key ID for signing (null for keyless).
|
||||
/// </summary>
|
||||
public string? DefaultKeyId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log URL.
|
||||
/// </summary>
|
||||
public Uri? RekorUrl { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Timeout for Rekor operations.
|
||||
/// </summary>
|
||||
public TimeSpan RekorTimeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
}
|
||||
@@ -0,0 +1,339 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MsOptions = Microsoft.Extensions.Options.Options;
|
||||
using Moq;
|
||||
using Moq.Protected;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.ReachabilityFacts;
|
||||
|
||||
public sealed class ReachabilityFactsSignalsClientTests
|
||||
{
|
||||
private readonly Mock<HttpMessageHandler> _mockHandler;
|
||||
private readonly ReachabilityFactsSignalsClientOptions _options;
|
||||
private readonly ReachabilityFactsSignalsClient _client;
|
||||
|
||||
public ReachabilityFactsSignalsClientTests()
|
||||
{
|
||||
_mockHandler = new Mock<HttpMessageHandler>();
|
||||
_options = new ReachabilityFactsSignalsClientOptions
|
||||
{
|
||||
BaseUri = new Uri("https://signals.example.com/"),
|
||||
MaxConcurrentRequests = 5,
|
||||
Timeout = TimeSpan.FromSeconds(30)
|
||||
};
|
||||
|
||||
var httpClient = new HttpClient(_mockHandler.Object)
|
||||
{
|
||||
BaseAddress = _options.BaseUri
|
||||
};
|
||||
|
||||
_client = new ReachabilityFactsSignalsClient(
|
||||
httpClient,
|
||||
MsOptions.Create(_options),
|
||||
NullLogger<ReachabilityFactsSignalsClient>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBySubjectAsync_ReturnsNull_WhenNotFound()
|
||||
{
|
||||
SetupMockResponse(HttpStatusCode.NotFound);
|
||||
|
||||
var result = await _client.GetBySubjectAsync("pkg:maven/foo@1.0|CVE-2025-001");
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBySubjectAsync_ReturnsFact_WhenFound()
|
||||
{
|
||||
var response = CreateSignalsResponse("fact-1", 0.85);
|
||||
SetupMockResponse(HttpStatusCode.OK, response);
|
||||
|
||||
var result = await _client.GetBySubjectAsync("pkg:maven/foo@1.0|CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("fact-1", result.Id);
|
||||
Assert.Equal(0.85, result.Score);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBySubjectAsync_CallsCorrectEndpoint()
|
||||
{
|
||||
var response = CreateSignalsResponse("fact-1", 0.85);
|
||||
string? capturedUri = null;
|
||||
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.Callback<HttpRequestMessage, CancellationToken>((req, _) =>
|
||||
{
|
||||
capturedUri = req.RequestUri?.ToString();
|
||||
})
|
||||
.ReturnsAsync(new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = JsonContent.Create(response)
|
||||
});
|
||||
|
||||
await _client.GetBySubjectAsync("pkg:maven/foo@1.0|CVE-2025-001");
|
||||
|
||||
Assert.NotNull(capturedUri);
|
||||
Assert.Contains("signals/facts/", capturedUri);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBySubjectAsync_ThrowsOnServerError()
|
||||
{
|
||||
SetupMockResponse(HttpStatusCode.InternalServerError);
|
||||
|
||||
await Assert.ThrowsAsync<HttpRequestException>(
|
||||
() => _client.GetBySubjectAsync("pkg:maven/foo@1.0|CVE-2025-001"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBatchBySubjectsAsync_ReturnsEmptyDict_WhenNoKeys()
|
||||
{
|
||||
var result = await _client.GetBatchBySubjectsAsync([]);
|
||||
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBatchBySubjectsAsync_FetchesInParallel()
|
||||
{
|
||||
var responses = new Dictionary<string, SignalsReachabilityFactResponse>
|
||||
{
|
||||
["pkg:maven/foo@1.0|CVE-2025-001"] = CreateSignalsResponse("fact-1", 0.9),
|
||||
["pkg:maven/bar@2.0|CVE-2025-002"] = CreateSignalsResponse("fact-2", 0.8)
|
||||
};
|
||||
|
||||
int callCount = 0;
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync((HttpRequestMessage req, CancellationToken _) =>
|
||||
{
|
||||
Interlocked.Increment(ref callCount);
|
||||
var path = req.RequestUri?.AbsolutePath ?? "";
|
||||
|
||||
// Decode the path to find the key
|
||||
foreach (var kvp in responses)
|
||||
{
|
||||
var encodedKey = Uri.EscapeDataString(kvp.Key);
|
||||
if (path.Contains(encodedKey))
|
||||
{
|
||||
return new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = JsonContent.Create(kvp.Value)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new HttpResponseMessage(HttpStatusCode.NotFound);
|
||||
});
|
||||
|
||||
var keys = responses.Keys.ToList();
|
||||
var result = await _client.GetBatchBySubjectsAsync(keys);
|
||||
|
||||
Assert.Equal(2, result.Count);
|
||||
Assert.Equal(2, callCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBatchBySubjectsAsync_ReturnsOnlyFound()
|
||||
{
|
||||
var response = CreateSignalsResponse("fact-1", 0.9);
|
||||
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync((HttpRequestMessage req, CancellationToken _) =>
|
||||
{
|
||||
var path = req.RequestUri?.AbsolutePath ?? "";
|
||||
if (path.Contains(Uri.EscapeDataString("pkg:maven/foo@1.0|CVE-2025-001")))
|
||||
{
|
||||
return new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = JsonContent.Create(response)
|
||||
};
|
||||
}
|
||||
|
||||
return new HttpResponseMessage(HttpStatusCode.NotFound);
|
||||
});
|
||||
|
||||
var keys = new List<string>
|
||||
{
|
||||
"pkg:maven/foo@1.0|CVE-2025-001",
|
||||
"pkg:maven/bar@2.0|CVE-2025-002"
|
||||
};
|
||||
|
||||
var result = await _client.GetBatchBySubjectsAsync(keys);
|
||||
|
||||
Assert.Single(result);
|
||||
Assert.True(result.ContainsKey("pkg:maven/foo@1.0|CVE-2025-001"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TriggerRecomputeAsync_ReturnsTrue_OnSuccess()
|
||||
{
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync(new HttpResponseMessage(HttpStatusCode.OK));
|
||||
|
||||
var request = new SignalsRecomputeRequest
|
||||
{
|
||||
SubjectKey = "pkg:maven/foo@1.0|CVE-2025-001",
|
||||
TenantId = "tenant-1"
|
||||
};
|
||||
|
||||
var result = await _client.TriggerRecomputeAsync(request);
|
||||
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TriggerRecomputeAsync_ReturnsFalse_OnFailure()
|
||||
{
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync(new HttpResponseMessage(HttpStatusCode.BadRequest));
|
||||
|
||||
var request = new SignalsRecomputeRequest
|
||||
{
|
||||
SubjectKey = "pkg:maven/foo@1.0|CVE-2025-001",
|
||||
TenantId = "tenant-1"
|
||||
};
|
||||
|
||||
var result = await _client.TriggerRecomputeAsync(request);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TriggerRecomputeAsync_PostsToCorrectEndpoint()
|
||||
{
|
||||
string? capturedUri = null;
|
||||
string? capturedBody = null;
|
||||
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.Callback<HttpRequestMessage, CancellationToken>(async (req, _) =>
|
||||
{
|
||||
capturedUri = req.RequestUri?.ToString();
|
||||
if (req.Content is not null)
|
||||
{
|
||||
capturedBody = await req.Content.ReadAsStringAsync();
|
||||
}
|
||||
})
|
||||
.ReturnsAsync(new HttpResponseMessage(HttpStatusCode.OK));
|
||||
|
||||
var request = new SignalsRecomputeRequest
|
||||
{
|
||||
SubjectKey = "pkg:maven/foo@1.0|CVE-2025-001",
|
||||
TenantId = "tenant-1"
|
||||
};
|
||||
|
||||
await _client.TriggerRecomputeAsync(request);
|
||||
|
||||
Assert.NotNull(capturedUri);
|
||||
Assert.Contains("signals/reachability/recompute", capturedUri);
|
||||
Assert.NotNull(capturedBody);
|
||||
Assert.Contains("subjectKey", capturedBody);
|
||||
Assert.Contains("tenantId", capturedBody);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TriggerRecomputeAsync_ReturnsFalse_OnException()
|
||||
{
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ThrowsAsync(new HttpRequestException("Connection failed"));
|
||||
|
||||
var request = new SignalsRecomputeRequest
|
||||
{
|
||||
SubjectKey = "pkg:maven/foo@1.0|CVE-2025-001",
|
||||
TenantId = "tenant-1"
|
||||
};
|
||||
|
||||
var result = await _client.TriggerRecomputeAsync(request);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
// Options Tests
|
||||
|
||||
[Fact]
|
||||
public void Options_HasCorrectDefaults()
|
||||
{
|
||||
var options = new ReachabilityFactsSignalsClientOptions();
|
||||
|
||||
Assert.Null(options.BaseUri);
|
||||
Assert.Equal(10, options.MaxConcurrentRequests);
|
||||
Assert.Equal(TimeSpan.FromSeconds(30), options.Timeout);
|
||||
Assert.Equal(3, options.RetryCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Options_SectionName_IsCorrect()
|
||||
{
|
||||
Assert.Equal("ReachabilitySignals", ReachabilityFactsSignalsClientOptions.SectionName);
|
||||
}
|
||||
|
||||
private void SetupMockResponse(HttpStatusCode statusCode, SignalsReachabilityFactResponse? content = null)
|
||||
{
|
||||
var response = new HttpResponseMessage(statusCode);
|
||||
if (content is not null)
|
||||
{
|
||||
response.Content = JsonContent.Create(content);
|
||||
}
|
||||
|
||||
_mockHandler.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync(response);
|
||||
}
|
||||
|
||||
private static SignalsReachabilityFactResponse CreateSignalsResponse(string id, double score)
|
||||
{
|
||||
return new SignalsReachabilityFactResponse
|
||||
{
|
||||
Id = id,
|
||||
CallgraphId = "cg-test",
|
||||
Score = score,
|
||||
States = new List<SignalsReachabilityState>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Target = "test_method",
|
||||
Reachable = true,
|
||||
Confidence = 0.9,
|
||||
Bucket = "reachable"
|
||||
}
|
||||
},
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,369 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.ReachabilityFacts;
|
||||
|
||||
public sealed class SignalsBackedReachabilityFactsStoreTests
|
||||
{
|
||||
private readonly Mock<IReachabilityFactsSignalsClient> _mockClient;
|
||||
private readonly SignalsBackedReachabilityFactsStore _store;
|
||||
|
||||
public SignalsBackedReachabilityFactsStoreTests()
|
||||
{
|
||||
_mockClient = new Mock<IReachabilityFactsSignalsClient>();
|
||||
_store = new SignalsBackedReachabilityFactsStore(
|
||||
_mockClient.Object,
|
||||
NullLogger<SignalsBackedReachabilityFactsStore>.Instance,
|
||||
TimeProvider.System);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_ReturnsNull_WhenSignalsReturnsNull()
|
||||
{
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((SignalsReachabilityFactResponse?)null);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/com.example/foo@1.0.0", "CVE-2025-12345");
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_MapsSignalsResponse_ToReachabilityFact()
|
||||
{
|
||||
var signalsResponse = CreateSignalsResponse(reachable: true, confidence: 0.95);
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(signalsResponse);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/com.example/foo@1.0.0", "CVE-2025-12345");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("tenant-1", result.TenantId);
|
||||
Assert.Equal("pkg:maven/com.example/foo@1.0.0", result.ComponentPurl);
|
||||
Assert.Equal("CVE-2025-12345", result.AdvisoryId);
|
||||
Assert.Equal(ReachabilityState.Reachable, result.State);
|
||||
Assert.Equal("signals", result.Source);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_BuildsCorrectSubjectKey()
|
||||
{
|
||||
var signalsResponse = CreateSignalsResponse(reachable: true, confidence: 0.9);
|
||||
string? capturedKey = null;
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<string, CancellationToken>((key, _) => capturedKey = key)
|
||||
.ReturnsAsync(signalsResponse);
|
||||
|
||||
await _store.GetAsync("tenant-1", "pkg:maven/com.example/foo@1.0.0", "CVE-2025-12345");
|
||||
|
||||
Assert.Equal("pkg:maven/com.example/foo@1.0.0|CVE-2025-12345", capturedKey);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBatchAsync_ReturnsEmptyDict_WhenNoKeysProvided()
|
||||
{
|
||||
var result = await _store.GetBatchAsync([]);
|
||||
|
||||
Assert.Empty(result);
|
||||
_mockClient.Verify(c => c.GetBatchBySubjectsAsync(It.IsAny<IReadOnlyList<string>>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBatchAsync_MapsBatchResponse()
|
||||
{
|
||||
var keys = new List<ReachabilityFactKey>
|
||||
{
|
||||
new("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001"),
|
||||
new("tenant-1", "pkg:maven/bar@2.0", "CVE-2025-002")
|
||||
};
|
||||
|
||||
var responses = new Dictionary<string, SignalsReachabilityFactResponse>
|
||||
{
|
||||
["pkg:maven/foo@1.0|CVE-2025-001"] = CreateSignalsResponse(reachable: true, confidence: 0.9),
|
||||
["pkg:maven/bar@2.0|CVE-2025-002"] = CreateSignalsResponse(reachable: false, confidence: 0.8)
|
||||
};
|
||||
|
||||
_mockClient.Setup(c => c.GetBatchBySubjectsAsync(It.IsAny<IReadOnlyList<string>>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(responses);
|
||||
|
||||
var result = await _store.GetBatchAsync(keys);
|
||||
|
||||
Assert.Equal(2, result.Count);
|
||||
Assert.Contains(keys[0], result.Keys);
|
||||
Assert.Contains(keys[1], result.Keys);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBatchAsync_OnlyReturnsFound()
|
||||
{
|
||||
var keys = new List<ReachabilityFactKey>
|
||||
{
|
||||
new("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001"),
|
||||
new("tenant-1", "pkg:maven/bar@2.0", "CVE-2025-002")
|
||||
};
|
||||
|
||||
// Only return first key
|
||||
var responses = new Dictionary<string, SignalsReachabilityFactResponse>
|
||||
{
|
||||
["pkg:maven/foo@1.0|CVE-2025-001"] = CreateSignalsResponse(reachable: true, confidence: 0.9)
|
||||
};
|
||||
|
||||
_mockClient.Setup(c => c.GetBatchBySubjectsAsync(It.IsAny<IReadOnlyList<string>>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(responses);
|
||||
|
||||
var result = await _store.GetBatchAsync(keys);
|
||||
|
||||
Assert.Single(result);
|
||||
Assert.Contains(keys[0], result.Keys);
|
||||
Assert.DoesNotContain(keys[1], result.Keys);
|
||||
}
|
||||
|
||||
// State Determination Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DeterminesState_Reachable_WhenHasReachableStates()
|
||||
{
|
||||
var response = CreateSignalsResponse(reachable: true, confidence: 0.9);
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(response);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(ReachabilityState.Reachable, result.State);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeterminesState_Unreachable_WhenHighConfidenceUnreachable()
|
||||
{
|
||||
var response = CreateSignalsResponse(reachable: false, confidence: 0.8);
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(response);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(ReachabilityState.Unreachable, result.State);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeterminesState_UnderInvestigation_WhenLowConfidenceUnreachable()
|
||||
{
|
||||
var response = CreateSignalsResponse(reachable: false, confidence: 0.5);
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(response);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(ReachabilityState.UnderInvestigation, result.State);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeterminesState_Unknown_WhenNoStates()
|
||||
{
|
||||
var response = new SignalsReachabilityFactResponse
|
||||
{
|
||||
Id = "fact-1",
|
||||
CallgraphId = "cg-1",
|
||||
States = null,
|
||||
Score = 0,
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(response);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(ReachabilityState.Unknown, result.State);
|
||||
}
|
||||
|
||||
// Analysis Method Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DeterminesMethod_Hybrid_WhenBothStaticAndRuntime()
|
||||
{
|
||||
var response = CreateSignalsResponse(reachable: true, confidence: 0.9);
|
||||
response = response with
|
||||
{
|
||||
RuntimeFacts = new List<SignalsRuntimeFact>
|
||||
{
|
||||
new() { SymbolId = "sym1", HitCount = 5 }
|
||||
}
|
||||
};
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(response);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(AnalysisMethod.Hybrid, result.Method);
|
||||
Assert.True(result.HasRuntimeEvidence);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeterminesMethod_Static_WhenOnlyStates()
|
||||
{
|
||||
var response = CreateSignalsResponse(reachable: true, confidence: 0.9);
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(response);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(AnalysisMethod.Static, result.Method);
|
||||
Assert.False(result.HasRuntimeEvidence);
|
||||
}
|
||||
|
||||
// Metadata Extraction Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractsMetadata_FromSignalsResponse()
|
||||
{
|
||||
var response = new SignalsReachabilityFactResponse
|
||||
{
|
||||
Id = "fact-1",
|
||||
CallgraphId = "cg-123",
|
||||
Subject = new SignalsSubject
|
||||
{
|
||||
ScanId = "scan-456",
|
||||
ImageDigest = "sha256:abc"
|
||||
},
|
||||
States = new List<SignalsReachabilityState>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Target = "vulnerable_method",
|
||||
Reachable = true,
|
||||
Confidence = 0.9,
|
||||
Path = new List<string> { "main", "handler", "vulnerable_method" },
|
||||
LatticeState = "CR"
|
||||
}
|
||||
},
|
||||
EntryPoints = new List<string> { "main" },
|
||||
Uncertainty = new SignalsUncertainty { AggregateTier = "T3", RiskScore = 0.2 },
|
||||
UnknownsCount = 5,
|
||||
UnknownsPressure = 0.1,
|
||||
RiskScore = 0.3,
|
||||
Score = 0.85,
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_mockClient.Setup(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(response);
|
||||
|
||||
var result = await _store.GetAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.NotNull(result.Metadata);
|
||||
Assert.Equal("cg-123", result.Metadata["callgraph_id"]);
|
||||
Assert.Equal("scan-456", result.Metadata["scan_id"]);
|
||||
Assert.Equal("sha256:abc", result.Metadata["image_digest"]);
|
||||
Assert.Equal("T3", result.Metadata["uncertainty_tier"]);
|
||||
Assert.Equal(5, result.Metadata["unknowns_count"]);
|
||||
}
|
||||
|
||||
// Read-only Store Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_DoesNotCallClient()
|
||||
{
|
||||
var fact = CreateReachabilityFact();
|
||||
|
||||
await _store.SaveAsync(fact);
|
||||
|
||||
_mockClient.Verify(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveBatchAsync_DoesNotCallClient()
|
||||
{
|
||||
var facts = new List<ReachabilityFact> { CreateReachabilityFact() };
|
||||
|
||||
await _store.SaveBatchAsync(facts);
|
||||
|
||||
_mockClient.Verify(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteAsync_DoesNotCallClient()
|
||||
{
|
||||
await _store.DeleteAsync("tenant-1", "pkg:maven/foo@1.0", "CVE-2025-001");
|
||||
|
||||
_mockClient.Verify(c => c.GetBySubjectAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CountAsync_ReturnsZero()
|
||||
{
|
||||
var count = await _store.CountAsync("tenant-1");
|
||||
|
||||
Assert.Equal(0L, count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryAsync_ReturnsEmpty()
|
||||
{
|
||||
var query = new ReachabilityFactsQuery { TenantId = "tenant-1" };
|
||||
|
||||
var result = await _store.QueryAsync(query);
|
||||
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
// TriggerRecompute Tests
|
||||
|
||||
[Fact]
|
||||
public async Task TriggerRecomputeAsync_DelegatesToClient()
|
||||
{
|
||||
_mockClient.Setup(c => c.TriggerRecomputeAsync(It.IsAny<SignalsRecomputeRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
|
||||
var result = await _store.TriggerRecomputeAsync("tenant-1", "pkg:maven/foo@1.0|CVE-2025-001");
|
||||
|
||||
Assert.True(result);
|
||||
_mockClient.Verify(c => c.TriggerRecomputeAsync(
|
||||
It.Is<SignalsRecomputeRequest>(r => r.SubjectKey == "pkg:maven/foo@1.0|CVE-2025-001" && r.TenantId == "tenant-1"),
|
||||
It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
private static SignalsReachabilityFactResponse CreateSignalsResponse(bool reachable, double confidence)
|
||||
{
|
||||
return new SignalsReachabilityFactResponse
|
||||
{
|
||||
Id = $"fact-{Guid.NewGuid():N}",
|
||||
CallgraphId = "cg-test",
|
||||
States = new List<SignalsReachabilityState>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Target = "vulnerable_method",
|
||||
Reachable = reachable,
|
||||
Confidence = confidence,
|
||||
Bucket = reachable ? "reachable" : "unreachable"
|
||||
}
|
||||
},
|
||||
Score = reachable ? 0.9 : 0.1,
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilityFact CreateReachabilityFact()
|
||||
{
|
||||
return new ReachabilityFact
|
||||
{
|
||||
Id = "fact-1",
|
||||
TenantId = "tenant-1",
|
||||
ComponentPurl = "pkg:maven/foo@1.0",
|
||||
AdvisoryId = "CVE-2025-001",
|
||||
State = ReachabilityState.Reachable,
|
||||
Confidence = 0.9m,
|
||||
Source = "test",
|
||||
Method = AnalysisMethod.Static,
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.70" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,606 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Engine.Gates;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using StellaOps.Policy.Engine.Vex;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Vex;
|
||||
|
||||
public class VexDecisionEmitterTests
|
||||
{
|
||||
private const string TestTenantId = "test-tenant";
|
||||
private const string TestVulnId = "CVE-2021-44228";
|
||||
private const string TestPurl = "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1";
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_WithUnreachableFact_EmitsNotAffected()
|
||||
{
|
||||
// Arrange
|
||||
var fact = CreateFact(ReachabilityState.Unreachable, hasRuntime: true, confidence: 0.95m);
|
||||
var factsService = CreateMockFactsService(fact);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput { VulnId = TestVulnId, Purl = TestPurl }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.Document);
|
||||
Assert.Single(result.Document.Statements);
|
||||
var statement = result.Document.Statements[0];
|
||||
Assert.Equal("not_affected", statement.Status);
|
||||
Assert.Equal(VexJustification.VulnerableCodeNotInExecutePath, statement.Justification);
|
||||
Assert.Empty(result.Blocked);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_WithReachableFact_EmitsAffected()
|
||||
{
|
||||
// Arrange
|
||||
var fact = CreateFact(ReachabilityState.Reachable, hasRuntime: true, confidence: 0.9m);
|
||||
var factsService = CreateMockFactsService(fact);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput { VulnId = TestVulnId, Purl = TestPurl }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.Document);
|
||||
Assert.Single(result.Document.Statements);
|
||||
var statement = result.Document.Statements[0];
|
||||
Assert.Equal("affected", statement.Status);
|
||||
Assert.Null(statement.Justification);
|
||||
Assert.Empty(result.Blocked);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_WithUnknownFact_EmitsUnderInvestigation()
|
||||
{
|
||||
// Arrange
|
||||
var fact = CreateFact(ReachabilityState.Unknown, hasRuntime: false, confidence: 0.0m);
|
||||
var factsService = CreateMockFactsService(fact);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput { VulnId = TestVulnId, Purl = TestPurl }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.Document);
|
||||
Assert.Single(result.Document.Statements);
|
||||
var statement = result.Document.Statements[0];
|
||||
Assert.Equal("under_investigation", statement.Status);
|
||||
Assert.Empty(result.Blocked);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_WhenGateBlocks_FallsBackToUnderInvestigation()
|
||||
{
|
||||
// Arrange
|
||||
var fact = CreateFact(ReachabilityState.Unreachable, hasRuntime: false, confidence: 0.5m);
|
||||
var factsService = CreateMockFactsService(fact);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Block, blockedBy: "EvidenceCompleteness", reason: "graphHash required");
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput { VulnId = TestVulnId, Purl = TestPurl }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.Single(result.Blocked);
|
||||
Assert.Equal(TestVulnId, result.Blocked[0].VulnId);
|
||||
Assert.Equal("EvidenceCompleteness", result.Blocked[0].BlockedBy);
|
||||
|
||||
// With FallbackToUnderInvestigation=true (default), still emits under_investigation
|
||||
Assert.Single(result.Document.Statements);
|
||||
Assert.Equal("under_investigation", result.Document.Statements[0].Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_WithOverride_UsesOverrideStatus()
|
||||
{
|
||||
// Arrange
|
||||
var fact = CreateFact(ReachabilityState.Reachable, hasRuntime: true, confidence: 0.9m);
|
||||
var factsService = CreateMockFactsService(fact);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput
|
||||
{
|
||||
VulnId = TestVulnId,
|
||||
Purl = TestPurl,
|
||||
OverrideStatus = "not_affected",
|
||||
OverrideJustification = "Manual review confirmed unreachable"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.Single(result.Document.Statements);
|
||||
Assert.Equal("not_affected", result.Document.Statements[0].Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_IncludesEvidenceBlock()
|
||||
{
|
||||
// Arrange
|
||||
var fact = CreateFact(ReachabilityState.Unreachable, hasRuntime: true, confidence: 0.95m);
|
||||
fact = fact with
|
||||
{
|
||||
EvidenceHash = "blake3:abc123",
|
||||
Metadata = new Dictionary<string, object?>
|
||||
{
|
||||
["call_path"] = new List<object> { "main", "svc", "target" },
|
||||
["entry_points"] = new List<object> { "main" },
|
||||
["runtime_hits"] = new List<object> { "main", "svc" },
|
||||
["uncertainty_tier"] = "T3",
|
||||
["risk_score"] = 0.25
|
||||
}
|
||||
};
|
||||
var factsService = CreateMockFactsService(fact);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
IncludeEvidence = true,
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput { VulnId = TestVulnId, Purl = TestPurl }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
var statement = result.Document.Statements[0];
|
||||
Assert.NotNull(statement.Evidence);
|
||||
Assert.Equal("CU", statement.Evidence.LatticeState);
|
||||
Assert.Equal(0.95, statement.Evidence.Confidence);
|
||||
Assert.Equal("blake3:abc123", statement.Evidence.GraphHash);
|
||||
Assert.Equal("T3", statement.Evidence.UncertaintyTier);
|
||||
Assert.Equal(0.25, statement.Evidence.RiskScore);
|
||||
Assert.NotNull(statement.Evidence.CallPath);
|
||||
Assert.Equal(new[] { "main", "svc", "target" }, statement.Evidence.CallPath.Value.ToArray());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_WithMultipleFindings_EmitsMultipleStatements()
|
||||
{
|
||||
// Arrange
|
||||
var facts = new Dictionary<ReachabilityFactKey, ReachabilityFact>
|
||||
{
|
||||
[new(TestTenantId, TestPurl, "CVE-2021-44228")] = CreateFact(ReachabilityState.Unreachable, hasRuntime: true, confidence: 0.95m, vulnId: "CVE-2021-44228"),
|
||||
[new(TestTenantId, "pkg:npm/lodash@4.17.20", "CVE-2021-23337")] = CreateFact(ReachabilityState.Reachable, hasRuntime: false, confidence: 0.8m, vulnId: "CVE-2021-23337", purl: "pkg:npm/lodash@4.17.20")
|
||||
};
|
||||
var factsService = CreateMockFactsService(facts);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput { VulnId = "CVE-2021-44228", Purl = TestPurl },
|
||||
new VexFindingInput { VulnId = "CVE-2021-23337", Purl = "pkg:npm/lodash@4.17.20" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, result.Document.Statements.Length);
|
||||
Assert.Contains(result.Document.Statements, s => s.Status == "not_affected");
|
||||
Assert.Contains(result.Document.Statements, s => s.Status == "affected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_DocumentHasCorrectMetadata()
|
||||
{
|
||||
// Arrange
|
||||
var factsService = CreateMockFactsService((ReachabilityFact?)null);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "security-team@company.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput { VulnId = TestVulnId, Purl = TestPurl }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.StartsWith("urn:uuid:", result.Document.Id);
|
||||
Assert.Equal("https://openvex.dev/ns/v0.2.0", result.Document.Context);
|
||||
Assert.Equal("security-team@company.com", result.Document.Author);
|
||||
Assert.Equal("policy_engine", result.Document.Role);
|
||||
Assert.Equal("stellaops/policy-engine", result.Document.Tooling);
|
||||
Assert.Equal(1, result.Document.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetermineStatusAsync_ReturnsCorrectBucket()
|
||||
{
|
||||
// Arrange
|
||||
var fact = CreateFact(ReachabilityState.Reachable, hasRuntime: true, confidence: 0.9m);
|
||||
var factsService = CreateMockFactsService(fact);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
// Act
|
||||
var determination = await emitter.DetermineStatusAsync(TestTenantId, TestVulnId, TestPurl);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("affected", determination.Status);
|
||||
Assert.Equal("runtime", determination.Bucket);
|
||||
Assert.Equal("CR", determination.LatticeState);
|
||||
Assert.Equal(0.9, determination.Confidence);
|
||||
Assert.NotNull(determination.Fact);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitAsync_WithSymbolId_IncludesSubcomponent()
|
||||
{
|
||||
// Arrange
|
||||
var factsService = CreateMockFactsService((ReachabilityFact?)null);
|
||||
var gateEvaluator = CreateMockGateEvaluator(PolicyGateDecisionType.Allow);
|
||||
var emitter = CreateEmitter(factsService, gateEvaluator);
|
||||
|
||||
var request = new VexDecisionEmitRequest
|
||||
{
|
||||
TenantId = TestTenantId,
|
||||
Author = "test@example.com",
|
||||
Findings = new[]
|
||||
{
|
||||
new VexFindingInput
|
||||
{
|
||||
VulnId = TestVulnId,
|
||||
Purl = TestPurl,
|
||||
SymbolId = "org.apache.logging.log4j.core.lookup.JndiLookup.lookup"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await emitter.EmitAsync(request);
|
||||
|
||||
// Assert
|
||||
var statement = result.Document.Statements[0];
|
||||
Assert.NotNull(statement.Products[0].Subcomponents);
|
||||
var subcomponents = statement.Products[0].Subcomponents!.Value;
|
||||
Assert.Single(subcomponents);
|
||||
Assert.Equal("org.apache.logging.log4j.core.lookup.JndiLookup.lookup", subcomponents[0].Id);
|
||||
}
|
||||
|
||||
private static ReachabilityFact CreateFact(
|
||||
ReachabilityState state,
|
||||
bool hasRuntime,
|
||||
decimal confidence,
|
||||
string? vulnId = null,
|
||||
string? purl = null)
|
||||
{
|
||||
return new ReachabilityFact
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
TenantId = TestTenantId,
|
||||
ComponentPurl = purl ?? TestPurl,
|
||||
AdvisoryId = vulnId ?? TestVulnId,
|
||||
State = state,
|
||||
Confidence = confidence,
|
||||
Score = (decimal)(hasRuntime ? 0.9 : 0.5),
|
||||
HasRuntimeEvidence = hasRuntime,
|
||||
Source = "stellaops/signals",
|
||||
Method = hasRuntime ? AnalysisMethod.Hybrid : AnalysisMethod.Static,
|
||||
ComputedAt = DateTimeOffset.UtcNow,
|
||||
EvidenceRef = "cas://reachability/graphs/test"
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilityFactsJoiningService CreateMockFactsService(ReachabilityFact? fact)
|
||||
{
|
||||
var facts = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
|
||||
if (fact is not null)
|
||||
{
|
||||
facts[new(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId)] = fact;
|
||||
}
|
||||
return CreateMockFactsService(facts);
|
||||
}
|
||||
|
||||
private static ReachabilityFactsJoiningService CreateMockFactsService(Dictionary<ReachabilityFactKey, ReachabilityFact> facts)
|
||||
{
|
||||
var store = new InMemoryReachabilityFactsStore(facts);
|
||||
var cache = new InMemoryReachabilityFactsOverlayCache();
|
||||
return new ReachabilityFactsJoiningService(
|
||||
store,
|
||||
cache,
|
||||
NullLogger<ReachabilityFactsJoiningService>.Instance,
|
||||
TimeProvider.System);
|
||||
}
|
||||
|
||||
private static IPolicyGateEvaluator CreateMockGateEvaluator(
|
||||
PolicyGateDecisionType decision,
|
||||
string? blockedBy = null,
|
||||
string? reason = null)
|
||||
{
|
||||
return new MockPolicyGateEvaluator(decision, blockedBy, reason);
|
||||
}
|
||||
|
||||
private static VexDecisionEmitter CreateEmitter(
|
||||
ReachabilityFactsJoiningService factsService,
|
||||
IPolicyGateEvaluator gateEvaluator)
|
||||
{
|
||||
var options = new TestOptionsMonitor<VexDecisionEmitterOptions>(new VexDecisionEmitterOptions());
|
||||
return new VexDecisionEmitter(
|
||||
factsService,
|
||||
gateEvaluator,
|
||||
options,
|
||||
TimeProvider.System,
|
||||
NullLogger<VexDecisionEmitter>.Instance);
|
||||
}
|
||||
|
||||
private sealed class TestOptionsMonitor<T> : IOptionsMonitor<T>
|
||||
{
|
||||
public TestOptionsMonitor(T currentValue)
|
||||
{
|
||||
CurrentValue = currentValue;
|
||||
}
|
||||
|
||||
public T CurrentValue { get; }
|
||||
|
||||
public T Get(string? name) => CurrentValue;
|
||||
|
||||
public IDisposable? OnChange(Action<T, string?> listener) => null;
|
||||
}
|
||||
|
||||
private sealed class InMemoryReachabilityFactsStore : IReachabilityFactsStore
|
||||
{
|
||||
private readonly Dictionary<ReachabilityFactKey, ReachabilityFact> _facts;
|
||||
|
||||
public InMemoryReachabilityFactsStore(Dictionary<ReachabilityFactKey, ReachabilityFact> facts)
|
||||
{
|
||||
_facts = facts;
|
||||
}
|
||||
|
||||
public Task<ReachabilityFact?> GetAsync(string tenantId, string componentPurl, string advisoryId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
|
||||
_facts.TryGetValue(key, out var fact);
|
||||
return Task.FromResult(fact);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>> GetBatchAsync(IReadOnlyList<ReachabilityFactKey> keys, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var result = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
|
||||
foreach (var key in keys)
|
||||
{
|
||||
if (_facts.TryGetValue(key, out var fact))
|
||||
{
|
||||
result[key] = fact;
|
||||
}
|
||||
}
|
||||
return Task.FromResult<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>>(result);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<ReachabilityFact>> QueryAsync(ReachabilityFactsQuery query, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var results = _facts.Values
|
||||
.Where(f => f.TenantId == query.TenantId)
|
||||
.Where(f => query.ComponentPurls == null || query.ComponentPurls.Contains(f.ComponentPurl))
|
||||
.Where(f => query.AdvisoryIds == null || query.AdvisoryIds.Contains(f.AdvisoryId))
|
||||
.Where(f => query.States == null || query.States.Contains(f.State))
|
||||
.Where(f => !query.MinConfidence.HasValue || f.Confidence >= query.MinConfidence.Value)
|
||||
.Skip(query.Skip)
|
||||
.Take(query.Limit)
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<ReachabilityFact>>(results);
|
||||
}
|
||||
|
||||
public Task SaveAsync(ReachabilityFact fact, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
|
||||
_facts[key] = fact;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task SaveBatchAsync(IReadOnlyList<ReachabilityFact> facts, CancellationToken cancellationToken = default)
|
||||
{
|
||||
foreach (var fact in facts)
|
||||
{
|
||||
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
|
||||
_facts[key] = fact;
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DeleteAsync(string tenantId, string componentPurl, string advisoryId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
|
||||
_facts.Remove(key);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<long> CountAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var count = _facts.Values.Count(f => f.TenantId == tenantId);
|
||||
return Task.FromResult((long)count);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class InMemoryReachabilityFactsOverlayCache : IReachabilityFactsOverlayCache
|
||||
{
|
||||
private readonly Dictionary<ReachabilityFactKey, ReachabilityFact> _cache = new();
|
||||
|
||||
public Task<(ReachabilityFact? Fact, bool CacheHit)> GetAsync(ReachabilityFactKey key, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_cache.TryGetValue(key, out var fact))
|
||||
{
|
||||
return Task.FromResult<(ReachabilityFact?, bool)>((fact, true));
|
||||
}
|
||||
return Task.FromResult<(ReachabilityFact?, bool)>((null, false));
|
||||
}
|
||||
|
||||
public Task<ReachabilityFactsBatch> GetBatchAsync(IReadOnlyList<ReachabilityFactKey> keys, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var found = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
|
||||
var notFound = new List<ReachabilityFactKey>();
|
||||
|
||||
foreach (var key in keys)
|
||||
{
|
||||
if (_cache.TryGetValue(key, out var fact))
|
||||
{
|
||||
found[key] = fact;
|
||||
}
|
||||
else
|
||||
{
|
||||
notFound.Add(key);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(new ReachabilityFactsBatch
|
||||
{
|
||||
Found = found,
|
||||
NotFound = notFound,
|
||||
CacheHits = found.Count,
|
||||
CacheMisses = notFound.Count
|
||||
});
|
||||
}
|
||||
|
||||
public Task SetAsync(ReachabilityFactKey key, ReachabilityFact fact, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_cache[key] = fact;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task SetBatchAsync(IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> facts, CancellationToken cancellationToken = default)
|
||||
{
|
||||
foreach (var (key, fact) in facts)
|
||||
{
|
||||
_cache[key] = fact;
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task InvalidateAsync(ReachabilityFactKey key, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_cache.Remove(key);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task InvalidateTenantAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var keysToRemove = _cache.Keys.Where(k => k.TenantId == tenantId).ToList();
|
||||
foreach (var key in keysToRemove)
|
||||
{
|
||||
_cache.Remove(key);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public ReachabilityFactsCacheStats GetStats()
|
||||
{
|
||||
return new ReachabilityFactsCacheStats { ItemCount = _cache.Count };
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class MockPolicyGateEvaluator : IPolicyGateEvaluator
|
||||
{
|
||||
private readonly PolicyGateDecisionType _decision;
|
||||
private readonly string? _blockedBy;
|
||||
private readonly string? _reason;
|
||||
|
||||
public MockPolicyGateEvaluator(PolicyGateDecisionType decision, string? blockedBy, string? reason)
|
||||
{
|
||||
_decision = decision;
|
||||
_blockedBy = blockedBy;
|
||||
_reason = reason;
|
||||
}
|
||||
|
||||
public Task<PolicyGateDecision> EvaluateAsync(PolicyGateRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(new PolicyGateDecision
|
||||
{
|
||||
GateId = $"gate:vex:{request.RequestedStatus}:{DateTimeOffset.UtcNow:O}",
|
||||
RequestedStatus = request.RequestedStatus,
|
||||
Subject = new PolicyGateSubject
|
||||
{
|
||||
VulnId = request.VulnId,
|
||||
Purl = request.Purl
|
||||
},
|
||||
Evidence = new PolicyGateEvidence
|
||||
{
|
||||
LatticeState = request.LatticeState,
|
||||
Confidence = request.Confidence
|
||||
},
|
||||
Gates = ImmutableArray<PolicyGateResult>.Empty,
|
||||
Decision = _decision,
|
||||
BlockedBy = _decision == PolicyGateDecisionType.Block ? _blockedBy : null,
|
||||
BlockReason = _decision == PolicyGateDecisionType.Block ? _reason : null,
|
||||
DecidedAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,470 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Policy.Engine.Vex;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Vex;
|
||||
|
||||
public sealed class VexDecisionSigningServiceTests
|
||||
{
|
||||
private readonly Mock<IVexSignerClient> _mockSignerClient;
|
||||
private readonly Mock<IVexRekorClient> _mockRekorClient;
|
||||
private readonly VexSigningOptions _options;
|
||||
private readonly VexDecisionSigningService _service;
|
||||
|
||||
public VexDecisionSigningServiceTests()
|
||||
{
|
||||
_mockSignerClient = new Mock<IVexSignerClient>();
|
||||
_mockRekorClient = new Mock<IVexRekorClient>();
|
||||
_options = new VexSigningOptions
|
||||
{
|
||||
UseSignerService = true,
|
||||
RekorEnabled = true
|
||||
};
|
||||
|
||||
var optionsMonitor = new Mock<IOptionsMonitor<VexSigningOptions>>();
|
||||
optionsMonitor.Setup(o => o.CurrentValue).Returns(_options);
|
||||
|
||||
_service = new VexDecisionSigningService(
|
||||
_mockSignerClient.Object,
|
||||
_mockRekorClient.Object,
|
||||
optionsMonitor.Object,
|
||||
TimeProvider.System,
|
||||
NullLogger<VexDecisionSigningService>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WithSignerService_ReturnsEnvelope()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var request = CreateSigningRequest(document);
|
||||
|
||||
_mockSignerClient.Setup(c => c.SignAsync(It.IsAny<VexSignerRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new VexSignerResult
|
||||
{
|
||||
Success = true,
|
||||
Signature = Convert.ToBase64String(new byte[32]),
|
||||
KeyId = "test-key"
|
||||
});
|
||||
|
||||
var result = await _service.SignAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Envelope);
|
||||
Assert.NotNull(result.EnvelopeDigest);
|
||||
Assert.StartsWith("sha256:", result.EnvelopeDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WithSignerServiceFailure_ReturnsFailed()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var request = CreateSigningRequest(document);
|
||||
|
||||
_mockSignerClient.Setup(c => c.SignAsync(It.IsAny<VexSignerRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new VexSignerResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Signing failed"
|
||||
});
|
||||
|
||||
var result = await _service.SignAsync(request);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Null(result.Envelope);
|
||||
Assert.Contains("Signing failed", result.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WithLocalSigning_ReturnsEnvelope()
|
||||
{
|
||||
var localOptions = new VexSigningOptions
|
||||
{
|
||||
UseSignerService = false,
|
||||
RekorEnabled = false
|
||||
};
|
||||
|
||||
var optionsMonitor = new Mock<IOptionsMonitor<VexSigningOptions>>();
|
||||
optionsMonitor.Setup(o => o.CurrentValue).Returns(localOptions);
|
||||
|
||||
var service = new VexDecisionSigningService(
|
||||
null,
|
||||
null,
|
||||
optionsMonitor.Object,
|
||||
TimeProvider.System,
|
||||
NullLogger<VexDecisionSigningService>.Instance);
|
||||
|
||||
var document = CreateTestDocument();
|
||||
var request = CreateSigningRequest(document, submitToRekor: false);
|
||||
|
||||
var result = await service.SignAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Envelope);
|
||||
Assert.Single(result.Envelope.Signatures);
|
||||
Assert.Equal("local:sha256", result.Envelope.Signatures[0].KeyId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WithRekorEnabled_SubmitsToRekor()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var request = CreateSigningRequest(document, submitToRekor: true);
|
||||
|
||||
_mockSignerClient.Setup(c => c.SignAsync(It.IsAny<VexSignerRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new VexSignerResult
|
||||
{
|
||||
Success = true,
|
||||
Signature = Convert.ToBase64String(new byte[32]),
|
||||
KeyId = "test-key"
|
||||
});
|
||||
|
||||
_mockRekorClient.Setup(c => c.SubmitAsync(It.IsAny<VexRekorSubmitRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new VexRekorSubmitResult
|
||||
{
|
||||
Success = true,
|
||||
Metadata = new VexRekorMetadata
|
||||
{
|
||||
Uuid = "rekor-uuid-123",
|
||||
Index = 12345,
|
||||
LogUrl = "https://rekor.sigstore.dev",
|
||||
IntegratedAt = DateTimeOffset.UtcNow
|
||||
}
|
||||
});
|
||||
|
||||
var result = await _service.SignAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.RekorMetadata);
|
||||
Assert.Equal("rekor-uuid-123", result.RekorMetadata.Uuid);
|
||||
Assert.Equal(12345, result.RekorMetadata.Index);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WithRekorFailure_StillSucceeds()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var request = CreateSigningRequest(document, submitToRekor: true);
|
||||
|
||||
_mockSignerClient.Setup(c => c.SignAsync(It.IsAny<VexSignerRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new VexSignerResult
|
||||
{
|
||||
Success = true,
|
||||
Signature = Convert.ToBase64String(new byte[32]),
|
||||
KeyId = "test-key"
|
||||
});
|
||||
|
||||
_mockRekorClient.Setup(c => c.SubmitAsync(It.IsAny<VexRekorSubmitRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new VexRekorSubmitResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Rekor unavailable"
|
||||
});
|
||||
|
||||
var result = await _service.SignAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Envelope);
|
||||
Assert.Null(result.RekorMetadata);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WithRekorDisabled_DoesNotSubmit()
|
||||
{
|
||||
var disabledOptions = new VexSigningOptions
|
||||
{
|
||||
UseSignerService = true,
|
||||
RekorEnabled = false
|
||||
};
|
||||
|
||||
var optionsMonitor = new Mock<IOptionsMonitor<VexSigningOptions>>();
|
||||
optionsMonitor.Setup(o => o.CurrentValue).Returns(disabledOptions);
|
||||
|
||||
var service = new VexDecisionSigningService(
|
||||
_mockSignerClient.Object,
|
||||
_mockRekorClient.Object,
|
||||
optionsMonitor.Object,
|
||||
TimeProvider.System,
|
||||
NullLogger<VexDecisionSigningService>.Instance);
|
||||
|
||||
var document = CreateTestDocument();
|
||||
var request = CreateSigningRequest(document, submitToRekor: true);
|
||||
|
||||
_mockSignerClient.Setup(c => c.SignAsync(It.IsAny<VexSignerRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new VexSignerResult
|
||||
{
|
||||
Success = true,
|
||||
Signature = Convert.ToBase64String(new byte[32]),
|
||||
KeyId = "test-key"
|
||||
});
|
||||
|
||||
var result = await service.SignAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Null(result.RekorMetadata);
|
||||
_mockRekorClient.Verify(c => c.SubmitAsync(It.IsAny<VexRekorSubmitRequest>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_SetsCorrectPayloadType()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var request = CreateSigningRequest(document);
|
||||
|
||||
VexSignerRequest? capturedRequest = null;
|
||||
_mockSignerClient.Setup(c => c.SignAsync(It.IsAny<VexSignerRequest>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<VexSignerRequest, CancellationToken>((req, _) => capturedRequest = req)
|
||||
.ReturnsAsync(new VexSignerResult
|
||||
{
|
||||
Success = true,
|
||||
Signature = Convert.ToBase64String(new byte[32]),
|
||||
KeyId = "test-key"
|
||||
});
|
||||
|
||||
await _service.SignAsync(request);
|
||||
|
||||
Assert.NotNull(capturedRequest);
|
||||
Assert.Equal(VexPredicateTypes.VexDecision, capturedRequest.PayloadType);
|
||||
}
|
||||
|
||||
// Verification Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithValidEnvelope_ReturnsValid()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var payload = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(document);
|
||||
var envelope = new VexDsseEnvelope
|
||||
{
|
||||
PayloadType = VexPredicateTypes.VexDecision,
|
||||
Payload = Convert.ToBase64String(payload),
|
||||
Signatures = [new VexDsseSignature { KeyId = "test", Sig = Convert.ToBase64String(new byte[32]) }]
|
||||
};
|
||||
|
||||
var request = new VexVerificationRequest
|
||||
{
|
||||
Envelope = envelope,
|
||||
VerifyRekorInclusion = false
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync(request);
|
||||
|
||||
Assert.True(result.Valid);
|
||||
Assert.NotNull(result.Document);
|
||||
Assert.Equal(document.Id, result.Document.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithInvalidPayloadType_ReturnsInvalid()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var payload = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(document);
|
||||
var envelope = new VexDsseEnvelope
|
||||
{
|
||||
PayloadType = "invalid/type@v1",
|
||||
Payload = Convert.ToBase64String(payload),
|
||||
Signatures = [new VexDsseSignature { KeyId = "test", Sig = Convert.ToBase64String(new byte[32]) }]
|
||||
};
|
||||
|
||||
var request = new VexVerificationRequest
|
||||
{
|
||||
Envelope = envelope,
|
||||
VerifyRekorInclusion = false
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync(request);
|
||||
|
||||
Assert.False(result.Valid);
|
||||
Assert.NotNull(result.Errors);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Invalid payload type"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithNoSignatures_ReturnsInvalid()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var payload = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(document);
|
||||
var envelope = new VexDsseEnvelope
|
||||
{
|
||||
PayloadType = VexPredicateTypes.VexDecision,
|
||||
Payload = Convert.ToBase64String(payload),
|
||||
Signatures = []
|
||||
};
|
||||
|
||||
var request = new VexVerificationRequest
|
||||
{
|
||||
Envelope = envelope,
|
||||
VerifyRekorInclusion = false
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync(request);
|
||||
|
||||
Assert.False(result.Valid);
|
||||
Assert.NotNull(result.Errors);
|
||||
Assert.Contains(result.Errors, e => e.Contains("no signatures"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithInvalidBase64Signature_ReturnsInvalid()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var payload = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(document);
|
||||
var envelope = new VexDsseEnvelope
|
||||
{
|
||||
PayloadType = VexPredicateTypes.VexDecision,
|
||||
Payload = Convert.ToBase64String(payload),
|
||||
Signatures = [new VexDsseSignature { KeyId = "test", Sig = "not-valid-base64!!!" }]
|
||||
};
|
||||
|
||||
var request = new VexVerificationRequest
|
||||
{
|
||||
Envelope = envelope,
|
||||
VerifyRekorInclusion = false
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync(request);
|
||||
|
||||
Assert.False(result.Valid);
|
||||
Assert.NotNull(result.Errors);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Invalid base64"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_WithRekorVerification_CallsGetProof()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var payload = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(document);
|
||||
var envelope = new VexDsseEnvelope
|
||||
{
|
||||
PayloadType = VexPredicateTypes.VexDecision,
|
||||
Payload = Convert.ToBase64String(payload),
|
||||
Signatures = [new VexDsseSignature { KeyId = "test", Sig = Convert.ToBase64String(new byte[32]) }]
|
||||
};
|
||||
|
||||
var rekorMetadata = new VexRekorMetadata
|
||||
{
|
||||
Uuid = "rekor-uuid-123",
|
||||
Index = 12345,
|
||||
LogUrl = "https://rekor.sigstore.dev",
|
||||
IntegratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_mockRekorClient.Setup(c => c.GetProofAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(rekorMetadata);
|
||||
|
||||
var request = new VexVerificationRequest
|
||||
{
|
||||
Envelope = envelope,
|
||||
ExpectedRekorMetadata = rekorMetadata,
|
||||
VerifyRekorInclusion = true
|
||||
};
|
||||
|
||||
var result = await _service.VerifyAsync(request);
|
||||
|
||||
Assert.True(result.Valid);
|
||||
Assert.NotNull(result.RekorMetadata);
|
||||
_mockRekorClient.Verify(c => c.GetProofAsync("rekor-uuid-123", It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
// Options Tests
|
||||
|
||||
[Fact]
|
||||
public void VexSigningOptions_HasCorrectDefaults()
|
||||
{
|
||||
var options = new VexSigningOptions();
|
||||
|
||||
Assert.True(options.UseSignerService);
|
||||
Assert.True(options.RekorEnabled);
|
||||
Assert.Null(options.DefaultKeyId);
|
||||
Assert.Null(options.RekorUrl);
|
||||
Assert.Equal(TimeSpan.FromSeconds(30), options.RekorTimeout);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VexSigningOptions_SectionName_IsCorrect()
|
||||
{
|
||||
Assert.Equal("VexSigning", VexSigningOptions.SectionName);
|
||||
}
|
||||
|
||||
// Predicate Types Tests
|
||||
|
||||
[Fact]
|
||||
public void VexPredicateTypes_HasCorrectValues()
|
||||
{
|
||||
Assert.Equal("stella.ops/vexDecision@v1", VexPredicateTypes.VexDecision);
|
||||
Assert.Equal("stella.ops/vex@v1", VexPredicateTypes.VexDocument);
|
||||
Assert.Equal("https://openvex.dev/ns", VexPredicateTypes.OpenVex);
|
||||
}
|
||||
|
||||
// Evidence Reference Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_WithEvidenceRefs_IncludesInRequest()
|
||||
{
|
||||
var document = CreateTestDocument();
|
||||
var evidenceRefs = new List<VexEvidenceReference>
|
||||
{
|
||||
new() { Type = "sbom", Digest = "sha256:abc123" },
|
||||
new() { Type = "callgraph", Digest = "sha256:def456", CasUri = "cas://example/cg/1" }
|
||||
};
|
||||
|
||||
var request = new VexSigningRequest
|
||||
{
|
||||
Document = document,
|
||||
TenantId = "tenant-1",
|
||||
SubmitToRekor = false,
|
||||
EvidenceRefs = evidenceRefs
|
||||
};
|
||||
|
||||
var localOptions = new VexSigningOptions { UseSignerService = false, RekorEnabled = false };
|
||||
var optionsMonitor = new Mock<IOptionsMonitor<VexSigningOptions>>();
|
||||
optionsMonitor.Setup(o => o.CurrentValue).Returns(localOptions);
|
||||
|
||||
var service = new VexDecisionSigningService(
|
||||
null,
|
||||
null,
|
||||
optionsMonitor.Object,
|
||||
TimeProvider.System,
|
||||
NullLogger<VexDecisionSigningService>.Instance);
|
||||
|
||||
var result = await service.SignAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Envelope);
|
||||
}
|
||||
|
||||
private static VexDecisionDocument CreateTestDocument()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
return new VexDecisionDocument
|
||||
{
|
||||
Id = $"https://stellaops.io/vex/{Guid.NewGuid():N}",
|
||||
Author = "https://stellaops.io/policy-engine",
|
||||
Timestamp = now,
|
||||
Statements = ImmutableArray.Create(
|
||||
new VexStatement
|
||||
{
|
||||
Vulnerability = new VexVulnerability { Id = "CVE-2025-12345" },
|
||||
Status = "not_affected",
|
||||
Justification = VexJustification.VulnerableCodeNotInExecutePath,
|
||||
Timestamp = now,
|
||||
Products = ImmutableArray.Create(
|
||||
new VexProduct { Id = "pkg:maven/com.example/app@1.0.0" }
|
||||
)
|
||||
}
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
private static VexSigningRequest CreateSigningRequest(VexDecisionDocument document, bool submitToRekor = true)
|
||||
{
|
||||
return new VexSigningRequest
|
||||
{
|
||||
Document = document,
|
||||
TenantId = "tenant-1",
|
||||
SubmitToRekor = submitToRekor
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MicrosoftOptions = Microsoft.Extensions.Options;
|
||||
using StellaOps.SbomService.Models;
|
||||
using StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(SbomServicePostgresCollection.Name)]
|
||||
public sealed class PostgresEntrypointRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly SbomServicePostgresFixture _fixture;
|
||||
private readonly PostgresEntrypointRepository _repository;
|
||||
private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8];
|
||||
|
||||
public PostgresEntrypointRepositoryTests(SbomServicePostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new SbomServiceDataSource(MicrosoftOptions.Options.Create(options), NullLogger<SbomServiceDataSource>.Instance);
|
||||
_repository = new PostgresEntrypointRepository(dataSource, NullLogger<PostgresEntrypointRepository>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndList_RoundTripsEntrypoint()
|
||||
{
|
||||
// Arrange
|
||||
var entrypoint = new Entrypoint(
|
||||
Artifact: "ghcr.io/test/api",
|
||||
Service: "web",
|
||||
Path: "/api",
|
||||
Scope: "runtime",
|
||||
RuntimeFlag: true);
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(_tenantId, entrypoint, CancellationToken.None);
|
||||
var fetched = await _repository.ListAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().HaveCount(1);
|
||||
fetched[0].Artifact.Should().Be("ghcr.io/test/api");
|
||||
fetched[0].Service.Should().Be("web");
|
||||
fetched[0].Path.Should().Be("/api");
|
||||
fetched[0].RuntimeFlag.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_UpdatesExistingEntrypoint()
|
||||
{
|
||||
// Arrange
|
||||
var entrypoint1 = new Entrypoint("ghcr.io/test/api", "web", "/old", "runtime", false);
|
||||
var entrypoint2 = new Entrypoint("ghcr.io/test/api", "web", "/new", "build", true);
|
||||
|
||||
// Act
|
||||
await _repository.UpsertAsync(_tenantId, entrypoint1, CancellationToken.None);
|
||||
await _repository.UpsertAsync(_tenantId, entrypoint2, CancellationToken.None);
|
||||
var fetched = await _repository.ListAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().HaveCount(1);
|
||||
fetched[0].Path.Should().Be("/new");
|
||||
fetched[0].Scope.Should().Be("build");
|
||||
fetched[0].RuntimeFlag.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsOrderedByArtifactServicePath()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.UpsertAsync(_tenantId, new Entrypoint("z-api", "web", "/z", "runtime", true), CancellationToken.None);
|
||||
await _repository.UpsertAsync(_tenantId, new Entrypoint("a-api", "web", "/a", "runtime", true), CancellationToken.None);
|
||||
await _repository.UpsertAsync(_tenantId, new Entrypoint("a-api", "worker", "/b", "runtime", true), CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.ListAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().HaveCount(3);
|
||||
fetched[0].Artifact.Should().Be("a-api");
|
||||
fetched[0].Service.Should().Be("web");
|
||||
fetched[1].Artifact.Should().Be("a-api");
|
||||
fetched[1].Service.Should().Be("worker");
|
||||
fetched[2].Artifact.Should().Be("z-api");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsEmptyForUnknownTenant()
|
||||
{
|
||||
// Act
|
||||
var fetched = await _repository.ListAsync("unknown-tenant", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Should().BeEmpty();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MicrosoftOptions = Microsoft.Extensions.Options;
|
||||
using StellaOps.SbomService.Services;
|
||||
using StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Tests;
|
||||
|
||||
[Collection(SbomServicePostgresCollection.Name)]
|
||||
public sealed class PostgresOrchestratorControlRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly SbomServicePostgresFixture _fixture;
|
||||
private readonly PostgresOrchestratorControlRepository _repository;
|
||||
private readonly string _tenantId = "tenant-" + Guid.NewGuid().ToString("N")[..8];
|
||||
|
||||
public PostgresOrchestratorControlRepositoryTests(SbomServicePostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new SbomServiceDataSource(MicrosoftOptions.Options.Create(options), NullLogger<SbomServiceDataSource>.Instance);
|
||||
_repository = new PostgresOrchestratorControlRepository(dataSource, NullLogger<PostgresOrchestratorControlRepository>.Instance);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_ReturnsDefaultStateForNewTenant()
|
||||
{
|
||||
// Act
|
||||
var state = await _repository.GetAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
state.Should().NotBeNull();
|
||||
state.TenantId.Should().Be(_tenantId);
|
||||
state.Paused.Should().BeFalse();
|
||||
state.ThrottlePercent.Should().Be(0);
|
||||
state.Backpressure.Should().Be("normal");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAsync_PersistsControlState()
|
||||
{
|
||||
// Arrange
|
||||
var state = new OrchestratorControlState(
|
||||
TenantId: _tenantId,
|
||||
Paused: true,
|
||||
ThrottlePercent: 50,
|
||||
Backpressure: "high",
|
||||
UpdatedAtUtc: DateTimeOffset.UtcNow);
|
||||
|
||||
// Act
|
||||
await _repository.SetAsync(state, CancellationToken.None);
|
||||
var fetched = await _repository.GetAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Paused.Should().BeTrue();
|
||||
fetched.ThrottlePercent.Should().Be(50);
|
||||
fetched.Backpressure.Should().Be("high");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAsync_UpdatesExistingState()
|
||||
{
|
||||
// Arrange
|
||||
var state1 = new OrchestratorControlState(_tenantId, false, 10, "low", DateTimeOffset.UtcNow);
|
||||
var state2 = new OrchestratorControlState(_tenantId, true, 90, "critical", DateTimeOffset.UtcNow);
|
||||
|
||||
// Act
|
||||
await _repository.SetAsync(state1, CancellationToken.None);
|
||||
await _repository.SetAsync(state2, CancellationToken.None);
|
||||
var fetched = await _repository.GetAsync(_tenantId, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
fetched.Paused.Should().BeTrue();
|
||||
fetched.ThrottlePercent.Should().Be(90);
|
||||
fetched.Backpressure.Should().Be("critical");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsAllStates()
|
||||
{
|
||||
// Arrange
|
||||
var tenant1 = "tenant-a-" + Guid.NewGuid().ToString("N")[..4];
|
||||
var tenant2 = "tenant-b-" + Guid.NewGuid().ToString("N")[..4];
|
||||
await _repository.SetAsync(new OrchestratorControlState(tenant1, false, 0, "normal", DateTimeOffset.UtcNow), CancellationToken.None);
|
||||
await _repository.SetAsync(new OrchestratorControlState(tenant2, true, 50, "high", DateTimeOffset.UtcNow), CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var states = await _repository.ListAsync(CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
states.Should().HaveCountGreaterOrEqualTo(2);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Reflection;
|
||||
using StellaOps.Infrastructure.Postgres.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL integration test fixture for the SbomService module.
|
||||
/// </summary>
|
||||
public sealed class SbomServicePostgresFixture : PostgresIntegrationFixture, ICollectionFixture<SbomServicePostgresFixture>
|
||||
{
|
||||
protected override Assembly? GetMigrationAssembly()
|
||||
=> typeof(SbomServiceDataSource).Assembly;
|
||||
|
||||
protected override string GetModuleName() => "SbomService";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collection definition for SbomService PostgreSQL integration tests.
|
||||
/// Tests in this collection share a single PostgreSQL container instance.
|
||||
/// </summary>
|
||||
[CollectionDefinition(Name)]
|
||||
public sealed class SbomServicePostgresCollection : ICollectionFixture<SbomServicePostgresFixture>
|
||||
{
|
||||
public const string Name = "SbomServicePostgres";
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
<?xml version="1.0" ?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.SbomService.Storage.Postgres\StellaOps.SbomService.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,181 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.SbomService.Models;
|
||||
using StellaOps.SbomService.Repositories;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="ICatalogRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresCatalogRepository : RepositoryBase<SbomServiceDataSource>, ICatalogRepository
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresCatalogRepository(SbomServiceDataSource dataSource, ILogger<PostgresCatalogRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<CatalogRecord>> ListAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT artifact, sbom_version, digest, license, scope, asset_tags, created_at, projection_hash, evaluation_metadata
|
||||
FROM sbom.catalog
|
||||
ORDER BY created_at DESC, artifact";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<CatalogRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapCatalogRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<(IReadOnlyList<CatalogRecord> Items, int Total)> QueryAsync(SbomCatalogQuery query, CancellationToken cancellationToken)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var conditions = new List<string>();
|
||||
if (!string.IsNullOrWhiteSpace(query.Artifact))
|
||||
{
|
||||
conditions.Add("artifact ILIKE @artifact");
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(query.License))
|
||||
{
|
||||
conditions.Add("LOWER(license) = LOWER(@license)");
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(query.Scope))
|
||||
{
|
||||
conditions.Add("LOWER(scope) = LOWER(@scope)");
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(query.AssetTag))
|
||||
{
|
||||
conditions.Add("asset_tags ? @asset_tag");
|
||||
}
|
||||
|
||||
var whereClause = conditions.Count > 0 ? "WHERE " + string.Join(" AND ", conditions) : "";
|
||||
|
||||
var countSql = $"SELECT COUNT(*) FROM sbom.catalog {whereClause}";
|
||||
var dataSql = $@"
|
||||
SELECT artifact, sbom_version, digest, license, scope, asset_tags, created_at, projection_hash, evaluation_metadata
|
||||
FROM sbom.catalog
|
||||
{whereClause}
|
||||
ORDER BY created_at DESC, artifact
|
||||
LIMIT @limit OFFSET @offset";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Get count
|
||||
await using var countCommand = CreateCommand(countSql, connection);
|
||||
AddQueryParameters(countCommand, query);
|
||||
var total = Convert.ToInt32(await countCommand.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false));
|
||||
|
||||
// Get data
|
||||
await using var dataCommand = CreateCommand(dataSql, connection);
|
||||
AddQueryParameters(dataCommand, query);
|
||||
AddParameter(dataCommand, "@limit", query.Limit);
|
||||
AddParameter(dataCommand, "@offset", query.Offset);
|
||||
|
||||
await using var reader = await dataCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<CatalogRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapCatalogRecord(reader));
|
||||
}
|
||||
|
||||
return (results, total);
|
||||
}
|
||||
|
||||
private void AddQueryParameters(NpgsqlCommand command, SbomCatalogQuery query)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(query.Artifact))
|
||||
{
|
||||
AddParameter(command, "@artifact", $"%{query.Artifact}%");
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(query.License))
|
||||
{
|
||||
AddParameter(command, "@license", query.License);
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(query.Scope))
|
||||
{
|
||||
AddParameter(command, "@scope", query.Scope);
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(query.AssetTag))
|
||||
{
|
||||
AddParameter(command, "@asset_tag", query.AssetTag);
|
||||
}
|
||||
}
|
||||
|
||||
private static CatalogRecord MapCatalogRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
var assetTagsJson = reader.IsDBNull(5) ? null : reader.GetString(5);
|
||||
var assetTags = string.IsNullOrWhiteSpace(assetTagsJson)
|
||||
? new Dictionary<string, string>()
|
||||
: JsonSerializer.Deserialize<Dictionary<string, string>>(assetTagsJson, JsonOptions) ?? new Dictionary<string, string>();
|
||||
|
||||
return new CatalogRecord(
|
||||
Artifact: reader.GetString(0),
|
||||
SbomVersion: reader.GetString(1),
|
||||
Digest: reader.GetString(2),
|
||||
License: reader.IsDBNull(3) ? null : reader.GetString(3),
|
||||
Scope: reader.GetString(4),
|
||||
AssetTags: assetTags,
|
||||
CreatedAt: reader.GetFieldValue<DateTimeOffset>(6),
|
||||
ProjectionHash: reader.GetString(7),
|
||||
EvaluationMetadata: reader.GetString(8));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS sbom;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sbom.catalog (
|
||||
id TEXT PRIMARY KEY,
|
||||
artifact TEXT NOT NULL,
|
||||
sbom_version TEXT NOT NULL,
|
||||
digest TEXT NOT NULL,
|
||||
license TEXT,
|
||||
scope TEXT NOT NULL,
|
||||
asset_tags JSONB NOT NULL DEFAULT '{}',
|
||||
created_at TIMESTAMPTZ NOT NULL,
|
||||
projection_hash TEXT NOT NULL,
|
||||
evaluation_metadata TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_catalog_artifact ON sbom.catalog (artifact);
|
||||
CREATE INDEX IF NOT EXISTS idx_catalog_license ON sbom.catalog (license);
|
||||
CREATE INDEX IF NOT EXISTS idx_catalog_scope ON sbom.catalog (scope);
|
||||
CREATE INDEX IF NOT EXISTS idx_catalog_created_at ON sbom.catalog (created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_catalog_asset_tags ON sbom.catalog USING GIN (asset_tags);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.SbomService.Models;
|
||||
using StellaOps.SbomService.Repositories;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IComponentLookupRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresComponentLookupRepository : RepositoryBase<SbomServiceDataSource>, IComponentLookupRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresComponentLookupRepository(SbomServiceDataSource dataSource, ILogger<PostgresComponentLookupRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task<(IReadOnlyList<ComponentLookupRecord> Items, int Total)> QueryAsync(ComponentLookupQuery query, CancellationToken cancellationToken)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var conditions = new List<string> { "LOWER(purl) = LOWER(@purl)" };
|
||||
if (!string.IsNullOrWhiteSpace(query.Artifact))
|
||||
{
|
||||
conditions.Add("LOWER(artifact) = LOWER(@artifact)");
|
||||
}
|
||||
|
||||
var whereClause = "WHERE " + string.Join(" AND ", conditions);
|
||||
|
||||
var countSql = $"SELECT COUNT(*) FROM sbom.component_lookups {whereClause}";
|
||||
var dataSql = $@"
|
||||
SELECT artifact, purl, neighbor_purl, relationship, license, scope, runtime_flag
|
||||
FROM sbom.component_lookups
|
||||
{whereClause}
|
||||
ORDER BY artifact, purl
|
||||
LIMIT @limit OFFSET @offset";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Get count
|
||||
await using var countCommand = CreateCommand(countSql, connection);
|
||||
AddParameter(countCommand, "@purl", query.Purl);
|
||||
if (!string.IsNullOrWhiteSpace(query.Artifact))
|
||||
{
|
||||
AddParameter(countCommand, "@artifact", query.Artifact);
|
||||
}
|
||||
var total = Convert.ToInt32(await countCommand.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false));
|
||||
|
||||
// Get data
|
||||
await using var dataCommand = CreateCommand(dataSql, connection);
|
||||
AddParameter(dataCommand, "@purl", query.Purl);
|
||||
if (!string.IsNullOrWhiteSpace(query.Artifact))
|
||||
{
|
||||
AddParameter(dataCommand, "@artifact", query.Artifact);
|
||||
}
|
||||
AddParameter(dataCommand, "@limit", query.Limit);
|
||||
AddParameter(dataCommand, "@offset", query.Offset);
|
||||
|
||||
await using var reader = await dataCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ComponentLookupRecord>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapComponentLookupRecord(reader));
|
||||
}
|
||||
|
||||
return (results, total);
|
||||
}
|
||||
|
||||
private static ComponentLookupRecord MapComponentLookupRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
return new ComponentLookupRecord(
|
||||
Artifact: reader.GetString(0),
|
||||
Purl: reader.GetString(1),
|
||||
NeighborPurl: reader.GetString(2),
|
||||
Relationship: reader.GetString(3),
|
||||
License: reader.IsDBNull(4) ? null : reader.GetString(4),
|
||||
Scope: reader.GetString(5),
|
||||
RuntimeFlag: reader.GetBoolean(6));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS sbom;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sbom.component_lookups (
|
||||
id TEXT PRIMARY KEY,
|
||||
artifact TEXT NOT NULL,
|
||||
purl TEXT NOT NULL,
|
||||
neighbor_purl TEXT NOT NULL,
|
||||
relationship TEXT NOT NULL,
|
||||
license TEXT,
|
||||
scope TEXT NOT NULL,
|
||||
runtime_flag BOOLEAN NOT NULL DEFAULT false
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_component_lookups_purl ON sbom.component_lookups (LOWER(purl));
|
||||
CREATE INDEX IF NOT EXISTS idx_component_lookups_artifact ON sbom.component_lookups (LOWER(artifact));
|
||||
CREATE INDEX IF NOT EXISTS idx_component_lookups_purl_artifact ON sbom.component_lookups (LOWER(purl), LOWER(artifact));";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.SbomService.Models;
|
||||
using StellaOps.SbomService.Repositories;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IEntrypointRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresEntrypointRepository : RepositoryBase<SbomServiceDataSource>, IEntrypointRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresEntrypointRepository(SbomServiceDataSource dataSource, ILogger<PostgresEntrypointRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<Entrypoint>> ListAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT artifact, service, path, scope, runtime_flag
|
||||
FROM sbom.entrypoints
|
||||
WHERE tenant_id = @tenant_id
|
||||
ORDER BY artifact, service, path";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim().ToLowerInvariant());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<Entrypoint>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapEntrypoint(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task UpsertAsync(string tenantId, Entrypoint entrypoint, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(entrypoint);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO sbom.entrypoints (tenant_id, artifact, service, path, scope, runtime_flag)
|
||||
VALUES (@tenant_id, @artifact, @service, @path, @scope, @runtime_flag)
|
||||
ON CONFLICT (tenant_id, artifact, service) DO UPDATE SET
|
||||
path = EXCLUDED.path,
|
||||
scope = EXCLUDED.scope,
|
||||
runtime_flag = EXCLUDED.runtime_flag";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim().ToLowerInvariant());
|
||||
AddParameter(command, "@artifact", entrypoint.Artifact);
|
||||
AddParameter(command, "@service", entrypoint.Service);
|
||||
AddParameter(command, "@path", entrypoint.Path);
|
||||
AddParameter(command, "@scope", entrypoint.Scope);
|
||||
AddParameter(command, "@runtime_flag", entrypoint.RuntimeFlag);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static Entrypoint MapEntrypoint(NpgsqlDataReader reader)
|
||||
{
|
||||
return new Entrypoint(
|
||||
Artifact: reader.GetString(0),
|
||||
Service: reader.GetString(1),
|
||||
Path: reader.GetString(2),
|
||||
Scope: reader.GetString(3),
|
||||
RuntimeFlag: reader.GetBoolean(4));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS sbom;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sbom.entrypoints (
|
||||
tenant_id TEXT NOT NULL,
|
||||
artifact TEXT NOT NULL,
|
||||
service TEXT NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
scope TEXT NOT NULL,
|
||||
runtime_flag BOOLEAN NOT NULL DEFAULT false,
|
||||
PRIMARY KEY (tenant_id, artifact, service)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entrypoints_tenant_id ON sbom.entrypoints (tenant_id);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,134 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.SbomService.Repositories;
|
||||
using StellaOps.SbomService.Services;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IOrchestratorControlRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresOrchestratorControlRepository : RepositoryBase<SbomServiceDataSource>, IOrchestratorControlRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresOrchestratorControlRepository(SbomServiceDataSource dataSource, ILogger<PostgresOrchestratorControlRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task<OrchestratorControlState> GetAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT tenant_id, paused, throttle_percent, backpressure, updated_at
|
||||
FROM sbom.orchestrator_control
|
||||
WHERE tenant_id = @tenant_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return MapOrchestratorControlState(reader);
|
||||
}
|
||||
|
||||
// Return default state and persist it
|
||||
var defaultState = OrchestratorControlState.Default(tenantId);
|
||||
await reader.CloseAsync().ConfigureAwait(false);
|
||||
await SetAsync(defaultState, cancellationToken).ConfigureAwait(false);
|
||||
return defaultState;
|
||||
}
|
||||
|
||||
public async Task<OrchestratorControlState> SetAsync(OrchestratorControlState state, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO sbom.orchestrator_control (tenant_id, paused, throttle_percent, backpressure, updated_at)
|
||||
VALUES (@tenant_id, @paused, @throttle_percent, @backpressure, @updated_at)
|
||||
ON CONFLICT (tenant_id) DO UPDATE SET
|
||||
paused = EXCLUDED.paused,
|
||||
throttle_percent = EXCLUDED.throttle_percent,
|
||||
backpressure = EXCLUDED.backpressure,
|
||||
updated_at = EXCLUDED.updated_at";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@tenant_id", state.TenantId.Trim());
|
||||
AddParameter(command, "@paused", state.Paused);
|
||||
AddParameter(command, "@throttle_percent", state.ThrottlePercent);
|
||||
AddParameter(command, "@backpressure", state.Backpressure);
|
||||
AddParameter(command, "@updated_at", state.UpdatedAtUtc);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<OrchestratorControlState>> ListAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT tenant_id, paused, throttle_percent, backpressure, updated_at
|
||||
FROM sbom.orchestrator_control
|
||||
ORDER BY tenant_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<OrchestratorControlState>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapOrchestratorControlState(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static OrchestratorControlState MapOrchestratorControlState(NpgsqlDataReader reader)
|
||||
{
|
||||
return new OrchestratorControlState(
|
||||
TenantId: reader.GetString(0),
|
||||
Paused: reader.GetBoolean(1),
|
||||
ThrottlePercent: reader.GetInt32(2),
|
||||
Backpressure: reader.GetString(3),
|
||||
UpdatedAtUtc: reader.GetFieldValue<DateTimeOffset>(4));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS sbom;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sbom.orchestrator_control (
|
||||
tenant_id TEXT PRIMARY KEY,
|
||||
paused BOOLEAN NOT NULL DEFAULT false,
|
||||
throttle_percent INTEGER NOT NULL DEFAULT 0,
|
||||
backpressure TEXT NOT NULL DEFAULT 'normal',
|
||||
updated_at TIMESTAMPTZ NOT NULL
|
||||
);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,149 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.SbomService.Models;
|
||||
using StellaOps.SbomService.Repositories;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IOrchestratorRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresOrchestratorRepository : RepositoryBase<SbomServiceDataSource>, IOrchestratorRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresOrchestratorRepository(SbomServiceDataSource dataSource, ILogger<PostgresOrchestratorRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<OrchestratorSource>> ListAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT tenant_id, source_id, artifact_digest, source_type, created_at, metadata
|
||||
FROM sbom.orchestrator_sources
|
||||
WHERE tenant_id = @tenant_id
|
||||
ORDER BY artifact_digest, source_type, source_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<OrchestratorSource>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapOrchestratorSource(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<OrchestratorSource> RegisterAsync(RegisterOrchestratorSourceRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Check for existing record (idempotent on tenant, artifactDigest, sourceType)
|
||||
const string checkSql = @"
|
||||
SELECT tenant_id, source_id, artifact_digest, source_type, created_at, metadata
|
||||
FROM sbom.orchestrator_sources
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND artifact_digest = @artifact_digest
|
||||
AND source_type = @source_type";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var checkCommand = CreateCommand(checkSql, connection);
|
||||
AddParameter(checkCommand, "@tenant_id", request.TenantId.Trim());
|
||||
AddParameter(checkCommand, "@artifact_digest", request.ArtifactDigest.Trim());
|
||||
AddParameter(checkCommand, "@source_type", request.SourceType.Trim());
|
||||
|
||||
await using var checkReader = await checkCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (await checkReader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return MapOrchestratorSource(checkReader);
|
||||
}
|
||||
await checkReader.CloseAsync().ConfigureAwait(false);
|
||||
|
||||
// Generate new source ID
|
||||
const string countSql = "SELECT COUNT(*) FROM sbom.orchestrator_sources WHERE tenant_id = @tenant_id";
|
||||
await using var countCommand = CreateCommand(countSql, connection);
|
||||
AddParameter(countCommand, "@tenant_id", request.TenantId.Trim());
|
||||
var count = Convert.ToInt32(await countCommand.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false));
|
||||
var sourceId = $"src-{count + 1:D3}";
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
const string insertSql = @"
|
||||
INSERT INTO sbom.orchestrator_sources (tenant_id, source_id, artifact_digest, source_type, created_at, metadata)
|
||||
VALUES (@tenant_id, @source_id, @artifact_digest, @source_type, @created_at, @metadata)";
|
||||
|
||||
await using var insertCommand = CreateCommand(insertSql, connection);
|
||||
AddParameter(insertCommand, "@tenant_id", request.TenantId.Trim());
|
||||
AddParameter(insertCommand, "@source_id", sourceId);
|
||||
AddParameter(insertCommand, "@artifact_digest", request.ArtifactDigest.Trim());
|
||||
AddParameter(insertCommand, "@source_type", request.SourceType.Trim());
|
||||
AddParameter(insertCommand, "@created_at", now);
|
||||
AddParameter(insertCommand, "@metadata", request.Metadata.Trim());
|
||||
|
||||
await insertCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new OrchestratorSource(
|
||||
request.TenantId.Trim(),
|
||||
sourceId,
|
||||
request.ArtifactDigest.Trim(),
|
||||
request.SourceType.Trim(),
|
||||
now,
|
||||
request.Metadata.Trim());
|
||||
}
|
||||
|
||||
private static OrchestratorSource MapOrchestratorSource(NpgsqlDataReader reader)
|
||||
{
|
||||
return new OrchestratorSource(
|
||||
TenantId: reader.GetString(0),
|
||||
SourceId: reader.GetString(1),
|
||||
ArtifactDigest: reader.GetString(2),
|
||||
SourceType: reader.GetString(3),
|
||||
CreatedAtUtc: reader.GetFieldValue<DateTimeOffset>(4),
|
||||
Metadata: reader.GetString(5));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS sbom;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sbom.orchestrator_sources (
|
||||
tenant_id TEXT NOT NULL,
|
||||
source_id TEXT NOT NULL,
|
||||
artifact_digest TEXT NOT NULL,
|
||||
source_type TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL,
|
||||
metadata TEXT NOT NULL,
|
||||
PRIMARY KEY (tenant_id, source_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_orchestrator_sources_unique
|
||||
ON sbom.orchestrator_sources (tenant_id, artifact_digest, source_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_orchestrator_sources_tenant_id
|
||||
ON sbom.orchestrator_sources (tenant_id);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,114 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.SbomService.Models;
|
||||
using StellaOps.SbomService.Repositories;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IProjectionRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresProjectionRepository : RepositoryBase<SbomServiceDataSource>, IProjectionRepository
|
||||
{
|
||||
private bool _tableInitialized;
|
||||
|
||||
public PostgresProjectionRepository(SbomServiceDataSource dataSource, ILogger<PostgresProjectionRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public async Task<SbomProjectionResult?> GetAsync(string snapshotId, string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(snapshotId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT snapshot_id, tenant_id, projection_json, projection_hash, schema_version
|
||||
FROM sbom.projections
|
||||
WHERE snapshot_id = @snapshot_id AND tenant_id = @tenant_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@snapshot_id", snapshotId.Trim());
|
||||
AddParameter(command, "@tenant_id", tenantId.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapSbomProjectionResult(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<SbomProjectionResult>> ListAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await EnsureTableAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
const string sql = @"
|
||||
SELECT snapshot_id, tenant_id, projection_json, projection_hash, schema_version
|
||||
FROM sbom.projections
|
||||
ORDER BY snapshot_id, tenant_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var results = new List<SbomProjectionResult>();
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapSbomProjectionResult(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static SbomProjectionResult MapSbomProjectionResult(NpgsqlDataReader reader)
|
||||
{
|
||||
var projectionJson = reader.GetString(2);
|
||||
using var doc = JsonDocument.Parse(projectionJson);
|
||||
var projection = doc.RootElement.Clone();
|
||||
|
||||
return new SbomProjectionResult(
|
||||
SnapshotId: reader.GetString(0),
|
||||
TenantId: reader.GetString(1),
|
||||
Projection: projection,
|
||||
ProjectionHash: reader.GetString(3),
|
||||
SchemaVersion: reader.GetString(4));
|
||||
}
|
||||
|
||||
private async Task EnsureTableAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_tableInitialized)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
const string ddl = @"
|
||||
CREATE SCHEMA IF NOT EXISTS sbom;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sbom.projections (
|
||||
snapshot_id TEXT NOT NULL,
|
||||
tenant_id TEXT NOT NULL,
|
||||
projection_json JSONB NOT NULL,
|
||||
projection_hash TEXT NOT NULL,
|
||||
schema_version TEXT NOT NULL,
|
||||
PRIMARY KEY (snapshot_id, tenant_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_projections_tenant_id ON sbom.projections (tenant_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_projections_schema_version ON sbom.projections (schema_version);";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(ddl, connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_tableInitialized = true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Npgsql;
|
||||
using StellaOps.Infrastructure.Postgres.Connections;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL data source for SbomService module.
|
||||
/// </summary>
|
||||
public sealed class SbomServiceDataSource : DataSourceBase
|
||||
{
|
||||
/// <summary>
|
||||
/// Default schema name for SbomService tables.
|
||||
/// </summary>
|
||||
public const string DefaultSchemaName = "sbom";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new SbomService data source.
|
||||
/// </summary>
|
||||
public SbomServiceDataSource(IOptions<PostgresOptions> options, ILogger<SbomServiceDataSource> logger)
|
||||
: base(CreateOptions(options.Value), logger)
|
||||
{
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override string ModuleName => "SbomService";
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void ConfigureDataSourceBuilder(NpgsqlDataSourceBuilder builder)
|
||||
{
|
||||
base.ConfigureDataSourceBuilder(builder);
|
||||
}
|
||||
|
||||
private static PostgresOptions CreateOptions(PostgresOptions baseOptions)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(baseOptions.SchemaName))
|
||||
{
|
||||
baseOptions.SchemaName = DefaultSchemaName;
|
||||
}
|
||||
return baseOptions;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
using StellaOps.SbomService.Repositories;
|
||||
using StellaOps.SbomService.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.SbomService.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for configuring SbomService PostgreSQL storage services.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds SbomService PostgreSQL storage services.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configuration">Configuration root.</param>
|
||||
/// <param name="sectionName">Configuration section name for PostgreSQL options.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddSbomServicePostgresStorage(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration,
|
||||
string sectionName = "Postgres:SbomService")
|
||||
{
|
||||
services.Configure<PostgresOptions>(configuration.GetSection(sectionName));
|
||||
services.AddSingleton<SbomServiceDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddSingleton<ICatalogRepository, PostgresCatalogRepository>();
|
||||
services.AddSingleton<IComponentLookupRepository, PostgresComponentLookupRepository>();
|
||||
services.AddSingleton<IEntrypointRepository, PostgresEntrypointRepository>();
|
||||
services.AddSingleton<IOrchestratorRepository, PostgresOrchestratorRepository>();
|
||||
services.AddSingleton<IOrchestratorControlRepository, PostgresOrchestratorControlRepository>();
|
||||
services.AddSingleton<IProjectionRepository, PostgresProjectionRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds SbomService PostgreSQL storage services with explicit options.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configureOptions">Options configuration action.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddSbomServicePostgresStorage(
|
||||
this IServiceCollection services,
|
||||
Action<PostgresOptions> configureOptions)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
services.AddSingleton<SbomServiceDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddSingleton<ICatalogRepository, PostgresCatalogRepository>();
|
||||
services.AddSingleton<IComponentLookupRepository, PostgresComponentLookupRepository>();
|
||||
services.AddSingleton<IEntrypointRepository, PostgresEntrypointRepository>();
|
||||
services.AddSingleton<IOrchestratorRepository, PostgresOrchestratorRepository>();
|
||||
services.AddSingleton<IOrchestratorControlRepository, PostgresOrchestratorControlRepository>();
|
||||
services.AddSingleton<IProjectionRepository, PostgresProjectionRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<RootNamespace>StellaOps.SbomService.Storage.Postgres</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.SbomService/StellaOps.SbomService.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Infrastructure.Postgres/StellaOps.Infrastructure.Postgres.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -20,3 +20,91 @@ public sealed record RuntimeEventsIngestResponseDto
|
||||
[JsonPropertyName("duplicates")]
|
||||
public int Duplicates { get; init; }
|
||||
}
|
||||
|
||||
public sealed record RuntimeReconcileRequestDto
|
||||
{
|
||||
[JsonPropertyName("imageDigest")]
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("runtimeEventId")]
|
||||
public string? RuntimeEventId { get; init; }
|
||||
|
||||
[JsonPropertyName("maxMisses")]
|
||||
public int MaxMisses { get; init; } = 100;
|
||||
}
|
||||
|
||||
public sealed record RuntimeReconcileResponseDto
|
||||
{
|
||||
[JsonPropertyName("imageDigest")]
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("runtimeEventId")]
|
||||
public string? RuntimeEventId { get; init; }
|
||||
|
||||
[JsonPropertyName("sbomArtifactId")]
|
||||
public string? SbomArtifactId { get; init; }
|
||||
|
||||
[JsonPropertyName("totalRuntimeLibraries")]
|
||||
public int TotalRuntimeLibraries { get; init; }
|
||||
|
||||
[JsonPropertyName("totalSbomComponents")]
|
||||
public int TotalSbomComponents { get; init; }
|
||||
|
||||
[JsonPropertyName("matchCount")]
|
||||
public int MatchCount { get; init; }
|
||||
|
||||
[JsonPropertyName("missCount")]
|
||||
public int MissCount { get; init; }
|
||||
|
||||
[JsonPropertyName("misses")]
|
||||
public IReadOnlyList<RuntimeLibraryMissDto> Misses { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("matches")]
|
||||
public IReadOnlyList<RuntimeLibraryMatchDto> Matches { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("reconciledAt")]
|
||||
public DateTimeOffset ReconciledAt { get; init; }
|
||||
|
||||
[JsonPropertyName("errorCode")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? ErrorCode { get; init; }
|
||||
|
||||
[JsonPropertyName("errorMessage")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? ErrorMessage { get; init; }
|
||||
}
|
||||
|
||||
public sealed record RuntimeLibraryMissDto
|
||||
{
|
||||
[JsonPropertyName("path")]
|
||||
public required string Path { get; init; }
|
||||
|
||||
[JsonPropertyName("sha256")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Sha256 { get; init; }
|
||||
|
||||
[JsonPropertyName("inode")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public long? Inode { get; init; }
|
||||
}
|
||||
|
||||
public sealed record RuntimeLibraryMatchDto
|
||||
{
|
||||
[JsonPropertyName("runtimePath")]
|
||||
public required string RuntimePath { get; init; }
|
||||
|
||||
[JsonPropertyName("runtimeSha256")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? RuntimeSha256 { get; init; }
|
||||
|
||||
[JsonPropertyName("sbomComponentKey")]
|
||||
public required string SbomComponentKey { get; init; }
|
||||
|
||||
[JsonPropertyName("sbomComponentName")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? SbomComponentName { get; init; }
|
||||
|
||||
[JsonPropertyName("matchType")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? MatchType { get; init; }
|
||||
}
|
||||
|
||||
@@ -37,6 +37,15 @@ internal static class RuntimeEndpoints
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status429TooManyRequests)
|
||||
.RequireAuthorization(ScannerPolicies.RuntimeIngest);
|
||||
|
||||
runtime.MapPost("/reconcile", HandleRuntimeReconcileAsync)
|
||||
.WithName("scanner.runtime.reconcile")
|
||||
.WithSummary("Reconcile runtime-observed libraries against SBOM inventory")
|
||||
.WithDescription("Compares libraries observed at runtime against the static SBOM to identify discrepancies")
|
||||
.Produces<RuntimeReconcileResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.RuntimeIngest);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleRuntimeEventsAsync(
|
||||
@@ -234,6 +243,75 @@ internal static class RuntimeEndpoints
|
||||
return null;
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleRuntimeReconcileAsync(
|
||||
RuntimeReconcileRequestDto request,
|
||||
IRuntimeInventoryReconciler reconciler,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(reconciler);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ImageDigest))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid reconciliation request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "imageDigest is required.");
|
||||
}
|
||||
|
||||
var reconcileRequest = new RuntimeReconciliationRequest
|
||||
{
|
||||
ImageDigest = request.ImageDigest,
|
||||
RuntimeEventId = request.RuntimeEventId,
|
||||
MaxMisses = request.MaxMisses > 0 ? request.MaxMisses : 100
|
||||
};
|
||||
|
||||
var result = await reconciler.ReconcileAsync(reconcileRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var responseDto = new RuntimeReconcileResponseDto
|
||||
{
|
||||
ImageDigest = result.ImageDigest,
|
||||
RuntimeEventId = result.RuntimeEventId,
|
||||
SbomArtifactId = result.SbomArtifactId,
|
||||
TotalRuntimeLibraries = result.TotalRuntimeLibraries,
|
||||
TotalSbomComponents = result.TotalSbomComponents,
|
||||
MatchCount = result.MatchCount,
|
||||
MissCount = result.MissCount,
|
||||
Misses = result.Misses
|
||||
.Select(m => new RuntimeLibraryMissDto
|
||||
{
|
||||
Path = m.Path,
|
||||
Sha256 = m.Sha256,
|
||||
Inode = m.Inode
|
||||
})
|
||||
.ToList(),
|
||||
Matches = result.Matches
|
||||
.Select(m => new RuntimeLibraryMatchDto
|
||||
{
|
||||
RuntimePath = m.RuntimePath,
|
||||
RuntimeSha256 = m.RuntimeSha256,
|
||||
SbomComponentKey = m.SbomComponentKey,
|
||||
SbomComponentName = m.SbomComponentName,
|
||||
MatchType = m.MatchType
|
||||
})
|
||||
.ToList(),
|
||||
ReconciledAt = result.ReconciledAt,
|
||||
ErrorCode = result.ErrorCode,
|
||||
ErrorMessage = result.ErrorMessage
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(result.ErrorCode) &&
|
||||
result.ErrorCode is "RUNTIME_EVENT_NOT_FOUND" or "NO_RUNTIME_EVENTS")
|
||||
{
|
||||
return Json(responseDto, StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
return Json(responseDto, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static string NormalizeSegment(string segment)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(segment))
|
||||
|
||||
@@ -367,6 +367,20 @@ public sealed class ScannerWebServiceOptions
|
||||
public int PerTenantBurst { get; set; } = 1000;
|
||||
|
||||
public int PolicyCacheTtlSeconds { get; set; } = 300;
|
||||
|
||||
/// <summary>
|
||||
/// Enable automatic scanning when DRIFT events are detected.
|
||||
/// When true, DRIFT events will trigger a new scan of the affected image.
|
||||
/// Default: false (opt-in).
|
||||
/// </summary>
|
||||
public bool AutoScanEnabled { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Cooldown period in seconds before the same image can be scanned again due to DRIFT.
|
||||
/// Prevents scan storms from repeated DRIFT events.
|
||||
/// Default: 300 seconds (5 minutes).
|
||||
/// </summary>
|
||||
public int AutoScanCooldownSeconds { get; set; } = 300;
|
||||
}
|
||||
|
||||
public sealed class DeterminismOptions
|
||||
|
||||
@@ -202,7 +202,9 @@ builder.Services.AddScannerStorage(storageOptions =>
|
||||
});
|
||||
builder.Services.AddSingleton<IPostConfigureOptions<ScannerStorageOptions>, ScannerStorageOptionsPostConfigurator>();
|
||||
builder.Services.AddSingleton<RuntimeEventRateLimiter>();
|
||||
builder.Services.AddSingleton<IDeltaScanRequestHandler, DeltaScanRequestHandler>();
|
||||
builder.Services.AddSingleton<IRuntimeEventIngestionService, RuntimeEventIngestionService>();
|
||||
builder.Services.AddSingleton<IRuntimeInventoryReconciler, RuntimeInventoryReconciler>();
|
||||
builder.Services.AddSingleton<IRuntimeAttestationVerifier, RuntimeAttestationVerifier>();
|
||||
builder.Services.AddSingleton<ILinksetResolver, LinksetResolver>();
|
||||
builder.Services.AddSingleton<IRuntimePolicyService, RuntimePolicyService>();
|
||||
|
||||
@@ -0,0 +1,260 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Handles delta scan requests triggered by runtime DRIFT events.
|
||||
/// </summary>
|
||||
internal interface IDeltaScanRequestHandler
|
||||
{
|
||||
/// <summary>
|
||||
/// Processes a batch of runtime events and triggers scans for DRIFT events when enabled.
|
||||
/// </summary>
|
||||
Task<DeltaScanResult> ProcessAsync(
|
||||
IReadOnlyList<RuntimeEventEnvelope> envelopes,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of delta scan processing.
|
||||
/// </summary>
|
||||
internal readonly record struct DeltaScanResult(
|
||||
int DriftEventsDetected,
|
||||
int ScansTriggered,
|
||||
int ScansSkipped,
|
||||
int ScansDeduped);
|
||||
|
||||
internal sealed class DeltaScanRequestHandler : IDeltaScanRequestHandler
|
||||
{
|
||||
private static readonly Meter DeltaScanMeter = new("StellaOps.Scanner.DeltaScan", "1.0.0");
|
||||
private static readonly Counter<long> DeltaScanTriggered = DeltaScanMeter.CreateCounter<long>(
|
||||
"scanner_delta_scan_triggered_total",
|
||||
unit: "1",
|
||||
description: "Total delta scans triggered from runtime DRIFT events.");
|
||||
private static readonly Counter<long> DeltaScanSkipped = DeltaScanMeter.CreateCounter<long>(
|
||||
"scanner_delta_scan_skipped_total",
|
||||
unit: "1",
|
||||
description: "Total delta scans skipped (feature disabled, rate limited, or missing data).");
|
||||
private static readonly Counter<long> DeltaScanDeduped = DeltaScanMeter.CreateCounter<long>(
|
||||
"scanner_delta_scan_deduped_total",
|
||||
unit: "1",
|
||||
description: "Total delta scans deduplicated within cooldown window.");
|
||||
private static readonly Histogram<double> DeltaScanLatencyMs = DeltaScanMeter.CreateHistogram<double>(
|
||||
"scanner_delta_scan_latency_ms",
|
||||
unit: "ms",
|
||||
description: "Latency for delta scan trigger processing.");
|
||||
|
||||
// Deduplication cache: imageDigest -> last trigger time
|
||||
private readonly ConcurrentDictionary<string, DateTimeOffset> _recentTriggers = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
private readonly IScanCoordinator _scanCoordinator;
|
||||
private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<DeltaScanRequestHandler> _logger;
|
||||
|
||||
public DeltaScanRequestHandler(
|
||||
IScanCoordinator scanCoordinator,
|
||||
IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<DeltaScanRequestHandler> logger)
|
||||
{
|
||||
_scanCoordinator = scanCoordinator ?? throw new ArgumentNullException(nameof(scanCoordinator));
|
||||
_optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<DeltaScanResult> ProcessAsync(
|
||||
IReadOnlyList<RuntimeEventEnvelope> envelopes,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(envelopes);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var options = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions();
|
||||
|
||||
// Check if autoscan is enabled
|
||||
if (!options.AutoScanEnabled)
|
||||
{
|
||||
var driftCount = envelopes.Count(e => e.Event.Kind == RuntimeEventKind.Drift);
|
||||
if (driftCount > 0)
|
||||
{
|
||||
DeltaScanSkipped.Add(driftCount);
|
||||
_logger.LogDebug(
|
||||
"Delta scan disabled, skipping {DriftCount} DRIFT events",
|
||||
driftCount);
|
||||
}
|
||||
return new DeltaScanResult(driftCount, 0, driftCount, 0);
|
||||
}
|
||||
|
||||
var driftEvents = envelopes
|
||||
.Where(e => e.Event.Kind == RuntimeEventKind.Drift)
|
||||
.ToList();
|
||||
|
||||
if (driftEvents.Count == 0)
|
||||
{
|
||||
return new DeltaScanResult(0, 0, 0, 0);
|
||||
}
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var cooldownWindow = TimeSpan.FromSeconds(options.AutoScanCooldownSeconds);
|
||||
var triggered = 0;
|
||||
var skipped = 0;
|
||||
var deduped = 0;
|
||||
|
||||
// Cleanup old entries from dedup cache
|
||||
CleanupDeduplicationCache(now, cooldownWindow);
|
||||
|
||||
foreach (var envelope in driftEvents)
|
||||
{
|
||||
var runtimeEvent = envelope.Event;
|
||||
var imageDigest = ExtractImageDigest(runtimeEvent);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(imageDigest))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"DRIFT event {EventId} has no image digest, skipping auto-scan",
|
||||
runtimeEvent.EventId);
|
||||
DeltaScanSkipped.Add(1);
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check deduplication
|
||||
if (_recentTriggers.TryGetValue(imageDigest, out var lastTrigger))
|
||||
{
|
||||
if (now - lastTrigger < cooldownWindow)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"DRIFT event {EventId} for image {ImageDigest} within cooldown window, deduplicating",
|
||||
runtimeEvent.EventId,
|
||||
imageDigest);
|
||||
DeltaScanDeduped.Add(1);
|
||||
deduped++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger scan
|
||||
var scanTarget = new ScanTarget(
|
||||
runtimeEvent.Workload.ImageRef,
|
||||
imageDigest);
|
||||
|
||||
var metadata = new Dictionary<string, string>
|
||||
{
|
||||
["stellaops:trigger"] = "drift",
|
||||
["stellaops:drift.eventId"] = runtimeEvent.EventId,
|
||||
["stellaops:drift.tenant"] = runtimeEvent.Tenant,
|
||||
["stellaops:drift.node"] = runtimeEvent.Node
|
||||
};
|
||||
|
||||
if (runtimeEvent.Delta?.BaselineImageDigest is { } baseline)
|
||||
{
|
||||
metadata["stellaops:drift.baselineDigest"] = baseline;
|
||||
}
|
||||
|
||||
if (runtimeEvent.Delta?.ChangedFiles is { Count: > 0 } changedFiles)
|
||||
{
|
||||
metadata["stellaops:drift.changedFilesCount"] = changedFiles.Count.ToString();
|
||||
}
|
||||
|
||||
if (runtimeEvent.Delta?.NewBinaries is { Count: > 0 } newBinaries)
|
||||
{
|
||||
metadata["stellaops:drift.newBinariesCount"] = newBinaries.Count.ToString();
|
||||
}
|
||||
|
||||
var submission = new ScanSubmission(
|
||||
scanTarget.Normalize(),
|
||||
Force: false,
|
||||
ClientRequestId: $"drift:{runtimeEvent.EventId}",
|
||||
Metadata: metadata);
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _scanCoordinator.SubmitAsync(submission, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_recentTriggers[imageDigest] = now;
|
||||
DeltaScanTriggered.Add(1);
|
||||
triggered++;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Delta scan triggered for DRIFT event {EventId}: scanId={ScanId}, created={Created}",
|
||||
runtimeEvent.EventId,
|
||||
result.Snapshot.Id,
|
||||
result.Created);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Failed to trigger delta scan for DRIFT event {EventId}, image {ImageDigest}",
|
||||
runtimeEvent.EventId,
|
||||
imageDigest);
|
||||
DeltaScanSkipped.Add(1);
|
||||
skipped++;
|
||||
}
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
DeltaScanLatencyMs.Record(stopwatch.Elapsed.TotalMilliseconds);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Delta scan processing complete: {DriftCount} DRIFT events, {Triggered} triggered, {Skipped} skipped, {Deduped} deduped",
|
||||
driftEvents.Count,
|
||||
triggered,
|
||||
skipped,
|
||||
deduped);
|
||||
|
||||
return new DeltaScanResult(driftEvents.Count, triggered, skipped, deduped);
|
||||
}
|
||||
|
||||
private void CleanupDeduplicationCache(DateTimeOffset now, TimeSpan cooldownWindow)
|
||||
{
|
||||
var expiredKeys = _recentTriggers
|
||||
.Where(kvp => now - kvp.Value > cooldownWindow * 2)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in expiredKeys)
|
||||
{
|
||||
_recentTriggers.TryRemove(key, out _);
|
||||
}
|
||||
}
|
||||
|
||||
private static string? ExtractImageDigest(RuntimeEvent runtimeEvent)
|
||||
{
|
||||
// Prefer baseline digest from Delta for DRIFT events
|
||||
var digest = runtimeEvent.Delta?.BaselineImageDigest?.Trim().ToLowerInvariant();
|
||||
if (!string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return digest;
|
||||
}
|
||||
|
||||
// Fall back to extracting from ImageRef
|
||||
var imageRef = runtimeEvent.Workload.ImageRef;
|
||||
if (string.IsNullOrWhiteSpace(imageRef))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmed = imageRef.Trim();
|
||||
var atIndex = trimmed.LastIndexOf('@');
|
||||
if (atIndex >= 0 && atIndex < trimmed.Length - 1)
|
||||
{
|
||||
var candidate = trimmed[(atIndex + 1)..].Trim().ToLowerInvariant();
|
||||
if (candidate.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -23,6 +23,7 @@ internal sealed class RuntimeEventIngestionService : IRuntimeEventIngestionServi
|
||||
|
||||
private readonly RuntimeEventRepository _repository;
|
||||
private readonly RuntimeEventRateLimiter _rateLimiter;
|
||||
private readonly IDeltaScanRequestHandler _deltaScanHandler;
|
||||
private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<RuntimeEventIngestionService> _logger;
|
||||
@@ -30,12 +31,14 @@ internal sealed class RuntimeEventIngestionService : IRuntimeEventIngestionServi
|
||||
public RuntimeEventIngestionService(
|
||||
RuntimeEventRepository repository,
|
||||
RuntimeEventRateLimiter rateLimiter,
|
||||
IDeltaScanRequestHandler deltaScanHandler,
|
||||
IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<RuntimeEventIngestionService> logger)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_rateLimiter = rateLimiter ?? throw new ArgumentNullException(nameof(rateLimiter));
|
||||
_deltaScanHandler = deltaScanHandler ?? throw new ArgumentNullException(nameof(deltaScanHandler));
|
||||
_optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
@@ -126,9 +129,26 @@ internal sealed class RuntimeEventIngestionService : IRuntimeEventIngestionServi
|
||||
insertResult.DuplicateCount,
|
||||
totalPayloadBytes);
|
||||
|
||||
// Process DRIFT events for auto-scan (fire and forget, don't block ingestion)
|
||||
_ = ProcessDriftEventsAsync(envelopes, cancellationToken);
|
||||
|
||||
return RuntimeEventIngestionResult.Success(insertResult.InsertedCount, insertResult.DuplicateCount, totalPayloadBytes);
|
||||
}
|
||||
|
||||
private async Task ProcessDriftEventsAsync(
|
||||
IReadOnlyList<RuntimeEventEnvelope> envelopes,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _deltaScanHandler.ProcessAsync(envelopes, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error processing DRIFT events for auto-scan");
|
||||
}
|
||||
}
|
||||
|
||||
private static string? ExtractImageDigest(RuntimeEvent runtimeEvent)
|
||||
{
|
||||
var digest = NormalizeDigest(runtimeEvent.Delta?.BaselineImageDigest);
|
||||
|
||||
@@ -0,0 +1,613 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Text.Json;
|
||||
using CycloneDX.Json;
|
||||
using CycloneDX.Models;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service responsible for reconciling runtime-observed libraries against static SBOM inventory.
|
||||
/// </summary>
|
||||
internal interface IRuntimeInventoryReconciler
|
||||
{
|
||||
/// <summary>
|
||||
/// Reconciles runtime libraries from a runtime event against the SBOM for the associated image.
|
||||
/// </summary>
|
||||
Task<RuntimeReconciliationResult> ReconcileAsync(
|
||||
RuntimeReconciliationRequest request,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for runtime-static reconciliation.
|
||||
/// </summary>
|
||||
internal sealed record RuntimeReconciliationRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Image digest to reconcile (e.g., sha256:abc123...).
|
||||
/// </summary>
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional runtime event ID to use for library data.
|
||||
/// If not provided, the most recent event for the image will be used.
|
||||
/// </summary>
|
||||
public string? RuntimeEventId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of misses to return.
|
||||
/// </summary>
|
||||
public int MaxMisses { get; init; } = 100;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of runtime-static reconciliation.
|
||||
/// </summary>
|
||||
internal sealed record RuntimeReconciliationResult
|
||||
{
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
public string? RuntimeEventId { get; init; }
|
||||
|
||||
public string? SbomArtifactId { get; init; }
|
||||
|
||||
public int TotalRuntimeLibraries { get; init; }
|
||||
|
||||
public int TotalSbomComponents { get; init; }
|
||||
|
||||
public int MatchCount { get; init; }
|
||||
|
||||
public int MissCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Libraries observed at runtime but not found in SBOM.
|
||||
/// </summary>
|
||||
public ImmutableArray<RuntimeLibraryMiss> Misses { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Libraries matched between runtime and SBOM.
|
||||
/// </summary>
|
||||
public ImmutableArray<RuntimeLibraryMatch> Matches { get; init; } = [];
|
||||
|
||||
public DateTimeOffset ReconciledAt { get; init; }
|
||||
|
||||
public string? ErrorCode { get; init; }
|
||||
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
public static RuntimeReconciliationResult Error(string imageDigest, string code, string message)
|
||||
=> new()
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
ErrorCode = code,
|
||||
ErrorMessage = message,
|
||||
ReconciledAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A runtime library not found in the SBOM.
|
||||
/// </summary>
|
||||
internal sealed record RuntimeLibraryMiss
|
||||
{
|
||||
public required string Path { get; init; }
|
||||
|
||||
public string? Sha256 { get; init; }
|
||||
|
||||
public long? Inode { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A runtime library matched in the SBOM.
|
||||
/// </summary>
|
||||
internal sealed record RuntimeLibraryMatch
|
||||
{
|
||||
public required string RuntimePath { get; init; }
|
||||
|
||||
public string? RuntimeSha256 { get; init; }
|
||||
|
||||
public required string SbomComponentKey { get; init; }
|
||||
|
||||
public string? SbomComponentName { get; init; }
|
||||
|
||||
public string? MatchType { get; init; }
|
||||
}
|
||||
|
||||
internal sealed class RuntimeInventoryReconciler : IRuntimeInventoryReconciler
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
|
||||
|
||||
private static readonly Meter ReconcileMeter = new("StellaOps.Scanner.RuntimeReconcile", "1.0.0");
|
||||
private static readonly Counter<long> ReconcileRequests = ReconcileMeter.CreateCounter<long>(
|
||||
"scanner_runtime_reconcile_requests_total",
|
||||
unit: "1",
|
||||
description: "Total runtime-static reconciliation requests processed.");
|
||||
private static readonly Counter<long> ReconcileMatches = ReconcileMeter.CreateCounter<long>(
|
||||
"scanner_runtime_reconcile_matches_total",
|
||||
unit: "1",
|
||||
description: "Total library matches between runtime and SBOM.");
|
||||
private static readonly Counter<long> ReconcileMisses = ReconcileMeter.CreateCounter<long>(
|
||||
"scanner_runtime_reconcile_misses_total",
|
||||
unit: "1",
|
||||
description: "Total runtime libraries not found in SBOM.");
|
||||
private static readonly Counter<long> ReconcileErrors = ReconcileMeter.CreateCounter<long>(
|
||||
"scanner_runtime_reconcile_errors_total",
|
||||
unit: "1",
|
||||
description: "Total reconciliation errors (no SBOM, no events, etc.).");
|
||||
private static readonly Histogram<double> ReconcileLatencyMs = ReconcileMeter.CreateHistogram<double>(
|
||||
"scanner_runtime_reconcile_latency_ms",
|
||||
unit: "ms",
|
||||
description: "Latency for runtime-static reconciliation operations.");
|
||||
|
||||
private readonly RuntimeEventRepository _runtimeEventRepository;
|
||||
private readonly LinkRepository _linkRepository;
|
||||
private readonly ArtifactRepository _artifactRepository;
|
||||
private readonly IArtifactObjectStore _objectStore;
|
||||
private readonly IOptionsMonitor<ScannerStorageOptions> _storageOptions;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<RuntimeInventoryReconciler> _logger;
|
||||
|
||||
public RuntimeInventoryReconciler(
|
||||
RuntimeEventRepository runtimeEventRepository,
|
||||
LinkRepository linkRepository,
|
||||
ArtifactRepository artifactRepository,
|
||||
IArtifactObjectStore objectStore,
|
||||
IOptionsMonitor<ScannerStorageOptions> storageOptions,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<RuntimeInventoryReconciler> logger)
|
||||
{
|
||||
_runtimeEventRepository = runtimeEventRepository ?? throw new ArgumentNullException(nameof(runtimeEventRepository));
|
||||
_linkRepository = linkRepository ?? throw new ArgumentNullException(nameof(linkRepository));
|
||||
_artifactRepository = artifactRepository ?? throw new ArgumentNullException(nameof(artifactRepository));
|
||||
_objectStore = objectStore ?? throw new ArgumentNullException(nameof(objectStore));
|
||||
_storageOptions = storageOptions ?? throw new ArgumentNullException(nameof(storageOptions));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<RuntimeReconciliationResult> ReconcileAsync(
|
||||
RuntimeReconciliationRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.ImageDigest);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
ReconcileRequests.Add(1);
|
||||
|
||||
var normalizedDigest = NormalizeDigest(request.ImageDigest);
|
||||
var reconciledAt = _timeProvider.GetUtcNow();
|
||||
|
||||
// Step 1: Get runtime event
|
||||
RuntimeEventDocument? runtimeEventDoc;
|
||||
if (!string.IsNullOrWhiteSpace(request.RuntimeEventId))
|
||||
{
|
||||
runtimeEventDoc = await _runtimeEventRepository.GetByEventIdAsync(
|
||||
request.RuntimeEventId,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (runtimeEventDoc is null)
|
||||
{
|
||||
ReconcileErrors.Add(1);
|
||||
RecordLatency(stopwatch);
|
||||
return RuntimeReconciliationResult.Error(
|
||||
normalizedDigest,
|
||||
"RUNTIME_EVENT_NOT_FOUND",
|
||||
$"Runtime event '{request.RuntimeEventId}' not found.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var recentEvents = await _runtimeEventRepository.GetByImageDigestAsync(
|
||||
normalizedDigest,
|
||||
1,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
runtimeEventDoc = recentEvents.FirstOrDefault();
|
||||
if (runtimeEventDoc is null)
|
||||
{
|
||||
ReconcileErrors.Add(1);
|
||||
RecordLatency(stopwatch);
|
||||
return RuntimeReconciliationResult.Error(
|
||||
normalizedDigest,
|
||||
"NO_RUNTIME_EVENTS",
|
||||
$"No runtime events found for image '{normalizedDigest}'.");
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Parse runtime event payload to get LoadedLibraries
|
||||
var runtimeLibraries = ParseLoadedLibraries(runtimeEventDoc.PayloadJson);
|
||||
if (runtimeLibraries.Count == 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"No loaded libraries in runtime event {EventId} for image {ImageDigest}",
|
||||
runtimeEventDoc.EventId,
|
||||
normalizedDigest);
|
||||
|
||||
RecordLatency(stopwatch);
|
||||
return new RuntimeReconciliationResult
|
||||
{
|
||||
ImageDigest = normalizedDigest,
|
||||
RuntimeEventId = runtimeEventDoc.EventId,
|
||||
TotalRuntimeLibraries = 0,
|
||||
TotalSbomComponents = 0,
|
||||
MatchCount = 0,
|
||||
MissCount = 0,
|
||||
ReconciledAt = reconciledAt
|
||||
};
|
||||
}
|
||||
|
||||
// Step 3: Get SBOM artifact for the image
|
||||
var links = await _linkRepository.ListBySourceAsync(
|
||||
LinkSourceType.Image,
|
||||
normalizedDigest,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var sbomLink = links.FirstOrDefault(l =>
|
||||
l.ArtifactId.Contains("imagebom", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (sbomLink is null)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"No SBOM artifact linked to image {ImageDigest}",
|
||||
normalizedDigest);
|
||||
|
||||
ReconcileMisses.Add(runtimeLibraries.Count);
|
||||
ReconcileErrors.Add(1);
|
||||
RecordLatency(stopwatch);
|
||||
|
||||
// Return all runtime libraries as misses since no SBOM exists
|
||||
return new RuntimeReconciliationResult
|
||||
{
|
||||
ImageDigest = normalizedDigest,
|
||||
RuntimeEventId = runtimeEventDoc.EventId,
|
||||
TotalRuntimeLibraries = runtimeLibraries.Count,
|
||||
TotalSbomComponents = 0,
|
||||
MatchCount = 0,
|
||||
MissCount = runtimeLibraries.Count,
|
||||
Misses = runtimeLibraries
|
||||
.Take(request.MaxMisses)
|
||||
.Select(lib => new RuntimeLibraryMiss
|
||||
{
|
||||
Path = lib.Path,
|
||||
Sha256 = lib.Sha256,
|
||||
Inode = lib.Inode
|
||||
})
|
||||
.ToImmutableArray(),
|
||||
ReconciledAt = reconciledAt,
|
||||
ErrorCode = "NO_SBOM",
|
||||
ErrorMessage = "No SBOM artifact linked to this image."
|
||||
};
|
||||
}
|
||||
|
||||
// Step 4: Get SBOM content
|
||||
var sbomArtifact = await _artifactRepository.GetAsync(sbomLink.ArtifactId, cancellationToken).ConfigureAwait(false);
|
||||
if (sbomArtifact is null)
|
||||
{
|
||||
ReconcileErrors.Add(1);
|
||||
RecordLatency(stopwatch);
|
||||
return RuntimeReconciliationResult.Error(
|
||||
normalizedDigest,
|
||||
"SBOM_ARTIFACT_NOT_FOUND",
|
||||
$"SBOM artifact '{sbomLink.ArtifactId}' metadata not found.");
|
||||
}
|
||||
|
||||
var sbomComponents = await LoadSbomComponentsAsync(sbomArtifact, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Step 5: Build lookup indexes for matching
|
||||
var sbomByPath = BuildPathIndex(sbomComponents);
|
||||
var sbomByHash = BuildHashIndex(sbomComponents);
|
||||
|
||||
// Step 6: Reconcile
|
||||
var matches = new List<RuntimeLibraryMatch>();
|
||||
var misses = new List<RuntimeLibraryMiss>();
|
||||
|
||||
foreach (var runtimeLib in runtimeLibraries)
|
||||
{
|
||||
var matched = TryMatchLibrary(runtimeLib, sbomByPath, sbomByHash, out var match);
|
||||
if (matched && match is not null)
|
||||
{
|
||||
matches.Add(match);
|
||||
}
|
||||
else
|
||||
{
|
||||
misses.Add(new RuntimeLibraryMiss
|
||||
{
|
||||
Path = runtimeLib.Path,
|
||||
Sha256 = runtimeLib.Sha256,
|
||||
Inode = runtimeLib.Inode
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Reconciliation complete for image {ImageDigest}: {MatchCount} matches, {MissCount} misses out of {TotalRuntime} runtime libs",
|
||||
normalizedDigest,
|
||||
matches.Count,
|
||||
misses.Count,
|
||||
runtimeLibraries.Count);
|
||||
|
||||
// Record metrics
|
||||
ReconcileMatches.Add(matches.Count);
|
||||
ReconcileMisses.Add(misses.Count);
|
||||
RecordLatency(stopwatch);
|
||||
|
||||
return new RuntimeReconciliationResult
|
||||
{
|
||||
ImageDigest = normalizedDigest,
|
||||
RuntimeEventId = runtimeEventDoc.EventId,
|
||||
SbomArtifactId = sbomArtifact.Id,
|
||||
TotalRuntimeLibraries = runtimeLibraries.Count,
|
||||
TotalSbomComponents = sbomComponents.Count,
|
||||
MatchCount = matches.Count,
|
||||
MissCount = misses.Count,
|
||||
Matches = matches.ToImmutableArray(),
|
||||
Misses = misses.Take(request.MaxMisses).ToImmutableArray(),
|
||||
ReconciledAt = reconciledAt
|
||||
};
|
||||
}
|
||||
|
||||
private IReadOnlyList<RuntimeLoadedLibrary> ParseLoadedLibraries(string payloadJson)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(payloadJson);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Navigate to event.loadedLibs
|
||||
if (root.TryGetProperty("event", out var eventElement) &&
|
||||
eventElement.TryGetProperty("loadedLibs", out var loadedLibsElement))
|
||||
{
|
||||
return JsonSerializer.Deserialize<List<RuntimeLoadedLibrary>>(
|
||||
loadedLibsElement.GetRawText(),
|
||||
JsonOptions) ?? [];
|
||||
}
|
||||
|
||||
// Fallback: try loadedLibs at root level
|
||||
if (root.TryGetProperty("loadedLibs", out loadedLibsElement))
|
||||
{
|
||||
return JsonSerializer.Deserialize<List<RuntimeLoadedLibrary>>(
|
||||
loadedLibsElement.GetRawText(),
|
||||
JsonOptions) ?? [];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to parse loadedLibraries from runtime event payload");
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<SbomComponent>> LoadSbomComponentsAsync(
|
||||
ArtifactDocument artifact,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var options = _storageOptions.CurrentValue;
|
||||
var key = ArtifactObjectKeyBuilder.Build(
|
||||
artifact.Type,
|
||||
artifact.Format,
|
||||
artifact.BytesSha256,
|
||||
options.ObjectStore.RootPrefix);
|
||||
|
||||
var descriptor = new ArtifactObjectDescriptor(
|
||||
options.ObjectStore.BucketName,
|
||||
key,
|
||||
artifact.Immutable);
|
||||
|
||||
await using var stream = await _objectStore.GetAsync(descriptor, cancellationToken).ConfigureAwait(false);
|
||||
if (stream is null)
|
||||
{
|
||||
_logger.LogWarning("SBOM artifact content not found at {Key}", key);
|
||||
return [];
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var bom = await Serializer.DeserializeAsync(stream).ConfigureAwait(false);
|
||||
if (bom?.Components is null)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
return bom.Components
|
||||
.Select(c => new SbomComponent
|
||||
{
|
||||
BomRef = c.BomRef ?? string.Empty,
|
||||
Name = c.Name ?? string.Empty,
|
||||
Version = c.Version,
|
||||
Purl = c.Purl,
|
||||
Hashes = c.Hashes?
|
||||
.Where(h => h.Alg == Hash.HashAlgorithm.SHA_256)
|
||||
.Select(h => h.Content)
|
||||
.Where(content => !string.IsNullOrWhiteSpace(content))
|
||||
.ToList() ?? [],
|
||||
FilePaths = ExtractFilePaths(c)
|
||||
})
|
||||
.ToList();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to deserialize SBOM from artifact {ArtifactId}", artifact.Id);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> ExtractFilePaths(Component component)
|
||||
{
|
||||
var paths = new List<string>();
|
||||
|
||||
// Extract from evidence.occurrences
|
||||
if (component.Evidence?.Occurrences is { } occurrences)
|
||||
{
|
||||
foreach (var occurrence in occurrences)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(occurrence.Location))
|
||||
{
|
||||
paths.Add(occurrence.Location);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract from properties with specific names
|
||||
if (component.Properties is { } props)
|
||||
{
|
||||
foreach (var prop in props)
|
||||
{
|
||||
if (prop.Name is "stellaops:file.path" or "cdx:file:path" &&
|
||||
!string.IsNullOrWhiteSpace(prop.Value))
|
||||
{
|
||||
paths.Add(prop.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
private static Dictionary<string, SbomComponent> BuildPathIndex(IReadOnlyList<SbomComponent> components)
|
||||
{
|
||||
var index = new Dictionary<string, SbomComponent>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var component in components)
|
||||
{
|
||||
foreach (var path in component.FilePaths)
|
||||
{
|
||||
var normalizedPath = NormalizePath(path);
|
||||
index.TryAdd(normalizedPath, component);
|
||||
}
|
||||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
private static Dictionary<string, SbomComponent> BuildHashIndex(IReadOnlyList<SbomComponent> components)
|
||||
{
|
||||
var index = new Dictionary<string, SbomComponent>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var component in components)
|
||||
{
|
||||
foreach (var hash in component.Hashes)
|
||||
{
|
||||
var normalizedHash = NormalizeHash(hash);
|
||||
index.TryAdd(normalizedHash, component);
|
||||
}
|
||||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
private static bool TryMatchLibrary(
|
||||
RuntimeLoadedLibrary runtimeLib,
|
||||
Dictionary<string, SbomComponent> pathIndex,
|
||||
Dictionary<string, SbomComponent> hashIndex,
|
||||
out RuntimeLibraryMatch? match)
|
||||
{
|
||||
match = null;
|
||||
|
||||
// Try hash match first (most reliable)
|
||||
if (!string.IsNullOrWhiteSpace(runtimeLib.Sha256))
|
||||
{
|
||||
var normalizedHash = NormalizeHash(runtimeLib.Sha256);
|
||||
if (hashIndex.TryGetValue(normalizedHash, out var componentByHash))
|
||||
{
|
||||
match = new RuntimeLibraryMatch
|
||||
{
|
||||
RuntimePath = runtimeLib.Path,
|
||||
RuntimeSha256 = runtimeLib.Sha256,
|
||||
SbomComponentKey = componentByHash.BomRef,
|
||||
SbomComponentName = componentByHash.Name,
|
||||
MatchType = "sha256"
|
||||
};
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Try path match
|
||||
var normalizedPath = NormalizePath(runtimeLib.Path);
|
||||
if (pathIndex.TryGetValue(normalizedPath, out var componentByPath))
|
||||
{
|
||||
match = new RuntimeLibraryMatch
|
||||
{
|
||||
RuntimePath = runtimeLib.Path,
|
||||
RuntimeSha256 = runtimeLib.Sha256,
|
||||
SbomComponentKey = componentByPath.BomRef,
|
||||
SbomComponentName = componentByPath.Name,
|
||||
MatchType = "path"
|
||||
};
|
||||
return true;
|
||||
}
|
||||
|
||||
// Try matching by filename only (less strict)
|
||||
var fileName = Path.GetFileName(runtimeLib.Path);
|
||||
if (!string.IsNullOrWhiteSpace(fileName))
|
||||
{
|
||||
foreach (var component in pathIndex.Values)
|
||||
{
|
||||
if (component.FilePaths.Any(p => Path.GetFileName(p).Equals(fileName, StringComparison.Ordinal)))
|
||||
{
|
||||
match = new RuntimeLibraryMatch
|
||||
{
|
||||
RuntimePath = runtimeLib.Path,
|
||||
RuntimeSha256 = runtimeLib.Sha256,
|
||||
SbomComponentKey = component.BomRef,
|
||||
SbomComponentName = component.Name,
|
||||
MatchType = "filename"
|
||||
};
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
var trimmed = digest.Trim();
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string NormalizePath(string path)
|
||||
{
|
||||
// Normalize to forward slashes and trim
|
||||
return path.Trim().Replace('\\', '/');
|
||||
}
|
||||
|
||||
private static string NormalizeHash(string hash)
|
||||
{
|
||||
// Remove "sha256:" prefix if present and normalize to lowercase
|
||||
var trimmed = hash.Trim();
|
||||
if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
trimmed = trimmed["sha256:".Length..];
|
||||
}
|
||||
return trimmed.ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static void RecordLatency(Stopwatch stopwatch)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
ReconcileLatencyMs.Record(stopwatch.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
private sealed record SbomComponent
|
||||
{
|
||||
public required string BomRef { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public string? Purl { get; init; }
|
||||
public IReadOnlyList<string> Hashes { get; init; } = [];
|
||||
public IReadOnlyList<string> FilePaths { get; init; } = [];
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@
|
||||
<RootNamespace>StellaOps.Scanner.WebService</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="CycloneDX.Core" Version="10.0.1" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
|
||||
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
|
||||
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
using System.Reflection.Metadata;
|
||||
using System.Reflection.PortableExecutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Analyzers.Lang.DotNet.Internal.Bundling;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// Resolves publish artifacts (deps/runtimeconfig) into deterministic entrypoint identities.
|
||||
/// Per SCANNER-ANALYZERS-LANG-11-001: maps project/publish artifacts to normalized entrypoint records
|
||||
/// with assembly name, MVID, TFM, RID, host kind, publish mode, ALC hints, and probing paths.
|
||||
/// </summary>
|
||||
public static class DotNetEntrypointResolver
|
||||
{
|
||||
@@ -46,6 +52,7 @@ public static class DotNetEntrypointResolver
|
||||
}
|
||||
|
||||
var name = GetEntrypointName(depsPath);
|
||||
var directory = Path.GetDirectoryName(depsPath) ?? ".";
|
||||
|
||||
DotNetRuntimeConfig? runtimeConfig = null;
|
||||
var runtimeConfigPath = GetRuntimeConfigPath(depsPath, name);
|
||||
@@ -61,16 +68,51 @@ public static class DotNetEntrypointResolver
|
||||
var rids = CollectRuntimeIdentifiers(depsFile, runtimeConfig);
|
||||
var publishKind = DeterminePublishKind(depsFile);
|
||||
|
||||
var id = BuildDeterministicId(name, tfms, rids, publishKind);
|
||||
// Resolve assembly and apphost paths
|
||||
var (assemblyPath, apphostPath) = ResolveEntrypointPaths(directory, name);
|
||||
|
||||
// Extract MVID from PE header (11-001 requirement)
|
||||
var mvid = ExtractMvid(assemblyPath);
|
||||
|
||||
// Compute SHA-256 hash over assembly bytes (11-001 requirement)
|
||||
var (hash, fileSize) = ComputeHashAndSize(assemblyPath);
|
||||
|
||||
// Determine host kind: apphost, framework-dependent, self-contained (11-001 requirement)
|
||||
var hostKind = DetermineHostKind(apphostPath, publishKind);
|
||||
|
||||
// Determine publish mode: single-file, trimmed, normal (11-001 requirement)
|
||||
var publishMode = DeterminePublishMode(apphostPath, depsFile, directory);
|
||||
|
||||
// Collect ALC hints from runtimeconfig.dev.json (11-001 requirement)
|
||||
var alcHints = CollectAlcHints(directory, name);
|
||||
|
||||
// Collect probing paths from runtimeconfig files (11-001 requirement)
|
||||
var probingPaths = CollectProbingPaths(directory, name);
|
||||
|
||||
// Collect native dependencies for apphost bundles (11-001 requirement)
|
||||
var nativeDeps = CollectNativeDependencies(apphostPath, publishMode);
|
||||
|
||||
var id = BuildDeterministicId(name, tfms, rids, publishKind, mvid);
|
||||
|
||||
results.Add(new DotNetEntrypoint(
|
||||
Id: id,
|
||||
Name: name,
|
||||
AssemblyName: Path.GetFileName(assemblyPath ?? $"{name}.dll"),
|
||||
Mvid: mvid,
|
||||
TargetFrameworks: tfms,
|
||||
RuntimeIdentifiers: rids,
|
||||
HostKind: hostKind,
|
||||
PublishKind: publishKind,
|
||||
PublishMode: publishMode,
|
||||
AlcHints: alcHints,
|
||||
ProbingPaths: probingPaths,
|
||||
NativeDependencies: nativeDeps,
|
||||
Hash: hash,
|
||||
FileSizeBytes: fileSize,
|
||||
RelativeDepsPath: relativeDepsPath,
|
||||
RelativeRuntimeConfigPath: relativeRuntimeConfig,
|
||||
PublishKind: publishKind));
|
||||
RelativeAssemblyPath: assemblyPath is not null ? NormalizeRelative(context.GetRelativePath(assemblyPath)) : null,
|
||||
RelativeApphostPath: apphostPath is not null ? NormalizeRelative(context.GetRelativePath(apphostPath)) : null));
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
@@ -89,6 +131,292 @@ public static class DotNetEntrypointResolver
|
||||
return ValueTask.FromResult<IReadOnlyList<DotNetEntrypoint>>(results);
|
||||
}
|
||||
|
||||
private static (string? assemblyPath, string? apphostPath) ResolveEntrypointPaths(string directory, string name)
|
||||
{
|
||||
string? assemblyPath = null;
|
||||
string? apphostPath = null;
|
||||
|
||||
// Look for main assembly (.dll)
|
||||
var dllPath = Path.Combine(directory, $"{name}.dll");
|
||||
if (File.Exists(dllPath))
|
||||
{
|
||||
assemblyPath = dllPath;
|
||||
}
|
||||
|
||||
// Look for apphost executable (.exe on Windows, no extension on Unix)
|
||||
var exePath = Path.Combine(directory, $"{name}.exe");
|
||||
if (File.Exists(exePath))
|
||||
{
|
||||
apphostPath = exePath;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Check for Unix-style executable (no extension)
|
||||
var unixExePath = Path.Combine(directory, name);
|
||||
if (File.Exists(unixExePath) && !unixExePath.EndsWith(".dll", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
apphostPath = unixExePath;
|
||||
}
|
||||
}
|
||||
|
||||
return (assemblyPath, apphostPath);
|
||||
}
|
||||
|
||||
private static Guid? ExtractMvid(string? assemblyPath)
|
||||
{
|
||||
if (string.IsNullOrEmpty(assemblyPath) || !File.Exists(assemblyPath))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(assemblyPath);
|
||||
using var peReader = new PEReader(stream);
|
||||
|
||||
if (!peReader.HasMetadata)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var metadataReader = peReader.GetMetadataReader();
|
||||
var moduleDefinition = metadataReader.GetModuleDefinition();
|
||||
return metadataReader.GetGuid(moduleDefinition.Mvid);
|
||||
}
|
||||
catch (BadImageFormatException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
catch (InvalidOperationException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static (string? hash, long fileSize) ComputeHashAndSize(string? assemblyPath)
|
||||
{
|
||||
if (string.IsNullOrEmpty(assemblyPath) || !File.Exists(assemblyPath))
|
||||
{
|
||||
return (null, 0);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(assemblyPath);
|
||||
var fileSize = stream.Length;
|
||||
var hashBytes = SHA256.HashData(stream);
|
||||
var hash = $"sha256:{Convert.ToHexStringLower(hashBytes)}";
|
||||
return (hash, fileSize);
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
return (null, 0);
|
||||
}
|
||||
}
|
||||
|
||||
private static DotNetHostKind DetermineHostKind(string? apphostPath, DotNetPublishKind publishKind)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(apphostPath) && File.Exists(apphostPath))
|
||||
{
|
||||
return DotNetHostKind.Apphost;
|
||||
}
|
||||
|
||||
return publishKind switch
|
||||
{
|
||||
DotNetPublishKind.SelfContained => DotNetHostKind.SelfContained,
|
||||
DotNetPublishKind.FrameworkDependent => DotNetHostKind.FrameworkDependent,
|
||||
_ => DotNetHostKind.Unknown
|
||||
};
|
||||
}
|
||||
|
||||
private static DotNetPublishMode DeterminePublishMode(string? apphostPath, DotNetDepsFile depsFile, string directory)
|
||||
{
|
||||
// Check for single-file bundle
|
||||
if (!string.IsNullOrEmpty(apphostPath) && File.Exists(apphostPath))
|
||||
{
|
||||
var singleFileResult = SingleFileAppDetector.Analyze(apphostPath);
|
||||
if (singleFileResult.IsSingleFile)
|
||||
{
|
||||
return DotNetPublishMode.SingleFile;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for trimmed publish (look for trim markers or reduced dependency count)
|
||||
var trimmedMarkerPath = Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(apphostPath ?? "app")}.staticwebassets.runtime.json");
|
||||
if (File.Exists(trimmedMarkerPath))
|
||||
{
|
||||
return DotNetPublishMode.Trimmed;
|
||||
}
|
||||
|
||||
// Check deps.json for trimmed indicators
|
||||
foreach (var library in depsFile.Libraries.Values)
|
||||
{
|
||||
if (library.Id.Contains("ILLink", StringComparison.OrdinalIgnoreCase) ||
|
||||
library.Id.Contains("Trimmer", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return DotNetPublishMode.Trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
return DotNetPublishMode.Normal;
|
||||
}
|
||||
|
||||
private static IReadOnlyCollection<string> CollectAlcHints(string directory, string name)
|
||||
{
|
||||
var hints = new SortedSet<string>(StringComparer.Ordinal);
|
||||
|
||||
// Check runtimeconfig.dev.json for ALC hints
|
||||
var devConfigPath = Path.Combine(directory, $"{name}.runtimeconfig.dev.json");
|
||||
if (File.Exists(devConfigPath))
|
||||
{
|
||||
try
|
||||
{
|
||||
var json = File.ReadAllText(devConfigPath);
|
||||
using var doc = JsonDocument.Parse(json, new JsonDocumentOptions
|
||||
{
|
||||
AllowTrailingCommas = true,
|
||||
CommentHandling = JsonCommentHandling.Skip
|
||||
});
|
||||
|
||||
if (doc.RootElement.TryGetProperty("runtimeOptions", out var runtimeOptions))
|
||||
{
|
||||
// Look for additionalProbingPaths which indicate ALC usage
|
||||
if (runtimeOptions.TryGetProperty("additionalProbingPaths", out var probingPaths) &&
|
||||
probingPaths.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var path in probingPaths.EnumerateArray())
|
||||
{
|
||||
if (path.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
var pathValue = path.GetString();
|
||||
if (!string.IsNullOrWhiteSpace(pathValue))
|
||||
{
|
||||
// Extract ALC hint from path pattern
|
||||
if (pathValue.Contains(".nuget", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
hints.Add("NuGetAssemblyLoadContext");
|
||||
}
|
||||
else if (pathValue.Contains("sdk", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
hints.Add("SdkAssemblyLoadContext");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
// Ignore malformed dev config
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// Ignore read errors
|
||||
}
|
||||
}
|
||||
|
||||
// Add default ALC hint
|
||||
if (hints.Count == 0)
|
||||
{
|
||||
hints.Add("Default");
|
||||
}
|
||||
|
||||
return hints;
|
||||
}
|
||||
|
||||
private static IReadOnlyCollection<string> CollectProbingPaths(string directory, string name)
|
||||
{
|
||||
var paths = new SortedSet<string>(StringComparer.Ordinal);
|
||||
|
||||
// Check runtimeconfig.dev.json for probing paths
|
||||
var devConfigPath = Path.Combine(directory, $"{name}.runtimeconfig.dev.json");
|
||||
if (File.Exists(devConfigPath))
|
||||
{
|
||||
try
|
||||
{
|
||||
var json = File.ReadAllText(devConfigPath);
|
||||
using var doc = JsonDocument.Parse(json, new JsonDocumentOptions
|
||||
{
|
||||
AllowTrailingCommas = true,
|
||||
CommentHandling = JsonCommentHandling.Skip
|
||||
});
|
||||
|
||||
if (doc.RootElement.TryGetProperty("runtimeOptions", out var runtimeOptions) &&
|
||||
runtimeOptions.TryGetProperty("additionalProbingPaths", out var probingPaths) &&
|
||||
probingPaths.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var path in probingPaths.EnumerateArray())
|
||||
{
|
||||
if (path.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
var pathValue = path.GetString();
|
||||
if (!string.IsNullOrWhiteSpace(pathValue))
|
||||
{
|
||||
// Normalize and add the probing path
|
||||
paths.Add(NormalizeRelative(pathValue));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
// Ignore malformed dev config
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// Ignore read errors
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
private static IReadOnlyCollection<string> CollectNativeDependencies(string? apphostPath, DotNetPublishMode publishMode)
|
||||
{
|
||||
var nativeDeps = new SortedSet<string>(StringComparer.Ordinal);
|
||||
|
||||
if (publishMode != DotNetPublishMode.SingleFile || string.IsNullOrEmpty(apphostPath))
|
||||
{
|
||||
return nativeDeps;
|
||||
}
|
||||
|
||||
// For single-file apps, try to extract bundled native library names
|
||||
// This is a simplified detection - full extraction would require parsing the bundle manifest
|
||||
var directory = Path.GetDirectoryName(apphostPath);
|
||||
if (string.IsNullOrEmpty(directory))
|
||||
{
|
||||
return nativeDeps;
|
||||
}
|
||||
|
||||
// Look for extracted native libraries (some single-file apps extract natives at runtime)
|
||||
var nativePatterns = new[] { "*.so", "*.dylib", "*.dll" };
|
||||
foreach (var pattern in nativePatterns)
|
||||
{
|
||||
try
|
||||
{
|
||||
foreach (var nativePath in Directory.EnumerateFiles(directory, pattern))
|
||||
{
|
||||
var fileName = Path.GetFileName(nativePath);
|
||||
// Filter out managed assemblies
|
||||
if (!fileName.Equals(Path.GetFileName(apphostPath), StringComparison.OrdinalIgnoreCase) &&
|
||||
!fileName.EndsWith(".deps.json", StringComparison.OrdinalIgnoreCase) &&
|
||||
!fileName.EndsWith(".runtimeconfig.json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
nativeDeps.Add(fileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// Ignore enumeration errors
|
||||
}
|
||||
}
|
||||
|
||||
return nativeDeps;
|
||||
}
|
||||
|
||||
private static string GetEntrypointName(string depsPath)
|
||||
{
|
||||
// Strip .json then any trailing .deps suffix to yield a logical entrypoint name.
|
||||
@@ -273,12 +601,14 @@ public static class DotNetEntrypointResolver
|
||||
string name,
|
||||
IReadOnlyCollection<string> tfms,
|
||||
IReadOnlyCollection<string> rids,
|
||||
DotNetPublishKind publishKind)
|
||||
DotNetPublishKind publishKind,
|
||||
Guid? mvid)
|
||||
{
|
||||
var tfmPart = tfms.Count == 0 ? "unknown" : string.Join('+', tfms.OrderBy(t => t, StringComparer.OrdinalIgnoreCase));
|
||||
var ridPart = rids.Count == 0 ? "none" : string.Join('+', rids.OrderBy(r => r, StringComparer.OrdinalIgnoreCase));
|
||||
var publishPart = publishKind.ToString().ToLowerInvariant();
|
||||
return $"{name}:{tfmPart}:{ridPart}:{publishPart}";
|
||||
var mvidPart = mvid?.ToString("N") ?? "no-mvid";
|
||||
return $"{name}:{tfmPart}:{ridPart}:{publishPart}:{mvidPart}";
|
||||
}
|
||||
|
||||
private static string NormalizeRelative(string path)
|
||||
@@ -293,18 +623,84 @@ public static class DotNetEntrypointResolver
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a resolved .NET entrypoint with deterministic identity per SCANNER-ANALYZERS-LANG-11-001.
|
||||
/// </summary>
|
||||
public sealed record DotNetEntrypoint(
|
||||
/// <summary>Deterministic identifier: name:tfms:rids:publishKind:mvid</summary>
|
||||
string Id,
|
||||
/// <summary>Logical entrypoint name derived from deps.json</summary>
|
||||
string Name,
|
||||
/// <summary>Assembly file name (e.g., "MyApp.dll")</summary>
|
||||
string AssemblyName,
|
||||
/// <summary>Module Version ID from PE metadata (deterministic per build)</summary>
|
||||
Guid? Mvid,
|
||||
/// <summary>Target frameworks (normalized, e.g., "net8.0")</summary>
|
||||
IReadOnlyCollection<string> TargetFrameworks,
|
||||
/// <summary>Runtime identifiers (e.g., "linux-x64", "win-x64")</summary>
|
||||
IReadOnlyCollection<string> RuntimeIdentifiers,
|
||||
/// <summary>Host kind: apphost, framework-dependent, self-contained</summary>
|
||||
DotNetHostKind HostKind,
|
||||
/// <summary>Publish kind from deps.json analysis</summary>
|
||||
DotNetPublishKind PublishKind,
|
||||
/// <summary>Publish mode: normal, single-file, trimmed</summary>
|
||||
DotNetPublishMode PublishMode,
|
||||
/// <summary>AssemblyLoadContext hints from runtimeconfig.dev.json</summary>
|
||||
IReadOnlyCollection<string> AlcHints,
|
||||
/// <summary>Additional probing paths from runtimeconfig.dev.json</summary>
|
||||
IReadOnlyCollection<string> ProbingPaths,
|
||||
/// <summary>Native dependencies for single-file bundles</summary>
|
||||
IReadOnlyCollection<string> NativeDependencies,
|
||||
/// <summary>SHA-256 hash of assembly bytes (sha256:hex)</summary>
|
||||
string? Hash,
|
||||
/// <summary>Assembly file size in bytes</summary>
|
||||
long FileSizeBytes,
|
||||
/// <summary>Relative path to deps.json</summary>
|
||||
string RelativeDepsPath,
|
||||
/// <summary>Relative path to runtimeconfig.json</summary>
|
||||
string? RelativeRuntimeConfigPath,
|
||||
DotNetPublishKind PublishKind);
|
||||
/// <summary>Relative path to main assembly (.dll)</summary>
|
||||
string? RelativeAssemblyPath,
|
||||
/// <summary>Relative path to apphost executable</summary>
|
||||
string? RelativeApphostPath);
|
||||
|
||||
/// <summary>
|
||||
/// .NET host kind classification per SCANNER-ANALYZERS-LANG-11-001.
|
||||
/// </summary>
|
||||
public enum DotNetHostKind
|
||||
{
|
||||
/// <summary>Host kind could not be determined</summary>
|
||||
Unknown = 0,
|
||||
/// <summary>Application uses apphost executable</summary>
|
||||
Apphost = 1,
|
||||
/// <summary>Framework-dependent deployment (requires shared runtime)</summary>
|
||||
FrameworkDependent = 2,
|
||||
/// <summary>Self-contained deployment (includes runtime)</summary>
|
||||
SelfContained = 3
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// .NET publish kind from deps.json analysis.
|
||||
/// </summary>
|
||||
public enum DotNetPublishKind
|
||||
{
|
||||
/// <summary>Publish kind could not be determined</summary>
|
||||
Unknown = 0,
|
||||
/// <summary>Framework-dependent (relies on shared .NET runtime)</summary>
|
||||
FrameworkDependent = 1,
|
||||
/// <summary>Self-contained (includes .NET runtime)</summary>
|
||||
SelfContained = 2
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// .NET publish mode per SCANNER-ANALYZERS-LANG-11-001.
|
||||
/// </summary>
|
||||
public enum DotNetPublishMode
|
||||
{
|
||||
/// <summary>Normal publish (separate files)</summary>
|
||||
Normal = 0,
|
||||
/// <summary>Single-file publish (assemblies bundled into executable)</summary>
|
||||
SingleFile = 1,
|
||||
/// <summary>Trimmed publish (unused code removed)</summary>
|
||||
Trimmed = 2
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user